tizen beta release
[profile/ivi/webkit-efl.git] / Source / JavaScriptCore / jit / JIT.h
1 /*
2  * Copyright (C) 2008 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #ifndef JIT_h
27 #define JIT_h
28
29 #if ENABLE(JIT)
30
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
35
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations.  Try forcing alignment in an attempt to stabalize this.
38 #if COMPILER(GCC)
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
40 #else
41 #define JIT_CLASS_ALIGNMENT
42 #endif
43
44 #define ASSERT_JIT_OFFSET_UNUSED(variable, actual, expected) ASSERT_WITH_MESSAGE_UNUSED(variable, actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
45 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
46
47 #include "CodeBlock.h"
48 #include "CompactJITCodeMap.h"
49 #include "Interpreter.h"
50 #include "JSInterfaceJIT.h"
51 #include "Opcode.h"
52 #include "Profiler.h"
53 #include <bytecode/SamplingTool.h>
54
55 namespace JSC {
56
57     class CodeBlock;
58     class FunctionExecutable;
59     class JIT;
60     class JSPropertyNameIterator;
61     class Interpreter;
62     class Register;
63     class RegisterFile;
64     class ScopeChainNode;
65     class StructureChain;
66
67     struct CallLinkInfo;
68     struct Instruction;
69     struct OperandTypes;
70     struct PolymorphicAccessStructureList;
71     struct SimpleJumpTable;
72     struct StringJumpTable;
73     struct StructureStubInfo;
74
75     struct CallRecord {
76         MacroAssembler::Call from;
77         unsigned bytecodeOffset;
78         void* to;
79
80         CallRecord()
81         {
82         }
83
84         CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
85             : from(from)
86             , bytecodeOffset(bytecodeOffset)
87             , to(to)
88         {
89         }
90     };
91
92     struct JumpTable {
93         MacroAssembler::Jump from;
94         unsigned toBytecodeOffset;
95
96         JumpTable(MacroAssembler::Jump f, unsigned t)
97             : from(f)
98             , toBytecodeOffset(t)
99         {
100         }
101     };
102
103     struct SlowCaseEntry {
104         MacroAssembler::Jump from;
105         unsigned to;
106         unsigned hint;
107         
108         SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
109             : from(f)
110             , to(t)
111             , hint(h)
112         {
113         }
114     };
115
116     struct SwitchRecord {
117         enum Type {
118             Immediate,
119             Character,
120             String
121         };
122
123         Type type;
124
125         union {
126             SimpleJumpTable* simpleJumpTable;
127             StringJumpTable* stringJumpTable;
128         } jumpTable;
129
130         unsigned bytecodeOffset;
131         unsigned defaultOffset;
132
133         SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
134             : type(type)
135             , bytecodeOffset(bytecodeOffset)
136             , defaultOffset(defaultOffset)
137         {
138             this->jumpTable.simpleJumpTable = jumpTable;
139         }
140
141         SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
142             : type(String)
143             , bytecodeOffset(bytecodeOffset)
144             , defaultOffset(defaultOffset)
145         {
146             this->jumpTable.stringJumpTable = jumpTable;
147         }
148     };
149
150     struct PropertyStubCompilationInfo {
151         unsigned bytecodeIndex;
152         MacroAssembler::Call callReturnLocation;
153         MacroAssembler::Label hotPathBegin;
154         
155 #if !ASSERT_DISABLED
156         PropertyStubCompilationInfo()
157             : bytecodeIndex(std::numeric_limits<unsigned>::max())
158         {
159         }
160 #endif
161     };
162
163     struct StructureStubCompilationInfo {
164         MacroAssembler::DataLabelPtr hotPathBegin;
165         MacroAssembler::Call hotPathOther;
166         MacroAssembler::Call callReturnLocation;
167         CallLinkInfo::CallType callType;
168         unsigned bytecodeIndex;
169     };
170
171     struct MethodCallCompilationInfo {
172         MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
173             : bytecodeIndex(bytecodeIndex)
174             , propertyAccessIndex(propertyAccessIndex)
175         {
176         }
177
178         unsigned bytecodeIndex;
179         MacroAssembler::DataLabelPtr structureToCompare;
180         unsigned propertyAccessIndex;
181     };
182
183     // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
184     void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
185     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
186     void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
187
188     class JIT : private JSInterfaceJIT {
189         friend class JITStubCall;
190
191         using MacroAssembler::Jump;
192         using MacroAssembler::JumpList;
193         using MacroAssembler::Label;
194
195         static const int patchGetByIdDefaultStructure = -1;
196         static const int patchGetByIdDefaultOffset = 0;
197         // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
198         // will compress the displacement, and we may not be able to fit a patched offset.
199         static const int patchPutByIdDefaultOffset = 256;
200
201     public:
202         static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0)
203         {
204             return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck);
205         }
206
207         static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
208         {
209             JIT jit(globalData, codeBlock);
210             jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
211         }
212
213         static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
214         {
215             JIT jit(globalData, codeBlock);
216             jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
217         }
218         static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
219         {
220             JIT jit(globalData, codeBlock);
221             jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
222         }
223         static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
224         {
225             JIT jit(globalData, codeBlock);
226             jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
227         }
228
229         static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
230         {
231             JIT jit(globalData, codeBlock);
232             jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
233         }
234         
235         static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
236         {
237             JIT jit(globalData, codeBlock);
238             jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
239         }
240
241         static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
242         {
243             if (!globalData->canUseJIT())
244                 return 0;
245             JIT jit(globalData, 0);
246             return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
247         }
248
249         static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
250         {
251             if (!globalData->canUseJIT())
252                 return CodeRef();
253             JIT jit(globalData, 0);
254             return jit.privateCompileCTINativeCall(globalData, func);
255         }
256
257         static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
258         static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
259         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
260         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
261         static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
262
263         static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
264         {
265             JIT jit(globalData, codeBlock);
266             return jit.privateCompilePatchGetArrayLength(returnAddress);
267         }
268
269         static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, JSGlobalData*, CodeSpecializationKind);
270
271     private:
272         struct JSRInfo {
273             DataLabelPtr storeLocation;
274             Label target;
275
276             JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
277                 : storeLocation(storeLocation)
278                 , target(targetLocation)
279             {
280             }
281         };
282
283         JIT(JSGlobalData*, CodeBlock* = 0);
284
285         void privateCompileMainPass();
286         void privateCompileLinkPass();
287         void privateCompileSlowCases();
288         JITCode privateCompile(CodePtr* functionEntryArityCheck);
289         void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
290         void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
291         void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
292         void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
293         void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
294         void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
295
296         PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
297         Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
298         CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
299         void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
300
301         static bool isDirectPutById(StructureStubInfo*);
302
303         void addSlowCase(Jump);
304         void addSlowCase(JumpList);
305         void addSlowCase();
306         void addJump(Jump, int);
307         void emitJumpSlowToHot(Jump, int);
308
309         void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
310         void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
311         void compileLoadVarargs(Instruction*);
312         void compileCallEval();
313         void compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator&);
314
315         enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
316         void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
317         bool isOperandConstantImmediateDouble(unsigned src);
318         
319         void emitLoadDouble(int index, FPRegisterID value);
320         void emitLoadInt32ToDouble(int index, FPRegisterID value);
321         Jump emitJumpIfNotObject(RegisterID structureReg);
322         Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
323
324         void testPrototype(JSValue, JumpList& failureCases);
325
326         enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
327         // value register in write barrier is used before any scratch registers
328         // so may safely be the same as either of the scratch registers.
329         void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
330         void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
331
332         template<typename ClassType, typename StructureType> void emitAllocateBasicJSObject(StructureType, void* vtable, RegisterID result, RegisterID storagePtr);
333         template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
334         void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
335         
336         enum ValueProfilingSiteKind { FirstProfilingSite, SubsequentProfilingSite };
337 #if ENABLE(VALUE_PROFILER)
338         // This assumes that the value to profile is in regT0 and that regT3 is available for
339         // scratch.
340         void emitValueProfilingSite(ValueProfilingSiteKind);
341 #else
342         void emitValueProfilingSite(ValueProfilingSiteKind) { }
343 #endif
344
345 #if USE(JSVALUE32_64)
346         bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
347
348         void emitLoadTag(int index, RegisterID tag);
349         void emitLoadPayload(int index, RegisterID payload);
350
351         void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
352         void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
353         void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
354
355         void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
356         void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
357         void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
358         void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
359         void emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
360         void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
361         void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
362         void emitStoreDouble(int index, FPRegisterID value);
363
364         bool isLabeled(unsigned bytecodeOffset);
365         void map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload);
366         void unmap(RegisterID);
367         void unmap();
368         bool isMapped(int virtualRegisterIndex);
369         bool getMappedPayload(int virtualRegisterIndex, RegisterID& payload);
370         bool getMappedTag(int virtualRegisterIndex, RegisterID& tag);
371
372         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
373         void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
374
375         void compileGetByIdHotPath();
376         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
377         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
378         void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
379         void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
380         void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset);
381
382         // Arithmetic opcode helpers
383         void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
384         void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
385         void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
386
387 #if CPU(X86)
388         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
389         static const int patchOffsetPutByIdStructure = 7;
390         static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
391         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
392         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
393         static const int patchOffsetGetByIdStructure = 7;
394         static const int patchOffsetGetByIdBranchToSlowCase = 13;
395         static const int patchOffsetGetByIdPropertyMapOffset1 = 19;
396         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
397         static const int patchOffsetGetByIdPutResult = 22;
398 #if ENABLE(OPCODE_SAMPLING)
399         static const int patchOffsetGetByIdSlowCaseCall = 37;
400 #else
401         static const int patchOffsetGetByIdSlowCaseCall = 33;
402 #endif
403         static const int patchOffsetOpCallCompareToJump = 6;
404
405         static const int patchOffsetMethodCheckProtoObj = 11;
406         static const int patchOffsetMethodCheckProtoStruct = 18;
407         static const int patchOffsetMethodCheckPutFunction = 29;
408 #elif CPU(ARM_TRADITIONAL)
409         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
410         static const int patchOffsetPutByIdStructure = 4;
411         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
412         static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
413         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
414         static const int patchOffsetGetByIdStructure = 4;
415         static const int patchOffsetGetByIdBranchToSlowCase = 16;
416         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
417         static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
418         static const int patchOffsetGetByIdPutResult = 36;
419 #if ENABLE(OPCODE_SAMPLING)
420         #error "OPCODE_SAMPLING is not yet supported"
421 #else
422         static const int patchOffsetGetByIdSlowCaseCall = 40;
423 #endif
424         static const int patchOffsetOpCallCompareToJump = 12;
425
426         static const int patchOffsetMethodCheckProtoObj = 12;
427         static const int patchOffsetMethodCheckProtoStruct = 20;
428         static const int patchOffsetMethodCheckPutFunction = 32;
429
430         // sequenceOpCall
431         static const int sequenceOpCallInstructionSpace = 12;
432         static const int sequenceOpCallConstantSpace = 2;
433         // sequenceMethodCheck
434         static const int sequenceMethodCheckInstructionSpace = 40;
435         static const int sequenceMethodCheckConstantSpace = 6;
436         // sequenceGetByIdHotPath
437         static const int sequenceGetByIdHotPathInstructionSpace = 36;
438         static const int sequenceGetByIdHotPathConstantSpace = 4;
439         // sequenceGetByIdSlowCase
440         static const int sequenceGetByIdSlowCaseInstructionSpace = 56;
441         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
442         // sequencePutById
443         static const int sequencePutByIdInstructionSpace = 36;
444         static const int sequencePutByIdConstantSpace = 4;
445 #elif CPU(ARM_THUMB2)
446         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
447         static const int patchOffsetPutByIdStructure = 10;
448         static const int patchOffsetPutByIdPropertyMapOffset1 = 36;
449         static const int patchOffsetPutByIdPropertyMapOffset2 = 48;
450         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
451         static const int patchOffsetGetByIdStructure = 10;
452         static const int patchOffsetGetByIdBranchToSlowCase = 26;
453         static const int patchOffsetGetByIdPropertyMapOffset1 = 28;
454         static const int patchOffsetGetByIdPropertyMapOffset2 = 30;
455         static const int patchOffsetGetByIdPutResult = 32;
456 #if ENABLE(OPCODE_SAMPLING)
457         #error "OPCODE_SAMPLING is not yet supported"
458 #else
459         static const int patchOffsetGetByIdSlowCaseCall = 40;
460 #endif
461         static const int patchOffsetOpCallCompareToJump = 16;
462
463         static const int patchOffsetMethodCheckProtoObj = 24;
464         static const int patchOffsetMethodCheckProtoStruct = 34;
465         static const int patchOffsetMethodCheckPutFunction = 58;
466
467         // sequenceOpCall
468         static const int sequenceOpCallInstructionSpace = 12;
469         static const int sequenceOpCallConstantSpace = 2;
470         // sequenceMethodCheck
471         static const int sequenceMethodCheckInstructionSpace = 40;
472         static const int sequenceMethodCheckConstantSpace = 6;
473         // sequenceGetByIdHotPath
474         static const int sequenceGetByIdHotPathInstructionSpace = 36;
475         static const int sequenceGetByIdHotPathConstantSpace = 4;
476         // sequenceGetByIdSlowCase
477         static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
478         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
479         // sequencePutById
480         static const int sequencePutByIdInstructionSpace = 36;
481         static const int sequencePutByIdConstantSpace = 4;
482 #elif CPU(MIPS)
483 #if WTF_MIPS_ISA(1)
484         static const int patchOffsetPutByIdStructure = 16;
485         static const int patchOffsetPutByIdPropertyMapOffset1 = 56;
486         static const int patchOffsetPutByIdPropertyMapOffset2 = 72;
487         static const int patchOffsetGetByIdStructure = 16;
488         static const int patchOffsetGetByIdBranchToSlowCase = 48;
489         static const int patchOffsetGetByIdPropertyMapOffset1 = 56;
490         static const int patchOffsetGetByIdPropertyMapOffset2 = 76;
491         static const int patchOffsetGetByIdPutResult = 96;
492 #if ENABLE(OPCODE_SAMPLING)
493         #error "OPCODE_SAMPLING is not yet supported"
494 #else
495         static const int patchOffsetGetByIdSlowCaseCall = 56;
496 #endif
497         static const int patchOffsetOpCallCompareToJump = 32;
498         static const int patchOffsetMethodCheckProtoObj = 32;
499         static const int patchOffsetMethodCheckProtoStruct = 56;
500         static const int patchOffsetMethodCheckPutFunction = 88;
501 #else // WTF_MIPS_ISA(1)
502         static const int patchOffsetPutByIdStructure = 12;
503         static const int patchOffsetPutByIdPropertyMapOffset1 = 48;
504         static const int patchOffsetPutByIdPropertyMapOffset2 = 64;
505         static const int patchOffsetGetByIdStructure = 12;
506         static const int patchOffsetGetByIdBranchToSlowCase = 44;
507         static const int patchOffsetGetByIdPropertyMapOffset1 = 48;
508         static const int patchOffsetGetByIdPropertyMapOffset2 = 64;
509         static const int patchOffsetGetByIdPutResult = 80;
510 #if ENABLE(OPCODE_SAMPLING)
511         #error "OPCODE_SAMPLING is not yet supported"
512 #else
513         static const int patchOffsetGetByIdSlowCaseCall = 56;
514 #endif
515         static const int patchOffsetOpCallCompareToJump = 32;
516         static const int patchOffsetMethodCheckProtoObj = 32;
517         static const int patchOffsetMethodCheckProtoStruct = 52;
518         static const int patchOffsetMethodCheckPutFunction = 84;
519 #endif
520 #elif CPU(SH4)
521        // These architecture specific value are used to enable patching - see comment on op_put_by_id.
522         static const int patchOffsetGetByIdStructure = 6;
523         static const int patchOffsetPutByIdPropertyMapOffset = 24;
524         static const int patchOffsetPutByIdStructure = 6;
525         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
526         static const int patchOffsetGetByIdBranchToSlowCase = 10;
527         static const int patchOffsetGetByIdPropertyMapOffset = 24;
528         static const int patchOffsetGetByIdPutResult = 24;
529
530         // sequenceOpCall
531         static const int sequenceOpCallInstructionSpace = 12;
532         static const int sequenceOpCallConstantSpace = 2;
533         // sequenceMethodCheck
534         static const int sequenceMethodCheckInstructionSpace = 40;
535         static const int sequenceMethodCheckConstantSpace = 6;
536         // sequenceGetByIdHotPath
537         static const int sequenceGetByIdHotPathInstructionSpace = 36;
538         static const int sequenceGetByIdHotPathConstantSpace = 5;
539         // sequenceGetByIdSlowCase
540         static const int sequenceGetByIdSlowCaseInstructionSpace = 30;
541         static const int sequenceGetByIdSlowCaseConstantSpace = 3;
542         // sequencePutById
543         static const int sequencePutByIdInstructionSpace = 36;
544         static const int sequencePutByIdConstantSpace = 5;
545
546         static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
547         static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
548
549         static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
550         static const int patchOffsetPutByIdPropertyMapOffset2 = 26;
551
552 #if ENABLE(OPCODE_SAMPLING)
553         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
554 #else
555         static const int patchOffsetGetByIdSlowCaseCall = 26;
556 #endif
557         static const int patchOffsetOpCallCompareToJump = 4;
558
559         static const int patchOffsetMethodCheckProtoObj = 12;
560         static const int patchOffsetMethodCheckProtoStruct = 20;
561         static const int patchOffsetMethodCheckPutFunction = 32;
562 #else
563 #error "JSVALUE32_64 not supported on this platform."
564 #endif
565
566 #else // USE(JSVALUE32_64)
567         void emitGetVirtualRegister(int src, RegisterID dst);
568         void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
569         void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
570         void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
571         {
572             emitPutVirtualRegister(dst, payload);
573         }
574
575         int32_t getConstantOperandImmediateInt(unsigned src);
576
577         void killLastResultRegister();
578
579         Jump emitJumpIfJSCell(RegisterID);
580         Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
581         void emitJumpSlowCaseIfJSCell(RegisterID);
582         Jump emitJumpIfNotJSCell(RegisterID);
583         void emitJumpSlowCaseIfNotJSCell(RegisterID);
584         void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
585 #if USE(JSVALUE32_64)
586         JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
587         {
588             return emitJumpIfImmediateInteger(reg);
589         }
590         
591         JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
592         {
593             return emitJumpIfNotImmediateInteger(reg);
594         }
595 #endif
596         Jump emitJumpIfImmediateInteger(RegisterID);
597         Jump emitJumpIfNotImmediateInteger(RegisterID);
598         Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
599         void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
600         void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
601         void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
602
603 #if USE(JSVALUE32_64)
604         void emitFastArithDeTagImmediate(RegisterID);
605         Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
606 #endif
607         void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
608         void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
609
610         void emitTagAsBoolImmediate(RegisterID reg);
611         void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
612 #if USE(JSVALUE64)
613         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
614 #else
615         void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
616 #endif
617
618         void compileGetByIdHotPath(int baseVReg, Identifier*);
619         void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
620         void compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset);
621         void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
622         void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
623         void compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset);
624
625 #if CPU(X86_64)
626         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
627         static const int patchOffsetPutByIdStructure = 10;
628         static const int patchOffsetPutByIdPropertyMapOffset = 31;
629         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
630         static const int patchOffsetGetByIdStructure = 10;
631         static const int patchOffsetGetByIdBranchToSlowCase = 20;
632         static const int patchOffsetGetByIdPropertyMapOffset = 28;
633         static const int patchOffsetGetByIdPutResult = 28;
634 #if ENABLE(OPCODE_SAMPLING)
635         static const int patchOffsetGetByIdSlowCaseCall = 64;
636 #else
637         static const int patchOffsetGetByIdSlowCaseCall = 54;
638 #endif
639         static const int patchOffsetOpCallCompareToJump = 9;
640
641         static const int patchOffsetMethodCheckProtoObj = 20;
642         static const int patchOffsetMethodCheckProtoStruct = 30;
643         static const int patchOffsetMethodCheckPutFunction = 50;
644 #elif CPU(X86)
645         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
646         static const int patchOffsetPutByIdStructure = 7;
647         static const int patchOffsetPutByIdPropertyMapOffset = 22;
648         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
649         static const int patchOffsetGetByIdStructure = 7;
650         static const int patchOffsetGetByIdBranchToSlowCase = 13;
651         static const int patchOffsetGetByIdPropertyMapOffset = 22;
652         static const int patchOffsetGetByIdPutResult = 22;
653 #if ENABLE(OPCODE_SAMPLING)
654         static const int patchOffsetGetByIdSlowCaseCall = 33;
655 #else
656         static const int patchOffsetGetByIdSlowCaseCall = 23;
657 #endif
658         static const int patchOffsetOpCallCompareToJump = 6;
659
660         static const int patchOffsetMethodCheckProtoObj = 11;
661         static const int patchOffsetMethodCheckProtoStruct = 18;
662         static const int patchOffsetMethodCheckPutFunction = 29;
663 #elif CPU(ARM_THUMB2)
664         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
665         static const int patchOffsetPutByIdStructure = 10;
666         static const int patchOffsetPutByIdPropertyMapOffset = 46;
667         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
668         static const int patchOffsetGetByIdStructure = 10;
669         static const int patchOffsetGetByIdBranchToSlowCase = 26;
670         static const int patchOffsetGetByIdPropertyMapOffset = 46;
671         static const int patchOffsetGetByIdPutResult = 50;
672 #if ENABLE(OPCODE_SAMPLING)
673         static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
674 #else
675         static const int patchOffsetGetByIdSlowCaseCall = 28;
676 #endif
677         static const int patchOffsetOpCallCompareToJump = 16;
678
679         static const int patchOffsetMethodCheckProtoObj = 24;
680         static const int patchOffsetMethodCheckProtoStruct = 34;
681         static const int patchOffsetMethodCheckPutFunction = 58;
682 #elif CPU(ARM_TRADITIONAL)
683         // These architecture specific value are used to enable patching - see comment on op_put_by_id.
684         static const int patchOffsetPutByIdStructure = 4;
685         static const int patchOffsetPutByIdPropertyMapOffset = 20;
686         // These architecture specific value are used to enable patching - see comment on op_get_by_id.
687         static const int patchOffsetGetByIdStructure = 4;
688         static const int patchOffsetGetByIdBranchToSlowCase = 16;
689         static const int patchOffsetGetByIdPropertyMapOffset = 20;
690         static const int patchOffsetGetByIdPutResult = 28;
691 #if ENABLE(OPCODE_SAMPLING)
692         #error "OPCODE_SAMPLING is not yet supported"
693 #else
694         static const int patchOffsetGetByIdSlowCaseCall = 28;
695 #endif
696         static const int patchOffsetOpCallCompareToJump = 12;
697
698         static const int patchOffsetMethodCheckProtoObj = 12;
699         static const int patchOffsetMethodCheckProtoStruct = 20;
700         static const int patchOffsetMethodCheckPutFunction = 32;
701
702         // sequenceOpCall
703         static const int sequenceOpCallInstructionSpace = 12;
704         static const int sequenceOpCallConstantSpace = 2;
705         // sequenceMethodCheck
706         static const int sequenceMethodCheckInstructionSpace = 40;
707         static const int sequenceMethodCheckConstantSpace = 6;
708         // sequenceGetByIdHotPath
709         static const int sequenceGetByIdHotPathInstructionSpace = 28;
710         static const int sequenceGetByIdHotPathConstantSpace = 3;
711         // sequenceGetByIdSlowCase
712         static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
713         static const int sequenceGetByIdSlowCaseConstantSpace = 2;
714         // sequencePutById
715         static const int sequencePutByIdInstructionSpace = 28;
716         static const int sequencePutByIdConstantSpace = 3;
717 #elif CPU(MIPS)
718 #if WTF_MIPS_ISA(1)
719         static const int patchOffsetPutByIdStructure = 16;
720         static const int patchOffsetPutByIdPropertyMapOffset = 68;
721         static const int patchOffsetGetByIdStructure = 16;
722         static const int patchOffsetGetByIdBranchToSlowCase = 48;
723         static const int patchOffsetGetByIdPropertyMapOffset = 68;
724         static const int patchOffsetGetByIdPutResult = 88;
725 #if ENABLE(OPCODE_SAMPLING)
726         #error "OPCODE_SAMPLING is not yet supported"
727 #else
728         static const int patchOffsetGetByIdSlowCaseCall = 40;
729 #endif
730         static const int patchOffsetOpCallCompareToJump = 32;
731         static const int patchOffsetMethodCheckProtoObj = 32;
732         static const int patchOffsetMethodCheckProtoStruct = 56;
733         static const int patchOffsetMethodCheckPutFunction = 88;
734 #else // WTF_MIPS_ISA(1)
735         static const int patchOffsetPutByIdStructure = 12;
736         static const int patchOffsetPutByIdPropertyMapOffset = 60;
737         static const int patchOffsetGetByIdStructure = 12;
738         static const int patchOffsetGetByIdBranchToSlowCase = 44;
739         static const int patchOffsetGetByIdPropertyMapOffset = 60;
740         static const int patchOffsetGetByIdPutResult = 76;
741 #if ENABLE(OPCODE_SAMPLING)
742         #error "OPCODE_SAMPLING is not yet supported"
743 #else
744         static const int patchOffsetGetByIdSlowCaseCall = 40;
745 #endif
746         static const int patchOffsetOpCallCompareToJump = 32;
747         static const int patchOffsetMethodCheckProtoObj = 32;
748         static const int patchOffsetMethodCheckProtoStruct = 52;
749         static const int patchOffsetMethodCheckPutFunction = 84;
750 #endif
751 #endif
752 #endif // USE(JSVALUE32_64)
753
754 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
755 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
756 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
757 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
758
759         void beginUninterruptedSequence(int, int);
760         void endUninterruptedSequence(int, int, int);
761
762 #else
763 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)  do { beginUninterruptedSequence(); } while (false)
764 #define END_UNINTERRUPTED_SEQUENCE(name)  do { endUninterruptedSequence(); } while (false)
765 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false)
766 #endif
767
768         void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
769         void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
770
771         void emit_op_add(Instruction*);
772         void emit_op_bitand(Instruction*);
773         void emit_op_bitnot(Instruction*);
774         void emit_op_bitor(Instruction*);
775         void emit_op_bitxor(Instruction*);
776         void emit_op_call(Instruction*);
777         void emit_op_call_eval(Instruction*);
778         void emit_op_call_varargs(Instruction*);
779         void emit_op_call_put_result(Instruction*);
780         void emit_op_catch(Instruction*);
781         void emit_op_construct(Instruction*);
782         void emit_op_get_callee(Instruction*);
783         void emit_op_create_this(Instruction*);
784         void emit_op_convert_this(Instruction*);
785         void emit_op_create_arguments(Instruction*);
786         void emit_op_debug(Instruction*);
787         void emit_op_del_by_id(Instruction*);
788         void emit_op_div(Instruction*);
789         void emit_op_end(Instruction*);
790         void emit_op_enter(Instruction*);
791         void emit_op_create_activation(Instruction*);
792         void emit_op_eq(Instruction*);
793         void emit_op_eq_null(Instruction*);
794         void emit_op_get_by_id(Instruction*);
795         void emit_op_get_arguments_length(Instruction*);
796         void emit_op_get_by_val(Instruction*);
797         void emit_op_get_argument_by_val(Instruction*);
798         void emit_op_get_by_pname(Instruction*);
799         void emit_op_get_global_var(Instruction*);
800         void emit_op_get_scoped_var(Instruction*);
801         void emit_op_init_lazy_reg(Instruction*);
802         void emit_op_check_has_instance(Instruction*);
803         void emit_op_instanceof(Instruction*);
804         void emit_op_jeq_null(Instruction*);
805         void emit_op_jfalse(Instruction*);
806         void emit_op_jmp(Instruction*);
807         void emit_op_jmp_scopes(Instruction*);
808         void emit_op_jneq_null(Instruction*);
809         void emit_op_jneq_ptr(Instruction*);
810         void emit_op_jless(Instruction*);
811         void emit_op_jlesseq(Instruction*);
812         void emit_op_jgreater(Instruction*);
813         void emit_op_jgreatereq(Instruction*);
814         void emit_op_jnless(Instruction*);
815         void emit_op_jnlesseq(Instruction*);
816         void emit_op_jngreater(Instruction*);
817         void emit_op_jngreatereq(Instruction*);
818         void emit_op_jsr(Instruction*);
819         void emit_op_jtrue(Instruction*);
820         void emit_op_loop(Instruction*);
821         void emit_op_loop_hint(Instruction*);
822         void emit_op_loop_if_less(Instruction*);
823         void emit_op_loop_if_lesseq(Instruction*);
824         void emit_op_loop_if_greater(Instruction*);
825         void emit_op_loop_if_greatereq(Instruction*);
826         void emit_op_loop_if_true(Instruction*);
827         void emit_op_loop_if_false(Instruction*);
828         void emit_op_lshift(Instruction*);
829         void emit_op_method_check(Instruction*);
830         void emit_op_mod(Instruction*);
831         void emit_op_mov(Instruction*);
832         void emit_op_mul(Instruction*);
833         void emit_op_negate(Instruction*);
834         void emit_op_neq(Instruction*);
835         void emit_op_neq_null(Instruction*);
836         void emit_op_new_array(Instruction*);
837         void emit_op_new_array_buffer(Instruction*);
838         void emit_op_new_func(Instruction*);
839         void emit_op_new_func_exp(Instruction*);
840         void emit_op_new_object(Instruction*);
841         void emit_op_new_regexp(Instruction*);
842         void emit_op_get_pnames(Instruction*);
843         void emit_op_next_pname(Instruction*);
844         void emit_op_not(Instruction*);
845         void emit_op_nstricteq(Instruction*);
846         void emit_op_pop_scope(Instruction*);
847         void emit_op_post_dec(Instruction*);
848         void emit_op_post_inc(Instruction*);
849         void emit_op_pre_dec(Instruction*);
850         void emit_op_pre_inc(Instruction*);
851         void emit_op_profile_did_call(Instruction*);
852         void emit_op_profile_will_call(Instruction*);
853         void emit_op_push_new_scope(Instruction*);
854         void emit_op_push_scope(Instruction*);
855         void emit_op_put_by_id(Instruction*);
856         void emit_op_put_by_index(Instruction*);
857         void emit_op_put_by_val(Instruction*);
858         void emit_op_put_getter(Instruction*);
859         void emit_op_put_global_var(Instruction*);
860         void emit_op_put_scoped_var(Instruction*);
861         void emit_op_put_setter(Instruction*);
862         void emit_op_resolve(Instruction*);
863         void emit_op_resolve_base(Instruction*);
864         void emit_op_ensure_property_exists(Instruction*);
865         void emit_op_resolve_global(Instruction*, bool dynamic = false);
866         void emit_op_resolve_global_dynamic(Instruction*);
867         void emit_op_resolve_skip(Instruction*);
868         void emit_op_resolve_with_base(Instruction*);
869         void emit_op_resolve_with_this(Instruction*);
870         void emit_op_ret(Instruction*);
871         void emit_op_ret_object_or_this(Instruction*);
872         void emit_op_rshift(Instruction*);
873         void emit_op_sret(Instruction*);
874         void emit_op_strcat(Instruction*);
875         void emit_op_stricteq(Instruction*);
876         void emit_op_sub(Instruction*);
877         void emit_op_switch_char(Instruction*);
878         void emit_op_switch_imm(Instruction*);
879         void emit_op_switch_string(Instruction*);
880         void emit_op_tear_off_activation(Instruction*);
881         void emit_op_tear_off_arguments(Instruction*);
882         void emit_op_throw(Instruction*);
883         void emit_op_throw_reference_error(Instruction*);
884         void emit_op_to_jsnumber(Instruction*);
885         void emit_op_to_primitive(Instruction*);
886         void emit_op_unexpected_load(Instruction*);
887         void emit_op_urshift(Instruction*);
888 #if ENABLE(JIT_USE_SOFT_MODULO)
889         void softModulo();
890 #endif
891
892         void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
893         void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
894         void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
895         void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
896         void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
897         void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
898         void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
899         void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
900         void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
901         void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
902         void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
903         void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
904         void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
905         void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
906         void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
907         void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
908         void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
909         void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
910         void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
911         void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
912         void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
913         void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
914         void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
915         void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
916         void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
917         void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
918         void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
919         void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
920         void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
921         void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
922         void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
923         void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
924         void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
925         void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
926         void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
927         void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
928         void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
929         void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
930         void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
931         void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
932         void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
933         void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
934         void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
935         void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
936         void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
937         void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
938         void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
939         void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
940         void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
941         void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
942         void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
943         void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
944         void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
945         void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
946         void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
947         void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
948         void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
949         void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
950         void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
951         void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
952         void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
953
954         
955         void emitRightShift(Instruction*, bool isUnsigned);
956         void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
957
958         /* This function is deprecated. */
959         void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
960
961         void emitInitRegister(unsigned dst);
962
963         void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
964         void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
965         void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
966         void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
967         void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
968         void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
969
970         JSValue getConstantOperand(unsigned src);
971         bool isOperandConstantImmediateInt(unsigned src);
972         bool isOperandConstantImmediateChar(unsigned src);
973
974         bool atJumpTarget();
975
976         Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
977         {
978             return iter++->from;
979         }
980         void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
981         {
982             iter->from.link(this);
983             ++iter;
984         }
985         void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
986         {
987             ASSERT(!iter->from.isSet());
988             ++iter;
989         }
990         void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
991
992         Jump checkStructure(RegisterID reg, Structure* structure);
993
994         void restoreArgumentReference();
995         void restoreArgumentReferenceForTrampoline();
996         void updateTopCallFrame();
997
998         Call emitNakedCall(CodePtr function = CodePtr());
999
1000         void preserveReturnAddressAfterCall(RegisterID);
1001         void restoreReturnAddressBeforeReturn(RegisterID);
1002         void restoreReturnAddressBeforeReturn(Address);
1003
1004         // Loads the character value of a single character string into dst.
1005         void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
1006         
1007         enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
1008 #if ENABLE(DFG_JIT)
1009         void emitOptimizationCheck(OptimizationCheckKind);
1010 #else
1011         void emitOptimizationCheck(OptimizationCheckKind) { }
1012 #endif
1013         
1014         void emitTimeoutCheck();
1015 #ifndef NDEBUG
1016         void printBytecodeOperandTypes(unsigned src1, unsigned src2);
1017 #endif
1018
1019 #if ENABLE(SAMPLING_FLAGS)
1020         void setSamplingFlag(int32_t);
1021         void clearSamplingFlag(int32_t);
1022 #endif
1023
1024 #if ENABLE(SAMPLING_COUNTERS)
1025         void emitCount(AbstractSamplingCounter&, int32_t = 1);
1026 #endif
1027
1028 #if ENABLE(OPCODE_SAMPLING)
1029         void sampleInstruction(Instruction*, bool = false);
1030 #endif
1031
1032 #if ENABLE(CODEBLOCK_SAMPLING)
1033         void sampleCodeBlock(CodeBlock*);
1034 #else
1035         void sampleCodeBlock(CodeBlock*) {}
1036 #endif
1037
1038 #if ENABLE(DFG_JIT)
1039         bool canBeOptimized() { return m_canBeOptimized; }
1040         bool shouldEmitProfiling() { return m_canBeOptimized; }
1041 #else
1042         bool canBeOptimized() { return false; }
1043         // Enables use of value profiler with tiered compilation turned off,
1044         // in which case all code gets profiled.
1045         bool shouldEmitProfiling() { return true; }
1046 #endif
1047
1048         Interpreter* m_interpreter;
1049         JSGlobalData* m_globalData;
1050         CodeBlock* m_codeBlock;
1051
1052         Vector<CallRecord> m_calls;
1053         Vector<Label> m_labels;
1054         Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
1055         Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
1056         Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
1057         Vector<JumpTable> m_jmpTable;
1058
1059         unsigned m_bytecodeOffset;
1060         Vector<JSRInfo> m_jsrSites;
1061         Vector<SlowCaseEntry> m_slowCases;
1062         Vector<SwitchRecord> m_switches;
1063
1064         unsigned m_propertyAccessInstructionIndex;
1065         unsigned m_globalResolveInfoIndex;
1066         unsigned m_callLinkInfoIndex;
1067
1068 #if USE(JSVALUE32_64)
1069         unsigned m_jumpTargetIndex;
1070         unsigned m_mappedBytecodeOffset;
1071         int m_mappedVirtualRegisterIndex;
1072         RegisterID m_mappedTag;
1073         RegisterID m_mappedPayload;
1074 #else
1075         int m_lastResultBytecodeRegister;
1076 #endif
1077         unsigned m_jumpTargetsPosition;
1078
1079 #ifndef NDEBUG
1080 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
1081         Label m_uninterruptedInstructionSequenceBegin;
1082         int m_uninterruptedConstantSequenceBegin;
1083 #endif
1084 #endif
1085         WeakRandom m_randomGenerator;
1086         static CodeRef stringGetByValStubGenerator(JSGlobalData*);
1087
1088 #if ENABLE(VALUE_PROFILER)
1089         bool m_canBeOptimized;
1090 #endif
1091     } JIT_CLASS_ALIGNMENT;
1092
1093     inline void JIT::emit_op_loop(Instruction* currentInstruction)
1094     {
1095         emitTimeoutCheck();
1096         emit_op_jmp(currentInstruction);
1097     }
1098
1099     inline void JIT::emit_op_loop_hint(Instruction*)
1100     {
1101         emitOptimizationCheck(LoopOptimizationCheck);
1102     }
1103
1104     inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
1105     {
1106         emitTimeoutCheck();
1107         emit_op_jtrue(currentInstruction);
1108     }
1109
1110     inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1111     {
1112         emitSlow_op_jtrue(currentInstruction, iter);
1113     }
1114
1115     inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
1116     {
1117         emitTimeoutCheck();
1118         emit_op_jfalse(currentInstruction);
1119     }
1120
1121     inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1122     {
1123         emitSlow_op_jfalse(currentInstruction, iter);
1124     }
1125
1126     inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1127     {
1128         emitTimeoutCheck();
1129         emit_op_jless(currentInstruction);
1130     }
1131
1132     inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1133     {
1134         emitSlow_op_jless(currentInstruction, iter);
1135     }
1136
1137     inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1138     {
1139         emitTimeoutCheck();
1140         emit_op_jlesseq(currentInstruction);
1141     }
1142
1143     inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1144     {
1145         emitSlow_op_jlesseq(currentInstruction, iter);
1146     }
1147
1148     inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
1149     {
1150         emitTimeoutCheck();
1151         emit_op_jgreater(currentInstruction);
1152     }
1153
1154     inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1155     {
1156         emitSlow_op_jgreater(currentInstruction, iter);
1157     }
1158
1159     inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
1160     {
1161         emitTimeoutCheck();
1162         emit_op_jgreatereq(currentInstruction);
1163     }
1164
1165     inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1166     {
1167         emitSlow_op_jgreatereq(currentInstruction, iter);
1168     }
1169
1170 } // namespace JSC
1171
1172 #endif // ENABLE(JIT)
1173
1174 #endif // JIT_h