2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
41 #define JIT_CLASS_ALIGNMENT
44 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
46 #include "CodeBlock.h"
47 #include "CompactJITCodeMap.h"
48 #include "Interpreter.h"
49 #include "JSInterfaceJIT.h"
52 #include <bytecode/SamplingTool.h>
57 class FunctionExecutable;
59 class JSPropertyNameIterator;
69 struct PolymorphicAccessStructureList;
70 struct SimpleJumpTable;
71 struct StringJumpTable;
72 struct StructureStubInfo;
75 MacroAssembler::Call from;
76 unsigned bytecodeOffset;
83 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
85 , bytecodeOffset(bytecodeOffset)
92 MacroAssembler::Jump from;
93 unsigned toBytecodeOffset;
95 JumpTable(MacroAssembler::Jump f, unsigned t)
102 struct SlowCaseEntry {
103 MacroAssembler::Jump from;
107 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
115 struct SwitchRecord {
125 SimpleJumpTable* simpleJumpTable;
126 StringJumpTable* stringJumpTable;
129 unsigned bytecodeOffset;
130 unsigned defaultOffset;
132 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
134 , bytecodeOffset(bytecodeOffset)
135 , defaultOffset(defaultOffset)
137 this->jumpTable.simpleJumpTable = jumpTable;
140 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
142 , bytecodeOffset(bytecodeOffset)
143 , defaultOffset(defaultOffset)
145 this->jumpTable.stringJumpTable = jumpTable;
149 enum PropertyStubGetById_T { PropertyStubGetById };
150 enum PropertyStubPutById_T { PropertyStubPutById };
152 struct PropertyStubCompilationInfo {
153 enum Type { GetById, PutById, MethodCheck } m_type;
155 unsigned bytecodeIndex;
156 MacroAssembler::Call callReturnLocation;
157 MacroAssembler::Label hotPathBegin;
158 MacroAssembler::DataLabelPtr getStructureToCompare;
159 MacroAssembler::PatchableJump getStructureCheck;
161 MacroAssembler::DataLabelCompact getDisplacementLabel;
163 MacroAssembler::DataLabelCompact getDisplacementLabel1;
164 MacroAssembler::DataLabelCompact getDisplacementLabel2;
166 MacroAssembler::Label getPutResult;
167 MacroAssembler::Label getColdPathBegin;
168 MacroAssembler::DataLabelPtr putStructureToCompare;
170 MacroAssembler::DataLabel32 putDisplacementLabel;
172 MacroAssembler::DataLabel32 putDisplacementLabel1;
173 MacroAssembler::DataLabel32 putDisplacementLabel2;
175 MacroAssembler::DataLabelPtr methodCheckStructureToCompare;
176 MacroAssembler::DataLabelPtr methodCheckProtoObj;
177 MacroAssembler::DataLabelPtr methodCheckProtoStructureToCompare;
178 MacroAssembler::DataLabelPtr methodCheckPutFunction;
181 PropertyStubCompilationInfo()
182 : bytecodeIndex(std::numeric_limits<unsigned>::max())
188 PropertyStubCompilationInfo(PropertyStubGetById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
190 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::PatchableJump structureCheck, MacroAssembler::DataLabelCompact displacementLabel, MacroAssembler::Label putResult)
192 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::PatchableJump structureCheck, MacroAssembler::DataLabelCompact displacementLabel1, MacroAssembler::DataLabelCompact displacementLabel2, MacroAssembler::Label putResult)
195 , bytecodeIndex(bytecodeIndex)
196 , hotPathBegin(hotPathBegin)
197 , getStructureToCompare(structureToCompare)
198 , getStructureCheck(structureCheck)
200 , getDisplacementLabel(displacementLabel)
202 , getDisplacementLabel1(displacementLabel1)
203 , getDisplacementLabel2(displacementLabel2)
205 , getPutResult(putResult)
209 PropertyStubCompilationInfo(PropertyStubPutById_T, unsigned bytecodeIndex, MacroAssembler::Label hotPathBegin,
211 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabel32 displacementLabel)
213 MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabel32 displacementLabel1, MacroAssembler::DataLabel32 displacementLabel2)
216 , bytecodeIndex(bytecodeIndex)
217 , hotPathBegin(hotPathBegin)
218 , putStructureToCompare(structureToCompare)
220 , putDisplacementLabel(displacementLabel)
222 , putDisplacementLabel1(displacementLabel1)
223 , putDisplacementLabel2(displacementLabel2)
228 void slowCaseInfo(PropertyStubGetById_T, MacroAssembler::Label coldPathBegin, MacroAssembler::Call call)
230 ASSERT(m_type == GetById || m_type == MethodCheck);
231 callReturnLocation = call;
232 getColdPathBegin = coldPathBegin;
235 void slowCaseInfo(PropertyStubPutById_T, MacroAssembler::Call call)
237 ASSERT(m_type == PutById);
238 callReturnLocation = call;
241 void addMethodCheckInfo(MacroAssembler::DataLabelPtr structureToCompare, MacroAssembler::DataLabelPtr protoObj, MacroAssembler::DataLabelPtr protoStructureToCompare, MacroAssembler::DataLabelPtr putFunction)
243 m_type = MethodCheck;
244 methodCheckStructureToCompare = structureToCompare;
245 methodCheckProtoObj = protoObj;
246 methodCheckProtoStructureToCompare = protoStructureToCompare;
247 methodCheckPutFunction = putFunction;
250 void copyToStubInfo(StructureStubInfo& info, LinkBuffer &patchBuffer);
253 struct StructureStubCompilationInfo {
254 MacroAssembler::DataLabelPtr hotPathBegin;
255 MacroAssembler::Call hotPathOther;
256 MacroAssembler::Call callReturnLocation;
257 CallLinkInfo::CallType callType;
258 unsigned bytecodeIndex;
261 struct MethodCallCompilationInfo {
262 MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
263 : bytecodeIndex(bytecodeIndex)
264 , propertyAccessIndex(propertyAccessIndex)
268 unsigned bytecodeIndex;
269 MacroAssembler::DataLabelPtr structureToCompare;
270 unsigned propertyAccessIndex;
273 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
274 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
275 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
276 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
278 class JIT : private JSInterfaceJIT {
279 friend class JITStubCall;
280 friend struct PropertyStubCompilationInfo;
282 using MacroAssembler::Jump;
283 using MacroAssembler::JumpList;
284 using MacroAssembler::Label;
286 static const int patchGetByIdDefaultStructure = -1;
287 static const int patchGetByIdDefaultOffset = 0;
288 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
289 // will compress the displacement, and we may not be able to fit a patched offset.
290 static const int patchPutByIdDefaultOffset = 256;
293 static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, JITCompilationEffort effort, CodePtr* functionEntryArityCheck = 0)
295 return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck, effort);
298 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
300 JIT jit(globalData, codeBlock);
301 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
302 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
305 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
307 JIT jit(globalData, codeBlock);
308 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
309 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
311 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
313 JIT jit(globalData, codeBlock);
314 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
315 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
317 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
319 JIT jit(globalData, codeBlock);
320 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
321 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
324 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
326 JIT jit(globalData, codeBlock);
327 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
328 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
331 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
333 JIT jit(globalData, codeBlock);
334 jit.m_bytecodeOffset = stubInfo->bytecodeIndex;
335 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
338 static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
340 if (!globalData->canUseJIT())
342 JIT jit(globalData, 0);
343 return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
346 static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
348 if (!globalData->canUseJIT()) {
350 return CodeRef::createLLIntCodeRef(llint_native_call_trampoline);
355 JIT jit(globalData, 0);
356 return jit.privateCompileCTINativeCall(globalData, func);
359 static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
360 static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
361 static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
362 static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
363 static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, StructureStubInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
365 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
367 JIT jit(globalData, codeBlock);
368 return jit.privateCompilePatchGetArrayLength(returnAddress);
371 static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, JSGlobalData*, CodeSpecializationKind);
374 JIT(JSGlobalData*, CodeBlock* = 0);
376 void privateCompileMainPass();
377 void privateCompileLinkPass();
378 void privateCompileSlowCases();
379 JITCode privateCompile(CodePtr* functionEntryArityCheck, JITCompilationEffort);
380 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
381 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
382 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
383 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
384 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
385 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
387 PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
388 Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
389 CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
390 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
392 static bool isDirectPutById(StructureStubInfo*);
394 void addSlowCase(Jump);
395 void addSlowCase(JumpList);
397 void addJump(Jump, int);
398 void emitJumpSlowToHot(Jump, int);
400 void compileOpCall(OpcodeID, Instruction*, unsigned callLinkInfoIndex);
401 void compileOpCallSlowCase(OpcodeID, Instruction*, Vector<SlowCaseEntry>::iterator&, unsigned callLinkInfoIndex);
402 void compileLoadVarargs(Instruction*);
403 void compileCallEval();
404 void compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator&);
406 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
407 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
408 bool isOperandConstantImmediateDouble(unsigned src);
410 void emitLoadDouble(int index, FPRegisterID value);
411 void emitLoadInt32ToDouble(int index, FPRegisterID value);
412 Jump emitJumpIfNotObject(RegisterID structureReg);
413 Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
415 void testPrototype(JSValue, JumpList& failureCases);
417 enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
418 // value register in write barrier is used before any scratch registers
419 // so may safely be the same as either of the scratch registers.
420 void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
421 void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
423 template<typename ClassType, bool destructor, typename StructureType> void emitAllocateBasicJSObject(StructureType, RegisterID result, RegisterID storagePtr);
424 void emitAllocateBasicStorage(size_t, RegisterID result, RegisterID storagePtr);
425 template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
426 void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
427 void emitAllocateJSArray(unsigned valuesRegister, unsigned length, RegisterID cellResult, RegisterID storageResult, RegisterID storagePtr);
429 #if ENABLE(VALUE_PROFILER)
430 // This assumes that the value to profile is in regT0 and that regT3 is available for
432 void emitValueProfilingSite(ValueProfile*);
433 void emitValueProfilingSite(unsigned bytecodeOffset);
434 void emitValueProfilingSite();
436 void emitValueProfilingSite(unsigned) { }
437 void emitValueProfilingSite() { }
440 #if USE(JSVALUE32_64)
441 bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
443 void emitLoadTag(int index, RegisterID tag);
444 void emitLoadPayload(int index, RegisterID payload);
446 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
447 void emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
448 void emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2);
450 void emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
451 void emitStore(int index, const JSValue constant, RegisterID base = callFrameRegister);
452 void emitStoreInt32(int index, RegisterID payload, bool indexIsInt32 = false);
453 void emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32 = false);
454 void emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
455 void emitStoreCell(int index, RegisterID payload, bool indexIsCell = false);
456 void emitStoreBool(int index, RegisterID payload, bool indexIsBool = false);
457 void emitStoreDouble(int index, FPRegisterID value);
459 bool isLabeled(unsigned bytecodeOffset);
460 void map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload);
461 void unmap(RegisterID);
463 bool isMapped(int virtualRegisterIndex);
464 bool getMappedPayload(int virtualRegisterIndex, RegisterID& payload);
465 bool getMappedTag(int virtualRegisterIndex, RegisterID& tag);
467 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex);
468 void emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag);
470 void compileGetByIdHotPath();
471 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
472 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
473 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
474 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
475 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset);
477 // Arithmetic opcode helpers
478 void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
479 void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
480 void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
482 #if CPU(ARM_TRADITIONAL)
484 static const int sequenceOpCallInstructionSpace = 12;
485 static const int sequenceOpCallConstantSpace = 2;
486 // sequenceMethodCheck
487 static const int sequenceMethodCheckInstructionSpace = 40;
488 static const int sequenceMethodCheckConstantSpace = 6;
489 // sequenceGetByIdHotPath
490 static const int sequenceGetByIdHotPathInstructionSpace = 36;
491 static const int sequenceGetByIdHotPathConstantSpace = 4;
492 // sequenceGetByIdSlowCase
493 static const int sequenceGetByIdSlowCaseInstructionSpace = 64;
494 static const int sequenceGetByIdSlowCaseConstantSpace = 4;
496 static const int sequencePutByIdInstructionSpace = 36;
497 static const int sequencePutByIdConstantSpace = 4;
500 static const int sequenceOpCallInstructionSpace = 12;
501 static const int sequenceOpCallConstantSpace = 2;
502 // sequenceMethodCheck
503 static const int sequenceMethodCheckInstructionSpace = 40;
504 static const int sequenceMethodCheckConstantSpace = 6;
505 // sequenceGetByIdHotPath
506 static const int sequenceGetByIdHotPathInstructionSpace = 36;
507 static const int sequenceGetByIdHotPathConstantSpace = 5;
508 // sequenceGetByIdSlowCase
509 static const int sequenceGetByIdSlowCaseInstructionSpace = 38;
510 static const int sequenceGetByIdSlowCaseConstantSpace = 4;
512 static const int sequencePutByIdInstructionSpace = 36;
513 static const int sequencePutByIdConstantSpace = 5;
516 #else // USE(JSVALUE32_64)
517 void emitGetVirtualRegister(int src, RegisterID dst);
518 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
519 void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
520 void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
522 emitPutVirtualRegister(dst, payload);
525 int32_t getConstantOperandImmediateInt(unsigned src);
527 void killLastResultRegister();
529 Jump emitJumpIfJSCell(RegisterID);
530 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
531 void emitJumpSlowCaseIfJSCell(RegisterID);
532 Jump emitJumpIfNotJSCell(RegisterID);
533 void emitJumpSlowCaseIfNotJSCell(RegisterID);
534 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
535 Jump emitJumpIfImmediateInteger(RegisterID);
536 Jump emitJumpIfNotImmediateInteger(RegisterID);
537 Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
538 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
539 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
540 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
542 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
543 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
545 void emitTagAsBoolImmediate(RegisterID reg);
546 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
547 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
549 void compileGetByIdHotPath(int baseVReg, Identifier*);
550 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
551 void compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset);
552 void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
553 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
554 void compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset);
556 #endif // USE(JSVALUE32_64)
558 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
559 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
560 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
561 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
563 void beginUninterruptedSequence(int, int);
564 void endUninterruptedSequence(int, int, int);
567 #define BEGIN_UNINTERRUPTED_SEQUENCE(name)
568 #define END_UNINTERRUPTED_SEQUENCE(name)
569 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst)
572 void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
573 void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
575 void emit_op_add(Instruction*);
576 void emit_op_bitand(Instruction*);
577 void emit_op_bitor(Instruction*);
578 void emit_op_bitxor(Instruction*);
579 void emit_op_call(Instruction*);
580 void emit_op_call_eval(Instruction*);
581 void emit_op_call_varargs(Instruction*);
582 void emit_op_call_put_result(Instruction*);
583 void emit_op_catch(Instruction*);
584 void emit_op_construct(Instruction*);
585 void emit_op_create_this(Instruction*);
586 void emit_op_convert_this(Instruction*);
587 void emit_op_create_arguments(Instruction*);
588 void emit_op_debug(Instruction*);
589 void emit_op_del_by_id(Instruction*);
590 void emit_op_div(Instruction*);
591 void emit_op_end(Instruction*);
592 void emit_op_enter(Instruction*);
593 void emit_op_create_activation(Instruction*);
594 void emit_op_eq(Instruction*);
595 void emit_op_eq_null(Instruction*);
596 void emit_op_get_by_id(Instruction*);
597 void emit_op_get_arguments_length(Instruction*);
598 void emit_op_get_by_val(Instruction*);
599 void emit_op_get_argument_by_val(Instruction*);
600 void emit_op_get_by_pname(Instruction*);
601 void emit_op_get_global_var(Instruction*);
602 void emit_op_get_scoped_var(Instruction*);
603 void emit_op_init_lazy_reg(Instruction*);
604 void emit_op_check_has_instance(Instruction*);
605 void emit_op_instanceof(Instruction*);
606 void emit_op_is_undefined(Instruction*);
607 void emit_op_is_boolean(Instruction*);
608 void emit_op_is_number(Instruction*);
609 void emit_op_is_string(Instruction*);
610 void emit_op_jeq_null(Instruction*);
611 void emit_op_jfalse(Instruction*);
612 void emit_op_jmp(Instruction*);
613 void emit_op_jmp_scopes(Instruction*);
614 void emit_op_jneq_null(Instruction*);
615 void emit_op_jneq_ptr(Instruction*);
616 void emit_op_jless(Instruction*);
617 void emit_op_jlesseq(Instruction*);
618 void emit_op_jgreater(Instruction*);
619 void emit_op_jgreatereq(Instruction*);
620 void emit_op_jnless(Instruction*);
621 void emit_op_jnlesseq(Instruction*);
622 void emit_op_jngreater(Instruction*);
623 void emit_op_jngreatereq(Instruction*);
624 void emit_op_jtrue(Instruction*);
625 void emit_op_loop(Instruction*);
626 void emit_op_loop_hint(Instruction*);
627 void emit_op_loop_if_less(Instruction*);
628 void emit_op_loop_if_lesseq(Instruction*);
629 void emit_op_loop_if_greater(Instruction*);
630 void emit_op_loop_if_greatereq(Instruction*);
631 void emit_op_loop_if_true(Instruction*);
632 void emit_op_loop_if_false(Instruction*);
633 void emit_op_lshift(Instruction*);
634 void emit_op_method_check(Instruction*);
635 void emit_op_mod(Instruction*);
636 void emit_op_mov(Instruction*);
637 void emit_op_mul(Instruction*);
638 void emit_op_negate(Instruction*);
639 void emit_op_neq(Instruction*);
640 void emit_op_neq_null(Instruction*);
641 void emit_op_new_array(Instruction*);
642 void emit_op_new_array_buffer(Instruction*);
643 void emit_op_new_func(Instruction*);
644 void emit_op_new_func_exp(Instruction*);
645 void emit_op_new_object(Instruction*);
646 void emit_op_new_regexp(Instruction*);
647 void emit_op_get_pnames(Instruction*);
648 void emit_op_next_pname(Instruction*);
649 void emit_op_not(Instruction*);
650 void emit_op_nstricteq(Instruction*);
651 void emit_op_pop_scope(Instruction*);
652 void emit_op_post_dec(Instruction*);
653 void emit_op_post_inc(Instruction*);
654 void emit_op_pre_dec(Instruction*);
655 void emit_op_pre_inc(Instruction*);
656 void emit_op_profile_did_call(Instruction*);
657 void emit_op_profile_will_call(Instruction*);
658 void emit_op_push_new_scope(Instruction*);
659 void emit_op_push_scope(Instruction*);
660 void emit_op_put_by_id(Instruction*);
661 void emit_op_put_by_index(Instruction*);
662 void emit_op_put_by_val(Instruction*);
663 void emit_op_put_getter_setter(Instruction*);
664 void emit_op_put_global_var(Instruction*);
665 void emit_op_put_scoped_var(Instruction*);
666 void emit_op_resolve(Instruction*);
667 void emit_op_resolve_base(Instruction*);
668 void emit_op_ensure_property_exists(Instruction*);
669 void emit_op_resolve_global(Instruction*, bool dynamic = false);
670 void emit_op_resolve_global_dynamic(Instruction*);
671 void emit_op_resolve_skip(Instruction*);
672 void emit_op_resolve_with_base(Instruction*);
673 void emit_op_resolve_with_this(Instruction*);
674 void emit_op_ret(Instruction*);
675 void emit_op_ret_object_or_this(Instruction*);
676 void emit_op_rshift(Instruction*);
677 void emit_op_strcat(Instruction*);
678 void emit_op_stricteq(Instruction*);
679 void emit_op_sub(Instruction*);
680 void emit_op_switch_char(Instruction*);
681 void emit_op_switch_imm(Instruction*);
682 void emit_op_switch_string(Instruction*);
683 void emit_op_tear_off_activation(Instruction*);
684 void emit_op_tear_off_arguments(Instruction*);
685 void emit_op_throw(Instruction*);
686 void emit_op_throw_reference_error(Instruction*);
687 void emit_op_to_jsnumber(Instruction*);
688 void emit_op_to_primitive(Instruction*);
689 void emit_op_unexpected_load(Instruction*);
690 void emit_op_urshift(Instruction*);
692 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
693 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
694 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
695 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
696 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
697 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
698 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
699 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
700 void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
701 void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
702 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
703 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
704 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
705 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
706 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
707 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
708 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
709 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
710 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
711 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
712 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
713 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
714 void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
715 void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
716 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
717 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
718 void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
719 void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
720 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
721 void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
722 void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
723 void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
724 void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
725 void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
726 void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
727 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
728 void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
729 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
730 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
731 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
732 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
733 void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
734 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
735 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
736 void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
737 void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
738 void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
739 void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
740 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
741 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
742 void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
743 void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
744 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
745 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
746 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
747 void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
748 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
749 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
750 void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
751 void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
752 void emitSlow_op_new_array(Instruction*, Vector<SlowCaseEntry>::iterator&);
754 void emitRightShift(Instruction*, bool isUnsigned);
755 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
757 /* This function is deprecated. */
758 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
760 void emitInitRegister(unsigned dst);
762 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
763 void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
764 void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
765 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
766 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
767 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
769 JSValue getConstantOperand(unsigned src);
770 bool isOperandConstantImmediateInt(unsigned src);
771 bool isOperandConstantImmediateChar(unsigned src);
775 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
779 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
781 iter->from.link(this);
784 void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
786 ASSERT(!iter->from.isSet());
789 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int virtualRegisterIndex);
791 Jump checkStructure(RegisterID reg, Structure* structure);
793 void restoreArgumentReference();
794 void restoreArgumentReferenceForTrampoline();
795 void updateTopCallFrame();
797 Call emitNakedCall(CodePtr function = CodePtr());
799 void preserveReturnAddressAfterCall(RegisterID);
800 void restoreReturnAddressBeforeReturn(RegisterID);
801 void restoreReturnAddressBeforeReturn(Address);
803 // Loads the character value of a single character string into dst.
804 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
806 enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
808 void emitOptimizationCheck(OptimizationCheckKind);
810 void emitOptimizationCheck(OptimizationCheckKind) { }
813 void emitTimeoutCheck();
815 void printBytecodeOperandTypes(unsigned src1, unsigned src2);
818 #if ENABLE(SAMPLING_FLAGS)
819 void setSamplingFlag(int32_t);
820 void clearSamplingFlag(int32_t);
823 #if ENABLE(SAMPLING_COUNTERS)
824 void emitCount(AbstractSamplingCounter&, int32_t = 1);
827 #if ENABLE(OPCODE_SAMPLING)
828 void sampleInstruction(Instruction*, bool = false);
831 #if ENABLE(CODEBLOCK_SAMPLING)
832 void sampleCodeBlock(CodeBlock*);
834 void sampleCodeBlock(CodeBlock*) {}
838 bool canBeOptimized() { return m_canBeOptimized; }
839 bool canBeOptimizedOrInlined() { return m_canBeOptimizedOrInlined; }
840 bool shouldEmitProfiling() { return m_shouldEmitProfiling; }
842 bool canBeOptimized() { return false; }
843 bool canBeOptimizedOrInlined() { return false; }
844 // Enables use of value profiler with tiered compilation turned off,
845 // in which case all code gets profiled.
846 bool shouldEmitProfiling() { return true; }
849 Interpreter* m_interpreter;
850 JSGlobalData* m_globalData;
851 CodeBlock* m_codeBlock;
853 Vector<CallRecord> m_calls;
854 Vector<Label> m_labels;
855 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
856 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
857 Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
858 Vector<JumpTable> m_jmpTable;
860 unsigned m_bytecodeOffset;
861 Vector<SlowCaseEntry> m_slowCases;
862 Vector<SwitchRecord> m_switches;
864 unsigned m_propertyAccessInstructionIndex;
865 unsigned m_globalResolveInfoIndex;
866 unsigned m_callLinkInfoIndex;
868 #if USE(JSVALUE32_64)
869 unsigned m_jumpTargetIndex;
870 unsigned m_mappedBytecodeOffset;
871 int m_mappedVirtualRegisterIndex;
872 RegisterID m_mappedTag;
873 RegisterID m_mappedPayload;
875 int m_lastResultBytecodeRegister;
877 unsigned m_jumpTargetsPosition;
880 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
881 Label m_uninterruptedInstructionSequenceBegin;
882 int m_uninterruptedConstantSequenceBegin;
885 WeakRandom m_randomGenerator;
886 static CodeRef stringGetByValStubGenerator(JSGlobalData*);
888 #if ENABLE(VALUE_PROFILER)
889 bool m_canBeOptimized;
890 bool m_canBeOptimizedOrInlined;
891 bool m_shouldEmitProfiling;
893 } JIT_CLASS_ALIGNMENT;
895 inline void JIT::emit_op_loop(Instruction* currentInstruction)
898 emit_op_jmp(currentInstruction);
901 inline void JIT::emit_op_loop_hint(Instruction*)
903 emitOptimizationCheck(LoopOptimizationCheck);
906 inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
909 emit_op_jtrue(currentInstruction);
912 inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
914 emitSlow_op_jtrue(currentInstruction, iter);
917 inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
920 emit_op_jfalse(currentInstruction);
923 inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
925 emitSlow_op_jfalse(currentInstruction, iter);
928 inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
931 emit_op_jless(currentInstruction);
934 inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
936 emitSlow_op_jless(currentInstruction, iter);
939 inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
942 emit_op_jlesseq(currentInstruction);
945 inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
947 emitSlow_op_jlesseq(currentInstruction, iter);
950 inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
953 emit_op_jgreater(currentInstruction);
956 inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
958 emitSlow_op_jgreater(currentInstruction, iter);
961 inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
964 emit_op_jgreatereq(currentInstruction);
967 inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
969 emitSlow_op_jgreatereq(currentInstruction, iter);
974 #endif // ENABLE(JIT)