2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // Verbose logging of code generation
32 #define ENABLE_JIT_VERBOSE 0
33 // Verbose logging for OSR-related code.
34 #define ENABLE_JIT_VERBOSE_OSR 0
36 // We've run into some problems where changing the size of the class JIT leads to
37 // performance fluctuations. Try forcing alignment in an attempt to stabalize this.
39 #define JIT_CLASS_ALIGNMENT __attribute__ ((aligned (32)))
41 #define JIT_CLASS_ALIGNMENT
44 #define ASSERT_JIT_OFFSET_UNUSED(variable, actual, expected) ASSERT_WITH_MESSAGE_UNUSED(variable, actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
45 #define ASSERT_JIT_OFFSET(actual, expected) ASSERT_WITH_MESSAGE(actual == expected, "JIT Offset \"%s\" should be %d, not %d.\n", #expected, static_cast<int>(expected), static_cast<int>(actual));
47 #include "CodeBlock.h"
48 #include "CompactJITCodeMap.h"
49 #include "Interpreter.h"
50 #include "JSInterfaceJIT.h"
53 #include <bytecode/SamplingTool.h>
58 class FunctionExecutable;
60 class JSPropertyNameIterator;
70 struct PolymorphicAccessStructureList;
71 struct SimpleJumpTable;
72 struct StringJumpTable;
73 struct StructureStubInfo;
76 MacroAssembler::Call from;
77 unsigned bytecodeOffset;
84 CallRecord(MacroAssembler::Call from, unsigned bytecodeOffset, void* to = 0)
86 , bytecodeOffset(bytecodeOffset)
93 MacroAssembler::Jump from;
94 unsigned toBytecodeOffset;
96 JumpTable(MacroAssembler::Jump f, unsigned t)
103 struct SlowCaseEntry {
104 MacroAssembler::Jump from;
108 SlowCaseEntry(MacroAssembler::Jump f, unsigned t, unsigned h = 0)
116 struct SwitchRecord {
126 SimpleJumpTable* simpleJumpTable;
127 StringJumpTable* stringJumpTable;
130 unsigned bytecodeOffset;
131 unsigned defaultOffset;
133 SwitchRecord(SimpleJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset, Type type)
135 , bytecodeOffset(bytecodeOffset)
136 , defaultOffset(defaultOffset)
138 this->jumpTable.simpleJumpTable = jumpTable;
141 SwitchRecord(StringJumpTable* jumpTable, unsigned bytecodeOffset, unsigned defaultOffset)
143 , bytecodeOffset(bytecodeOffset)
144 , defaultOffset(defaultOffset)
146 this->jumpTable.stringJumpTable = jumpTable;
150 struct PropertyStubCompilationInfo {
151 unsigned bytecodeIndex;
152 MacroAssembler::Call callReturnLocation;
153 MacroAssembler::Label hotPathBegin;
156 PropertyStubCompilationInfo()
157 : bytecodeIndex(std::numeric_limits<unsigned>::max())
163 struct StructureStubCompilationInfo {
164 MacroAssembler::DataLabelPtr hotPathBegin;
165 MacroAssembler::Call hotPathOther;
166 MacroAssembler::Call callReturnLocation;
168 unsigned bytecodeIndex;
171 struct MethodCallCompilationInfo {
172 MethodCallCompilationInfo(unsigned bytecodeIndex, unsigned propertyAccessIndex)
173 : bytecodeIndex(bytecodeIndex)
174 , propertyAccessIndex(propertyAccessIndex)
178 unsigned bytecodeIndex;
179 MacroAssembler::DataLabelPtr structureToCompare;
180 unsigned propertyAccessIndex;
183 // Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
184 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
185 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
186 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
188 class JIT : private JSInterfaceJIT {
189 friend class JITStubCall;
191 using MacroAssembler::Jump;
192 using MacroAssembler::JumpList;
193 using MacroAssembler::Label;
195 static const int patchGetByIdDefaultStructure = -1;
196 static const int patchGetByIdDefaultOffset = 0;
197 // Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
198 // will compress the displacement, and we may not be able to fit a patched offset.
199 static const int patchPutByIdDefaultOffset = 256;
202 static JITCode compile(JSGlobalData* globalData, CodeBlock* codeBlock, CodePtr* functionEntryArityCheck = 0)
204 return JIT(globalData, codeBlock).privateCompile(functionEntryArityCheck);
207 static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
209 JIT jit(globalData, codeBlock);
210 jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
213 static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
215 JIT jit(globalData, codeBlock);
216 jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
218 static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
220 JIT jit(globalData, codeBlock);
221 jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
223 static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
225 JIT jit(globalData, codeBlock);
226 jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
229 static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
231 JIT jit(globalData, codeBlock);
232 jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
235 static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
237 JIT jit(globalData, codeBlock);
238 jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
241 static PassRefPtr<ExecutableMemoryHandle> compileCTIMachineTrampolines(JSGlobalData* globalData, TrampolineStructure *trampolines)
243 if (!globalData->canUseJIT())
245 JIT jit(globalData, 0);
246 return jit.privateCompileCTIMachineTrampolines(globalData, trampolines);
249 static CodeRef compileCTINativeCall(JSGlobalData* globalData, NativeFunction func)
251 if (!globalData->canUseJIT())
253 JIT jit(globalData, 0);
254 return jit.privateCompileCTINativeCall(globalData, func);
257 static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
258 static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
259 static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
261 static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
263 JIT jit(globalData, codeBlock);
264 return jit.privateCompilePatchGetArrayLength(returnAddress);
267 static void linkFor(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*, CodeSpecializationKind);
271 DataLabelPtr storeLocation;
274 JSRInfo(DataLabelPtr storeLocation, Label targetLocation)
275 : storeLocation(storeLocation)
276 , target(targetLocation)
281 JIT(JSGlobalData*, CodeBlock* = 0);
283 void privateCompileMainPass();
284 void privateCompileLinkPass();
285 void privateCompileSlowCases();
286 JITCode privateCompile(CodePtr* functionEntryArityCheck);
287 void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
288 void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
289 void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
290 void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
291 void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
292 void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
294 PassRefPtr<ExecutableMemoryHandle> privateCompileCTIMachineTrampolines(JSGlobalData*, TrampolineStructure*);
295 Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
296 CodeRef privateCompileCTINativeCall(JSGlobalData*, NativeFunction);
297 void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
299 void addSlowCase(Jump);
300 void addSlowCase(JumpList);
302 void addJump(Jump, int);
303 void emitJumpSlowToHot(Jump, int);
305 void compileOpCall(OpcodeID, Instruction* instruction, unsigned callLinkInfoIndex);
306 void compileOpCallVarargs(Instruction* instruction);
307 void compileOpCallInitializeCallFrame();
308 void compileOpCallSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex, OpcodeID opcodeID);
309 void compileOpCallVarargsSlowCase(Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter);
311 enum CompileOpStrictEqType { OpStrictEq, OpNStrictEq };
312 void compileOpStrictEq(Instruction* instruction, CompileOpStrictEqType type);
313 bool isOperandConstantImmediateDouble(unsigned src);
315 void emitLoadDouble(unsigned index, FPRegisterID value);
316 void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
317 Jump emitJumpIfNotObject(RegisterID structureReg);
318 Jump emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType);
320 void testPrototype(JSValue, JumpList& failureCases);
322 enum WriteBarrierMode { UnconditionalWriteBarrier, ShouldFilterImmediates };
323 // value register in write barrier is used before any scratch registers
324 // so may safely be the same as either of the scratch registers.
325 void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
326 void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
328 template<typename ClassType, typename StructureType> void emitAllocateBasicJSObject(StructureType, void* vtable, RegisterID result, RegisterID storagePtr);
329 template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
330 void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
332 enum ValueProfilingSiteKind { FirstProfilingSite, SubsequentProfilingSite };
333 #if ENABLE(VALUE_PROFILER)
334 // This assumes that the value to profile is in regT0 and that regT3 is available for
336 void emitValueProfilingSite(ValueProfilingSiteKind);
338 void emitValueProfilingSite(ValueProfilingSiteKind) { }
341 #if USE(JSVALUE32_64)
342 bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
344 void emitLoadTag(unsigned index, RegisterID tag);
345 void emitLoadPayload(unsigned index, RegisterID payload);
347 void emitLoad(const JSValue& v, RegisterID tag, RegisterID payload);
348 void emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
349 void emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2);
351 void emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base = callFrameRegister);
352 void emitStore(unsigned index, const JSValue constant, RegisterID base = callFrameRegister);
353 void emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32 = false);
354 void emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32 = false);
355 void emitStoreAndMapInt32(unsigned index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength);
356 void emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell = false);
357 void emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool = false);
358 void emitStoreDouble(unsigned index, FPRegisterID value);
360 bool isLabeled(unsigned bytecodeOffset);
361 void map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload);
362 void unmap(RegisterID);
364 bool isMapped(unsigned virtualRegisterIndex);
365 bool getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload);
366 bool getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag);
368 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex);
369 void emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag);
370 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, unsigned virtualRegisterIndex);
372 void compileGetByIdHotPath();
373 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
374 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
375 void compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset);
376 void compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset);
377 void compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, size_t cachedOffset);
379 // Arithmetic opcode helpers
380 void emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
381 void emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType);
382 void emitBinaryDoubleOp(OpcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters = true, bool op2IsInRegisters = true);
385 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
386 static const int patchOffsetPutByIdStructure = 7;
387 static const int patchOffsetPutByIdPropertyMapOffset1 = 22;
388 static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
389 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
390 static const int patchOffsetGetByIdStructure = 7;
391 static const int patchOffsetGetByIdBranchToSlowCase = 13;
392 static const int patchOffsetGetByIdPropertyMapOffset1 = 19;
393 static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
394 static const int patchOffsetGetByIdPutResult = 22;
395 #if ENABLE(OPCODE_SAMPLING)
396 static const int patchOffsetGetByIdSlowCaseCall = 37;
398 static const int patchOffsetGetByIdSlowCaseCall = 33;
400 static const int patchOffsetOpCallCompareToJump = 6;
402 static const int patchOffsetMethodCheckProtoObj = 11;
403 static const int patchOffsetMethodCheckProtoStruct = 18;
404 static const int patchOffsetMethodCheckPutFunction = 29;
405 #elif CPU(ARM_TRADITIONAL)
406 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
407 static const int patchOffsetPutByIdStructure = 4;
408 static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
409 static const int patchOffsetPutByIdPropertyMapOffset2 = 28;
410 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
411 static const int patchOffsetGetByIdStructure = 4;
412 static const int patchOffsetGetByIdBranchToSlowCase = 16;
413 static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
414 static const int patchOffsetGetByIdPropertyMapOffset2 = 28;
415 static const int patchOffsetGetByIdPutResult = 36;
416 #if ENABLE(OPCODE_SAMPLING)
417 #error "OPCODE_SAMPLING is not yet supported"
419 static const int patchOffsetGetByIdSlowCaseCall = 40;
421 static const int patchOffsetOpCallCompareToJump = 12;
423 static const int patchOffsetMethodCheckProtoObj = 12;
424 static const int patchOffsetMethodCheckProtoStruct = 20;
425 static const int patchOffsetMethodCheckPutFunction = 32;
428 static const int sequenceOpCallInstructionSpace = 12;
429 static const int sequenceOpCallConstantSpace = 2;
430 // sequenceMethodCheck
431 static const int sequenceMethodCheckInstructionSpace = 40;
432 static const int sequenceMethodCheckConstantSpace = 6;
433 // sequenceGetByIdHotPath
434 static const int sequenceGetByIdHotPathInstructionSpace = 36;
435 static const int sequenceGetByIdHotPathConstantSpace = 4;
436 // sequenceGetByIdSlowCase
437 static const int sequenceGetByIdSlowCaseInstructionSpace = 56;
438 static const int sequenceGetByIdSlowCaseConstantSpace = 3;
440 static const int sequencePutByIdInstructionSpace = 36;
441 static const int sequencePutByIdConstantSpace = 4;
442 #elif CPU(ARM_THUMB2)
443 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
444 static const int patchOffsetPutByIdStructure = 10;
445 static const int patchOffsetPutByIdPropertyMapOffset1 = 36;
446 static const int patchOffsetPutByIdPropertyMapOffset2 = 48;
447 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
448 static const int patchOffsetGetByIdStructure = 10;
449 static const int patchOffsetGetByIdBranchToSlowCase = 26;
450 static const int patchOffsetGetByIdPropertyMapOffset1 = 28;
451 static const int patchOffsetGetByIdPropertyMapOffset2 = 30;
452 static const int patchOffsetGetByIdPutResult = 32;
453 #if ENABLE(OPCODE_SAMPLING)
454 #error "OPCODE_SAMPLING is not yet supported"
456 static const int patchOffsetGetByIdSlowCaseCall = 40;
458 static const int patchOffsetOpCallCompareToJump = 16;
460 static const int patchOffsetMethodCheckProtoObj = 24;
461 static const int patchOffsetMethodCheckProtoStruct = 34;
462 static const int patchOffsetMethodCheckPutFunction = 58;
465 static const int sequenceOpCallInstructionSpace = 12;
466 static const int sequenceOpCallConstantSpace = 2;
467 // sequenceMethodCheck
468 static const int sequenceMethodCheckInstructionSpace = 40;
469 static const int sequenceMethodCheckConstantSpace = 6;
470 // sequenceGetByIdHotPath
471 static const int sequenceGetByIdHotPathInstructionSpace = 36;
472 static const int sequenceGetByIdHotPathConstantSpace = 4;
473 // sequenceGetByIdSlowCase
474 static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
475 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
477 static const int sequencePutByIdInstructionSpace = 36;
478 static const int sequencePutByIdConstantSpace = 4;
481 static const int patchOffsetPutByIdStructure = 16;
482 static const int patchOffsetPutByIdPropertyMapOffset1 = 56;
483 static const int patchOffsetPutByIdPropertyMapOffset2 = 72;
484 static const int patchOffsetGetByIdStructure = 16;
485 static const int patchOffsetGetByIdBranchToSlowCase = 48;
486 static const int patchOffsetGetByIdPropertyMapOffset1 = 56;
487 static const int patchOffsetGetByIdPropertyMapOffset2 = 76;
488 static const int patchOffsetGetByIdPutResult = 96;
489 #if ENABLE(OPCODE_SAMPLING)
490 #error "OPCODE_SAMPLING is not yet supported"
492 static const int patchOffsetGetByIdSlowCaseCall = 56;
494 static const int patchOffsetOpCallCompareToJump = 32;
495 static const int patchOffsetMethodCheckProtoObj = 32;
496 static const int patchOffsetMethodCheckProtoStruct = 56;
497 static const int patchOffsetMethodCheckPutFunction = 88;
498 #else // WTF_MIPS_ISA(1)
499 static const int patchOffsetPutByIdStructure = 12;
500 static const int patchOffsetPutByIdPropertyMapOffset1 = 48;
501 static const int patchOffsetPutByIdPropertyMapOffset2 = 64;
502 static const int patchOffsetGetByIdStructure = 12;
503 static const int patchOffsetGetByIdBranchToSlowCase = 44;
504 static const int patchOffsetGetByIdPropertyMapOffset1 = 48;
505 static const int patchOffsetGetByIdPropertyMapOffset2 = 64;
506 static const int patchOffsetGetByIdPutResult = 80;
507 #if ENABLE(OPCODE_SAMPLING)
508 #error "OPCODE_SAMPLING is not yet supported"
510 static const int patchOffsetGetByIdSlowCaseCall = 56;
512 static const int patchOffsetOpCallCompareToJump = 32;
513 static const int patchOffsetMethodCheckProtoObj = 32;
514 static const int patchOffsetMethodCheckProtoStruct = 52;
515 static const int patchOffsetMethodCheckPutFunction = 84;
518 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
519 static const int patchOffsetGetByIdStructure = 6;
520 static const int patchOffsetPutByIdPropertyMapOffset = 24;
521 static const int patchOffsetPutByIdStructure = 6;
522 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
523 static const int patchOffsetGetByIdBranchToSlowCase = 10;
524 static const int patchOffsetGetByIdPropertyMapOffset = 24;
525 static const int patchOffsetGetByIdPutResult = 24;
528 static const int sequenceOpCallInstructionSpace = 12;
529 static const int sequenceOpCallConstantSpace = 2;
530 // sequenceMethodCheck
531 static const int sequenceMethodCheckInstructionSpace = 40;
532 static const int sequenceMethodCheckConstantSpace = 6;
533 // sequenceGetByIdHotPath
534 static const int sequenceGetByIdHotPathInstructionSpace = 36;
535 static const int sequenceGetByIdHotPathConstantSpace = 5;
536 // sequenceGetByIdSlowCase
537 static const int sequenceGetByIdSlowCaseInstructionSpace = 30;
538 static const int sequenceGetByIdSlowCaseConstantSpace = 3;
540 static const int sequencePutByIdInstructionSpace = 36;
541 static const int sequencePutByIdConstantSpace = 5;
543 static const int patchOffsetGetByIdPropertyMapOffset1 = 20;
544 static const int patchOffsetGetByIdPropertyMapOffset2 = 22;
546 static const int patchOffsetPutByIdPropertyMapOffset1 = 20;
547 static const int patchOffsetPutByIdPropertyMapOffset2 = 26;
549 #if ENABLE(OPCODE_SAMPLING)
550 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
552 static const int patchOffsetGetByIdSlowCaseCall = 26;
554 static const int patchOffsetOpCallCompareToJump = 4;
556 static const int patchOffsetMethodCheckProtoObj = 12;
557 static const int patchOffsetMethodCheckProtoStruct = 20;
558 static const int patchOffsetMethodCheckPutFunction = 32;
560 #error "JSVALUE32_64 not supported on this platform."
563 #else // USE(JSVALUE32_64)
564 void emitGetVirtualRegister(int src, RegisterID dst);
565 void emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2);
566 void emitPutVirtualRegister(unsigned dst, RegisterID from = regT0);
567 void emitStoreCell(unsigned dst, RegisterID payload, bool /* only used in JSValue32_64 */ = false)
569 emitPutVirtualRegister(dst, payload);
572 int32_t getConstantOperandImmediateInt(unsigned src);
574 void killLastResultRegister();
576 Jump emitJumpIfJSCell(RegisterID);
577 Jump emitJumpIfBothJSCells(RegisterID, RegisterID, RegisterID);
578 void emitJumpSlowCaseIfJSCell(RegisterID);
579 Jump emitJumpIfNotJSCell(RegisterID);
580 void emitJumpSlowCaseIfNotJSCell(RegisterID);
581 void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
582 #if USE(JSVALUE32_64)
583 JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
585 return emitJumpIfImmediateInteger(reg);
588 JIT::Jump emitJumpIfNotImmediateNumber(RegisterID reg)
590 return emitJumpIfNotImmediateInteger(reg);
593 Jump emitJumpIfImmediateInteger(RegisterID);
594 Jump emitJumpIfNotImmediateInteger(RegisterID);
595 Jump emitJumpIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
596 void emitJumpSlowCaseIfNotImmediateInteger(RegisterID);
597 void emitJumpSlowCaseIfNotImmediateNumber(RegisterID);
598 void emitJumpSlowCaseIfNotImmediateIntegers(RegisterID, RegisterID, RegisterID);
600 #if USE(JSVALUE32_64)
601 void emitFastArithDeTagImmediate(RegisterID);
602 Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
604 void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
605 void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
607 void emitTagAsBoolImmediate(RegisterID reg);
608 void compileBinaryArithOp(OpcodeID, unsigned dst, unsigned src1, unsigned src2, OperandTypes opi);
610 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes, bool op1HasImmediateIntFastCase, bool op2HasImmediateIntFastCase);
612 void compileBinaryArithOpSlowCase(OpcodeID, Vector<SlowCaseEntry>::iterator&, unsigned dst, unsigned src1, unsigned src2, OperandTypes);
615 void compileGetByIdHotPath(int baseVReg, Identifier*);
616 void compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck = false);
617 void compileGetDirectOffset(RegisterID base, RegisterID result, size_t cachedOffset);
618 void compileGetDirectOffset(JSObject* base, RegisterID result, size_t cachedOffset);
619 void compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch);
620 void compilePutDirectOffset(RegisterID base, RegisterID value, size_t cachedOffset);
623 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
624 static const int patchOffsetPutByIdStructure = 10;
625 static const int patchOffsetPutByIdPropertyMapOffset = 31;
626 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
627 static const int patchOffsetGetByIdStructure = 10;
628 static const int patchOffsetGetByIdBranchToSlowCase = 20;
629 static const int patchOffsetGetByIdPropertyMapOffset = 28;
630 static const int patchOffsetGetByIdPutResult = 28;
631 #if ENABLE(OPCODE_SAMPLING)
632 static const int patchOffsetGetByIdSlowCaseCall = 64;
634 static const int patchOffsetGetByIdSlowCaseCall = 54;
636 static const int patchOffsetOpCallCompareToJump = 9;
638 static const int patchOffsetMethodCheckProtoObj = 20;
639 static const int patchOffsetMethodCheckProtoStruct = 30;
640 static const int patchOffsetMethodCheckPutFunction = 50;
642 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
643 static const int patchOffsetPutByIdStructure = 7;
644 static const int patchOffsetPutByIdPropertyMapOffset = 22;
645 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
646 static const int patchOffsetGetByIdStructure = 7;
647 static const int patchOffsetGetByIdBranchToSlowCase = 13;
648 static const int patchOffsetGetByIdPropertyMapOffset = 22;
649 static const int patchOffsetGetByIdPutResult = 22;
650 #if ENABLE(OPCODE_SAMPLING)
651 static const int patchOffsetGetByIdSlowCaseCall = 33;
653 static const int patchOffsetGetByIdSlowCaseCall = 23;
655 static const int patchOffsetOpCallCompareToJump = 6;
657 static const int patchOffsetMethodCheckProtoObj = 11;
658 static const int patchOffsetMethodCheckProtoStruct = 18;
659 static const int patchOffsetMethodCheckPutFunction = 29;
660 #elif CPU(ARM_THUMB2)
661 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
662 static const int patchOffsetPutByIdStructure = 10;
663 static const int patchOffsetPutByIdPropertyMapOffset = 46;
664 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
665 static const int patchOffsetGetByIdStructure = 10;
666 static const int patchOffsetGetByIdBranchToSlowCase = 26;
667 static const int patchOffsetGetByIdPropertyMapOffset = 46;
668 static const int patchOffsetGetByIdPutResult = 50;
669 #if ENABLE(OPCODE_SAMPLING)
670 static const int patchOffsetGetByIdSlowCaseCall = 0; // FIMXE
672 static const int patchOffsetGetByIdSlowCaseCall = 28;
674 static const int patchOffsetOpCallCompareToJump = 16;
676 static const int patchOffsetMethodCheckProtoObj = 24;
677 static const int patchOffsetMethodCheckProtoStruct = 34;
678 static const int patchOffsetMethodCheckPutFunction = 58;
679 #elif CPU(ARM_TRADITIONAL)
680 // These architecture specific value are used to enable patching - see comment on op_put_by_id.
681 static const int patchOffsetPutByIdStructure = 4;
682 static const int patchOffsetPutByIdPropertyMapOffset = 20;
683 // These architecture specific value are used to enable patching - see comment on op_get_by_id.
684 static const int patchOffsetGetByIdStructure = 4;
685 static const int patchOffsetGetByIdBranchToSlowCase = 16;
686 static const int patchOffsetGetByIdPropertyMapOffset = 20;
687 static const int patchOffsetGetByIdPutResult = 28;
688 #if ENABLE(OPCODE_SAMPLING)
689 #error "OPCODE_SAMPLING is not yet supported"
691 static const int patchOffsetGetByIdSlowCaseCall = 28;
693 static const int patchOffsetOpCallCompareToJump = 12;
695 static const int patchOffsetMethodCheckProtoObj = 12;
696 static const int patchOffsetMethodCheckProtoStruct = 20;
697 static const int patchOffsetMethodCheckPutFunction = 32;
700 static const int sequenceOpCallInstructionSpace = 12;
701 static const int sequenceOpCallConstantSpace = 2;
702 // sequenceMethodCheck
703 static const int sequenceMethodCheckInstructionSpace = 40;
704 static const int sequenceMethodCheckConstantSpace = 6;
705 // sequenceGetByIdHotPath
706 static const int sequenceGetByIdHotPathInstructionSpace = 28;
707 static const int sequenceGetByIdHotPathConstantSpace = 3;
708 // sequenceGetByIdSlowCase
709 static const int sequenceGetByIdSlowCaseInstructionSpace = 32;
710 static const int sequenceGetByIdSlowCaseConstantSpace = 2;
712 static const int sequencePutByIdInstructionSpace = 28;
713 static const int sequencePutByIdConstantSpace = 3;
716 static const int patchOffsetPutByIdStructure = 16;
717 static const int patchOffsetPutByIdPropertyMapOffset = 68;
718 static const int patchOffsetGetByIdStructure = 16;
719 static const int patchOffsetGetByIdBranchToSlowCase = 48;
720 static const int patchOffsetGetByIdPropertyMapOffset = 68;
721 static const int patchOffsetGetByIdPutResult = 88;
722 #if ENABLE(OPCODE_SAMPLING)
723 #error "OPCODE_SAMPLING is not yet supported"
725 static const int patchOffsetGetByIdSlowCaseCall = 40;
727 static const int patchOffsetOpCallCompareToJump = 32;
728 static const int patchOffsetMethodCheckProtoObj = 32;
729 static const int patchOffsetMethodCheckProtoStruct = 56;
730 static const int patchOffsetMethodCheckPutFunction = 88;
731 #else // WTF_MIPS_ISA(1)
732 static const int patchOffsetPutByIdStructure = 12;
733 static const int patchOffsetPutByIdPropertyMapOffset = 60;
734 static const int patchOffsetGetByIdStructure = 12;
735 static const int patchOffsetGetByIdBranchToSlowCase = 44;
736 static const int patchOffsetGetByIdPropertyMapOffset = 60;
737 static const int patchOffsetGetByIdPutResult = 76;
738 #if ENABLE(OPCODE_SAMPLING)
739 #error "OPCODE_SAMPLING is not yet supported"
741 static const int patchOffsetGetByIdSlowCaseCall = 40;
743 static const int patchOffsetOpCallCompareToJump = 32;
744 static const int patchOffsetMethodCheckProtoObj = 32;
745 static const int patchOffsetMethodCheckProtoStruct = 52;
746 static const int patchOffsetMethodCheckPutFunction = 84;
749 #endif // USE(JSVALUE32_64)
751 #if (defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL)
752 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace); } while (false)
753 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(name ## InstructionSpace, name ## ConstantSpace, dst); } while (false)
754 #define END_UNINTERRUPTED_SEQUENCE(name) END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, 0)
756 void beginUninterruptedSequence(int, int);
757 void endUninterruptedSequence(int, int, int);
760 #define BEGIN_UNINTERRUPTED_SEQUENCE(name) do { beginUninterruptedSequence(); } while (false)
761 #define END_UNINTERRUPTED_SEQUENCE(name) do { endUninterruptedSequence(); } while (false)
762 #define END_UNINTERRUPTED_SEQUENCE_FOR_PUT(name, dst) do { endUninterruptedSequence(); } while (false)
765 void emit_compareAndJump(OpcodeID, unsigned op1, unsigned op2, unsigned target, RelationalCondition);
766 void emit_compareAndJumpSlow(unsigned op1, unsigned op2, unsigned target, DoubleCondition, int (JIT_STUB *stub)(STUB_ARGS_DECLARATION), bool invert, Vector<SlowCaseEntry>::iterator&);
768 void emit_op_add(Instruction*);
769 void emit_op_bitand(Instruction*);
770 void emit_op_bitnot(Instruction*);
771 void emit_op_bitor(Instruction*);
772 void emit_op_bitxor(Instruction*);
773 void emit_op_call(Instruction*);
774 void emit_op_call_eval(Instruction*);
775 void emit_op_call_varargs(Instruction*);
776 void emit_op_call_put_result(Instruction*);
777 void emit_op_catch(Instruction*);
778 void emit_op_construct(Instruction*);
779 void emit_op_get_callee(Instruction*);
780 void emit_op_create_this(Instruction*);
781 void emit_op_convert_this(Instruction*);
782 void emit_op_create_arguments(Instruction*);
783 void emit_op_debug(Instruction*);
784 void emit_op_del_by_id(Instruction*);
785 void emit_op_div(Instruction*);
786 void emit_op_end(Instruction*);
787 void emit_op_enter(Instruction*);
788 void emit_op_create_activation(Instruction*);
789 void emit_op_eq(Instruction*);
790 void emit_op_eq_null(Instruction*);
791 void emit_op_get_by_id(Instruction*);
792 void emit_op_get_arguments_length(Instruction*);
793 void emit_op_get_by_val(Instruction*);
794 void emit_op_get_argument_by_val(Instruction*);
795 void emit_op_get_by_pname(Instruction*);
796 void emit_op_get_global_var(Instruction*);
797 void emit_op_get_scoped_var(Instruction*);
798 void emit_op_init_lazy_reg(Instruction*);
799 void emit_op_check_has_instance(Instruction*);
800 void emit_op_instanceof(Instruction*);
801 void emit_op_jeq_null(Instruction*);
802 void emit_op_jfalse(Instruction*);
803 void emit_op_jmp(Instruction*);
804 void emit_op_jmp_scopes(Instruction*);
805 void emit_op_jneq_null(Instruction*);
806 void emit_op_jneq_ptr(Instruction*);
807 void emit_op_jless(Instruction*);
808 void emit_op_jlesseq(Instruction*);
809 void emit_op_jgreater(Instruction*);
810 void emit_op_jgreatereq(Instruction*);
811 void emit_op_jnless(Instruction*);
812 void emit_op_jnlesseq(Instruction*);
813 void emit_op_jngreater(Instruction*);
814 void emit_op_jngreatereq(Instruction*);
815 void emit_op_jsr(Instruction*);
816 void emit_op_jtrue(Instruction*);
817 void emit_op_load_varargs(Instruction*);
818 void emit_op_loop(Instruction*);
819 void emit_op_loop_hint(Instruction*);
820 void emit_op_loop_if_less(Instruction*);
821 void emit_op_loop_if_lesseq(Instruction*);
822 void emit_op_loop_if_greater(Instruction*);
823 void emit_op_loop_if_greatereq(Instruction*);
824 void emit_op_loop_if_true(Instruction*);
825 void emit_op_loop_if_false(Instruction*);
826 void emit_op_lshift(Instruction*);
827 void emit_op_method_check(Instruction*);
828 void emit_op_mod(Instruction*);
829 void emit_op_mov(Instruction*);
830 void emit_op_mul(Instruction*);
831 void emit_op_negate(Instruction*);
832 void emit_op_neq(Instruction*);
833 void emit_op_neq_null(Instruction*);
834 void emit_op_new_array(Instruction*);
835 void emit_op_new_array_buffer(Instruction*);
836 void emit_op_new_func(Instruction*);
837 void emit_op_new_func_exp(Instruction*);
838 void emit_op_new_object(Instruction*);
839 void emit_op_new_regexp(Instruction*);
840 void emit_op_get_pnames(Instruction*);
841 void emit_op_next_pname(Instruction*);
842 void emit_op_not(Instruction*);
843 void emit_op_nstricteq(Instruction*);
844 void emit_op_pop_scope(Instruction*);
845 void emit_op_post_dec(Instruction*);
846 void emit_op_post_inc(Instruction*);
847 void emit_op_pre_dec(Instruction*);
848 void emit_op_pre_inc(Instruction*);
849 void emit_op_profile_did_call(Instruction*);
850 void emit_op_profile_will_call(Instruction*);
851 void emit_op_push_new_scope(Instruction*);
852 void emit_op_push_scope(Instruction*);
853 void emit_op_put_by_id(Instruction*);
854 void emit_op_put_by_index(Instruction*);
855 void emit_op_put_by_val(Instruction*);
856 void emit_op_put_getter(Instruction*);
857 void emit_op_put_global_var(Instruction*);
858 void emit_op_put_scoped_var(Instruction*);
859 void emit_op_put_setter(Instruction*);
860 void emit_op_resolve(Instruction*);
861 void emit_op_resolve_base(Instruction*);
862 void emit_op_ensure_property_exists(Instruction*);
863 void emit_op_resolve_global(Instruction*, bool dynamic = false);
864 void emit_op_resolve_global_dynamic(Instruction*);
865 void emit_op_resolve_skip(Instruction*);
866 void emit_op_resolve_with_base(Instruction*);
867 void emit_op_resolve_with_this(Instruction*);
868 void emit_op_ret(Instruction*);
869 void emit_op_ret_object_or_this(Instruction*);
870 void emit_op_rshift(Instruction*);
871 void emit_op_sret(Instruction*);
872 void emit_op_strcat(Instruction*);
873 void emit_op_stricteq(Instruction*);
874 void emit_op_sub(Instruction*);
875 void emit_op_switch_char(Instruction*);
876 void emit_op_switch_imm(Instruction*);
877 void emit_op_switch_string(Instruction*);
878 void emit_op_tear_off_activation(Instruction*);
879 void emit_op_tear_off_arguments(Instruction*);
880 void emit_op_throw(Instruction*);
881 void emit_op_throw_reference_error(Instruction*);
882 void emit_op_to_jsnumber(Instruction*);
883 void emit_op_to_primitive(Instruction*);
884 void emit_op_unexpected_load(Instruction*);
885 void emit_op_urshift(Instruction*);
886 #if ENABLE(JIT_USE_SOFT_MODULO)
890 void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
891 void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
892 void emitSlow_op_bitnot(Instruction*, Vector<SlowCaseEntry>::iterator&);
893 void emitSlow_op_bitor(Instruction*, Vector<SlowCaseEntry>::iterator&);
894 void emitSlow_op_bitxor(Instruction*, Vector<SlowCaseEntry>::iterator&);
895 void emitSlow_op_call(Instruction*, Vector<SlowCaseEntry>::iterator&);
896 void emitSlow_op_call_eval(Instruction*, Vector<SlowCaseEntry>::iterator&);
897 void emitSlow_op_call_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
898 void emitSlow_op_construct(Instruction*, Vector<SlowCaseEntry>::iterator&);
899 void emitSlow_op_convert_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
900 void emitSlow_op_create_this(Instruction*, Vector<SlowCaseEntry>::iterator&);
901 void emitSlow_op_div(Instruction*, Vector<SlowCaseEntry>::iterator&);
902 void emitSlow_op_eq(Instruction*, Vector<SlowCaseEntry>::iterator&);
903 void emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
904 void emitSlow_op_get_arguments_length(Instruction*, Vector<SlowCaseEntry>::iterator&);
905 void emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
906 void emitSlow_op_get_argument_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
907 void emitSlow_op_get_by_pname(Instruction*, Vector<SlowCaseEntry>::iterator&);
908 void emitSlow_op_check_has_instance(Instruction*, Vector<SlowCaseEntry>::iterator&);
909 void emitSlow_op_instanceof(Instruction*, Vector<SlowCaseEntry>::iterator&);
910 void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
911 void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
912 void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
913 void emitSlow_op_jgreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
914 void emitSlow_op_jgreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
915 void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
916 void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
917 void emitSlow_op_jngreater(Instruction*, Vector<SlowCaseEntry>::iterator&);
918 void emitSlow_op_jngreatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
919 void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
920 void emitSlow_op_load_varargs(Instruction*, Vector<SlowCaseEntry>::iterator&);
921 void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
922 void emitSlow_op_loop_if_lesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
923 void emitSlow_op_loop_if_greater(Instruction*, Vector<SlowCaseEntry>::iterator&);
924 void emitSlow_op_loop_if_greatereq(Instruction*, Vector<SlowCaseEntry>::iterator&);
925 void emitSlow_op_loop_if_true(Instruction*, Vector<SlowCaseEntry>::iterator&);
926 void emitSlow_op_loop_if_false(Instruction*, Vector<SlowCaseEntry>::iterator&);
927 void emitSlow_op_lshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
928 void emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&);
929 void emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&);
930 void emitSlow_op_mul(Instruction*, Vector<SlowCaseEntry>::iterator&);
931 void emitSlow_op_negate(Instruction*, Vector<SlowCaseEntry>::iterator&);
932 void emitSlow_op_neq(Instruction*, Vector<SlowCaseEntry>::iterator&);
933 void emitSlow_op_new_object(Instruction*, Vector<SlowCaseEntry>::iterator&);
934 void emitSlow_op_not(Instruction*, Vector<SlowCaseEntry>::iterator&);
935 void emitSlow_op_nstricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
936 void emitSlow_op_post_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
937 void emitSlow_op_post_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
938 void emitSlow_op_pre_dec(Instruction*, Vector<SlowCaseEntry>::iterator&);
939 void emitSlow_op_pre_inc(Instruction*, Vector<SlowCaseEntry>::iterator&);
940 void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
941 void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
942 void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
943 void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
944 void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
945 void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
946 void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
947 void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
948 void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
949 void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
950 void emitSlow_op_new_func(Instruction*, Vector<SlowCaseEntry>::iterator&);
951 void emitSlow_op_new_func_exp(Instruction*, Vector<SlowCaseEntry>::iterator&);
954 void emitRightShift(Instruction*, bool isUnsigned);
955 void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
957 /* This function is deprecated. */
958 void emitGetJITStubArg(unsigned argumentNumber, RegisterID dst);
960 void emitInitRegister(unsigned dst);
962 void emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry);
963 void emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
964 void emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry);
965 void emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry);
966 void emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
967 void emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from = callFrameRegister);
969 JSValue getConstantOperand(unsigned src);
970 bool isOperandConstantImmediateInt(unsigned src);
971 bool isOperandConstantImmediateChar(unsigned src);
975 Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
979 void linkSlowCase(Vector<SlowCaseEntry>::iterator& iter)
981 iter->from.link(this);
984 void linkDummySlowCase(Vector<SlowCaseEntry>::iterator& iter)
986 ASSERT(!iter->from.isSet());
989 void linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator&, int vReg);
991 Jump checkStructure(RegisterID reg, Structure* structure);
993 void restoreArgumentReference();
994 void restoreArgumentReferenceForTrampoline();
995 void updateTopCallFrame();
997 Call emitNakedCall(CodePtr function = CodePtr());
999 void preserveReturnAddressAfterCall(RegisterID);
1000 void restoreReturnAddressBeforeReturn(RegisterID);
1001 void restoreReturnAddressBeforeReturn(Address);
1003 // Loads the character value of a single character string into dst.
1004 void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
1006 enum OptimizationCheckKind { LoopOptimizationCheck, RetOptimizationCheck };
1008 void emitOptimizationCheck(OptimizationCheckKind);
1010 void emitOptimizationCheck(OptimizationCheckKind) { }
1013 void emitTimeoutCheck();
1015 void printBytecodeOperandTypes(unsigned src1, unsigned src2);
1018 #if ENABLE(SAMPLING_FLAGS)
1019 void setSamplingFlag(int32_t);
1020 void clearSamplingFlag(int32_t);
1023 #if ENABLE(SAMPLING_COUNTERS)
1024 void emitCount(AbstractSamplingCounter&, uint32_t = 1);
1027 #if ENABLE(OPCODE_SAMPLING)
1028 void sampleInstruction(Instruction*, bool = false);
1031 #if ENABLE(CODEBLOCK_SAMPLING)
1032 void sampleCodeBlock(CodeBlock*);
1034 void sampleCodeBlock(CodeBlock*) {}
1038 bool canBeOptimized() { return m_canBeOptimized; }
1039 bool shouldEmitProfiling() { return m_canBeOptimized; }
1041 bool canBeOptimized() { return false; }
1042 // Enables use of value profiler with tiered compilation turned off,
1043 // in which case all code gets profiled.
1044 bool shouldEmitProfiling() { return true; }
1047 Interpreter* m_interpreter;
1048 JSGlobalData* m_globalData;
1049 CodeBlock* m_codeBlock;
1051 Vector<CallRecord> m_calls;
1052 Vector<Label> m_labels;
1053 Vector<PropertyStubCompilationInfo> m_propertyAccessCompilationInfo;
1054 Vector<StructureStubCompilationInfo> m_callStructureStubCompilationInfo;
1055 Vector<MethodCallCompilationInfo> m_methodCallCompilationInfo;
1056 Vector<JumpTable> m_jmpTable;
1058 unsigned m_bytecodeOffset;
1059 Vector<JSRInfo> m_jsrSites;
1060 Vector<SlowCaseEntry> m_slowCases;
1061 Vector<SwitchRecord> m_switches;
1063 unsigned m_propertyAccessInstructionIndex;
1064 unsigned m_globalResolveInfoIndex;
1065 unsigned m_callLinkInfoIndex;
1067 #if USE(JSVALUE32_64)
1068 unsigned m_jumpTargetIndex;
1069 unsigned m_mappedBytecodeOffset;
1070 unsigned m_mappedVirtualRegisterIndex;
1071 RegisterID m_mappedTag;
1072 RegisterID m_mappedPayload;
1074 int m_lastResultBytecodeRegister;
1076 unsigned m_jumpTargetsPosition;
1079 #if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
1080 Label m_uninterruptedInstructionSequenceBegin;
1081 int m_uninterruptedConstantSequenceBegin;
1084 WeakRandom m_randomGenerator;
1085 static CodeRef stringGetByValStubGenerator(JSGlobalData*);
1087 #if ENABLE(VALUE_PROFILER)
1088 bool m_canBeOptimized;
1090 #if ENABLE(DFG_JIT) || ENABLE(JIT_VERBOSE)
1091 Label m_startOfCode;
1094 CompactJITCodeMap::Encoder m_jitCodeMapEncoder;
1096 } JIT_CLASS_ALIGNMENT;
1098 inline void JIT::emit_op_loop(Instruction* currentInstruction)
1101 emit_op_jmp(currentInstruction);
1104 inline void JIT::emit_op_loop_hint(Instruction*)
1106 emitOptimizationCheck(LoopOptimizationCheck);
1109 inline void JIT::emit_op_loop_if_true(Instruction* currentInstruction)
1112 emit_op_jtrue(currentInstruction);
1115 inline void JIT::emitSlow_op_loop_if_true(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1117 emitSlow_op_jtrue(currentInstruction, iter);
1120 inline void JIT::emit_op_loop_if_false(Instruction* currentInstruction)
1123 emit_op_jfalse(currentInstruction);
1126 inline void JIT::emitSlow_op_loop_if_false(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1128 emitSlow_op_jfalse(currentInstruction, iter);
1131 inline void JIT::emit_op_loop_if_less(Instruction* currentInstruction)
1134 emit_op_jless(currentInstruction);
1137 inline void JIT::emitSlow_op_loop_if_less(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1139 emitSlow_op_jless(currentInstruction, iter);
1142 inline void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
1145 emit_op_jlesseq(currentInstruction);
1148 inline void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1150 emitSlow_op_jlesseq(currentInstruction, iter);
1153 inline void JIT::emit_op_loop_if_greater(Instruction* currentInstruction)
1156 emit_op_jgreater(currentInstruction);
1159 inline void JIT::emitSlow_op_loop_if_greater(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1161 emitSlow_op_jgreater(currentInstruction, iter);
1164 inline void JIT::emit_op_loop_if_greatereq(Instruction* currentInstruction)
1167 emit_op_jgreatereq(currentInstruction);
1170 inline void JIT::emitSlow_op_loop_if_greatereq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1172 emitSlow_op_jgreatereq(currentInstruction, iter);
1177 #endif // ENABLE(JIT)