dfg/DFGFixupPhase.cpp
dfg/DFGGraph.cpp
dfg/DFGJITCompiler.cpp
+ dfg/DFGMinifiedNode.cpp
dfg/DFGNodeFlags.cpp
dfg/DFGOSREntry.cpp
dfg/DFGOSRExit.cpp
dfg/DFGSpeculativeJIT32_64.cpp
dfg/DFGSpeculativeJIT64.cpp
dfg/DFGThunks.cpp
+ dfg/DFGValueSource.cpp
+ dfg/DFGVariableEvent.cpp
+ dfg/DFGVariableEventStream.cpp
dfg/DFGValidate.cpp
dfg/DFGVirtualRegisterAllocationPhase.cpp
2012-07-02 Filip Pizlo <fpizlo@apple.com>
+ DFG OSR exit value recoveries should be computed lazily
+ https://bugs.webkit.org/show_bug.cgi?id=82155
+
+ Reviewed by Gavin Barraclough.
+
+ This change aims to reduce one aspect of DFG compile times: the fact
+ that we currently compute the value recoveries for each local and
+ argument on every speculation check. We compile many speculation checks,
+ so this can add up quick. The strategy that this change takes is to
+ have the DFG save just enough information about how the compiler is
+ choosing to represent state, that the DFG::OSRExitCompiler can reify
+ the value recoveries lazily.
+
+ This appears to be an 0.3% SunSpider speed-up and is neutral elsewhere.
+
+ I also took the opportunity to fix the sampling regions profiler (it
+ was missing an export macro) and to put in more sampling regions in
+ the DFG (which are disabled so long as ENABLE(SAMPLING_REGIONS) is
+ false).
+
+ * CMakeLists.txt:
+ * GNUmakefile.list.am:
+ * JavaScriptCore.xcodeproj/project.pbxproj:
+ * Target.pri:
+ * bytecode/CodeBlock.cpp:
+ (JSC):
+ (JSC::CodeBlock::shrinkDFGDataToFit):
+ * bytecode/CodeBlock.h:
+ (CodeBlock):
+ (JSC::CodeBlock::minifiedDFG):
+ (JSC::CodeBlock::variableEventStream):
+ (DFGData):
+ * bytecode/Operands.h:
+ (JSC::Operands::hasOperand):
+ (Operands):
+ (JSC::Operands::size):
+ (JSC::Operands::at):
+ (JSC::Operands::operator[]):
+ (JSC::Operands::isArgument):
+ (JSC::Operands::isVariable):
+ (JSC::Operands::argumentForIndex):
+ (JSC::Operands::variableForIndex):
+ (JSC::Operands::operandForIndex):
+ (JSC):
+ (JSC::dumpOperands):
+ * bytecode/SamplingTool.h:
+ (SamplingRegion):
+ * dfg/DFGByteCodeParser.cpp:
+ (JSC::DFG::parse):
+ * dfg/DFGCFAPhase.cpp:
+ (JSC::DFG::performCFA):
+ * dfg/DFGCSEPhase.cpp:
+ (JSC::DFG::performCSE):
+ * dfg/DFGFixupPhase.cpp:
+ (JSC::DFG::performFixup):
+ * dfg/DFGGenerationInfo.h:
+ (JSC::DFG::GenerationInfo::GenerationInfo):
+ (JSC::DFG::GenerationInfo::initConstant):
+ (JSC::DFG::GenerationInfo::initInteger):
+ (JSC::DFG::GenerationInfo::initJSValue):
+ (JSC::DFG::GenerationInfo::initCell):
+ (JSC::DFG::GenerationInfo::initBoolean):
+ (JSC::DFG::GenerationInfo::initDouble):
+ (JSC::DFG::GenerationInfo::initStorage):
+ (GenerationInfo):
+ (JSC::DFG::GenerationInfo::noticeOSRBirth):
+ (JSC::DFG::GenerationInfo::use):
+ (JSC::DFG::GenerationInfo::spill):
+ (JSC::DFG::GenerationInfo::setSpilled):
+ (JSC::DFG::GenerationInfo::fillJSValue):
+ (JSC::DFG::GenerationInfo::fillCell):
+ (JSC::DFG::GenerationInfo::fillInteger):
+ (JSC::DFG::GenerationInfo::fillBoolean):
+ (JSC::DFG::GenerationInfo::fillDouble):
+ (JSC::DFG::GenerationInfo::fillStorage):
+ (JSC::DFG::GenerationInfo::appendFill):
+ (JSC::DFG::GenerationInfo::appendSpill):
+ * dfg/DFGJITCompiler.cpp:
+ (JSC::DFG::JITCompiler::link):
+ (JSC::DFG::JITCompiler::compile):
+ (JSC::DFG::JITCompiler::compileFunction):
+ * dfg/DFGMinifiedGraph.h: Added.
+ (DFG):
+ (MinifiedGraph):
+ (JSC::DFG::MinifiedGraph::MinifiedGraph):
+ (JSC::DFG::MinifiedGraph::at):
+ (JSC::DFG::MinifiedGraph::append):
+ (JSC::DFG::MinifiedGraph::prepareAndShrink):
+ (JSC::DFG::MinifiedGraph::setOriginalGraphSize):
+ (JSC::DFG::MinifiedGraph::originalGraphSize):
+ * dfg/DFGMinifiedNode.cpp: Added.
+ (DFG):
+ (JSC::DFG::MinifiedNode::fromNode):
+ * dfg/DFGMinifiedNode.h: Added.
+ (DFG):
+ (JSC::DFG::belongsInMinifiedGraph):
+ (MinifiedNode):
+ (JSC::DFG::MinifiedNode::MinifiedNode):
+ (JSC::DFG::MinifiedNode::index):
+ (JSC::DFG::MinifiedNode::op):
+ (JSC::DFG::MinifiedNode::hasChild1):
+ (JSC::DFG::MinifiedNode::child1):
+ (JSC::DFG::MinifiedNode::hasConstant):
+ (JSC::DFG::MinifiedNode::hasConstantNumber):
+ (JSC::DFG::MinifiedNode::constantNumber):
+ (JSC::DFG::MinifiedNode::hasWeakConstant):
+ (JSC::DFG::MinifiedNode::weakConstant):
+ (JSC::DFG::MinifiedNode::getIndex):
+ (JSC::DFG::MinifiedNode::compareByNodeIndex):
+ (JSC::DFG::MinifiedNode::hasChild):
+ * dfg/DFGNode.h:
+ (Node):
+ * dfg/DFGOSRExit.cpp:
+ (JSC::DFG::OSRExit::OSRExit):
+ * dfg/DFGOSRExit.h:
+ (OSRExit):
+ * dfg/DFGOSRExitCompiler.cpp:
+ * dfg/DFGOSRExitCompiler.h:
+ (OSRExitCompiler):
+ * dfg/DFGOSRExitCompiler32_64.cpp:
+ (JSC::DFG::OSRExitCompiler::compileExit):
+ * dfg/DFGOSRExitCompiler64.cpp:
+ (JSC::DFG::OSRExitCompiler::compileExit):
+ * dfg/DFGPredictionPropagationPhase.cpp:
+ (JSC::DFG::performPredictionPropagation):
+ * dfg/DFGRedundantPhiEliminationPhase.cpp:
+ (JSC::DFG::performRedundantPhiElimination):
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::SpeculativeJIT):
+ (DFG):
+ (JSC::DFG::SpeculativeJIT::fillStorage):
+ (JSC::DFG::SpeculativeJIT::noticeOSRBirth):
+ (JSC::DFG::SpeculativeJIT::compileMovHint):
+ (JSC::DFG::SpeculativeJIT::compile):
+ (JSC::DFG::SpeculativeJIT::computeValueRecoveryFor):
+ * dfg/DFGSpeculativeJIT.h:
+ (DFG):
+ (JSC::DFG::SpeculativeJIT::use):
+ (SpeculativeJIT):
+ (JSC::DFG::SpeculativeJIT::spill):
+ (JSC::DFG::SpeculativeJIT::speculationCheck):
+ (JSC::DFG::SpeculativeJIT::forwardSpeculationCheck):
+ (JSC::DFG::SpeculativeJIT::recordSetLocal):
+ * dfg/DFGSpeculativeJIT32_64.cpp:
+ (JSC::DFG::SpeculativeJIT::fillInteger):
+ (JSC::DFG::SpeculativeJIT::fillDouble):
+ (JSC::DFG::SpeculativeJIT::fillJSValue):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateIntInternal):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateDouble):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateCell):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateBoolean):
+ (JSC::DFG::SpeculativeJIT::compile):
+ * dfg/DFGSpeculativeJIT64.cpp:
+ (JSC::DFG::SpeculativeJIT::fillInteger):
+ (JSC::DFG::SpeculativeJIT::fillDouble):
+ (JSC::DFG::SpeculativeJIT::fillJSValue):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateIntInternal):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateDouble):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateCell):
+ (JSC::DFG::SpeculativeJIT::fillSpeculateBoolean):
+ (JSC::DFG::SpeculativeJIT::compile):
+ * dfg/DFGValueRecoveryOverride.h: Added.
+ (DFG):
+ (ValueRecoveryOverride):
+ (JSC::DFG::ValueRecoveryOverride::ValueRecoveryOverride):
+ * dfg/DFGValueSource.cpp: Added.
+ (DFG):
+ (JSC::DFG::ValueSource::dump):
+ * dfg/DFGValueSource.h: Added.
+ (DFG):
+ (JSC::DFG::dataFormatToValueSourceKind):
+ (JSC::DFG::valueSourceKindToDataFormat):
+ (JSC::DFG::isInRegisterFile):
+ (ValueSource):
+ (JSC::DFG::ValueSource::ValueSource):
+ (JSC::DFG::ValueSource::forPrediction):
+ (JSC::DFG::ValueSource::forDataFormat):
+ (JSC::DFG::ValueSource::isSet):
+ (JSC::DFG::ValueSource::kind):
+ (JSC::DFG::ValueSource::isInRegisterFile):
+ (JSC::DFG::ValueSource::dataFormat):
+ (JSC::DFG::ValueSource::valueRecovery):
+ (JSC::DFG::ValueSource::nodeIndex):
+ (JSC::DFG::ValueSource::nodeIndexFromKind):
+ (JSC::DFG::ValueSource::kindFromNodeIndex):
+ * dfg/DFGVariableEvent.cpp: Added.
+ (DFG):
+ (JSC::DFG::VariableEvent::dump):
+ (JSC::DFG::VariableEvent::dumpFillInfo):
+ (JSC::DFG::VariableEvent::dumpSpillInfo):
+ * dfg/DFGVariableEvent.h: Added.
+ (DFG):
+ (VariableEvent):
+ (JSC::DFG::VariableEvent::VariableEvent):
+ (JSC::DFG::VariableEvent::reset):
+ (JSC::DFG::VariableEvent::fillGPR):
+ (JSC::DFG::VariableEvent::fillPair):
+ (JSC::DFG::VariableEvent::fillFPR):
+ (JSC::DFG::VariableEvent::spill):
+ (JSC::DFG::VariableEvent::death):
+ (JSC::DFG::VariableEvent::setLocal):
+ (JSC::DFG::VariableEvent::movHint):
+ (JSC::DFG::VariableEvent::kind):
+ (JSC::DFG::VariableEvent::nodeIndex):
+ (JSC::DFG::VariableEvent::dataFormat):
+ (JSC::DFG::VariableEvent::gpr):
+ (JSC::DFG::VariableEvent::tagGPR):
+ (JSC::DFG::VariableEvent::payloadGPR):
+ (JSC::DFG::VariableEvent::fpr):
+ (JSC::DFG::VariableEvent::virtualRegister):
+ (JSC::DFG::VariableEvent::operand):
+ (JSC::DFG::VariableEvent::variableRepresentation):
+ * dfg/DFGVariableEventStream.cpp: Added.
+ (DFG):
+ (JSC::DFG::VariableEventStream::logEvent):
+ (MinifiedGenerationInfo):
+ (JSC::DFG::MinifiedGenerationInfo::MinifiedGenerationInfo):
+ (JSC::DFG::MinifiedGenerationInfo::update):
+ (JSC::DFG::VariableEventStream::reconstruct):
+ * dfg/DFGVariableEventStream.h: Added.
+ (DFG):
+ (VariableEventStream):
+ (JSC::DFG::VariableEventStream::appendAndLog):
+ * dfg/DFGVirtualRegisterAllocationPhase.cpp:
+ (JSC::DFG::performVirtualRegisterAllocation):
+
+2012-07-02 Filip Pizlo <fpizlo@apple.com>
+
DFG::ArgumentsSimplificationPhase should assert that the PhantomArguments nodes it creates are not shouldGenerate()
https://bugs.webkit.org/show_bug.cgi?id=90407
Source/JavaScriptCore/dfg/DFGInsertionSet.h \
Source/JavaScriptCore/dfg/DFGJITCompiler.cpp \
Source/JavaScriptCore/dfg/DFGJITCompiler.h \
+ Source/JavaScriptCore/dfg/DFGMinifiedGraph.h \
+ Source/JavaScriptCore/dfg/DFGMinifiedNode.cpp \
+ Source/JavaScriptCore/dfg/DFGMinifiedNode.h \
Source/JavaScriptCore/dfg/DFGNode.h \
Source/JavaScriptCore/dfg/DFGNodeFlags.cpp \
Source/JavaScriptCore/dfg/DFGNodeFlags.h \
Source/JavaScriptCore/dfg/DFGSpeculativeJIT.h \
Source/JavaScriptCore/dfg/DFGThunks.cpp \
Source/JavaScriptCore/dfg/DFGThunks.h \
+ Source/JavaScriptCore/dfg/DFGValueRecoveryOverride.h \
+ Source/JavaScriptCore/dfg/DFGValueSource.cpp \
+ Source/JavaScriptCore/dfg/DFGValueSource.h \
+ Source/JavaScriptCore/dfg/DFGVariableEvent.cpp \
+ Source/JavaScriptCore/dfg/DFGVariableEvent.h \
+ Source/JavaScriptCore/dfg/DFGVariableEventStream.cpp \
+ Source/JavaScriptCore/dfg/DFGVariableEventStream.h \
Source/JavaScriptCore/dfg/DFGValidate.cpp \
Source/JavaScriptCore/dfg/DFGValidate.h \
Source/JavaScriptCore/dfg/DFGVariableAccessData.h \
0F2BDC16151C5D4F00CD8910 /* DFGFixupPhase.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC13151C5D4A00CD8910 /* DFGFixupPhase.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F2BDC21151E803B00CD8910 /* DFGInsertionSet.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC1F151E803800CD8910 /* DFGInsertionSet.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F2BDC2C151FDE9100CD8910 /* Operands.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC2B151FDE8B00CD8910 /* Operands.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC451522801B00CD8910 /* DFGMinifiedGraph.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC3D1522801700CD8910 /* DFGMinifiedGraph.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC461522802000CD8910 /* DFGMinifiedNode.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC3E1522801700CD8910 /* DFGMinifiedNode.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC471522802500CD8910 /* DFGValueRecoveryOverride.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC3F1522801700CD8910 /* DFGValueRecoveryOverride.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC481522802900CD8910 /* DFGValueSource.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC401522801700CD8910 /* DFGValueSource.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC491522809600CD8910 /* DFGVariableEvent.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC411522801700CD8910 /* DFGVariableEvent.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC4A1522809A00CD8910 /* DFGVariableEventStream.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F2BDC421522801700CD8910 /* DFGVariableEventStream.cpp */; };
+ 0F2BDC4B1522809D00CD8910 /* DFGVariableEventStream.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2BDC431522801700CD8910 /* DFGVariableEventStream.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F2BDC4D1522818600CD8910 /* DFGMinifiedNode.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F2BDC4C1522818300CD8910 /* DFGMinifiedNode.cpp */; };
+ 0F2BDC4F15228BF300CD8910 /* DFGValueSource.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F2BDC4E15228BE700CD8910 /* DFGValueSource.cpp */; };
+ 0F2BDC5115228FFD00CD8910 /* DFGVariableEvent.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F2BDC5015228FFA00CD8910 /* DFGVariableEvent.cpp */; };
0F2C556F14738F3100121E4F /* DFGCodeBlocks.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2C556E14738F2E00121E4F /* DFGCodeBlocks.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F2C557014738F3500121E4F /* DFGCodeBlocks.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F2C556D14738F2E00121E4F /* DFGCodeBlocks.cpp */; };
0F3B3A1A153E68F2003ED0FF /* DFGConstantFoldingPhase.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F3B3A17153E68EF003ED0FF /* DFGConstantFoldingPhase.cpp */; };
0F2BDC13151C5D4A00CD8910 /* DFGFixupPhase.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGFixupPhase.h; path = dfg/DFGFixupPhase.h; sourceTree = "<group>"; };
0F2BDC1F151E803800CD8910 /* DFGInsertionSet.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGInsertionSet.h; path = dfg/DFGInsertionSet.h; sourceTree = "<group>"; };
0F2BDC2B151FDE8B00CD8910 /* Operands.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Operands.h; sourceTree = "<group>"; };
+ 0F2BDC3D1522801700CD8910 /* DFGMinifiedGraph.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGMinifiedGraph.h; path = dfg/DFGMinifiedGraph.h; sourceTree = "<group>"; };
+ 0F2BDC3E1522801700CD8910 /* DFGMinifiedNode.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGMinifiedNode.h; path = dfg/DFGMinifiedNode.h; sourceTree = "<group>"; };
+ 0F2BDC3F1522801700CD8910 /* DFGValueRecoveryOverride.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGValueRecoveryOverride.h; path = dfg/DFGValueRecoveryOverride.h; sourceTree = "<group>"; };
+ 0F2BDC401522801700CD8910 /* DFGValueSource.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGValueSource.h; path = dfg/DFGValueSource.h; sourceTree = "<group>"; };
+ 0F2BDC411522801700CD8910 /* DFGVariableEvent.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGVariableEvent.h; path = dfg/DFGVariableEvent.h; sourceTree = "<group>"; };
+ 0F2BDC421522801700CD8910 /* DFGVariableEventStream.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGVariableEventStream.cpp; path = dfg/DFGVariableEventStream.cpp; sourceTree = "<group>"; };
+ 0F2BDC431522801700CD8910 /* DFGVariableEventStream.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGVariableEventStream.h; path = dfg/DFGVariableEventStream.h; sourceTree = "<group>"; };
+ 0F2BDC4C1522818300CD8910 /* DFGMinifiedNode.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGMinifiedNode.cpp; path = dfg/DFGMinifiedNode.cpp; sourceTree = "<group>"; };
+ 0F2BDC4E15228BE700CD8910 /* DFGValueSource.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGValueSource.cpp; path = dfg/DFGValueSource.cpp; sourceTree = "<group>"; };
+ 0F2BDC5015228FFA00CD8910 /* DFGVariableEvent.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGVariableEvent.cpp; path = dfg/DFGVariableEvent.cpp; sourceTree = "<group>"; };
0F2C556D14738F2E00121E4F /* DFGCodeBlocks.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = DFGCodeBlocks.cpp; sourceTree = "<group>"; };
0F2C556E14738F2E00121E4F /* DFGCodeBlocks.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DFGCodeBlocks.h; sourceTree = "<group>"; };
0F3B3A17153E68EF003ED0FF /* DFGConstantFoldingPhase.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGConstantFoldingPhase.cpp; path = dfg/DFGConstantFoldingPhase.cpp; sourceTree = "<group>"; };
0F2BDC1F151E803800CD8910 /* DFGInsertionSet.h */,
86EC9DBB1328DF82002B2AD7 /* DFGJITCompiler.cpp */,
86EC9DBC1328DF82002B2AD7 /* DFGJITCompiler.h */,
+ 0F2BDC3D1522801700CD8910 /* DFGMinifiedGraph.h */,
+ 0F2BDC4C1522818300CD8910 /* DFGMinifiedNode.cpp */,
+ 0F2BDC3E1522801700CD8910 /* DFGMinifiedNode.h */,
86ECA3E9132DEF1C002B2AD7 /* DFGNode.h */,
0FA581B7150E952A00B9A2D9 /* DFGNodeFlags.cpp */,
0FA581B8150E952A00B9A2D9 /* DFGNodeFlags.h */,
86880F4C14353B2100B08D42 /* DFGSpeculativeJIT64.cpp */,
0FC0979F146B28C700CF2442 /* DFGThunks.cpp */,
0FC097A0146B28C700CF2442 /* DFGThunks.h */,
+ 0F2BDC3F1522801700CD8910 /* DFGValueRecoveryOverride.h */,
+ 0F2BDC4E15228BE700CD8910 /* DFGValueSource.cpp */,
+ 0F2BDC401522801700CD8910 /* DFGValueSource.h */,
0F3B3A2915474FF4003ED0FF /* DFGValidate.cpp */,
0F3B3A2A15474FF4003ED0FF /* DFGValidate.h */,
0F620172143FCD2F0068B77C /* DFGVariableAccessData.h */,
+ 0F2BDC5015228FFA00CD8910 /* DFGVariableEvent.cpp */,
+ 0F2BDC411522801700CD8910 /* DFGVariableEvent.h */,
+ 0F2BDC421522801700CD8910 /* DFGVariableEventStream.cpp */,
+ 0F2BDC431522801700CD8910 /* DFGVariableEventStream.h */,
0FFFC95314EF909500C72532 /* DFGVirtualRegisterAllocationPhase.cpp */,
0FFFC95414EF909500C72532 /* DFGVirtualRegisterAllocationPhase.h */,
);
0F2BDC16151C5D4F00CD8910 /* DFGFixupPhase.h in Headers */,
0F2BDC21151E803B00CD8910 /* DFGInsertionSet.h in Headers */,
0F2BDC2C151FDE9100CD8910 /* Operands.h in Headers */,
+ 0F2BDC451522801B00CD8910 /* DFGMinifiedGraph.h in Headers */,
+ 0F2BDC461522802000CD8910 /* DFGMinifiedNode.h in Headers */,
+ 0F2BDC471522802500CD8910 /* DFGValueRecoveryOverride.h in Headers */,
+ 0F2BDC481522802900CD8910 /* DFGValueSource.h in Headers */,
+ 0F2BDC491522809600CD8910 /* DFGVariableEvent.h in Headers */,
+ 0F2BDC4B1522809D00CD8910 /* DFGVariableEventStream.h in Headers */,
8612E4CD152389EC00C836BE /* MatchResult.h in Headers */,
0F1E3A461534CBAF000F9456 /* DFGArgumentPosition.h in Headers */,
0F3B3A1B153E68F4003ED0FF /* DFGConstantFoldingPhase.h in Headers */,
8642C510151C06A90046D4EF /* RegExpCachedResult.cpp in Sources */,
8642C512151C083D0046D4EF /* RegExpMatchesArray.cpp in Sources */,
863C6D9C1521111A00585E4E /* YarrCanonicalizeUCS2.cpp in Sources */,
+ 0F2BDC4A1522809A00CD8910 /* DFGVariableEventStream.cpp in Sources */,
+ 0F2BDC4D1522818600CD8910 /* DFGMinifiedNode.cpp in Sources */,
+ 0F2BDC4F15228BF300CD8910 /* DFGValueSource.cpp in Sources */,
+ 0F2BDC5115228FFD00CD8910 /* DFGVariableEvent.cpp in Sources */,
14816E1B154CC56C00B8054C /* BlockAllocator.cpp in Sources */,
86EBF2FF1560F06A008E9222 /* NameConstructor.cpp in Sources */,
86EBF3011560F06A008E9222 /* NameInstance.cpp in Sources */,
dfg/DFGFixupPhase.cpp \
dfg/DFGGraph.cpp \
dfg/DFGJITCompiler.cpp \
+ dfg/DFGMinifiedNode.cpp \
dfg/DFGNodeFlags.cpp \
dfg/DFGOperations.cpp \
dfg/DFGOSREntry.cpp \
dfg/DFGSpeculativeJIT32_64.cpp \
dfg/DFGSpeculativeJIT64.cpp \
dfg/DFGThunks.cpp \
+ dfg/DFGValueSource.cpp \
+ dfg/DFGVariableEvent.cpp \
+ dfg/DFGVariableEventStream.cpp \
dfg/DFGValidate.cpp \
dfg/DFGVirtualRegisterAllocationPhase.cpp \
interpreter/AbstractPC.cpp \
m_dfgData->speculationRecovery.shrinkToFit();
m_dfgData->weakReferences.shrinkToFit();
m_dfgData->transitions.shrinkToFit();
+ m_dfgData->minifiedDFG.prepareAndShrink();
+ m_dfgData->variableEventStream.shrinkToFit();
}
#endif
}
dataLog(" bc = %d: %u\n", profile->m_bytecodeOffset, profile->m_counter);
}
}
-#endif
+#endif // ENABLE(VERBOSE_VALUE_PROFILE)
size_t CodeBlock::predictedMachineCodeSize()
{
#include "DFGCodeBlocks.h"
#include "DFGCommon.h"
#include "DFGExitProfile.h"
+#include "DFGMinifiedGraph.h"
#include "DFGOSREntry.h"
#include "DFGOSRExit.h"
+#include "DFGVariableEventStream.h"
#include "EvalCodeCache.h"
#include "ExecutionCounter.h"
#include "ExpressionRangeInfo.h"
m_dfgData->transitions.append(
WeakReferenceTransition(*globalData(), ownerExecutable(), codeOrigin, from, to));
}
+
+ DFG::MinifiedGraph& minifiedDFG()
+ {
+ createDFGDataIfNecessary();
+ return m_dfgData->minifiedDFG;
+ }
+
+ DFG::VariableEventStream& variableEventStream()
+ {
+ createDFGDataIfNecessary();
+ return m_dfgData->variableEventStream;
+ }
#endif
unsigned bytecodeOffset(Instruction* returnAddress)
SegmentedVector<Watchpoint, 1, 0> watchpoints;
Vector<WeakReferenceTransition> transitions;
Vector<WriteBarrier<JSCell> > weakReferences;
+ DFG::VariableEventStream variableEventStream;
+ DFG::MinifiedGraph minifiedDFG;
bool mayBeExecuting;
bool isJettisoned;
bool livenessHasBeenProved; // Initialized and used on every GC.
DataFormatJSInteger = DataFormatJS | DataFormatInteger,
DataFormatJSDouble = DataFormatJS | DataFormatDouble,
DataFormatJSCell = DataFormatJS | DataFormatCell,
- DataFormatJSBoolean = DataFormatJS | DataFormatBoolean
+ DataFormatJSBoolean = DataFormatJS | DataFormatBoolean,
+
+ // Marker deliminating ordinary data formats and OSR-only data formats.
+ DataFormatOSRMarker = 32,
+
+ // Special data formats used only for OSR.
+ DataFormatDead = 33, // Implies jsUndefined().
+ DataFormatArguments = 34 // Implies that the arguments object must be reified.
};
inline const char* dataFormatToString(DataFormat dataFormat)
const T& operand(int operand) const { return const_cast<const T&>(const_cast<Operands*>(this)->operand(operand)); }
+ bool hasOperand(int operand) const
+ {
+ if (operandIsArgument(operand))
+ return true;
+ return static_cast<size_t>(operand) < numberOfLocals();
+ }
+
void setOperand(int operand, const T& value)
{
if (operandIsArgument(operand)) {
setLocal(operand, value);
}
+ size_t size() const { return numberOfArguments() + numberOfLocals(); }
+ const T& at(size_t index) const
+ {
+ if (index < numberOfArguments())
+ return m_arguments[index];
+ return m_locals[index - numberOfArguments()];
+ }
+ T& at(size_t index)
+ {
+ if (index < numberOfArguments())
+ return m_arguments[index];
+ return m_locals[index - numberOfArguments()];
+ }
+ const T& operator[](size_t index) const { return at(index); }
+ T& operator[](size_t index) { return at(index); }
+
+ bool isArgument(size_t index) const { return index < numberOfArguments(); }
+ bool isVariable(size_t index) const { return !isArgument(index); }
+ int argumentForIndex(size_t index) const
+ {
+ return index;
+ }
+ int variableForIndex(size_t index) const
+ {
+ return index - m_arguments.size();
+ }
+ int operandForIndex(size_t index) const
+ {
+ if (index < numberOfArguments())
+ return argumentToOperand(index);
+ return index - numberOfArguments();
+ }
+
void setOperandFirstTime(int operand, const T& value)
{
if (operandIsArgument(operand)) {
}
}
+template<typename T, typename Traits>
+void dumpOperands(const Operands<T, Traits>& operands, FILE* out)
+{
+ // Use const-cast because:
+ // 1) I don't feel like writing this code twice, and
+ // 2) Some dump() methods may not be const, and I don't really care if that's
+ // the case.
+ dumpOperands(*const_cast<Operands<T, Traits>*>(&operands), out);
+}
+
} // namespace JSC
#endif // Operands_h
bool performArgumentsSimplification(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Arguments Simplification Phase");
return runPhase<ArgumentsSimplificationPhase>(graph);
}
bool parse(ExecState* exec, Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Parsing");
#if DFG_DEBUG_LOCAL_DISBALE
UNUSED_PARAM(exec);
UNUSED_PARAM(graph);
bool performCFA(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG CFA Phase");
return runPhase<CFAPhase>(graph);
}
bool performCFGSimplification(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG CFG Simplification Phase");
return runPhase<CFGSimplificationPhase>(graph);
}
bool performCSE(Graph& graph, OptimizationFixpointState fixpointState)
{
+ SamplingRegion samplingRegion("DFG CSE Phase");
return runPhase<CSEPhase>(graph, fixpointState);
}
bool performConstantFolding(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Constant Folding Phase");
return runPhase<ConstantFoldingPhase>(graph);
}
bool performFixup(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Fixup Phase");
return runPhase<FixupPhase>(graph);
}
#if ENABLE(DFG_JIT)
+#include "DFGJITCompiler.h"
+#include "DFGVariableEvent.h"
+#include "DFGVariableEventStream.h"
#include "DataFormat.h"
-#include <dfg/DFGJITCompiler.h>
namespace JSC { namespace DFG {
, m_registerFormat(DataFormatNone)
, m_spillFormat(DataFormatNone)
, m_canFill(false)
+ , m_bornForOSR(false)
{
}
m_registerFormat = DataFormatNone;
m_spillFormat = DataFormatNone;
m_canFill = true;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
void initInteger(NodeIndex nodeIndex, uint32_t useCount, GPRReg gpr)
m_spillFormat = DataFormatNone;
m_canFill = false;
u.gpr = gpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
#if USE(JSVALUE64)
m_spillFormat = DataFormatNone;
m_canFill = false;
u.gpr = gpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
#elif USE(JSVALUE32_64)
m_canFill = false;
u.v.tagGPR = tagGPR;
u.v.payloadGPR = payloadGPR;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
#endif
m_spillFormat = DataFormatNone;
m_canFill = false;
u.gpr = gpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
void initBoolean(NodeIndex nodeIndex, uint32_t useCount, GPRReg gpr)
m_spillFormat = DataFormatNone;
m_canFill = false;
u.gpr = gpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
void initDouble(NodeIndex nodeIndex, uint32_t useCount, FPRReg fpr)
m_spillFormat = DataFormatNone;
m_canFill = false;
u.fpr = fpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
void initStorage(NodeIndex nodeIndex, uint32_t useCount, GPRReg gpr)
m_spillFormat = DataFormatNone;
m_canFill = false;
u.gpr = gpr;
+ m_bornForOSR = false;
ASSERT(m_useCount);
}
// Get the index of the node that produced this value.
NodeIndex nodeIndex() { return m_nodeIndex; }
+
+ void noticeOSRBirth(VariableEventStream& stream, NodeIndex nodeIndex, VirtualRegister virtualRegister)
+ {
+ if (m_nodeIndex != nodeIndex)
+ return;
+ if (!alive())
+ return;
+ if (m_bornForOSR)
+ return;
+
+ m_bornForOSR = true;
+
+ if (m_registerFormat != DataFormatNone)
+ appendFill(BirthToFill, stream);
+ else if (m_spillFormat != DataFormatNone)
+ appendSpill(BirthToSpill, stream, virtualRegister);
+ }
// Mark the value as having been used (decrement the useCount).
// Returns true if this was the last use of the value, and any
// associated machine registers may be freed.
- bool use()
+ bool use(VariableEventStream& stream)
{
ASSERT(m_useCount);
- return !--m_useCount;
+ bool result = !--m_useCount;
+
+ if (result && m_bornForOSR) {
+ ASSERT(m_nodeIndex != NoNode);
+ stream.appendAndLog(VariableEvent::death(m_nodeIndex));
+ }
+
+ return result;
}
// Used to check the operands of operations to see if they are on
}
// Called when a VirtualRegister is being spilled to the RegisterFile for the first time.
- void spill(DataFormat spillFormat)
+ void spill(VariableEventStream& stream, VirtualRegister virtualRegister, DataFormat spillFormat)
{
// We shouldn't be spill values that don't need spilling.
ASSERT(!m_canFill);
m_registerFormat = DataFormatNone;
m_spillFormat = spillFormat;
m_canFill = true;
+
+ if (m_bornForOSR)
+ appendSpill(Spill, stream, virtualRegister);
}
// Called on values that don't need spilling (constants and values that have
// already been spilled), to mark them as no longer being in machine registers.
- void setSpilled()
+ void setSpilled(VariableEventStream& stream, VirtualRegister virtualRegister)
{
// Should only be called on values that don't need spilling, and are currently in registers.
ASSERT(m_canFill && m_registerFormat != DataFormatNone);
m_registerFormat = DataFormatNone;
+
+ if (m_bornForOSR)
+ appendSpill(Spill, stream, virtualRegister);
}
void killSpilled()
// Record that this value is filled into machine registers,
// tracking which registers, and what format the value has.
#if USE(JSVALUE64)
- void fillJSValue(GPRReg gpr, DataFormat format = DataFormatJS)
+ void fillJSValue(VariableEventStream& stream, GPRReg gpr, DataFormat format = DataFormatJS)
{
ASSERT(format & DataFormatJS);
m_registerFormat = format;
u.gpr = gpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
#elif USE(JSVALUE32_64)
- void fillJSValue(GPRReg tagGPR, GPRReg payloadGPR, DataFormat format = DataFormatJS)
+ void fillJSValue(VariableEventStream& stream, GPRReg tagGPR, GPRReg payloadGPR, DataFormat format = DataFormatJS)
{
ASSERT(format & DataFormatJS);
m_registerFormat = format;
u.v.tagGPR = tagGPR; // FIXME: for JSValues with known type (boolean, integer, cell etc.) no tagGPR is needed?
u.v.payloadGPR = payloadGPR;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
- void fillCell(GPRReg gpr)
+ void fillCell(VariableEventStream& stream, GPRReg gpr)
{
m_registerFormat = DataFormatCell;
u.gpr = gpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
#endif
- void fillInteger(GPRReg gpr)
+ void fillInteger(VariableEventStream& stream, GPRReg gpr)
{
m_registerFormat = DataFormatInteger;
u.gpr = gpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
- void fillBoolean(GPRReg gpr)
+ void fillBoolean(VariableEventStream& stream, GPRReg gpr)
{
m_registerFormat = DataFormatBoolean;
u.gpr = gpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
- void fillDouble(FPRReg fpr)
+ void fillDouble(VariableEventStream& stream, FPRReg fpr)
{
ASSERT(fpr != InvalidFPRReg);
m_registerFormat = DataFormatDouble;
u.fpr = fpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
- void fillStorage(GPRReg gpr)
+ void fillStorage(VariableEventStream& stream, GPRReg gpr)
{
m_registerFormat = DataFormatStorage;
u.gpr = gpr;
+
+ if (m_bornForOSR)
+ appendFill(Fill, stream);
}
bool alive()
}
private:
+ void appendFill(VariableEventKind kind, VariableEventStream& stream)
+ {
+ if (m_registerFormat == DataFormatDouble) {
+ stream.appendAndLog(VariableEvent::fillFPR(kind, m_nodeIndex, u.fpr));
+ return;
+ }
+#if USE(JSVALUE32_64)
+ if (m_registerFormat & DataFormatJS) {
+ stream.appendAndLog(VariableEvent::fillPair(kind, m_nodeIndex, u.v.tagGPR, u.v.payloadGPR));
+ return;
+ }
+#endif
+ stream.appendAndLog(VariableEvent::fillGPR(kind, m_nodeIndex, u.gpr, m_registerFormat));
+ }
+
+ void appendSpill(VariableEventKind kind, VariableEventStream& stream, VirtualRegister virtualRegister)
+ {
+ stream.appendAndLog(VariableEvent::spill(kind, m_nodeIndex, virtualRegister, m_spillFormat));
+ }
+
// The index of the node whose result is stored in this virtual register.
NodeIndex m_nodeIndex;
uint32_t m_useCount;
DataFormat m_registerFormat;
DataFormat m_spillFormat;
bool m_canFill;
+ bool m_bornForOSR;
union {
GPRReg gpr;
FPRReg fpr;
codeBlock()->watchpoint(exit.m_watchpointIndex).correctLabels(linkBuffer);
}
+ codeBlock()->minifiedDFG().setOriginalGraphSize(m_graph.size());
codeBlock()->shrinkToFit(CodeBlock::LateShrink);
}
bool JITCompiler::compile(JITCode& entry)
{
+ SamplingRegion samplingRegion("DFG Backend");
+
setStartOfCode();
compileEntry();
SpeculativeJIT speculative(*this);
bool JITCompiler::compileFunction(JITCode& entry, MacroAssemblerCodePtr& entryWithArityCheck)
{
+ SamplingRegion samplingRegion("DFG Backend");
+
setStartOfCode();
compileEntry();
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGMinifiedGraph_h
+#define DFGMinifiedGraph_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGMinifiedNode.h"
+#include <algorithm>
+#include <wtf/StdLibExtras.h>
+#include <wtf/Vector.h>
+
+namespace JSC { namespace DFG {
+
+class MinifiedGraph {
+public:
+ MinifiedGraph() { }
+
+ MinifiedNode* at(NodeIndex nodeIndex)
+ {
+ if (!m_list.size())
+ return 0;
+ MinifiedNode* entry =
+ binarySearch<MinifiedNode, NodeIndex, MinifiedNode::getIndex>(
+ m_list.begin(), m_list.size(), nodeIndex, WTF::KeyMustNotBePresentInArray);
+ if (entry->index() != nodeIndex)
+ return 0;
+ return entry;
+ }
+
+ void append(const MinifiedNode& node)
+ {
+ m_list.append(node);
+ }
+
+ void prepareAndShrink()
+ {
+ std::sort(m_list.begin(), m_list.end(), MinifiedNode::compareByNodeIndex);
+ m_list.shrinkToFit();
+ }
+
+ void setOriginalGraphSize(size_t size) { m_size = size; }
+
+ size_t originalGraphSize() const { return m_size; }
+
+private:
+ Vector<MinifiedNode> m_list;
+ size_t m_size;
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGMinifiedGraph_h
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGMinifiedNode.h"
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGNode.h"
+
+namespace JSC { namespace DFG {
+
+MinifiedNode MinifiedNode::fromNode(NodeIndex nodeIndex, Node& node)
+{
+ ASSERT(belongsInMinifiedGraph(node.op()));
+ MinifiedNode result;
+ result.m_index = nodeIndex;
+ result.m_op = node.op();
+ if (hasChild(node.op()))
+ result.m_childOrInfo = node.child1().index();
+ else if (hasConstantNumber(node.op()))
+ result.m_childOrInfo = node.constantNumber();
+ else if (hasWeakConstant(node.op()))
+ result.m_childOrInfo = bitwise_cast<uintptr_t>(node.weakConstant());
+ else {
+ ASSERT(node.op() == PhantomArguments);
+ result.m_childOrInfo = 0;
+ }
+ return result;
+}
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGMinifiedNode_h
+#define DFGMinifiedNode_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGCommon.h"
+#include "DFGNodeType.h"
+
+namespace JSC { namespace DFG {
+
+struct Node;
+
+inline bool belongsInMinifiedGraph(NodeType type)
+{
+ switch (type) {
+ case JSConstant:
+ case WeakJSConstant:
+ case ValueToInt32:
+ case Int32ToDouble:
+ case UInt32ToNumber:
+ case DoubleAsInt32:
+ case PhantomArguments:
+ return true;
+ default:
+ return false;
+ }
+}
+
+class MinifiedNode {
+public:
+ MinifiedNode() { }
+
+ static MinifiedNode fromNode(NodeIndex, Node&);
+
+ NodeIndex index() const { return m_index; }
+ NodeType op() const { return m_op; }
+
+ bool hasChild1() const { return hasChild(m_op); }
+
+ NodeIndex child1() const
+ {
+ ASSERT(hasChild(m_op));
+ return m_childOrInfo;
+ }
+
+ bool hasConstant() const { return hasConstantNumber() || hasWeakConstant(); }
+
+ bool hasConstantNumber() const { return hasConstantNumber(m_op); }
+
+ unsigned constantNumber() const
+ {
+ ASSERT(hasConstantNumber(m_op));
+ return m_childOrInfo;
+ }
+
+ bool hasWeakConstant() const { return hasWeakConstant(m_op); }
+
+ JSCell* weakConstant() const
+ {
+ ASSERT(hasWeakConstant(m_op));
+ return bitwise_cast<JSCell*>(m_childOrInfo);
+ }
+
+ static NodeIndex getIndex(MinifiedNode* node) { return node->index(); }
+ static bool compareByNodeIndex(const MinifiedNode& a, const MinifiedNode& b)
+ {
+ return a.m_index < b.m_index;
+ }
+
+private:
+ static bool hasChild(NodeType type)
+ {
+ switch (type) {
+ case ValueToInt32:
+ case Int32ToDouble:
+ case UInt32ToNumber:
+ case DoubleAsInt32:
+ return true;
+ default:
+ return false;
+ }
+ }
+ static bool hasConstantNumber(NodeType type)
+ {
+ return type == JSConstant;
+ }
+ static bool hasWeakConstant(NodeType type)
+ {
+ return type == WeakJSConstant;
+ }
+
+ NodeIndex m_index;
+ NodeType m_op;
+ uintptr_t m_childOrInfo; // Nodes in the minified graph have only one child each.
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGMinifiedNode_h
+
#include "JSValue.h"
#include "Operands.h"
#include "SpeculatedType.h"
+#include "StructureSet.h"
#include "ValueProfile.h"
namespace JSC { namespace DFG {
ASSERT(m_virtualRegister != InvalidVirtualRegister);
return m_virtualRegister;
}
-
+
void setVirtualRegister(VirtualRegister virtualRegister)
{
ASSERT(hasResult());
namespace JSC { namespace DFG {
-static unsigned computeNumVariablesForCodeOrigin(
- CodeBlock* codeBlock, const CodeOrigin& codeOrigin)
-{
- if (!codeOrigin.inlineCallFrame)
- return codeBlock->m_numCalleeRegisters;
- return
- codeOrigin.inlineCallFrame->stackOffset +
- baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame)->m_numCalleeRegisters;
-}
-
-OSRExit::OSRExit(ExitKind kind, JSValueSource jsValueSource, MethodOfGettingAValueProfile valueProfile, MacroAssembler::Jump check, SpeculativeJIT* jit, unsigned recoveryIndex)
+OSRExit::OSRExit(ExitKind kind, JSValueSource jsValueSource, MethodOfGettingAValueProfile valueProfile, MacroAssembler::Jump check, SpeculativeJIT* jit, unsigned streamIndex, unsigned recoveryIndex)
: m_jsValueSource(jsValueSource)
, m_valueProfile(valueProfile)
, m_check(check)
, m_watchpointIndex(std::numeric_limits<unsigned>::max())
, m_kind(kind)
, m_count(0)
- , m_arguments(jit->m_arguments.size())
- , m_variables(computeNumVariablesForCodeOrigin(jit->m_jit.graph().m_profiledBlock, jit->m_codeOriginForOSR))
+ , m_streamIndex(streamIndex)
, m_lastSetOperand(jit->m_lastSetOperand)
{
ASSERT(m_codeOrigin.isSet());
- for (unsigned argument = 0; argument < m_arguments.size(); ++argument)
- m_arguments[argument] = jit->computeValueRecoveryFor(jit->m_arguments[argument]);
- for (unsigned variable = 0; variable < m_variables.size(); ++variable)
- m_variables[variable] = jit->computeValueRecoveryFor(jit->m_variables[variable]);
-}
-
-void OSRExit::dump(FILE* out) const
-{
- for (unsigned argument = 0; argument < m_arguments.size(); ++argument)
- m_arguments[argument].dump(out);
- fprintf(out, " : ");
- for (unsigned variable = 0; variable < m_variables.size(); ++variable)
- m_variables[variable].dump(out);
}
bool OSRExit::considerAddingAsFrequentExitSiteSlow(CodeBlock* dfgCodeBlock, CodeBlock* profiledCodeBlock)
#include "DFGCorrectableJumpPoint.h"
#include "DFGExitProfile.h"
#include "DFGGPRInfo.h"
+#include "DFGValueRecoveryOverride.h"
#include "MacroAssembler.h"
#include "MethodOfGettingAValueProfile.h"
#include "Operands.h"
// This structure describes how to exit the speculative path by
// going into baseline code.
struct OSRExit {
- OSRExit(ExitKind, JSValueSource, MethodOfGettingAValueProfile, MacroAssembler::Jump, SpeculativeJIT*, unsigned recoveryIndex = 0);
+ OSRExit(ExitKind, JSValueSource, MethodOfGettingAValueProfile, MacroAssembler::Jump, SpeculativeJIT*, unsigned streamIndex, unsigned recoveryIndex = 0);
MacroAssemblerCodeRef m_code;
ExitKind m_kind;
uint32_t m_count;
- // Convenient way of iterating over ValueRecoveries while being
- // generic over argument versus variable.
- int numberOfRecoveries() const { return m_arguments.size() + m_variables.size(); }
- const ValueRecovery& valueRecovery(int index) const
- {
- if (index < (int)m_arguments.size())
- return m_arguments[index];
- return m_variables[index - m_arguments.size()];
- }
- ValueRecovery& valueRecoveryForOperand(int operand)
- {
- if (operandIsArgument(operand))
- return m_arguments[operandToArgument(operand)];
- return m_variables[operand];
- }
- bool isArgument(int index) const { return index < (int)m_arguments.size(); }
- bool isVariable(int index) const { return !isArgument(index); }
- int argumentForIndex(int index) const
- {
- return index;
- }
- int variableForIndex(int index) const
- {
- return index - m_arguments.size();
- }
- int operandForIndex(int index) const
- {
- if (index < (int)m_arguments.size())
- return operandToArgument(index);
- return index - m_arguments.size();
- }
-
bool considerAddingAsFrequentExitSite(CodeBlock* dfgCodeBlock, CodeBlock* profiledCodeBlock)
{
if (!m_count || !exitKindIsCountable(m_kind))
return considerAddingAsFrequentExitSiteSlow(dfgCodeBlock, profiledCodeBlock);
}
- void dump(FILE* out) const;
-
- Vector<ValueRecovery, 0> m_arguments;
- Vector<ValueRecovery, 0> m_variables;
+ unsigned m_streamIndex;
int m_lastSetOperand;
+
+ RefPtr<ValueRecoveryOverride> m_valueRecoveryOverride;
private:
bool considerAddingAsFrequentExitSiteSlow(CodeBlock* dfgCodeBlock, CodeBlock* profiledCodeBlock);
void compileOSRExit(ExecState* exec)
{
+ SamplingRegion samplingRegion("DFG OSR Exit Compilation");
+
CodeBlock* codeBlock = exec->codeBlock();
ASSERT(codeBlock);
->jitCompile(exec);
}
+ // Compute the value recoveries.
+ Operands<ValueRecovery> operands;
+ codeBlock->variableEventStream().reconstruct(codeBlock, exit.m_codeOrigin, codeBlock->minifiedDFG(), exit.m_streamIndex, operands);
+
+ // There may be an override, for forward speculations.
+ if (!!exit.m_valueRecoveryOverride) {
+ operands.setOperand(
+ exit.m_valueRecoveryOverride->operand, exit.m_valueRecoveryOverride->recovery);
+ }
+
SpeculationRecovery* recovery = 0;
if (exit.m_recoveryIndex)
recovery = &codeBlock->speculationRecovery(exit.m_recoveryIndex - 1);
#if DFG_ENABLE(DEBUG_VERBOSE)
- dataLog("Generating OSR exit #%u (bc#%u, @%u, %s) for code block %p.\n", exitIndex, exit.m_codeOrigin.bytecodeIndex, exit.m_nodeIndex, exitKindToString(exit.m_kind), codeBlock);
+ dataLog("Generating OSR exit #%u (seq#%u, bc#%u, @%u, %s) for code block %p.\n", exitIndex, exit.m_streamIndex, exit.m_codeOrigin.bytecodeIndex, exit.m_nodeIndex, exitKindToString(exit.m_kind), codeBlock);
#endif
{
OSRExitCompiler exitCompiler(jit);
jit.jitAssertHasValidCallFrame();
- exitCompiler.compileExit(exit, recovery);
+ exitCompiler.compileExit(exit, operands, recovery);
LinkBuffer patchBuffer(*globalData, &jit, codeBlock);
exit.m_code = FINALIZE_CODE_IF(
{
}
- void compileExit(const OSRExit&, SpeculationRecovery*);
+ void compileExit(const OSRExit&, const Operands<ValueRecovery>&, SpeculationRecovery*);
private:
#if !ASSERT_DISABLED
#if ENABLE(DFG_JIT) && USE(JSVALUE32_64)
#include "DFGOperations.h"
+#include <wtf/DataLog.h>
namespace JSC { namespace DFG {
-void OSRExitCompiler::compileExit(const OSRExit& exit, SpeculationRecovery* recovery)
+void OSRExitCompiler::compileExit(const OSRExit& exit, const Operands<ValueRecovery>& operands, SpeculationRecovery* recovery)
{
// 1) Pro-forma stuff.
#if DFG_ENABLE(DEBUG_VERBOSE)
dataLog(" -> %p ", codeOrigin.inlineCallFrame->executable.get());
}
dataLog(") at JIT offset 0x%x ", m_jit.debugOffset());
- exit.dump(WTF::dataFile());
+ dumpOperands(operands, WTF::dataFile());
#endif
#if DFG_ENABLE(VERBOSE_SPECULATION_FAILURE)
SpeculationFailureDebugInfo* debugInfo = new SpeculationFailureDebugInfo;
// GPRInfo::numberOfRegisters of them. Also see if there are any constants,
// any undefined slots, any FPR slots, and any unboxed ints.
- Vector<bool> poisonedVirtualRegisters(exit.m_variables.size());
+ Vector<bool> poisonedVirtualRegisters(operands.numberOfLocals());
for (unsigned i = 0; i < poisonedVirtualRegisters.size(); ++i)
poisonedVirtualRegisters[i] = false;
bool haveUndefined = false;
bool haveArguments = false;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
case Int32DisplacedInRegisterFile:
// to ensure this happens efficiently. Note that we expect this case
// to be rare, so the handling of it is optimized for the cases in
// which it does not happen.
- if (recovery.virtualRegister() < (int)exit.m_variables.size()) {
- switch (exit.m_variables[recovery.virtualRegister()].technique()) {
+ if (recovery.virtualRegister() < (int)operands.numberOfLocals()) {
+ switch (operands.local(recovery.virtualRegister()).technique()) {
case InGPR:
case UnboxedInt32InGPR:
case UnboxedBooleanInGPR:
// 5) Perform all reboxing of integers and cells, except for those in registers.
if (haveUnboxedInt32InRegisterFile || haveUnboxedCellInRegisterFile || haveUnboxedBooleanInRegisterFile) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case AlreadyInRegisterFileAsUnboxedInt32:
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(exit.operandForIndex(index))));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(operands.operandForIndex(index))));
break;
case AlreadyInRegisterFileAsUnboxedCell:
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(exit.operandForIndex(index))));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(operands.operandForIndex(index))));
break;
case AlreadyInRegisterFileAsUnboxedBoolean:
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(exit.operandForIndex(index))));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(operands.operandForIndex(index))));
break;
default:
// Note that GPRs do not have a fast change (like haveFPRs) because we expect that
// most OSR failure points will have at least one GPR that needs to be dumped.
- initializePoisoned(exit.m_variables.size());
+ initializePoisoned(operands.numberOfLocals());
unsigned currentPoisonIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
- int operand = exit.operandForIndex(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
+ int operand = operands.operandForIndex(index);
switch (recovery.technique()) {
case InGPR:
case UnboxedInt32InGPR:
case UnboxedBooleanInGPR:
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.store32(recovery.gpr(), reinterpret_cast<char*>(scratchDataBuffer + currentPoisonIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
} else {
uint32_t tag = JSValue::EmptyValueTag;
}
break;
case InPair:
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.store32(recovery.tagGPR(), reinterpret_cast<char*>(scratchDataBuffer + currentPoisonIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
m_jit.store32(recovery.payloadGPR(), reinterpret_cast<char*>(scratchDataBuffer + currentPoisonIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
} else {
m_jit.store32(recovery.tagGPR(), AssemblyHelpers::tagFor((VirtualRegister)operand));
m_jit.convertInt32ToDouble(recovery.gpr(), FPRInfo::fpRegT0);
m_jit.addDouble(AssemblyHelpers::AbsoluteAddress(&AssemblyHelpers::twoToThe32), FPRInfo::fpRegT0);
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.move(AssemblyHelpers::TrustedImmPtr(scratchDataBuffer + currentPoisonIndex), addressGPR);
m_jit.storeDouble(FPRInfo::fpRegT0, addressGPR);
} else
positive.link(&m_jit);
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.store32(recovery.gpr(), reinterpret_cast<char*>(scratchDataBuffer + currentPoisonIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), reinterpret_cast<char*>(scratchDataBuffer + currentPoisonIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
} else {
m_jit.loadDouble(addressGPR, FPRInfo::fpRegT0);
m_jit.loadPtr(myScratch, addressGPR);
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
}
break;
// 7) Dump all doubles into the register file, or to the scratch storage if the
// destination virtual register is poisoned.
if (haveFPRs) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != InFPR)
continue;
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.storeDouble(recovery.fpr(), scratchDataBuffer + currentPoisonIndex);
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
} else
- m_jit.storeDouble(recovery.fpr(), AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storeDouble(recovery.fpr(), AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
}
}
// that is far from guaranteed.
unsigned displacementIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
}
displacementIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
case Int32DisplacedInRegisterFile:
case CellDisplacedInRegisterFile:
case BooleanDisplacedInRegisterFile:
- m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
break;
default:
break;
// to their new (old JIT) locations.
unsigned scratchIndex = numberOfPoisonedVirtualRegisters;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
m_jit.load32(AssemblyHelpers::payloadFor(recovery.virtualRegister()), GPRInfo::regT0);
}
scratchIndex = numberOfPoisonedVirtualRegisters;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), GPRInfo::regT1);
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
scratchIndex++;
break;
case Int32DisplacedInRegisterFile:
m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
break;
case CellDisplacedInRegisterFile:
m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::CellTag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
break;
case BooleanDisplacedInRegisterFile:
m_jit.load32(reinterpret_cast<char*>(scratchDataBuffer + scratchIndex++) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), GPRInfo::regT0);
- m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(JSValue::BooleanTag), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
break;
default:
break;
// 9) Dump all poisoned virtual registers.
if (numberOfPoisonedVirtualRegisters) {
- for (int virtualRegister = 0; virtualRegister < (int)exit.m_variables.size(); ++virtualRegister) {
+ for (int virtualRegister = 0; virtualRegister < (int)operands.numberOfLocals(); ++virtualRegister) {
if (!poisonedVirtualRegisters[virtualRegister])
continue;
- const ValueRecovery& recovery = exit.m_variables[virtualRegister];
+ const ValueRecovery& recovery = operands.local(virtualRegister);
switch (recovery.technique()) {
case InGPR:
case UnboxedInt32InGPR:
m_jit.move(AssemblyHelpers::TrustedImm32(jsUndefined().tag()), GPRInfo::regT1);
}
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != Constant)
continue;
if (recovery.constant().isUndefined()) {
- m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT0, AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(GPRInfo::regT1, AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
} else {
- m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().payload()), AssemblyHelpers::payloadFor((VirtualRegister)exit.operandForIndex(index)));
- m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().tag()), AssemblyHelpers::tagFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().payload()), AssemblyHelpers::payloadFor((VirtualRegister)operands.operandForIndex(index)));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(recovery.constant().tag()), AssemblyHelpers::tagFor((VirtualRegister)operands.operandForIndex(index)));
}
}
}
// registers.
if (haveArguments) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != ArgumentsThatWereNotCreated)
continue;
- int operand = exit.operandForIndex(index);
+ int operand = operands.operandForIndex(index);
// Find the right inline call frame.
InlineCallFrame* inlineCallFrame = 0;
for (InlineCallFrame* current = exit.m_codeOrigin.inlineCallFrame;
#if ENABLE(DFG_JIT) && USE(JSVALUE64)
#include "DFGOperations.h"
+#include <wtf/DataLog.h>
namespace JSC { namespace DFG {
-void OSRExitCompiler::compileExit(const OSRExit& exit, SpeculationRecovery* recovery)
+void OSRExitCompiler::compileExit(const OSRExit& exit, const Operands<ValueRecovery>& operands, SpeculationRecovery* recovery)
{
// 1) Pro-forma stuff.
#if DFG_ENABLE(DEBUG_VERBOSE)
dataLog(" -> %p ", codeOrigin.inlineCallFrame->executable.get());
}
dataLog(") ");
- exit.dump(WTF::dataFile());
+ dumpOperands(operands, WTF::dataFile());
#endif
#if DFG_ENABLE(VERBOSE_SPECULATION_FAILURE)
SpeculationFailureDebugInfo* debugInfo = new SpeculationFailureDebugInfo;
// GPRInfo::numberOfRegisters of them. Also see if there are any constants,
// any undefined slots, any FPR slots, and any unboxed ints.
- Vector<bool> poisonedVirtualRegisters(exit.m_variables.size());
+ Vector<bool> poisonedVirtualRegisters(operands.numberOfLocals());
for (unsigned i = 0; i < poisonedVirtualRegisters.size(); ++i)
poisonedVirtualRegisters[i] = false;
bool haveUInt32s = false;
bool haveArguments = false;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case Int32DisplacedInRegisterFile:
case DoubleDisplacedInRegisterFile:
// to ensure this happens efficiently. Note that we expect this case
// to be rare, so the handling of it is optimized for the cases in
// which it does not happen.
- if (recovery.virtualRegister() < (int)exit.m_variables.size()) {
- switch (exit.m_variables[recovery.virtualRegister()].technique()) {
+ if (recovery.virtualRegister() < (int)operands.numberOfLocals()) {
+ switch (operands.local(recovery.virtualRegister()).technique()) {
case InGPR:
case UnboxedInt32InGPR:
case UInt32InGPR:
// 5) Perform all reboxing of integers.
if (haveUnboxedInt32s || haveUInt32s) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case UnboxedInt32InGPR:
if (recovery.gpr() != alreadyBoxed)
break;
case AlreadyInRegisterFileAsUnboxedInt32:
- m_jit.store32(AssemblyHelpers::TrustedImm32(static_cast<uint32_t>(TagTypeNumber >> 32)), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(exit.operandForIndex(index))));
+ m_jit.store32(AssemblyHelpers::TrustedImm32(static_cast<uint32_t>(TagTypeNumber >> 32)), AssemblyHelpers::tagFor(static_cast<VirtualRegister>(operands.operandForIndex(index))));
break;
case UInt32InGPR: {
// Note that GPRs do not have a fast change (like haveFPRs) because we expect that
// most OSR failure points will have at least one GPR that needs to be dumped.
- initializePoisoned(exit.m_variables.size());
+ initializePoisoned(operands.numberOfLocals());
unsigned currentPoisonIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
- int operand = exit.operandForIndex(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
+ int operand = operands.operandForIndex(index);
switch (recovery.technique()) {
case InGPR:
case UnboxedInt32InGPR:
case UInt32InGPR:
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.storePtr(recovery.gpr(), scratchDataBuffer + currentPoisonIndex);
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
} else
m_jit.storePtr(recovery.gpr(), AssemblyHelpers::addressFor((VirtualRegister)operand));
if (haveFPRs) {
// 7) Box all doubles (relies on there being more GPRs than FPRs)
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != InFPR)
continue;
FPRReg fpr = recovery.fpr();
// 8) Dump all doubles into the register file, or to the scratch storage if
// the destination virtual register is poisoned.
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != InFPR)
continue;
GPRReg gpr = GPRInfo::toRegister(FPRInfo::toIndex(recovery.fpr()));
- if (exit.isVariable(index) && poisonedVirtualRegisters[exit.variableForIndex(index)]) {
+ if (operands.isVariable(index) && poisonedVirtualRegisters[operands.variableForIndex(index)]) {
m_jit.storePtr(gpr, scratchDataBuffer + currentPoisonIndex);
- m_poisonScratchIndices[exit.variableForIndex(index)] = currentPoisonIndex;
+ m_poisonScratchIndices[operands.variableForIndex(index)] = currentPoisonIndex;
currentPoisonIndex++;
} else
- m_jit.storePtr(gpr, AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(gpr, AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
}
}
// 9) Box all unboxed doubles in the register file.
if (haveUnboxedDoubles) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != AlreadyInRegisterFileAsUnboxedDouble)
continue;
- m_jit.loadDouble(AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)), FPRInfo::fpRegT0);
+ m_jit.loadDouble(AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)), FPRInfo::fpRegT0);
m_jit.boxDouble(FPRInfo::fpRegT0, GPRInfo::regT0);
- m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
}
}
// that is far from guaranteed.
unsigned displacementIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
m_jit.loadPtr(AssemblyHelpers::addressFor(recovery.virtualRegister()), GPRInfo::toRegister(displacementIndex++));
}
displacementIndex = 0;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
case Int32DisplacedInRegisterFile:
case DoubleDisplacedInRegisterFile:
- m_jit.storePtr(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(GPRInfo::toRegister(displacementIndex++), AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
break;
default:
// to their new (old JIT) locations.
unsigned scratchIndex = numberOfPoisonedVirtualRegisters;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
}
scratchIndex = numberOfPoisonedVirtualRegisters;
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
switch (recovery.technique()) {
case DisplacedInRegisterFile:
case Int32DisplacedInRegisterFile:
case DoubleDisplacedInRegisterFile:
m_jit.loadPtr(scratchDataBuffer + scratchIndex++, GPRInfo::regT0);
- m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
break;
default:
// 11) Dump all poisoned virtual registers.
if (numberOfPoisonedVirtualRegisters) {
- for (int virtualRegister = 0; virtualRegister < (int)exit.m_variables.size(); ++virtualRegister) {
+ for (int virtualRegister = 0; virtualRegister < (int)operands.numberOfLocals(); ++virtualRegister) {
if (!poisonedVirtualRegisters[virtualRegister])
continue;
- const ValueRecovery& recovery = exit.m_variables[virtualRegister];
+ const ValueRecovery& recovery = operands.local(virtualRegister);
switch (recovery.technique()) {
case InGPR:
case UnboxedInt32InGPR:
if (haveUndefined)
m_jit.move(AssemblyHelpers::TrustedImmPtr(JSValue::encode(jsUndefined())), GPRInfo::regT0);
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != Constant)
continue;
if (recovery.constant().isUndefined())
- m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(GPRInfo::regT0, AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
else
- m_jit.storePtr(AssemblyHelpers::TrustedImmPtr(JSValue::encode(recovery.constant())), AssemblyHelpers::addressFor((VirtualRegister)exit.operandForIndex(index)));
+ m_jit.storePtr(AssemblyHelpers::TrustedImmPtr(JSValue::encode(recovery.constant())), AssemblyHelpers::addressFor((VirtualRegister)operands.operandForIndex(index)));
}
}
// registers.
if (haveArguments) {
- for (int index = 0; index < exit.numberOfRecoveries(); ++index) {
- const ValueRecovery& recovery = exit.valueRecovery(index);
+ for (size_t index = 0; index < operands.size(); ++index) {
+ const ValueRecovery& recovery = operands[index];
if (recovery.technique() != ArgumentsThatWereNotCreated)
continue;
- int operand = exit.operandForIndex(index);
+ int operand = operands.operandForIndex(index);
// Find the right inline call frame.
InlineCallFrame* inlineCallFrame = 0;
for (InlineCallFrame* current = exit.m_codeOrigin.inlineCallFrame;
bool performPredictionPropagation(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Prediction Propagation Phase");
return runPhase<PredictionPropagationPhase>(graph);
}
bool performRedundantPhiElimination(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Redundant Phi Elimination Phase");
return runPhase<RedundantPhiEliminationPhase>(graph);
}
, m_variables(jit.graph().m_localVars)
, m_lastSetOperand(std::numeric_limits<int>::max())
, m_state(m_jit.graph())
+ , m_stream(&jit.codeBlock()->variableEventStream())
+ , m_minifiedGraph(&jit.codeBlock()->minifiedDFG())
, m_isCheckingArgumentTypes(false)
{
}
GPRReg gpr = allocate();
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillStorage(gpr);
+ info.fillStorage(*m_stream, gpr);
return gpr;
}
}
#endif
-void ValueSource::dump(FILE* out) const
-{
- switch (kind()) {
- case SourceNotSet:
- fprintf(out, "NotSet");
- break;
- case SourceIsDead:
- fprintf(out, "IsDead");
- break;
- case ValueInRegisterFile:
- fprintf(out, "InRegFile");
- break;
- case Int32InRegisterFile:
- fprintf(out, "Int32");
- break;
- case CellInRegisterFile:
- fprintf(out, "Cell");
- break;
- case BooleanInRegisterFile:
- fprintf(out, "Bool");
- break;
- case DoubleInRegisterFile:
- fprintf(out, "Double");
- break;
- case ArgumentsSource:
- fprintf(out, "Arguments");
- break;
- case HaveNode:
- fprintf(out, "Node(%d)", m_nodeIndex);
- break;
- }
-}
-
void SpeculativeJIT::compilePeepHoleDoubleBranch(Node& node, NodeIndex branchNodeIndex, JITCompiler::DoubleCondition condition)
{
Node& branchNode = at(branchNodeIndex);
return false;
}
+void SpeculativeJIT::noticeOSRBirth(NodeIndex nodeIndex, Node& node)
+{
+ if (!node.hasVirtualRegister())
+ return;
+
+ VirtualRegister virtualRegister = node.virtualRegister();
+ GenerationInfo& info = m_generationInfo[virtualRegister];
+
+ info.noticeOSRBirth(*m_stream, nodeIndex, virtualRegister);
+}
+
void SpeculativeJIT::compileMovHint(Node& node)
{
ASSERT(node.op() == SetLocal);
- setNodeIndexForOperand(node.child1().index(), node.local());
m_lastSetOperand = node.local();
+
+ Node& child = at(node.child1());
+ noticeOSRBirth(node.child1().index(), child);
+
+ if (child.op() == UInt32ToNumber)
+ noticeOSRBirth(child.child1().index(), at(child.child1()));
+
+ m_stream->appendAndLog(VariableEvent::movHint(node.child1().index(), node.local()));
}
void SpeculativeJIT::compile(BasicBlock& block)
m_jit.breakpoint();
#endif
+#if DFG_ENABLE(DEBUG_VERBOSE)
+ dataLog("Setting up state for block #%u: ", m_block);
+#endif
+
+ m_stream->appendAndLog(VariableEvent::reset());
+
m_jit.jitAssertHasValidCallFrame();
ASSERT(m_arguments.size() == block.variablesAtHead.numberOfArguments());
- for (size_t i = 0; i < m_arguments.size(); ++i)
- m_arguments[i] = ValueSource(ValueInRegisterFile);
+ for (size_t i = 0; i < m_arguments.size(); ++i) {
+ ValueSource valueSource = ValueSource(ValueInRegisterFile);
+ m_arguments[i] = valueSource;
+ m_stream->appendAndLog(VariableEvent::setLocal(argumentToOperand(i), valueSource.dataFormat()));
+ }
m_state.reset();
m_state.beginBasicBlock(&block);
ASSERT(m_variables.size() == block.variablesAtHead.numberOfLocals());
for (size_t i = 0; i < m_variables.size(); ++i) {
NodeIndex nodeIndex = block.variablesAtHead.local(i);
+ ValueSource valueSource;
if (nodeIndex == NoNode)
- m_variables[i] = ValueSource(SourceIsDead);
+ valueSource = ValueSource(SourceIsDead);
else if (at(nodeIndex).variableAccessData()->isArgumentsAlias())
- m_variables[i] = ValueSource(ArgumentsSource);
+ valueSource = ValueSource(ArgumentsSource);
else if (at(nodeIndex).variableAccessData()->isCaptured())
- m_variables[i] = ValueSource(ValueInRegisterFile);
+ valueSource = ValueSource(ValueInRegisterFile);
else if (!at(nodeIndex).refCount())
- m_variables[i] = ValueSource(SourceIsDead);
+ valueSource = ValueSource(SourceIsDead);
else if (at(nodeIndex).variableAccessData()->shouldUseDoubleFormat())
- m_variables[i] = ValueSource(DoubleInRegisterFile);
+ valueSource = ValueSource(DoubleInRegisterFile);
else
- m_variables[i] = ValueSource::forSpeculation(at(nodeIndex).variableAccessData()->argumentAwarePrediction());
+ valueSource = ValueSource::forSpeculation(at(nodeIndex).variableAccessData()->argumentAwarePrediction());
+ m_variables[i] = valueSource;
+ m_stream->appendAndLog(VariableEvent::setLocal(i, valueSource.dataFormat()));
}
m_lastSetOperand = std::numeric_limits<int>::max();
verificationSucceeded.link(&m_jit);
}
+#if DFG_ENABLE(DEBUG_VERBOSE)
+ dataLog("\n");
+#endif
+
for (m_indexInBlock = 0; m_indexInBlock < block.size(); ++m_indexInBlock) {
m_compileIndex = block[m_indexInBlock];
m_jit.setForNode(m_compileIndex);
dataLog("SpeculativeJIT skipping Node @%d (bc#%u) at JIT offset 0x%x ", (int)m_compileIndex, node.codeOrigin.bytecodeIndex, m_jit.debugOffset());
#endif
switch (node.op()) {
+ case JSConstant:
+ m_minifiedGraph->append(MinifiedNode::fromNode(m_compileIndex, node));
+ break;
+
+ case WeakJSConstant:
+ m_jit.addWeakReference(node.weakConstant());
+ m_minifiedGraph->append(MinifiedNode::fromNode(m_compileIndex, node));
+ break;
+
case SetLocal:
compileMovHint(node);
break;
break;
}
- case WeakJSConstant:
- m_jit.addWeakReference(node.weakConstant());
- break;
-
default:
+ if (belongsInMinifiedGraph(node.op()))
+ m_minifiedGraph->append(MinifiedNode::fromNode(m_compileIndex, node));
break;
}
} else {
return;
}
+ if (belongsInMinifiedGraph(node.op())) {
+ m_minifiedGraph->append(MinifiedNode::fromNode(m_compileIndex, node));
+ noticeOSRBirth(m_compileIndex, node);
+ }
+
#if DFG_ENABLE(DEBUG_VERBOSE)
if (node.hasResult()) {
GenerationInfo& info = m_generationInfo[node.virtualRegister()];
#endif
}
-#if DFG_ENABLE(VERBOSE_VALUE_RECOVERIES)
- for (size_t i = 0; i < m_arguments.size(); ++i)
- computeValueRecoveryFor(argumentToOperand(i)).dump(stderr);
-
- dataLog(" : ");
-
- for (int operand = 0; operand < (int)m_variables.size(); ++operand)
- computeValueRecoveryFor(operand).dump(stderr);
-#endif
-
#if DFG_ENABLE(DEBUG_VERBOSE)
dataLog("\n");
#endif
ValueRecovery SpeculativeJIT::computeValueRecoveryFor(const ValueSource& valueSource)
{
- switch (valueSource.kind()) {
- case SourceIsDead:
- return ValueRecovery::constant(jsUndefined());
-
- case ValueInRegisterFile:
- return ValueRecovery::alreadyInRegisterFile();
-
- case Int32InRegisterFile:
- return ValueRecovery::alreadyInRegisterFileAsUnboxedInt32();
-
- case CellInRegisterFile:
- return ValueRecovery::alreadyInRegisterFileAsUnboxedCell();
-
- case BooleanInRegisterFile:
- return ValueRecovery::alreadyInRegisterFileAsUnboxedBoolean();
-
- case DoubleInRegisterFile:
- return ValueRecovery::alreadyInRegisterFileAsUnboxedDouble();
-
- case ArgumentsSource:
- return ValueRecovery::argumentsThatWereNotCreated();
-
- case HaveNode: {
- Node* nodePtr = &at(valueSource.nodeIndex());
-
- if (nodePtr->isPhantomArguments())
- return ValueRecovery::argumentsThatWereNotCreated();
+ if (valueSource.isInRegisterFile())
+ return valueSource.valueRecovery();
- if (nodePtr->hasConstant())
- return ValueRecovery::constant(valueOfJSConstant(valueSource.nodeIndex()));
-
- GenerationInfo* infoPtr;
- if (nodePtr->shouldGenerate())
- infoPtr = &m_generationInfo[nodePtr->virtualRegister()];
- else
- infoPtr = 0;
- if (!infoPtr || !infoPtr->alive() || infoPtr->nodeIndex() != valueSource.nodeIndex()) {
- // Try to see if there is an alternate node that would contain the value we want.
- // There are four possibilities:
- //
- // Int32ToDouble: We can use this in place of the original node, but
- // we'd rather not; so we use it only if it is the only remaining
- // live version.
- //
- // ValueToInt32: If the only remaining live version of the value is
- // ValueToInt32, then we can use it.
- //
- // UInt32ToNumber: If the only live version of the value is a UInt32ToNumber
- // then the only remaining uses are ones that want a properly formed number
- // rather than a UInt32 intermediate.
- //
- // The reverse of the above: This node could be a UInt32ToNumber, but its
- // alternative is still alive. This means that the only remaining uses of
- // the number would be fine with a UInt32 intermediate.
- //
- // DoubleAsInt32: Same as UInt32ToNumber.
- //
-
- bool found = false;
-
- if (nodePtr->op() == UInt32ToNumber || nodePtr->op() == DoubleAsInt32) {
- NodeIndex nodeIndex = nodePtr->child1().index();
- nodePtr = &at(nodeIndex);
- if (nodePtr->shouldGenerate()) {
- infoPtr = &m_generationInfo[nodePtr->virtualRegister()];
- if (infoPtr->alive() && infoPtr->nodeIndex() == nodeIndex)
- found = true;
- }
- }
-
- if (!found) {
- NodeIndex int32ToDoubleIndex = NoNode;
- NodeIndex valueToInt32Index = NoNode;
- NodeIndex uint32ToNumberIndex = NoNode;
- NodeIndex doubleAsInt32Index = NoNode;
-
- for (unsigned virtualRegister = 0; virtualRegister < m_generationInfo.size(); ++virtualRegister) {
- GenerationInfo& info = m_generationInfo[virtualRegister];
- if (!info.alive())
- continue;
- if (info.nodeIndex() == NoNode)
- continue;
- Node& node = at(info.nodeIndex());
- if (node.child1Unchecked() != valueSource.nodeIndex())
- continue;
- switch (node.op()) {
- case Int32ToDouble:
- int32ToDoubleIndex = info.nodeIndex();
- break;
- case ValueToInt32:
- valueToInt32Index = info.nodeIndex();
- break;
- case UInt32ToNumber:
- uint32ToNumberIndex = info.nodeIndex();
- break;
- case DoubleAsInt32:
- doubleAsInt32Index = info.nodeIndex();
- default:
- break;
- }
- }
-
- NodeIndex nodeIndexToUse;
- if (doubleAsInt32Index != NoNode)
- nodeIndexToUse = doubleAsInt32Index;
- else if (int32ToDoubleIndex != NoNode)
- nodeIndexToUse = int32ToDoubleIndex;
- else if (valueToInt32Index != NoNode)
- nodeIndexToUse = valueToInt32Index;
- else if (uint32ToNumberIndex != NoNode)
- nodeIndexToUse = uint32ToNumberIndex;
- else
- nodeIndexToUse = NoNode;
-
- if (nodeIndexToUse != NoNode) {
- nodePtr = &at(nodeIndexToUse);
- if (nodePtr->shouldGenerate()) {
- infoPtr = &m_generationInfo[nodePtr->virtualRegister()];
- ASSERT(infoPtr->alive() && infoPtr->nodeIndex() == nodeIndexToUse);
- found = true;
- }
- }
- }
-
- if (!found)
- return ValueRecovery::constant(jsUndefined());
- }
-
- ASSERT(infoPtr->alive());
-
- if (infoPtr->registerFormat() != DataFormatNone) {
- if (infoPtr->registerFormat() == DataFormatDouble)
- return ValueRecovery::inFPR(infoPtr->fpr());
-#if USE(JSVALUE32_64)
- if (infoPtr->registerFormat() & DataFormatJS)
- return ValueRecovery::inPair(infoPtr->tagGPR(), infoPtr->payloadGPR());
-#endif
- return ValueRecovery::inGPR(infoPtr->gpr(), infoPtr->registerFormat());
- }
- if (infoPtr->spillFormat() != DataFormatNone)
- return ValueRecovery::displacedInRegisterFile(static_cast<VirtualRegister>(nodePtr->virtualRegister()), infoPtr->spillFormat());
+ ASSERT(valueSource.kind() == HaveNode);
+ if (isConstant(valueSource.nodeIndex()))
+ return ValueRecovery::constant(valueOfJSConstant(valueSource.nodeIndex()));
- ASSERT_NOT_REACHED();
- return ValueRecovery();
- }
-
- default:
- ASSERT_NOT_REACHED();
- return ValueRecovery();
- }
+ return ValueRecovery();
}
void SpeculativeJIT::compileGetCharCodeAt(Node& node)
case DataFormatJSDouble:
case DataFormatDouble:
return GeneratedOperandDouble;
+
+ default:
+ ASSERT_NOT_REACHED();
+ return GeneratedOperandTypeUnknown;
}
-
- ASSERT_NOT_REACHED();
- return GeneratedOperandTypeUnknown;
}
void SpeculativeJIT::compileValueToInt32(Node& node)
/*
- * Copyright (C) 2011 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#ifndef DFGSpeculativeJIT_h
#define DFGSpeculativeJIT_h
+#include <wtf/Platform.h>
+
#if ENABLE(DFG_JIT)
#include "DFGAbstractState.h"
#include "DFGOSRExit.h"
#include "DFGOperations.h"
#include "DFGSilentRegisterSavePlan.h"
+#include "DFGValueSource.h"
#include "MarkedAllocator.h"
#include "ValueRecovery.h"
class SpeculateCellOperand;
class SpeculateBooleanOperand;
-
-enum ValueSourceKind {
- SourceNotSet,
- ValueInRegisterFile,
- Int32InRegisterFile,
- CellInRegisterFile,
- BooleanInRegisterFile,
- DoubleInRegisterFile,
- ArgumentsSource,
- SourceIsDead,
- HaveNode
-};
-
-class ValueSource {
-public:
- ValueSource()
- : m_nodeIndex(nodeIndexFromKind(SourceNotSet))
- {
- }
-
- explicit ValueSource(ValueSourceKind valueSourceKind)
- : m_nodeIndex(nodeIndexFromKind(valueSourceKind))
- {
- ASSERT(kind() != SourceNotSet);
- ASSERT(kind() != HaveNode);
- }
-
- explicit ValueSource(NodeIndex nodeIndex)
- : m_nodeIndex(nodeIndex)
- {
- ASSERT(kind() == HaveNode);
- }
-
- static ValueSource forSpeculation(SpeculatedType prediction)
- {
- if (isInt32Speculation(prediction))
- return ValueSource(Int32InRegisterFile);
- if (isArraySpeculation(prediction))
- return ValueSource(CellInRegisterFile);
- if (isBooleanSpeculation(prediction))
- return ValueSource(BooleanInRegisterFile);
- return ValueSource(ValueInRegisterFile);
- }
-
- bool isSet() const
- {
- return kindFromNodeIndex(m_nodeIndex) != SourceNotSet;
- }
-
- ValueSourceKind kind() const
- {
- return kindFromNodeIndex(m_nodeIndex);
- }
-
- NodeIndex nodeIndex() const
- {
- ASSERT(kind() == HaveNode);
- return m_nodeIndex;
- }
-
- void dump(FILE* out) const;
-
-private:
- static NodeIndex nodeIndexFromKind(ValueSourceKind kind)
- {
- ASSERT(kind >= SourceNotSet && kind < HaveNode);
- return NoNode - kind;
- }
-
- static ValueSourceKind kindFromNodeIndex(NodeIndex nodeIndex)
- {
- unsigned kind = static_cast<unsigned>(NoNode - nodeIndex);
- if (kind >= static_cast<unsigned>(HaveNode))
- return HaveNode;
- return static_cast<ValueSourceKind>(kind);
- }
-
- NodeIndex m_nodeIndex;
-};
-
-
enum GeneratedOperandType { GeneratedOperandTypeUnknown, GeneratedOperandInteger, GeneratedOperandDouble, GeneratedOperandJSValue};
// === SpeculativeJIT ===
// use() returns true when the value becomes dead, and any
// associated resources may be freed.
- if (!info.use())
+ if (!info.use(*m_stream))
return;
// Release the associated machine registers.
void runSlowPathGenerators();
void compile(Node&);
+ void noticeOSRBirth(NodeIndex, Node&);
void compileMovHint(Node&);
void compile(BasicBlock&);
// Check the GenerationInfo to see if this value need writing
// to the RegisterFile - if not, mark it as spilled & return.
if (!info.needsSpill()) {
- info.setSpilled();
+ info.setSpilled(*m_stream, spillMe);
return;
}
// This is special, since it's not a JS value - as in it's not visible to JS
// code.
m_jit.storePtr(info.gpr(), JITCompiler::addressFor(spillMe));
- info.spill(DataFormatStorage);
+ info.spill(*m_stream, spillMe, DataFormatStorage);
return;
}
case DataFormatInteger: {
m_jit.store32(info.gpr(), JITCompiler::payloadFor(spillMe));
- info.spill(DataFormatInteger);
+ info.spill(*m_stream, spillMe, DataFormatInteger);
return;
}
#if USE(JSVALUE64)
case DataFormatDouble: {
m_jit.storeDouble(info.fpr(), JITCompiler::addressFor(spillMe));
- info.spill(DataFormatDouble);
+ info.spill(*m_stream, spillMe, DataFormatDouble);
return;
}
// Spill the value, and record it as spilled in its boxed form.
m_jit.storePtr(reg, JITCompiler::addressFor(spillMe));
- info.spill((DataFormat)(spillFormat | DataFormatJS));
+ info.spill(*m_stream, spillMe, (DataFormat)(spillFormat | DataFormatJS));
return;
#elif USE(JSVALUE32_64)
case DataFormatCell:
case DataFormatBoolean: {
m_jit.store32(info.gpr(), JITCompiler::payloadFor(spillMe));
- info.spill(spillFormat);
+ info.spill(*m_stream, spillMe, spillFormat);
return;
}
case DataFormatJSDouble: {
// On JSVALUE32_64 boxing a double is a no-op.
m_jit.storeDouble(info.fpr(), JITCompiler::addressFor(spillMe));
- info.spill(DataFormatJSDouble);
+ info.spill(*m_stream, spillMe, DataFormatJSDouble);
return;
}
ASSERT(spillFormat & DataFormatJS);
m_jit.store32(info.tagGPR(), JITCompiler::tagFor(spillMe));
m_jit.store32(info.payloadGPR(), JITCompiler::payloadFor(spillMe));
- info.spill(spillFormat);
+ info.spill(*m_stream, spillMe, spillFormat);
return;
#endif
}
if (!m_compileOkay)
return;
ASSERT(at(m_compileIndex).canExit() || m_isCheckingArgumentTypes);
- m_jit.codeBlock()->appendOSRExit(OSRExit(kind, jsValueSource, m_jit.graph().methodOfGettingAValueProfileFor(nodeIndex), jumpToFail, this));
+ m_jit.codeBlock()->appendOSRExit(OSRExit(kind, jsValueSource, m_jit.graph().methodOfGettingAValueProfileFor(nodeIndex), jumpToFail, this, m_stream->size()));
}
void speculationCheck(ExitKind kind, JSValueSource jsValueSource, Edge nodeUse, MacroAssembler::Jump jumpToFail)
{
return;
ASSERT(at(m_compileIndex).canExit() || m_isCheckingArgumentTypes);
m_jit.codeBlock()->appendSpeculationRecovery(recovery);
- m_jit.codeBlock()->appendOSRExit(OSRExit(kind, jsValueSource, m_jit.graph().methodOfGettingAValueProfileFor(nodeIndex), jumpToFail, this, m_jit.codeBlock()->numberOfSpeculationRecoveries()));
+ m_jit.codeBlock()->appendOSRExit(OSRExit(kind, jsValueSource, m_jit.graph().methodOfGettingAValueProfileFor(nodeIndex), jumpToFail, this, m_stream->size(), m_jit.codeBlock()->numberOfSpeculationRecoveries()));
}
void speculationCheck(ExitKind kind, JSValueSource jsValueSource, Edge nodeUse, MacroAssembler::Jump jumpToFail, const SpeculationRecovery& recovery)
{
m_jit.codeBlock()->appendOSRExit(
OSRExit(kind, jsValueSource,
m_jit.graph().methodOfGettingAValueProfileFor(nodeIndex),
- JITCompiler::Jump(), this)));
+ JITCompiler::Jump(), this, m_stream->size())));
exit.m_watchpointIndex = m_jit.codeBlock()->appendWatchpoint(
Watchpoint(m_jit.watchpointLabel()));
return &m_jit.codeBlock()->watchpoint(exit.m_watchpointIndex);
exit.m_codeOrigin = nextNode->codeOrigin;
exit.m_lastSetOperand = setLocal->local();
- exit.valueRecoveryForOperand(setLocal->local()) = valueRecovery;
+ exit.m_valueRecoveryOverride = adoptRef(
+ new ValueRecoveryOverride(setLocal->local(), valueRecovery));
}
void forwardSpeculationCheck(ExitKind kind, JSValueSource jsValueSource, NodeIndex nodeIndex, MacroAssembler::JumpList& jumpsToFail, const ValueRecovery& valueRecovery)
{
return m_variables[operand];
}
+ void recordSetLocal(int operand, ValueSource valueSource)
+ {
+ valueSourceReferenceForOperand(operand) = valueSource;
+ m_stream->appendAndLog(VariableEvent::setLocal(operand, valueSource.dataFormat()));
+ }
+
+ // The JIT, while also provides MacroAssembler functionality.
JITCompiler& m_jit;
// The current node being generated.
AbstractState m_state;
+ VariableEventStream* m_stream;
+ MinifiedGraph* m_minifiedGraph;
+
bool m_isCheckingArgumentTypes;
Vector<SlowPathGenerator*, 8> m_slowPathGenerators; // doesn't use OwnPtr<> because I don't want to include DFGSlowPathGenerator.h
m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
}
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
m_gprs.release(tagGPR);
m_gprs.release(payloadGPR);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderInteger);
- info.fillInteger(payloadGPR);
+ info.fillInteger(*m_stream, payloadGPR);
returnFormat = DataFormatInteger;
return payloadGPR;
}
returnFormat = DataFormatInteger;
return gpr;
}
- }
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
+ }
}
FPRReg SpeculativeJIT::fillDouble(NodeIndex nodeIndex)
GPRReg gpr = allocate();
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
unlock(gpr);
} else if (isNumberConstant(nodeIndex)) {
FPRReg fpr = fprAllocate();
m_jit.loadDouble(addressOfDoubleConstant(nodeIndex), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
} else {
// FIXME: should not be reachable?
FPRReg fpr = fprAllocate();
m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderSpilled);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
hasUnboxedDouble.link(&m_jit);
m_fprs.retain(fpr, virtualRegister, SpillOrderSpilled);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
}
m_gprs.unlock(tagGPR);
m_gprs.unlock(payloadGPR);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
info.killSpilled();
return fpr;
}
m_fprs.lock(fpr);
return fpr;
}
- }
- ASSERT_NOT_REACHED();
- return InvalidFPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidFPRReg;
+ }
}
bool SpeculativeJIT::fillJSValue(NodeIndex nodeIndex, GPRReg& tagGPR, GPRReg& payloadGPR, FPRReg& fpr)
m_jit.move(Imm32(valueOfJSConstant(nodeIndex).payload()), payloadGPR);
m_gprs.retain(tagGPR, virtualRegister, SpillOrderConstant);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderConstant);
- info.fillJSValue(tagGPR, payloadGPR, isInt32Constant(nodeIndex) ? DataFormatJSInteger : DataFormatJS);
+ info.fillJSValue(*m_stream, tagGPR, payloadGPR, isInt32Constant(nodeIndex) ? DataFormatJSInteger : DataFormatJS);
} else {
DataFormat spillFormat = info.spillFormat();
ASSERT(spillFormat != DataFormatNone && spillFormat != DataFormatStorage);
m_jit.load32(JITCompiler::payloadFor(virtualRegister), payloadGPR);
m_gprs.retain(tagGPR, virtualRegister, SpillOrderSpilled);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderSpilled);
- info.fillJSValue(tagGPR, payloadGPR, spillFormat == DataFormatJSDouble ? DataFormatJS : spillFormat);
+ info.fillJSValue(*m_stream, tagGPR, payloadGPR, spillFormat == DataFormatJSDouble ? DataFormatJS : spillFormat);
}
return true;
m_gprs.release(gpr);
m_gprs.retain(tagGPR, virtualRegister, SpillOrderJS);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderJS);
- info.fillJSValue(tagGPR, payloadGPR, fillFormat);
+ info.fillJSValue(*m_stream, tagGPR, payloadGPR, fillFormat);
return true;
}
m_fprs.release(oldFPR);
m_gprs.retain(tagGPR, virtualRegister, SpillOrderJS);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderJS);
- info.fillJSValue(tagGPR, payloadGPR, DataFormatJS);
+ info.fillJSValue(*m_stream, tagGPR, payloadGPR, DataFormatJS);
return true;
}
case DataFormatStorage:
// this type currently never occurs
ASSERT_NOT_REACHED();
- }
- ASSERT_NOT_REACHED();
- return true;
+ default:
+ ASSERT_NOT_REACHED();
+ return true;
+ }
}
class ValueToNumberSlowPathGenerator
GPRReg gpr = allocate();
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
GPRReg gpr = allocate();
m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
m_gprs.release(tagGPR);
m_gprs.release(payloadGPR);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderInteger);
- info.fillInteger(payloadGPR);
+ info.fillInteger(*m_stream, payloadGPR);
// If !strict we're done, return.
returnFormat = DataFormatInteger;
return payloadGPR;
case DataFormatJSBoolean:
case DataFormatStorage:
ASSERT_NOT_REACHED();
- }
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
+ }
}
GPRReg SpeculativeJIT::fillSpeculateInt(NodeIndex nodeIndex, DataFormat& returnFormat)
GPRReg gpr = allocate();
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
unlock(gpr);
} else if (isNumberConstant(nodeIndex)) {
FPRReg fpr = fprAllocate();
m_jit.loadDouble(addressOfDoubleConstant(nodeIndex), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderConstant);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
} else
ASSERT_NOT_REACHED();
FPRReg fpr = fprAllocate();
m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderSpilled);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
hasUnboxedDouble.link(&m_jit);
m_fprs.retain(fpr, virtualRegister, SpillOrderSpilled);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
info.killSpilled();
return fpr;
}
m_gprs.unlock(tagGPR);
m_gprs.unlock(payloadGPR);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
info.killSpilled();
return fpr;
}
case DataFormatBoolean:
case DataFormatJSBoolean:
ASSERT_NOT_REACHED();
- }
- ASSERT_NOT_REACHED();
- return InvalidFPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidFPRReg;
+ }
}
GPRReg SpeculativeJIT::fillSpeculateCell(NodeIndex nodeIndex)
GPRReg gpr = allocate();
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
m_jit.move(MacroAssembler::TrustedImmPtr(jsValue.asCell()), gpr);
- info.fillCell(gpr);
+ info.fillCell(*m_stream, gpr);
return gpr;
}
GPRReg gpr = allocate();
m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
- info.fillCell(gpr);
+ info.fillCell(*m_stream, gpr);
return gpr;
}
m_gprs.release(tagGPR);
m_gprs.release(payloadGPR);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderCell);
- info.fillCell(payloadGPR);
+ info.fillCell(*m_stream, payloadGPR);
return payloadGPR;
}
case DataFormatBoolean:
case DataFormatStorage:
ASSERT_NOT_REACHED();
- }
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
+ }
}
GPRReg SpeculativeJIT::fillSpeculateBoolean(NodeIndex nodeIndex)
GPRReg gpr = allocate();
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
m_jit.move(MacroAssembler::TrustedImm32(jsValue.asBoolean()), gpr);
- info.fillBoolean(gpr);
+ info.fillBoolean(*m_stream, gpr);
return gpr;
}
GPRReg gpr = allocate();
m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
- info.fillBoolean(gpr);
+ info.fillBoolean(*m_stream, gpr);
return gpr;
}
m_gprs.release(tagGPR);
m_gprs.release(payloadGPR);
m_gprs.retain(payloadGPR, virtualRegister, SpillOrderBoolean);
- info.fillBoolean(payloadGPR);
+ info.fillBoolean(*m_stream, payloadGPR);
return payloadGPR;
}
case DataFormatCell:
case DataFormatStorage:
ASSERT_NOT_REACHED();
- }
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
+ }
}
JITCompiler::Jump SpeculativeJIT::convertToDouble(JSValueOperand& op, FPRReg result)
// Indicate that it's no longer necessary to retrieve the value of
// this bytecode variable from registers or other locations in the register file,
// but that it is stored as a double.
- valueSourceReferenceForOperand(node.local()) = ValueSource(DoubleInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(DoubleInRegisterFile));
break;
}
SpeculatedType predictedType = node.variableAccessData()->argumentAwarePrediction();
DoubleOperand value(this, node.child1());
m_jit.storeDouble(value.fpr(), JITCompiler::addressFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(DoubleInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(DoubleInRegisterFile));
break;
}
if (isInt32Speculation(predictedType)) {
SpeculateIntegerOperand value(this, node.child1());
m_jit.store32(value.gpr(), JITCompiler::payloadFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(Int32InRegisterFile);
+ recordSetLocal(node.local(), ValueSource(Int32InRegisterFile));
break;
}
if (isArraySpeculation(predictedType)) {
speculationCheck(BadType, JSValueSource::unboxedCell(cellGPR), node.child1(), m_jit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(cellGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info)));
m_jit.storePtr(cellGPR, JITCompiler::payloadFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(CellInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(CellInRegisterFile));
break;
}
if (isBooleanSpeculation(predictedType)) {
SpeculateBooleanOperand value(this, node.child1());
m_jit.store32(value.gpr(), JITCompiler::payloadFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(BooleanInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(BooleanInRegisterFile));
break;
}
}
m_jit.store32(value.payloadGPR(), JITCompiler::payloadFor(node.local()));
m_jit.store32(value.tagGPR(), JITCompiler::tagFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(ValueInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(ValueInRegisterFile));
break;
}
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
if (isInt32Constant(nodeIndex)) {
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
// Since we statically know that we're filling an integer, and values
// in the RegisterFile are boxed, this must be DataFormatJSInteger.
// We will check this with a jitAssert below.
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
unlock(gpr);
}
returnFormat = DataFormatInteger;
return gpr;
}
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
}
FPRReg SpeculativeJIT::fillDouble(NodeIndex nodeIndex)
// FIXME: should not be reachable?
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
unlock(gpr);
} else if (isNumberConstant(nodeIndex)) {
FPRReg fpr = fprAllocate();
unlock(gpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
} else {
// FIXME: should not be reachable?
JSValue jsValue = valueOfJSConstant(nodeIndex);
m_jit.move(MacroAssembler::TrustedImmPtr(JSValue::encode(jsValue)), gpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
- info.fillJSValue(gpr, DataFormatJS);
+ info.fillJSValue(*m_stream, gpr, DataFormatJS);
unlock(gpr);
}
} else {
FPRReg fpr = fprAllocate();
m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
unlock(gpr);
break;
}
ASSERT(spillFormat & DataFormatJS);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillJSValue(gpr, spillFormat);
+ info.fillJSValue(*m_stream, gpr, spillFormat);
unlock(gpr);
break;
}
m_gprs.unlock(jsValueGpr);
m_gprs.unlock(tempGpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
info.killSpilled();
return fpr;
}
m_gprs.release(gpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
m_fprs.lock(fpr);
return fpr;
}
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidFPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidFPRReg;
}
GPRReg SpeculativeJIT::fillJSValue(NodeIndex nodeIndex)
if (node.hasConstant()) {
if (isInt32Constant(nodeIndex)) {
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
JSValue jsValue = jsNumber(valueOfInt32Constant(nodeIndex));
m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
} else if (isNumberConstant(nodeIndex)) {
- info.fillJSValue(gpr, DataFormatJSDouble);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
JSValue jsValue(JSValue::EncodeAsDouble, valueOfNumberConstant(nodeIndex));
m_jit.move(MacroAssembler::ImmPtr(JSValue::encode(jsValue)), gpr);
} else {
ASSERT(isJSConstant(nodeIndex));
JSValue jsValue = valueOfJSConstant(nodeIndex);
m_jit.move(MacroAssembler::TrustedImmPtr(JSValue::encode(jsValue)), gpr);
- info.fillJSValue(gpr, DataFormatJS);
+ info.fillJSValue(*m_stream, gpr, DataFormatJS);
}
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
} else
ASSERT(spillFormat & DataFormatJS);
}
- info.fillJSValue(gpr, spillFormat);
+ info.fillJSValue(*m_stream, gpr, spillFormat);
}
return gpr;
}
}
m_gprs.lock(gpr);
m_jit.orPtr(GPRInfo::tagTypeNumberRegister, gpr);
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
return gpr;
}
GPRReg gpr = boxDouble(fpr);
// Update all info
- info.fillJSValue(gpr, DataFormatJSDouble);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSDouble);
m_fprs.release(fpr);
m_gprs.retain(gpr, virtualRegister, SpillOrderJS);
case DataFormatStorage:
// this type currently never occurs
ASSERT_NOT_REACHED();
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
}
class ValueToNumberSlowPathGenerator
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
ASSERT(isInt32Constant(nodeIndex));
m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(nodeIndex)), gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
// If we know this was spilled as an integer we can fill without checking.
if (strict) {
m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
returnFormat = DataFormatInteger;
return gpr;
}
m_jit.orPtr(GPRInfo::tagTypeNumberRegister, gpr);
} else
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
returnFormat = DataFormatJSInteger;
return gpr;
}
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
// Fill as JSValue, and fall through.
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
m_gprs.unlock(gpr);
}
m_gprs.lock(gpr);
if (!isInt32Speculation(type))
speculationCheck(BadType, JSValueRegs(gpr), nodeIndex, m_jit.branchPtr(MacroAssembler::Below, gpr, GPRInfo::tagTypeNumberRegister));
- info.fillJSValue(gpr, DataFormatJSInteger);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSInteger);
// If !strict we're done, return.
if (!strict) {
returnFormat = DataFormatJSInteger;
result = allocate();
else {
m_gprs.lock(gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
result = gpr;
}
m_jit.zeroExtend32ToPtr(gpr, result);
case DataFormatStorage:
ASSERT_NOT_REACHED();
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
}
GPRReg SpeculativeJIT::fillSpeculateInt(NodeIndex nodeIndex, DataFormat& returnFormat)
unlock(gpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
if (isNumberConstant(nodeIndex)) {
unlock(gpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
terminateSpeculativeExecution(Uncountable, JSValueRegs(), NoNode);
FPRReg fpr = fprAllocate();
m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillInteger(gpr);
+ info.fillInteger(*m_stream, gpr);
unlock(gpr);
break;
}
ASSERT(spillFormat & DataFormatJS);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillJSValue(gpr, spillFormat);
+ info.fillJSValue(*m_stream, gpr, spillFormat);
unlock(gpr);
break;
}
m_gprs.unlock(jsValueGpr);
m_gprs.unlock(tempGpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
info.killSpilled();
return fpr;
}
m_gprs.release(gpr);
m_fprs.retain(fpr, virtualRegister, SpillOrderDouble);
- info.fillDouble(fpr);
+ info.fillDouble(*m_stream, fpr);
return fpr;
}
m_fprs.lock(fpr);
return fpr;
}
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidFPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidFPRReg;
}
GPRReg SpeculativeJIT::fillSpeculateCell(NodeIndex nodeIndex)
if (jsValue.isCell()) {
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
m_jit.move(MacroAssembler::TrustedImmPtr(jsValue.asCell()), gpr);
- info.fillJSValue(gpr, DataFormatJSCell);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
return gpr;
}
terminateSpeculativeExecution(Uncountable, JSValueRegs(), NoNode);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillJSValue(gpr, DataFormatJS);
+ info.fillJSValue(*m_stream, gpr, DataFormatJS);
if (!isCellSpeculation(type))
speculationCheck(BadType, JSValueRegs(gpr), nodeIndex, m_jit.branchTestPtr(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
- info.fillJSValue(gpr, DataFormatJSCell);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
return gpr;
}
m_gprs.lock(gpr);
if (!isCellSpeculation(type))
speculationCheck(BadType, JSValueRegs(gpr), nodeIndex, m_jit.branchTestPtr(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister));
- info.fillJSValue(gpr, DataFormatJSCell);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSCell);
return gpr;
}
case DataFormatStorage:
ASSERT_NOT_REACHED();
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
}
GPRReg SpeculativeJIT::fillSpeculateBoolean(NodeIndex nodeIndex)
if (jsValue.isBoolean()) {
m_gprs.retain(gpr, virtualRegister, SpillOrderConstant);
m_jit.move(MacroAssembler::TrustedImmPtr(JSValue::encode(jsValue)), gpr);
- info.fillJSValue(gpr, DataFormatJSBoolean);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
return gpr;
}
terminateSpeculativeExecution(Uncountable, JSValueRegs(), NoNode);
m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled);
m_jit.loadPtr(JITCompiler::addressFor(virtualRegister), gpr);
- info.fillJSValue(gpr, DataFormatJS);
+ info.fillJSValue(*m_stream, gpr, DataFormatJS);
if (!isBooleanSpeculation(type)) {
m_jit.xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
speculationCheck(BadType, JSValueRegs(gpr), nodeIndex, m_jit.branchTestPtr(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
m_jit.xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
}
- info.fillJSValue(gpr, DataFormatJSBoolean);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
return gpr;
}
speculationCheck(BadType, JSValueRegs(gpr), nodeIndex, m_jit.branchTestPtr(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg));
m_jit.xorPtr(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr);
}
- info.fillJSValue(gpr, DataFormatJSBoolean);
+ info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean);
return gpr;
}
case DataFormatStorage:
ASSERT_NOT_REACHED();
+
+ default:
+ ASSERT_NOT_REACHED();
+ return InvalidGPRReg;
}
-
- ASSERT_NOT_REACHED();
- return InvalidGPRReg;
}
JITCompiler::Jump SpeculativeJIT::convertToDouble(GPRReg value, FPRReg result, GPRReg tmp)
// Indicate that it's no longer necessary to retrieve the value of
// this bytecode variable from registers or other locations in the register file,
// but that it is stored as a double.
- valueSourceReferenceForOperand(node.local()) = ValueSource(DoubleInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(DoubleInRegisterFile));
break;
}
SpeculateIntegerOperand value(this, node.child1());
m_jit.store32(value.gpr(), JITCompiler::payloadFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(Int32InRegisterFile);
+ recordSetLocal(node.local(), ValueSource(Int32InRegisterFile));
break;
}
if (isArraySpeculation(predictedType)) {
speculationCheck(BadType, JSValueRegs(cellGPR), node.child1(), m_jit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(cellGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info)));
m_jit.storePtr(cellGPR, JITCompiler::addressFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(CellInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(CellInRegisterFile));
break;
}
if (isBooleanSpeculation(predictedType)) {
SpeculateBooleanOperand boolean(this, node.child1());
m_jit.storePtr(boolean.gpr(), JITCompiler::addressFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(BooleanInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(BooleanInRegisterFile));
break;
}
}
m_jit.storePtr(value.gpr(), JITCompiler::addressFor(node.local()));
noResult(m_compileIndex);
- valueSourceReferenceForOperand(node.local()) = ValueSource(ValueInRegisterFile);
+ recordSetLocal(node.local(), ValueSource(ValueInRegisterFile));
break;
}
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGValueRecoveryOverride_h
+#define DFGValueRecoveryOverride_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "ValueRecovery.h"
+#include <wtf/RefCounted.h>
+
+namespace JSC { namespace DFG {
+
+class ValueRecoveryOverride : public RefCounted<ValueRecoveryOverride> {
+public:
+ ValueRecoveryOverride() { }
+
+ ValueRecoveryOverride(int operand, const ValueRecovery& recovery)
+ : operand(operand)
+ , recovery(recovery)
+ {
+ }
+
+ int operand;
+ ValueRecovery recovery;
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGValueRecoveryOverride_h
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGValueSource.h"
+
+#if ENABLE(DFG_JIT)
+
+namespace JSC { namespace DFG {
+
+void ValueSource::dump(FILE* out) const
+{
+ switch (kind()) {
+ case SourceNotSet:
+ fprintf(out, "NotSet");
+ break;
+ case SourceIsDead:
+ fprintf(out, "IsDead");
+ break;
+ case ValueInRegisterFile:
+ fprintf(out, "InRegFile");
+ break;
+ case Int32InRegisterFile:
+ fprintf(out, "Int32");
+ break;
+ case CellInRegisterFile:
+ fprintf(out, "Cell");
+ break;
+ case BooleanInRegisterFile:
+ fprintf(out, "Bool");
+ break;
+ case DoubleInRegisterFile:
+ fprintf(out, "Double");
+ break;
+ case ArgumentsSource:
+ fprintf(out, "Arguments");
+ break;
+ case HaveNode:
+ fprintf(out, "Node(%d)", m_nodeIndex);
+ break;
+ }
+}
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
--- /dev/null
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGValueSource_h
+#define DFGValueSource_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGCommon.h"
+#include "DataFormat.h"
+#include "SpeculatedType.h"
+#include "ValueRecovery.h"
+
+namespace JSC { namespace DFG {
+
+enum ValueSourceKind {
+ SourceNotSet,
+ ValueInRegisterFile,
+ Int32InRegisterFile,
+ CellInRegisterFile,
+ BooleanInRegisterFile,
+ DoubleInRegisterFile,
+ ArgumentsSource,
+ SourceIsDead,
+ HaveNode
+};
+
+static inline ValueSourceKind dataFormatToValueSourceKind(DataFormat dataFormat)
+{
+ switch (dataFormat) {
+ case DataFormatInteger:
+ return Int32InRegisterFile;
+ case DataFormatDouble:
+ return DoubleInRegisterFile;
+ case DataFormatBoolean:
+ return BooleanInRegisterFile;
+ case DataFormatCell:
+ return CellInRegisterFile;
+ case DataFormatDead:
+ return SourceIsDead;
+ case DataFormatArguments:
+ return ArgumentsSource;
+ default:
+ ASSERT(dataFormat & DataFormatJS);
+ return ValueInRegisterFile;
+ }
+}
+
+static inline DataFormat valueSourceKindToDataFormat(ValueSourceKind kind)
+{
+ switch (kind) {
+ case ValueInRegisterFile:
+ return DataFormatJS;
+ case Int32InRegisterFile:
+ return DataFormatInteger;
+ case CellInRegisterFile:
+ return DataFormatCell;
+ case BooleanInRegisterFile:
+ return DataFormatBoolean;
+ case DoubleInRegisterFile:
+ return DataFormatDouble;
+ case ArgumentsSource:
+ return DataFormatArguments;
+ case SourceIsDead:
+ return DataFormatDead;
+ default:
+ return DataFormatNone;
+ }
+}
+
+static inline bool isInRegisterFile(ValueSourceKind kind)
+{
+ DataFormat format = valueSourceKindToDataFormat(kind);
+ return format != DataFormatNone && format < DataFormatOSRMarker;
+}
+
+// Can this value be recovered without having to look at register allocation state or
+// DFG node liveness?
+static inline bool isTriviallyRecoverable(ValueSourceKind kind)
+{
+ return valueSourceKindToDataFormat(kind) != DataFormatNone;
+}
+
+class ValueSource {
+public:
+ ValueSource()
+ : m_nodeIndex(nodeIndexFromKind(SourceNotSet))
+ {
+ }
+
+ explicit ValueSource(ValueSourceKind valueSourceKind)
+ : m_nodeIndex(nodeIndexFromKind(valueSourceKind))
+ {
+ ASSERT(kind() != SourceNotSet);
+ ASSERT(kind() != HaveNode);
+ }
+
+ explicit ValueSource(NodeIndex nodeIndex)
+ : m_nodeIndex(nodeIndex)
+ {
+ ASSERT(nodeIndex != NoNode);
+ ASSERT(kind() == HaveNode);
+ }
+
+ static ValueSource forSpeculation(SpeculatedType prediction)
+ {
+ if (isInt32Speculation(prediction))
+ return ValueSource(Int32InRegisterFile);
+ if (isArraySpeculation(prediction))
+ return ValueSource(CellInRegisterFile);
+ if (isBooleanSpeculation(prediction))
+ return ValueSource(BooleanInRegisterFile);
+ return ValueSource(ValueInRegisterFile);
+ }
+
+ static ValueSource forDataFormat(DataFormat dataFormat)
+ {
+ return ValueSource(dataFormatToValueSourceKind(dataFormat));
+ }
+
+ bool isSet() const
+ {
+ return kindFromNodeIndex(m_nodeIndex) != SourceNotSet;
+ }
+
+ ValueSourceKind kind() const
+ {
+ return kindFromNodeIndex(m_nodeIndex);
+ }
+
+ bool isInRegisterFile() const { return JSC::DFG::isInRegisterFile(kind()); }
+ bool isTriviallyRecoverable() const { return JSC::DFG::isTriviallyRecoverable(kind()); }
+
+ DataFormat dataFormat() const
+ {
+ return valueSourceKindToDataFormat(kind());
+ }
+
+ ValueRecovery valueRecovery() const
+ {
+ ASSERT(isTriviallyRecoverable());
+ switch (kind()) {
+ case ValueInRegisterFile:
+ return ValueRecovery::alreadyInRegisterFile();
+
+ case Int32InRegisterFile:
+ return ValueRecovery::alreadyInRegisterFileAsUnboxedInt32();
+
+ case CellInRegisterFile:
+ return ValueRecovery::alreadyInRegisterFileAsUnboxedCell();
+
+ case BooleanInRegisterFile:
+ return ValueRecovery::alreadyInRegisterFileAsUnboxedBoolean();
+
+ case DoubleInRegisterFile:
+ return ValueRecovery::alreadyInRegisterFileAsUnboxedDouble();
+
+ case SourceIsDead:
+ return ValueRecovery::constant(jsUndefined());
+
+ case ArgumentsSource:
+ return ValueRecovery::argumentsThatWereNotCreated();
+
+ default:
+ ASSERT_NOT_REACHED();
+ return ValueRecovery();
+ }
+ }
+
+ NodeIndex nodeIndex() const
+ {
+ ASSERT(kind() == HaveNode);
+ return m_nodeIndex;
+ }
+
+ void dump(FILE* out) const;
+
+private:
+ static NodeIndex nodeIndexFromKind(ValueSourceKind kind)
+ {
+ ASSERT(kind >= SourceNotSet && kind < HaveNode);
+ return NoNode - kind;
+ }
+
+ static ValueSourceKind kindFromNodeIndex(NodeIndex nodeIndex)
+ {
+ unsigned kind = static_cast<unsigned>(NoNode - nodeIndex);
+ if (kind >= static_cast<unsigned>(HaveNode))
+ return HaveNode;
+ return static_cast<ValueSourceKind>(kind);
+ }
+
+ NodeIndex m_nodeIndex;
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGValueSource_h
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGVariableEvent.h"
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGFPRInfo.h"
+#include "DFGGPRInfo.h"
+
+namespace JSC { namespace DFG {
+
+void VariableEvent::dump(FILE* out) const
+{
+ switch (kind()) {
+ case Reset:
+ fprintf(out, "Reset");
+ break;
+ case BirthToFill:
+ dumpFillInfo("BirthToFill", out);
+ break;
+ case BirthToSpill:
+ dumpSpillInfo("BirthToSpill", out);
+ break;
+ case Fill:
+ dumpFillInfo("Fill", out);
+ break;
+ case Spill:
+ dumpSpillInfo("Spill", out);
+ break;
+ case Death:
+ fprintf(out, "Death(@%u)", nodeIndex());
+ break;
+ case MovHint:
+ fprintf(out, "MovHint(@%u, r%d)", nodeIndex(), operand());
+ break;
+ case SetLocalEvent:
+ fprintf(out, "SetLocal(r%d, %s)", operand(), dataFormatToString(dataFormat()));
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+}
+
+void VariableEvent::dumpFillInfo(const char* name, FILE* out) const
+{
+ fprintf(out, "%s(@%u, ", name, nodeIndex());
+ if (dataFormat() == DataFormatDouble)
+ fprintf(out, "%s", FPRInfo::debugName(fpr()));
+#if USE(JSVALUE32_64)
+ else if (dataFormat() & DataFormatJS)
+ fprintf(out, "%s:%s", GPRInfo::debugName(tagGPR()), GPRInfo::debugName(payloadGPR()));
+#endif
+ else
+ fprintf(out, "%s", GPRInfo::debugName(gpr()));
+ fprintf(out, ", %s)", dataFormatToString(dataFormat()));
+}
+
+void VariableEvent::dumpSpillInfo(const char* name, FILE* out) const
+{
+ fprintf(out, "%s(@%u, r%d, %s)", name, nodeIndex(), virtualRegister(), dataFormatToString(dataFormat()));
+}
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGVariableEvent_h
+#define DFGVariableEvent_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGCommon.h"
+#include "DataFormat.h"
+#include "MacroAssembler.h"
+#include <stdio.h>
+
+namespace JSC { namespace DFG {
+
+enum VariableEventKind {
+ // Marks the beginning of a checkpoint. If you interpret the variable
+ // events starting at a Reset point then you'll get everything you need.
+ Reset,
+
+ // Node births. Points in the code where a node becomes relevant for OSR.
+ // It may be the point where it is actually born (i.e. assigned) or it may
+ // be a later point, if it's only later in the sequence of instructions
+ // that we start to care about this node.
+ BirthToFill,
+ BirthToSpill,
+
+ // Events related to how a node is represented.
+ Fill,
+ Spill,
+
+ // Death of a node - after this we no longer care about this node.
+ Death,
+
+ // A MovHint means that a node is being associated with a bytecode operand,
+ // but that it has not been stored into that operand.
+ MovHint,
+
+ // A SetLocalEvent means that a node's value has actually been stored into the
+ // bytecode operand that it's associated with.
+ SetLocalEvent,
+
+ // Used to indicate an uninitialized VariableEvent. Don't use for other
+ // purposes.
+ InvalidEventKind
+};
+
+union VariableRepresentation {
+ MacroAssembler::RegisterID gpr;
+ MacroAssembler::FPRegisterID fpr;
+#if USE(JSVALUE32_64)
+ struct {
+ MacroAssembler::RegisterID tagGPR;
+ MacroAssembler::RegisterID payloadGPR;
+ } pair;
+#endif
+ int32_t virtualReg;
+};
+
+class VariableEvent {
+public:
+ VariableEvent()
+ : m_kind(InvalidEventKind)
+ {
+ }
+
+ static VariableEvent reset()
+ {
+ VariableEvent event;
+ event.m_kind = Reset;
+ return event;
+ }
+
+ static VariableEvent fillGPR(VariableEventKind kind, NodeIndex nodeIndex, MacroAssembler::RegisterID gpr, DataFormat dataFormat)
+ {
+ ASSERT(kind == BirthToFill || kind == Fill);
+ ASSERT(dataFormat != DataFormatDouble);
+#if USE(JSVALUE32_64)
+ ASSERT(!(dataFormat & DataFormatJS));
+#endif
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.u.gpr = gpr;
+ event.m_kind = kind;
+ event.m_dataFormat = dataFormat;
+ return event;
+ }
+
+#if USE(JSVALUE32_64)
+ static VariableEvent fillPair(VariableEventKind kind, NodeIndex nodeIndex, MacroAssembler::RegisterID tagGPR, MacroAssembler::RegisterID payloadGPR)
+ {
+ ASSERT(kind == BirthToFill || kind == Fill);
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.u.pair.tagGPR = tagGPR;
+ event.u.pair.payloadGPR = payloadGPR;
+ event.m_kind = kind;
+ event.m_dataFormat = DataFormatJS;
+ return event;
+ }
+#endif // USE(JSVALUE32_64)
+
+ static VariableEvent fillFPR(VariableEventKind kind, NodeIndex nodeIndex, MacroAssembler::FPRegisterID fpr)
+ {
+ ASSERT(kind == BirthToFill || kind == Fill);
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.u.fpr = fpr;
+ event.m_kind = kind;
+ event.m_dataFormat = DataFormatDouble;
+ return event;
+ }
+
+ static VariableEvent spill(VariableEventKind kind, NodeIndex nodeIndex, VirtualRegister virtualRegister, DataFormat format)
+ {
+ ASSERT(kind == BirthToSpill || kind == Spill);
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.u.virtualReg = virtualRegister;
+ event.m_kind = kind;
+ event.m_dataFormat = format;
+ return event;
+ }
+
+ static VariableEvent death(NodeIndex nodeIndex)
+ {
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.m_kind = Death;
+ return event;
+ }
+
+ static VariableEvent setLocal(int operand, DataFormat format)
+ {
+ VariableEvent event;
+ event.u.virtualReg = operand;
+ event.m_kind = SetLocalEvent;
+ event.m_dataFormat = format;
+ return event;
+ }
+
+ static VariableEvent movHint(NodeIndex nodeIndex, int operand)
+ {
+ VariableEvent event;
+ event.m_index = nodeIndex;
+ event.u.virtualReg = operand;
+ event.m_kind = MovHint;
+ return event;
+ }
+
+ VariableEventKind kind() const
+ {
+ return static_cast<VariableEventKind>(m_kind);
+ }
+
+ NodeIndex nodeIndex() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill
+ || m_kind == BirthToSpill || m_kind == Spill
+ || m_kind == Death || m_kind == MovHint);
+ return m_index;
+ }
+
+ DataFormat dataFormat() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill
+ || m_kind == BirthToSpill || m_kind == Spill
+ || m_kind == SetLocalEvent);
+ return static_cast<DataFormat>(m_dataFormat);
+ }
+
+ MacroAssembler::RegisterID gpr() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill);
+ ASSERT(m_dataFormat);
+ ASSERT(m_dataFormat != DataFormatDouble);
+#if USE(JSVALUE32_64)
+ ASSERT(!(m_dataFormat & DataFormatJS));
+#endif
+ return u.gpr;
+ }
+
+#if USE(JSVALUE32_64)
+ MacroAssembler::RegisterID tagGPR() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill);
+ ASSERT(m_dataFormat & DataFormatJS);
+ return u.pair.tagGPR;
+ }
+ MacroAssembler::RegisterID payloadGPR() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill);
+ ASSERT(m_dataFormat & DataFormatJS);
+ return u.pair.payloadGPR;
+ }
+#endif // USE(JSVALUE32_64)
+
+ MacroAssembler::FPRegisterID fpr() const
+ {
+ ASSERT(m_kind == BirthToFill || m_kind == Fill);
+ ASSERT(m_dataFormat == DataFormatDouble);
+ return u.fpr;
+ }
+
+ VirtualRegister virtualRegister() const
+ {
+ ASSERT(m_kind == BirthToSpill || m_kind == Spill);
+ return static_cast<VirtualRegister>(u.virtualReg);
+ }
+
+ int operand() const
+ {
+ ASSERT(m_kind == SetLocalEvent || m_kind == MovHint);
+ return u.virtualReg;
+ }
+
+ const VariableRepresentation& variableRepresentation() const { return u; }
+
+ void dump(FILE*) const;
+
+private:
+ void dumpFillInfo(const char* name, FILE*) const;
+ void dumpSpillInfo(const char* name, FILE*) const;
+
+ NodeIndex m_index;
+
+ // For BirthToFill, Fill:
+ // - The GPR or FPR, or a GPR pair.
+ // For BirthToSpill, Spill:
+ // - The virtual register.
+ // For MovHint, SetLocalEvent:
+ // - The bytecode operand.
+ // For Death:
+ // - Unused.
+ VariableRepresentation u;
+
+ int8_t m_kind;
+ int8_t m_dataFormat;
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGVariableEvent_h
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGVariableEventStream.h"
+
+#if ENABLE(DFG_JIT)
+
+#include "CodeBlock.h"
+#include "DFGValueSource.h"
+#include <wtf/DataLog.h>
+
+namespace JSC { namespace DFG {
+
+void VariableEventStream::logEvent(const VariableEvent& event)
+{
+ dataLog("seq#%u:", static_cast<unsigned>(size()));
+ event.dump(WTF::dataFile());
+ dataLog(" ");
+}
+
+struct MinifiedGenerationInfo {
+ bool filled; // true -> in gpr/fpr/pair, false -> spilled
+ VariableRepresentation u;
+ DataFormat format;
+
+ MinifiedGenerationInfo()
+ : format(DataFormatNone)
+ {
+ }
+
+ void update(const VariableEvent& event)
+ {
+ switch (event.kind()) {
+ case BirthToFill:
+ case Fill:
+ filled = true;
+ break;
+ case BirthToSpill:
+ case Spill:
+ filled = false;
+ break;
+ case Death:
+ format = DataFormatNone;
+ return;
+ default:
+ return;
+ }
+
+ u = event.variableRepresentation();
+ format = event.dataFormat();
+ }
+};
+
+void VariableEventStream::reconstruct(
+ CodeBlock* codeBlock, CodeOrigin codeOrigin, MinifiedGraph& graph,
+ unsigned index, Operands<ValueRecovery>& valueRecoveries) const
+{
+ ASSERT(codeBlock->getJITType() == JITCode::DFGJIT);
+ CodeBlock* baselineCodeBlock = codeBlock->baselineVersion();
+
+ unsigned numVariables;
+ if (codeOrigin.inlineCallFrame)
+ numVariables = baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame)->m_numCalleeRegisters + codeOrigin.inlineCallFrame->stackOffset;
+ else
+ numVariables = baselineCodeBlock->m_numCalleeRegisters;
+
+ // Crazy special case: if we're at index == 0 then this must be an argument check
+ // failure, in which case all variables are already set up. The recoveries should
+ // reflect this.
+ if (!index) {
+ valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables);
+ for (size_t i = 0; i < valueRecoveries.size(); ++i)
+ valueRecoveries[i] = ValueRecovery::alreadyInRegisterFile();
+ return;
+ }
+
+ // Step 1: Find the last checkpoint, and figure out the number of virtual registers as we go.
+ unsigned startIndex = index - 1;
+ while (at(startIndex).kind() != Reset)
+ startIndex--;
+
+ // Step 2: Create a mock-up of the DFG's state and execute the events.
+ Operands<ValueSource> operandSources(codeBlock->numParameters(), numVariables);
+ Vector<MinifiedGenerationInfo, 32> generationInfos(graph.originalGraphSize());
+ for (unsigned i = startIndex; i < index; ++i) {
+ const VariableEvent& event = at(i);
+ switch (event.kind()) {
+ case Reset:
+ // nothing to do.
+ break;
+ case BirthToFill:
+ case BirthToSpill:
+ case Fill:
+ case Spill:
+ case Death:
+ generationInfos[event.nodeIndex()].update(event);
+ break;
+ case MovHint:
+ if (operandSources.hasOperand(event.operand()))
+ operandSources.setOperand(event.operand(), ValueSource(event.nodeIndex()));
+ break;
+ case SetLocalEvent:
+ if (operandSources.hasOperand(event.operand()))
+ operandSources.setOperand(event.operand(), ValueSource::forDataFormat(event.dataFormat()));
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ break;
+ }
+ }
+
+ // Step 3: Record the things that are live, so we can get to them more quickly.
+ Vector<unsigned, 16> indicesOfLiveThings;
+ for (unsigned i = 0; i < generationInfos.size(); ++i) {
+ if (generationInfos[i].format != DataFormatNone)
+ indicesOfLiveThings.append(i);
+ }
+
+ // Step 4: Compute value recoveries!
+ valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables);
+ for (unsigned i = 0; i < operandSources.size(); ++i) {
+ ValueSource& source = operandSources[i];
+ if (source.isTriviallyRecoverable()) {
+ valueRecoveries[i] = source.valueRecovery();
+ continue;
+ }
+
+ ASSERT(source.kind() == HaveNode);
+ MinifiedNode* node = graph.at(source.nodeIndex());
+ if (node) {
+ if (node->hasConstantNumber()) {
+ valueRecoveries[i] = ValueRecovery::constant(
+ codeBlock->constantRegister(
+ FirstConstantRegisterIndex + node->constantNumber()).get());
+ continue;
+ }
+ if (node->hasWeakConstant()) {
+ valueRecoveries[i] = ValueRecovery::constant(node->weakConstant());
+ continue;
+ }
+ if (node->op() == PhantomArguments) {
+ valueRecoveries[i] = ValueRecovery::argumentsThatWereNotCreated();
+ continue;
+ }
+ }
+
+ MinifiedGenerationInfo* info = &generationInfos[source.nodeIndex()];
+ if (info->format == DataFormatNone) {
+ // Try to see if there is an alternate node that would contain the value we want.
+ // There are four possibilities:
+ //
+ // Int32ToDouble: We can use this in place of the original node, but
+ // we'd rather not; so we use it only if it is the only remaining
+ // live version.
+ //
+ // ValueToInt32: If the only remaining live version of the value is
+ // ValueToInt32, then we can use it.
+ //
+ // UInt32ToNumber: If the only live version of the value is a UInt32ToNumber
+ // then the only remaining uses are ones that want a properly formed number
+ // rather than a UInt32 intermediate.
+ //
+ // DoubleAsInt32: Same as UInt32ToNumber.
+ //
+ // The reverse of the above: This node could be a UInt32ToNumber, but its
+ // alternative is still alive. This means that the only remaining uses of
+ // the number would be fine with a UInt32 intermediate.
+
+ bool found = false;
+
+ if (node && node->op() == UInt32ToNumber) {
+ NodeIndex nodeIndex = node->child1();
+ node = graph.at(nodeIndex);
+ info = &generationInfos[nodeIndex];
+ if (info->format != DataFormatNone)
+ found = true;
+ }
+
+ if (!found) {
+ NodeIndex int32ToDoubleIndex = NoNode;
+ NodeIndex valueToInt32Index = NoNode;
+ NodeIndex uint32ToNumberIndex = NoNode;
+ NodeIndex doubleAsInt32Index = NoNode;
+
+ for (unsigned i = 0; i < indicesOfLiveThings.size(); ++i) {
+ NodeIndex nodeIndex = indicesOfLiveThings[i];
+ node = graph.at(nodeIndex);
+ if (!node)
+ continue;
+ if (!node->hasChild1())
+ continue;
+ if (node->child1() != source.nodeIndex())
+ continue;
+ ASSERT(generationInfos[nodeIndex].format != DataFormatNone);
+ switch (node->op()) {
+ case Int32ToDouble:
+ int32ToDoubleIndex = nodeIndex;
+ break;
+ case ValueToInt32:
+ valueToInt32Index = nodeIndex;
+ break;
+ case UInt32ToNumber:
+ uint32ToNumberIndex = nodeIndex;
+ break;
+ case DoubleAsInt32:
+ doubleAsInt32Index = nodeIndex;
+ break;
+ default:
+ break;
+ }
+ }
+
+ NodeIndex nodeIndexToUse;
+ if (doubleAsInt32Index != NoNode)
+ nodeIndexToUse = doubleAsInt32Index;
+ else if (int32ToDoubleIndex != NoNode)
+ nodeIndexToUse = int32ToDoubleIndex;
+ else if (valueToInt32Index != NoNode)
+ nodeIndexToUse = valueToInt32Index;
+ else if (uint32ToNumberIndex != NoNode)
+ nodeIndexToUse = uint32ToNumberIndex;
+ else
+ nodeIndexToUse = NoNode;
+
+ if (nodeIndexToUse != NoNode) {
+ info = &generationInfos[nodeIndexToUse];
+ ASSERT(info->format != DataFormatNone);
+ found = true;
+ }
+ }
+
+ if (!found) {
+ valueRecoveries[i] = ValueRecovery::constant(jsUndefined());
+ continue;
+ }
+ }
+
+ ASSERT(info->format != DataFormatNone);
+
+ if (info->filled) {
+ if (info->format == DataFormatDouble) {
+ valueRecoveries[i] = ValueRecovery::inFPR(info->u.fpr);
+ continue;
+ }
+#if USE(JSVALUE32_64)
+ if (info->format & DataFormatJS) {
+ valueRecoveries[i] = ValueRecovery::inPair(info->u.pair.tagGPR, info->u.pair.payloadGPR);
+ continue;
+ }
+#endif
+ valueRecoveries[i] = ValueRecovery::inGPR(info->u.gpr, info->format);
+ continue;
+ }
+
+ valueRecoveries[i] =
+ ValueRecovery::displacedInRegisterFile(static_cast<VirtualRegister>(info->u.virtualReg), info->format);
+ }
+}
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
--- /dev/null
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef DFGVariableEventStream_h
+#define DFGVariableEventStream_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(DFG_JIT)
+
+#include "DFGCommon.h"
+#include "DFGMinifiedGraph.h"
+#include "DFGVariableEvent.h"
+#include "Operands.h"
+#include <wtf/Vector.h>
+
+namespace JSC { namespace DFG {
+
+class VariableEventStream : public Vector<VariableEvent> {
+public:
+ void appendAndLog(const VariableEvent& event)
+ {
+#if DFG_ENABLE(DEBUG_VERBOSE)
+ logEvent(event);
+#endif
+ append(event);
+ }
+
+ void reconstruct(
+ CodeBlock*, CodeOrigin, MinifiedGraph&,
+ unsigned index, Operands<ValueRecovery>&) const;
+
+private:
+ void logEvent(const VariableEvent&);
+};
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+
+#endif // DFGVariableEventStream_h
+
bool performVirtualRegisterAllocation(Graph& graph)
{
+ SamplingRegion samplingRegion("DFG Virtual Register Allocation Phase");
return runPhase<VirtualRegisterAllocationPhase>(graph);
}