FTL B3 should be able to make JS->JS calls
authorfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 7 Dec 2015 19:17:56 +0000 (19:17 +0000)
committerfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 7 Dec 2015 19:17:56 +0000 (19:17 +0000)
https://bugs.webkit.org/show_bug.cgi?id=151901

Reviewed by Saam Barati.

This adds support for the Call and InvalidationPoint opcodes in DFG IR. This required doing some
clean-up in the OSR exit code. We don't want the B3 FTL to use a bunch of vectors to hold
side-state, so the use of OSRExitDescriptorImpl is not right. It makes sense in the LLVM FTL
because that code needs some way of saving some state from LowerDFGToLLVM to compile(), but
that's not how B3 FTL works. It turns out that for B3 FTL, there isn't anything in
OSRExitDescriptorImpl that the code in LowerDFGToLLVM can't just capture in a lambda.

This also simplifies some stackmap-related APIs, since I got tired of writing boilerplate.

* CMakeLists.txt:
* JavaScriptCore.xcodeproj/project.pbxproj:
* assembler/AbstractMacroAssembler.h:
(JSC::AbstractMacroAssembler::replaceWithAddressComputation):
(JSC::AbstractMacroAssembler::addLinkTask):
* b3/B3CheckSpecial.cpp:
(JSC::B3::CheckSpecial::generate):
* b3/B3Effects.h:
* b3/B3PatchpointSpecial.cpp:
(JSC::B3::PatchpointSpecial::generate):
* b3/B3Procedure.cpp:
(JSC::B3::Procedure::addDataSection):
(JSC::B3::Procedure::callArgAreaSize):
(JSC::B3::Procedure::requestCallArgAreaSize):
(JSC::B3::Procedure::frameSize):
* b3/B3Procedure.h:
(JSC::B3::Procedure::releaseByproducts):
(JSC::B3::Procedure::code):
* b3/B3StackmapGenerationParams.cpp: Added.
(JSC::B3::StackmapGenerationParams::usedRegisters):
(JSC::B3::StackmapGenerationParams::proc):
(JSC::B3::StackmapGenerationParams::StackmapGenerationParams):
* b3/B3StackmapGenerationParams.h: Added.
(JSC::B3::StackmapGenerationParams::value):
(JSC::B3::StackmapGenerationParams::reps):
(JSC::B3::StackmapGenerationParams::size):
(JSC::B3::StackmapGenerationParams::at):
(JSC::B3::StackmapGenerationParams::operator[]):
(JSC::B3::StackmapGenerationParams::begin):
(JSC::B3::StackmapGenerationParams::end):
(JSC::B3::StackmapGenerationParams::context):
(JSC::B3::StackmapGenerationParams::addLatePath):
* b3/B3StackmapValue.h:
* b3/B3ValueRep.h:
(JSC::B3::ValueRep::doubleValue):
(JSC::B3::ValueRep::withOffset):
* b3/air/AirGenerationContext.h:
* b3/testb3.cpp:
(JSC::B3::testSimplePatchpoint):
(JSC::B3::testSimplePatchpointWithoutOuputClobbersGPArgs):
(JSC::B3::testSimplePatchpointWithOuputClobbersGPArgs):
(JSC::B3::testSimplePatchpointWithoutOuputClobbersFPArgs):
(JSC::B3::testSimplePatchpointWithOuputClobbersFPArgs):
(JSC::B3::testPatchpointWithEarlyClobber):
(JSC::B3::testPatchpointCallArg):
(JSC::B3::testPatchpointFixedRegister):
(JSC::B3::testPatchpointAny):
(JSC::B3::testPatchpointLotsOfLateAnys):
(JSC::B3::testPatchpointAnyImm):
(JSC::B3::testPatchpointManyImms):
(JSC::B3::testPatchpointWithRegisterResult):
(JSC::B3::testPatchpointWithStackArgumentResult):
(JSC::B3::testPatchpointWithAnyResult):
(JSC::B3::testSimpleCheck):
(JSC::B3::testCheckLessThan):
(JSC::B3::testCheckMegaCombo):
(JSC::B3::testCheckAddImm):
(JSC::B3::testCheckAddImmCommute):
(JSC::B3::testCheckAddImmSomeRegister):
(JSC::B3::testCheckAdd):
(JSC::B3::testCheckAdd64):
(JSC::B3::testCheckSubImm):
(JSC::B3::testCheckSubBadImm):
(JSC::B3::testCheckSub):
(JSC::B3::testCheckSub64):
(JSC::B3::testCheckNeg):
(JSC::B3::testCheckNeg64):
(JSC::B3::testCheckMul):
(JSC::B3::testCheckMulMemory):
(JSC::B3::testCheckMul2):
(JSC::B3::testCheckMul64):
(JSC::B3::genericTestCompare):
* ftl/FTLExceptionHandlerManager.cpp:
* ftl/FTLExceptionHandlerManager.h:
* ftl/FTLJSCall.cpp:
* ftl/FTLJSCall.h:
* ftl/FTLJSCallBase.cpp:
(JSC::FTL::JSCallBase::emit):
* ftl/FTLJSCallBase.h:
* ftl/FTLJSCallVarargs.cpp:
* ftl/FTLJSCallVarargs.h:
* ftl/FTLJSTailCall.cpp:
(JSC::FTL::DFG::getRegisterWithAddend):
(JSC::FTL::JSTailCall::emit):
(JSC::FTL::JSTailCall::JSTailCall): Deleted.
* ftl/FTLJSTailCall.h:
(JSC::FTL::JSTailCall::stackmapID):
(JSC::FTL::JSTailCall::estimatedSize):
(JSC::FTL::JSTailCall::operator<):
(JSC::FTL::JSTailCall::patchpoint): Deleted.
* ftl/FTLLowerDFGToLLVM.cpp:
(JSC::FTL::DFG::LowerDFGToLLVM::compileCallOrConstruct):
(JSC::FTL::DFG::LowerDFGToLLVM::compileInvalidationPoint):
(JSC::FTL::DFG::LowerDFGToLLVM::lazySlowPath):
(JSC::FTL::DFG::LowerDFGToLLVM::callCheck):
(JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExitArgumentsForPatchpointIfWillCatchException):
(JSC::FTL::DFG::LowerDFGToLLVM::emitBranchToOSRExitIfWillCatchException):
(JSC::FTL::DFG::LowerDFGToLLVM::lowBlock):
(JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExitDescriptor):
(JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExit):
(JSC::FTL::DFG::LowerDFGToLLVM::blessSpeculation):
(JSC::FTL::DFG::LowerDFGToLLVM::emitOSRExitCall):
(JSC::FTL::DFG::LowerDFGToLLVM::buildExitArguments):
(JSC::FTL::DFG::LowerDFGToLLVM::exitValueForNode):
* ftl/FTLOSRExit.cpp:
(JSC::FTL::OSRExitDescriptor::OSRExitDescriptor):
(JSC::FTL::OSRExitDescriptor::emitOSRExit):
(JSC::FTL::OSRExitDescriptor::emitOSRExitLater):
(JSC::FTL::OSRExitDescriptor::prepareOSRExitHandle):
(JSC::FTL::OSRExit::OSRExit):
(JSC::FTL::OSRExit::codeLocationForRepatch):
(JSC::FTL::OSRExit::recoverRegistersFromSpillSlot):
(JSC::FTL::OSRExit::willArriveAtExitFromIndirectExceptionCheck):
(JSC::FTL::OSRExit::needsRegisterRecoveryOnGenericUnwindOSRExitPath):
* ftl/FTLOSRExit.h:
(JSC::FTL::OSRExitDescriptorImpl::OSRExitDescriptorImpl):
(JSC::FTL::OSRExit::considerAddingAsFrequentExitSite):
* ftl/FTLOSRExitCompiler.cpp:
(JSC::FTL::compileStub):
(JSC::FTL::compileFTLOSRExit):
* ftl/FTLState.h:

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@193640 268f45cc-cd09-0410-ab3c-d52691b4dbfc

30 files changed:
Source/JavaScriptCore/CMakeLists.txt
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
Source/JavaScriptCore/assembler/AbstractMacroAssembler.h
Source/JavaScriptCore/b3/B3CheckSpecial.cpp
Source/JavaScriptCore/b3/B3Effects.h
Source/JavaScriptCore/b3/B3PatchpointSpecial.cpp
Source/JavaScriptCore/b3/B3Procedure.cpp
Source/JavaScriptCore/b3/B3Procedure.h
Source/JavaScriptCore/b3/B3StackmapGenerationParams.cpp [new file with mode: 0644]
Source/JavaScriptCore/b3/B3StackmapGenerationParams.h [new file with mode: 0644]
Source/JavaScriptCore/b3/B3StackmapValue.h
Source/JavaScriptCore/b3/B3ValueRep.h
Source/JavaScriptCore/b3/air/AirGenerationContext.h
Source/JavaScriptCore/b3/testb3.cpp
Source/JavaScriptCore/ftl/FTLExceptionHandlerManager.cpp
Source/JavaScriptCore/ftl/FTLExceptionHandlerManager.h
Source/JavaScriptCore/ftl/FTLJSCall.cpp
Source/JavaScriptCore/ftl/FTLJSCall.h
Source/JavaScriptCore/ftl/FTLJSCallBase.cpp
Source/JavaScriptCore/ftl/FTLJSCallBase.h
Source/JavaScriptCore/ftl/FTLJSCallVarargs.cpp
Source/JavaScriptCore/ftl/FTLJSCallVarargs.h
Source/JavaScriptCore/ftl/FTLJSTailCall.cpp
Source/JavaScriptCore/ftl/FTLJSTailCall.h
Source/JavaScriptCore/ftl/FTLLowerDFGToLLVM.cpp
Source/JavaScriptCore/ftl/FTLOSRExit.cpp
Source/JavaScriptCore/ftl/FTLOSRExit.h
Source/JavaScriptCore/ftl/FTLOSRExitCompiler.cpp
Source/JavaScriptCore/ftl/FTLState.h

index da6b8f3..cea6665 100644 (file)
@@ -125,6 +125,7 @@ set(JavaScriptCore_SOURCES
     b3/B3PhiChildren.cpp
     b3/B3Procedure.cpp
     b3/B3ReduceStrength.cpp
+    b3/B3StackmapGenerationParams.cpp
     b3/B3StackmapSpecial.cpp
     b3/B3StackmapValue.cpp
     b3/B3StackSlotKind.cpp
index 0e7259e..69c1bd7 100644 (file)
@@ -1,3 +1,141 @@
+2015-12-06  Filip Pizlo  <fpizlo@apple.com>
+
+        FTL B3 should be able to make JS->JS calls
+        https://bugs.webkit.org/show_bug.cgi?id=151901
+
+        Reviewed by Saam Barati.
+
+        This adds support for the Call and InvalidationPoint opcodes in DFG IR. This required doing some
+        clean-up in the OSR exit code. We don't want the B3 FTL to use a bunch of vectors to hold
+        side-state, so the use of OSRExitDescriptorImpl is not right. It makes sense in the LLVM FTL
+        because that code needs some way of saving some state from LowerDFGToLLVM to compile(), but
+        that's not how B3 FTL works. It turns out that for B3 FTL, there isn't anything in
+        OSRExitDescriptorImpl that the code in LowerDFGToLLVM can't just capture in a lambda.
+
+        This also simplifies some stackmap-related APIs, since I got tired of writing boilerplate.
+
+        * CMakeLists.txt:
+        * JavaScriptCore.xcodeproj/project.pbxproj:
+        * assembler/AbstractMacroAssembler.h:
+        (JSC::AbstractMacroAssembler::replaceWithAddressComputation):
+        (JSC::AbstractMacroAssembler::addLinkTask):
+        * b3/B3CheckSpecial.cpp:
+        (JSC::B3::CheckSpecial::generate):
+        * b3/B3Effects.h:
+        * b3/B3PatchpointSpecial.cpp:
+        (JSC::B3::PatchpointSpecial::generate):
+        * b3/B3Procedure.cpp:
+        (JSC::B3::Procedure::addDataSection):
+        (JSC::B3::Procedure::callArgAreaSize):
+        (JSC::B3::Procedure::requestCallArgAreaSize):
+        (JSC::B3::Procedure::frameSize):
+        * b3/B3Procedure.h:
+        (JSC::B3::Procedure::releaseByproducts):
+        (JSC::B3::Procedure::code):
+        * b3/B3StackmapGenerationParams.cpp: Added.
+        (JSC::B3::StackmapGenerationParams::usedRegisters):
+        (JSC::B3::StackmapGenerationParams::proc):
+        (JSC::B3::StackmapGenerationParams::StackmapGenerationParams):
+        * b3/B3StackmapGenerationParams.h: Added.
+        (JSC::B3::StackmapGenerationParams::value):
+        (JSC::B3::StackmapGenerationParams::reps):
+        (JSC::B3::StackmapGenerationParams::size):
+        (JSC::B3::StackmapGenerationParams::at):
+        (JSC::B3::StackmapGenerationParams::operator[]):
+        (JSC::B3::StackmapGenerationParams::begin):
+        (JSC::B3::StackmapGenerationParams::end):
+        (JSC::B3::StackmapGenerationParams::context):
+        (JSC::B3::StackmapGenerationParams::addLatePath):
+        * b3/B3StackmapValue.h:
+        * b3/B3ValueRep.h:
+        (JSC::B3::ValueRep::doubleValue):
+        (JSC::B3::ValueRep::withOffset):
+        * b3/air/AirGenerationContext.h:
+        * b3/testb3.cpp:
+        (JSC::B3::testSimplePatchpoint):
+        (JSC::B3::testSimplePatchpointWithoutOuputClobbersGPArgs):
+        (JSC::B3::testSimplePatchpointWithOuputClobbersGPArgs):
+        (JSC::B3::testSimplePatchpointWithoutOuputClobbersFPArgs):
+        (JSC::B3::testSimplePatchpointWithOuputClobbersFPArgs):
+        (JSC::B3::testPatchpointWithEarlyClobber):
+        (JSC::B3::testPatchpointCallArg):
+        (JSC::B3::testPatchpointFixedRegister):
+        (JSC::B3::testPatchpointAny):
+        (JSC::B3::testPatchpointLotsOfLateAnys):
+        (JSC::B3::testPatchpointAnyImm):
+        (JSC::B3::testPatchpointManyImms):
+        (JSC::B3::testPatchpointWithRegisterResult):
+        (JSC::B3::testPatchpointWithStackArgumentResult):
+        (JSC::B3::testPatchpointWithAnyResult):
+        (JSC::B3::testSimpleCheck):
+        (JSC::B3::testCheckLessThan):
+        (JSC::B3::testCheckMegaCombo):
+        (JSC::B3::testCheckAddImm):
+        (JSC::B3::testCheckAddImmCommute):
+        (JSC::B3::testCheckAddImmSomeRegister):
+        (JSC::B3::testCheckAdd):
+        (JSC::B3::testCheckAdd64):
+        (JSC::B3::testCheckSubImm):
+        (JSC::B3::testCheckSubBadImm):
+        (JSC::B3::testCheckSub):
+        (JSC::B3::testCheckSub64):
+        (JSC::B3::testCheckNeg):
+        (JSC::B3::testCheckNeg64):
+        (JSC::B3::testCheckMul):
+        (JSC::B3::testCheckMulMemory):
+        (JSC::B3::testCheckMul2):
+        (JSC::B3::testCheckMul64):
+        (JSC::B3::genericTestCompare):
+        * ftl/FTLExceptionHandlerManager.cpp:
+        * ftl/FTLExceptionHandlerManager.h:
+        * ftl/FTLJSCall.cpp:
+        * ftl/FTLJSCall.h:
+        * ftl/FTLJSCallBase.cpp:
+        (JSC::FTL::JSCallBase::emit):
+        * ftl/FTLJSCallBase.h:
+        * ftl/FTLJSCallVarargs.cpp:
+        * ftl/FTLJSCallVarargs.h:
+        * ftl/FTLJSTailCall.cpp:
+        (JSC::FTL::DFG::getRegisterWithAddend):
+        (JSC::FTL::JSTailCall::emit):
+        (JSC::FTL::JSTailCall::JSTailCall): Deleted.
+        * ftl/FTLJSTailCall.h:
+        (JSC::FTL::JSTailCall::stackmapID):
+        (JSC::FTL::JSTailCall::estimatedSize):
+        (JSC::FTL::JSTailCall::operator<):
+        (JSC::FTL::JSTailCall::patchpoint): Deleted.
+        * ftl/FTLLowerDFGToLLVM.cpp:
+        (JSC::FTL::DFG::LowerDFGToLLVM::compileCallOrConstruct):
+        (JSC::FTL::DFG::LowerDFGToLLVM::compileInvalidationPoint):
+        (JSC::FTL::DFG::LowerDFGToLLVM::lazySlowPath):
+        (JSC::FTL::DFG::LowerDFGToLLVM::callCheck):
+        (JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExitArgumentsForPatchpointIfWillCatchException):
+        (JSC::FTL::DFG::LowerDFGToLLVM::emitBranchToOSRExitIfWillCatchException):
+        (JSC::FTL::DFG::LowerDFGToLLVM::lowBlock):
+        (JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExitDescriptor):
+        (JSC::FTL::DFG::LowerDFGToLLVM::appendOSRExit):
+        (JSC::FTL::DFG::LowerDFGToLLVM::blessSpeculation):
+        (JSC::FTL::DFG::LowerDFGToLLVM::emitOSRExitCall):
+        (JSC::FTL::DFG::LowerDFGToLLVM::buildExitArguments):
+        (JSC::FTL::DFG::LowerDFGToLLVM::exitValueForNode):
+        * ftl/FTLOSRExit.cpp:
+        (JSC::FTL::OSRExitDescriptor::OSRExitDescriptor):
+        (JSC::FTL::OSRExitDescriptor::emitOSRExit):
+        (JSC::FTL::OSRExitDescriptor::emitOSRExitLater):
+        (JSC::FTL::OSRExitDescriptor::prepareOSRExitHandle):
+        (JSC::FTL::OSRExit::OSRExit):
+        (JSC::FTL::OSRExit::codeLocationForRepatch):
+        (JSC::FTL::OSRExit::recoverRegistersFromSpillSlot):
+        (JSC::FTL::OSRExit::willArriveAtExitFromIndirectExceptionCheck):
+        (JSC::FTL::OSRExit::needsRegisterRecoveryOnGenericUnwindOSRExitPath):
+        * ftl/FTLOSRExit.h:
+        (JSC::FTL::OSRExitDescriptorImpl::OSRExitDescriptorImpl):
+        (JSC::FTL::OSRExit::considerAddingAsFrequentExitSite):
+        * ftl/FTLOSRExitCompiler.cpp:
+        (JSC::FTL::compileStub):
+        (JSC::FTL::compileFTLOSRExit):
+        * ftl/FTLState.h:
+
 2015-12-07  Saam barati  <sbarati@apple.com>
 
         Rename Watchdog::didFire to Watchdog::shouldTerminate because that's what didFire really meant
index 43139fe..a7ee808 100644 (file)
                0F338E1C1BF286EA0013C88F /* B3BlockInsertionSet.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F338E181BF286EA0013C88F /* B3BlockInsertionSet.h */; };
                0F338E1D1BF286EA0013C88F /* B3LowerMacros.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F338E191BF286EA0013C88F /* B3LowerMacros.cpp */; };
                0F338E1E1BF286EA0013C88F /* B3LowerMacros.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F338E1A1BF286EA0013C88F /* B3LowerMacros.h */; };
+               0F33FCF71C136E2500323F67 /* B3StackmapGenerationParams.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F33FCF51C136E2500323F67 /* B3StackmapGenerationParams.cpp */; };
+               0F33FCF81C136E2500323F67 /* B3StackmapGenerationParams.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F33FCF61C136E2500323F67 /* B3StackmapGenerationParams.h */; };
                0F34B14916D42010001CDA5A /* DFGUseKind.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F34B14716D4200E001CDA5A /* DFGUseKind.cpp */; };
                0F34B14A16D42013001CDA5A /* DFGUseKind.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F34B14816D4200E001CDA5A /* DFGUseKind.h */; };
                0F37308C1C0BD29100052BFA /* B3PhiChildren.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F37308A1C0BD29100052BFA /* B3PhiChildren.cpp */; };
                0F338E181BF286EA0013C88F /* B3BlockInsertionSet.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = B3BlockInsertionSet.h; path = b3/B3BlockInsertionSet.h; sourceTree = "<group>"; };
                0F338E191BF286EA0013C88F /* B3LowerMacros.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = B3LowerMacros.cpp; path = b3/B3LowerMacros.cpp; sourceTree = "<group>"; };
                0F338E1A1BF286EA0013C88F /* B3LowerMacros.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = B3LowerMacros.h; path = b3/B3LowerMacros.h; sourceTree = "<group>"; };
+               0F33FCF51C136E2500323F67 /* B3StackmapGenerationParams.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = B3StackmapGenerationParams.cpp; path = b3/B3StackmapGenerationParams.cpp; sourceTree = "<group>"; };
+               0F33FCF61C136E2500323F67 /* B3StackmapGenerationParams.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = B3StackmapGenerationParams.h; path = b3/B3StackmapGenerationParams.h; sourceTree = "<group>"; };
                0F34B14716D4200E001CDA5A /* DFGUseKind.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGUseKind.cpp; path = dfg/DFGUseKind.cpp; sourceTree = "<group>"; };
                0F34B14816D4200E001CDA5A /* DFGUseKind.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGUseKind.h; path = dfg/DFGUseKind.h; sourceTree = "<group>"; };
                0F37308A1C0BD29100052BFA /* B3PhiChildren.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = B3PhiChildren.cpp; path = b3/B3PhiChildren.cpp; sourceTree = "<group>"; };
                                0FEC84E31BDACDAC0080FF74 /* B3ProcedureInlines.h */,
                                0FEC85B71BE1462F0080FF74 /* B3ReduceStrength.cpp */,
                                0FEC85B81BE1462F0080FF74 /* B3ReduceStrength.h */,
+                               0F33FCF51C136E2500323F67 /* B3StackmapGenerationParams.cpp */,
+                               0F33FCF61C136E2500323F67 /* B3StackmapGenerationParams.h */,
                                0FEC84E61BDACDAC0080FF74 /* B3StackmapSpecial.cpp */,
                                0FEC84E71BDACDAC0080FF74 /* B3StackmapSpecial.h */,
                                0F338DEF1BE93AD10013C88F /* B3StackmapValue.cpp */,
                                0FD3E4021B618AAF00C80E1E /* DFGAdaptiveInferredPropertyValueWatchpoint.h in Headers */,
                                0F18D3D01B55A6E0002C5C9F /* DFGAdaptiveStructureWatchpoint.h in Headers */,
                                998ED6751BED768C00DD8017 /* RemoteControllableTarget.h in Headers */,
+                               0F33FCF81C136E2500323F67 /* B3StackmapGenerationParams.h in Headers */,
                                0F66E16B14DF3F1600B7B2E4 /* DFGAdjacencyList.h in Headers */,
                                0FFB921816D02EB20055A5DB /* DFGAllocator.h in Headers */,
                                0F1E3A461534CBAF000F9456 /* DFGArgumentPosition.h in Headers */,
                                0F8F943C1667631300D61971 /* CodeSpecializationKind.cpp in Sources */,
                                0F8F94421667633500D61971 /* CodeType.cpp in Sources */,
                                147F39C1107EC37600427A48 /* CommonIdentifiers.cpp in Sources */,
+                               0F33FCF71C136E2500323F67 /* B3StackmapGenerationParams.cpp in Sources */,
                                A709F2F217A0AC2A00512E98 /* CommonSlowPaths.cpp in Sources */,
                                6553A33117A1F1EE008CF6F3 /* CommonSlowPathsExceptions.cpp in Sources */,
                                A7E5A3A71797432D00E893C0 /* CompilationResult.cpp in Sources */,
index cc995a2..e056cfd 100644 (file)
@@ -1016,11 +1016,6 @@ public:
         AssemblerType::replaceWithAddressComputation(label.dataLocation());
     }
 
-    void addLinkTask(RefPtr<SharedTask<void(LinkBuffer&)>> task)
-    {
-        m_linkTasks.append(task);
-    }
-
     template<typename Functor>
     void addLinkTask(const Functor& functor)
     {
index 87219d8..7e746c3 100644 (file)
@@ -31,6 +31,7 @@
 #include "AirCode.h"
 #include "AirGenerationContext.h"
 #include "AirInstInlines.h"
+#include "B3StackmapGenerationParams.h"
 #include "B3ValueInlines.h"
 
 namespace JSC { namespace B3 {
@@ -208,13 +209,7 @@ CCallHelpers::Jump CheckSpecial::generate(Inst& inst, CCallHelpers& jit, Generat
                     break;
                 }
                 
-                StackmapGenerationParams params;
-                params.value = value;
-                params.reps = reps;
-                params.usedRegisters = value->m_usedRegisters;
-                params.context = &context;
-
-                value->m_generator->run(jit, params);
+                value->m_generator->run(jit, StackmapGenerationParams(value, reps, context));
             }));
 
     return CCallHelpers::Jump(); // As far as Air thinks, we are not a terminal.
index b7e858c..768f3c7 100644 (file)
@@ -37,7 +37,11 @@ struct Effects {
     // True if this cannot continue execution in the current block.
     bool terminal { false };
 
-    // True if this value can cause execution to terminate abruptly.
+    // True if this value can cause execution to terminate abruptly, and that this abrupt termination is
+    // observable. Note that if exitsSideways is set to true but reads is bottom, then B3 is free to
+    // assume that after abrupt termination of this procedure, none of the heap will be read. That's
+    // usually false, so make sure that reads corresponds to the set of things that are readable after
+    // this function terminates abruptly.
     bool exitsSideways { false };
 
     // True if the instruction may change semantics if hoisted above some control flow.
index 9785f83..0f760a7 100644 (file)
@@ -29,6 +29,7 @@
 #if ENABLE(B3_JIT)
 
 #include "AirGenerationContext.h"
+#include "B3StackmapGenerationParams.h"
 #include "B3ValueInlines.h"
 
 namespace JSC { namespace B3 {
@@ -107,13 +108,7 @@ CCallHelpers::Jump PatchpointSpecial::generate(
         reps.append(repForArg(*context.code, inst.args[offset++]));
     appendRepsImpl(context, offset, inst, reps);
     
-    StackmapGenerationParams params;
-    params.value = value;
-    params.reps = reps;
-    params.usedRegisters = value->m_usedRegisters;
-    params.context = &context;
-
-    value->m_generator->run(jit, params);
+    value->m_generator->run(jit, StackmapGenerationParams(value, reps, context));
 
     return CCallHelpers::Jump();
 }
index 9b4ecf8..0f8ebb4 100644 (file)
@@ -148,6 +148,16 @@ void* Procedure::addDataSection(size_t size)
     return result;
 }
 
+unsigned Procedure::callArgAreaSize() const
+{
+    return code().callArgAreaSize();
+}
+
+void Procedure::requestCallArgAreaSize(unsigned size)
+{
+    code().requestCallArgAreaSize(size);
+}
+
 unsigned Procedure::frameSize() const
 {
     return code().frameSize();
index 5293cee..44f075a 100644 (file)
@@ -221,9 +221,14 @@ public:
     // that API, then you don't have to worry about this.
     std::unique_ptr<OpaqueByproducts> releaseByproducts() { return WTF::move(m_byproducts); }
 
+    // This gives you direct access to Code. However, the idea is that clients of B3 shouldn't have to
+    // call this. So, Procedure has some methods (below) that expose some Air::Code functionality.
     const Air::Code& code() const { return *m_code; }
     Air::Code& code() { return *m_code; }
 
+    unsigned callArgAreaSize() const;
+    void requestCallArgAreaSize(unsigned size);
+
     JS_EXPORT_PRIVATE unsigned frameSize() const;
     const RegisterAtOffsetList& calleeSaveRegisters() const;
 
diff --git a/Source/JavaScriptCore/b3/B3StackmapGenerationParams.cpp b/Source/JavaScriptCore/b3/B3StackmapGenerationParams.cpp
new file mode 100644 (file)
index 0000000..6114cb0
--- /dev/null
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#include "config.h"
+#include "B3StackmapGenerationParams.h"
+
+#if ENABLE(B3_JIT)
+
+#include "AirCode.h"
+#include "AirGenerationContext.h"
+#include "B3StackmapValue.h"
+
+namespace JSC { namespace B3 {
+
+using namespace Air;
+
+const RegisterSet& StackmapGenerationParams::usedRegisters() const
+{
+    return m_value->m_usedRegisters;
+}
+
+Procedure& StackmapGenerationParams::proc() const
+{
+    return m_context.code->proc();
+}
+
+StackmapGenerationParams::StackmapGenerationParams(
+    StackmapValue* value, const Vector<ValueRep>& reps, Air::GenerationContext& context)
+    : m_value(value)
+    , m_reps(reps)
+    , m_context(context)
+{
+}
+
+} } // namespace JSC::B3
+
+#endif // ENABLE(B3_JIT)
+
diff --git a/Source/JavaScriptCore/b3/B3StackmapGenerationParams.h b/Source/JavaScriptCore/b3/B3StackmapGenerationParams.h
new file mode 100644 (file)
index 0000000..e6bc3f5
--- /dev/null
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2015 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#ifndef B3StackmapGenerationParams_h
+#define B3StackmapGenerationParams_h
+
+#if ENABLE(B3_JIT)
+
+#include "AirGenerationContext.h"
+#include "B3ValueRep.h"
+#include "RegisterSet.h"
+
+namespace JSC { namespace B3 {
+
+class CheckSpecial;
+class PatchpointSpecial;
+class Procedure;
+class StackmapValue;
+
+class StackmapGenerationParams {
+public:
+    // This is the stackmap value that we're generating.
+    StackmapValue* value() const { return m_value; }
+    
+    // This tells you the actual value representations that were chosen. This is usually different
+    // from the constraints we supplied.
+    const Vector<ValueRep>& reps() const { return m_reps; };
+
+    // Usually we wish to access the reps. We make this easy by making ourselves appear to be a
+    // collection of reps.
+    unsigned size() const { return m_reps.size(); }
+    const ValueRep& at(unsigned index) const { return m_reps[index]; }
+    const ValueRep& operator[](unsigned index) const { return at(index); }
+    Vector<ValueRep>::const_iterator begin() const { return m_reps.begin(); }
+    Vector<ValueRep>::const_iterator end() const { return m_reps.end(); }
+    
+    // This tells you the registers that were used.
+    const RegisterSet& usedRegisters() const;
+
+    // This is provided for convenience; it means that you don't have to capture it if you don't want to.
+    Procedure& proc() const;
+    
+    // The Air::GenerationContext gives you even more power.
+    Air::GenerationContext& context() const { return m_context; };
+
+    template<typename Functor>
+    void addLatePath(const Functor& functor) const
+    {
+        context().latePaths.append(
+            createSharedTask<Air::GenerationContext::LatePathFunction>(
+                [=] (CCallHelpers& jit, Air::GenerationContext&) {
+                    functor(jit);
+                }));
+    }
+
+private:
+    friend class CheckSpecial;
+    friend class PatchpointSpecial;
+    
+    StackmapGenerationParams(StackmapValue*, const Vector<ValueRep>& reps, Air::GenerationContext&);
+
+    StackmapValue* m_value;
+    Vector<ValueRep> m_reps;
+    Air::GenerationContext& m_context;
+};
+
+} } // namespace JSC::B3
+
+#endif // ENABLE(B3_JIT)
+
+#endif // B3StackmapGenerationParams_h
+
index 39aa13f..97cf027 100644 (file)
 
 namespace JSC { namespace B3 {
 
-class StackmapValue;
-
-namespace Air {
-struct GenerationContext;
-}
-
-struct StackmapGenerationParams {
-    // This is the stackmap value that we're generating.
-    StackmapValue* value;
-    
-    // This tells you the actual value representations that were chosen. This is usually different
-    // from the constraints we supplied.
-    Vector<ValueRep> reps;
-    
-    // This tells you the registers that were used.
-    RegisterSet usedRegisters;
-
-    // The Air::GenerationContext gives you even more power.
-    Air::GenerationContext* context;
-};
+class StackmapGenerationParams;
 
 typedef void StackmapGeneratorFunction(CCallHelpers&, const StackmapGenerationParams&);
 typedef SharedTask<StackmapGeneratorFunction> StackmapGenerator;
@@ -84,6 +65,13 @@ public:
     // children().append(). That will work fine, but it's not recommended.
     void append(const ConstrainedValue&);
 
+    template<typename VectorType>
+    void appendVector(const VectorType& vector)
+    {
+        for (const auto& value : vector)
+            append(value);
+    }
+
     // Helper for appending cold any's. This often used by clients to implement OSR.
     template<typename VectorType>
     void appendColdAnys(const VectorType& vector)
@@ -285,6 +273,7 @@ protected:
 private:
     friend class CheckSpecial;
     friend class PatchpointSpecial;
+    friend class StackmapGenerationParams;
     friend class StackmapSpecial;
     
     Vector<ValueRep> m_reps;
index 0bb0a06..5d07b7b 100644 (file)
@@ -30,6 +30,7 @@
 
 #include "FPRInfo.h"
 #include "GPRInfo.h"
+#include "JSCJSValue.h"
 #include "Reg.h"
 #include <wtf/PrintStream.h>
 
@@ -201,6 +202,18 @@ public:
         return bitwise_cast<double>(value());
     }
 
+    ValueRep withOffset(intptr_t offset)
+    {
+        switch (kind()) {
+        case Stack:
+            return stack(offsetFromFP() + offset);
+        case StackArgument:
+            return stackArgument(offsetFromSP() + offset);
+        default:
+            return *this;
+        }
+    }
+
     JS_EXPORT_PRIVATE void dump(PrintStream&) const;
 
 private:
index b6a27e9..50f75e1 100644 (file)
 #include <wtf/SharedTask.h>
 #include <wtf/Vector.h>
 
-namespace JSC { namespace B3 { namespace Air {
+namespace JSC {
+
+class CCallHelpers;
+
+namespace B3 { namespace Air {
 
 class Code;
 
index d5eb5be..490a439 100644 (file)
@@ -37,6 +37,7 @@
 #include "B3MemoryValue.h"
 #include "B3Procedure.h"
 #include "B3StackSlotValue.h"
+#include "B3StackmapGenerationParams.h"
 #include "B3SwitchValue.h"
 #include "B3UpsilonValue.h"
 #include "B3ValueInlines.h"
@@ -3413,11 +3414,11 @@ void testSimplePatchpoint()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            add32(jit, params.reps[1].gpr(), params.reps[2].gpr(), params.reps[0].gpr());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            add32(jit, params[1].gpr(), params[2].gpr(), params[0].gpr());
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3440,11 +3441,11 @@ void testSimplePatchpointWithoutOuputClobbersGPArgs()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 2);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isGPR());
-            jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), params.reps[0].gpr());
-            jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), params.reps[1].gpr());
+            CHECK(params.size() == 2);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isGPR());
+            jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), params[0].gpr());
+            jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), params[1].gpr());
             jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), GPRInfo::argumentGPR0);
             jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), GPRInfo::argumentGPR1);
         });
@@ -3485,12 +3486,12 @@ void testSimplePatchpointWithOuputClobbersGPArgs()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            jit.move(params.reps[1].gpr(), params.reps[0].gpr());
-            jit.add64(params.reps[2].gpr(), params.reps[0].gpr());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            jit.move(params[1].gpr(), params[0].gpr());
+            jit.add64(params[2].gpr(), params[0].gpr());
 
             clobberAll.forEach([&] (Reg reg) {
                 jit.move(CCallHelpers::TrustedImm32(0x00ff00ff), reg.gpr());
@@ -3520,11 +3521,11 @@ void testSimplePatchpointWithoutOuputClobbersFPArgs()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 2);
-            CHECK(params.reps[0].isFPR());
-            CHECK(params.reps[1].isFPR());
-            jit.moveZeroToDouble(params.reps[0].fpr());
-            jit.moveZeroToDouble(params.reps[1].fpr());
+            CHECK(params.size() == 2);
+            CHECK(params[0].isFPR());
+            CHECK(params[1].isFPR());
+            jit.moveZeroToDouble(params[0].fpr());
+            jit.moveZeroToDouble(params[1].fpr());
             jit.moveZeroToDouble(FPRInfo::argumentFPR0);
             jit.moveZeroToDouble(FPRInfo::argumentFPR1);
         });
@@ -3558,11 +3559,11 @@ void testSimplePatchpointWithOuputClobbersFPArgs()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isFPR());
-            CHECK(params.reps[1].isFPR());
-            CHECK(params.reps[2].isFPR());
-            jit.addDouble(params.reps[1].fpr(), params.reps[2].fpr(), params.reps[0].fpr());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isFPR());
+            CHECK(params[1].isFPR());
+            CHECK(params[2].isFPR());
+            jit.addDouble(params[1].fpr(), params[2].fpr(), params[0].fpr());
 
             clobberAll.forEach([&] (Reg reg) {
                 jit.moveZeroToDouble(reg.fpr());
@@ -3590,10 +3591,10 @@ void testPatchpointWithEarlyClobber()
         patchpoint->clobberEarly(RegisterSet(registerToClobber));
         patchpoint->setGenerator(
             [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
-                CHECK((params.reps[1].gpr() == GPRInfo::argumentGPR0) == arg1InArgGPR);
-                CHECK((params.reps[2].gpr() == GPRInfo::argumentGPR1) == arg2InArgGPR);
+                CHECK((params[1].gpr() == GPRInfo::argumentGPR0) == arg1InArgGPR);
+                CHECK((params[2].gpr() == GPRInfo::argumentGPR1) == arg2InArgGPR);
                 
-                add32(jit, params.reps[1].gpr(), params.reps[2].gpr(), params.reps[0].gpr());
+                add32(jit, params[1].gpr(), params[2].gpr(), params[0].gpr());
             });
 
         root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
@@ -3618,16 +3619,16 @@ void testPatchpointCallArg()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isStack());
-            CHECK(params.reps[2].isStack());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isStack());
+            CHECK(params[2].isStack());
             jit.load32(
-                CCallHelpers::Address(GPRInfo::callFrameRegister, params.reps[1].offsetFromFP()),
-                params.reps[0].gpr());
+                CCallHelpers::Address(GPRInfo::callFrameRegister, params[1].offsetFromFP()),
+                params[0].gpr());
             jit.add32(
-                CCallHelpers::Address(GPRInfo::callFrameRegister, params.reps[2].offsetFromFP()),
-                params.reps[0].gpr());
+                CCallHelpers::Address(GPRInfo::callFrameRegister, params[2].offsetFromFP()),
+                params[0].gpr());
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3646,11 +3647,11 @@ void testPatchpointFixedRegister()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1] == ValueRep(GPRInfo::regT0));
-            CHECK(params.reps[2] == ValueRep(GPRInfo::regT1));
-            add32(jit, GPRInfo::regT0, GPRInfo::regT1, params.reps[0].gpr());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1] == ValueRep(GPRInfo::regT0));
+            CHECK(params[2] == ValueRep(GPRInfo::regT1));
+            add32(jit, GPRInfo::regT0, GPRInfo::regT1, params[0].gpr());
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3670,11 +3671,11 @@ void testPatchpointAny(ValueRep rep)
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
             // We shouldn't have spilled the inputs, so we assert that they're in registers.
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            add32(jit, params.reps[1].gpr(), params.reps[2].gpr(), params.reps[0].gpr());
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            add32(jit, params[1].gpr(), params[2].gpr(), params[0].gpr());
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3704,16 +3705,16 @@ void testPatchpointLotsOfLateAnys()
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
             // We shouldn't have spilled the inputs, so we assert that they're in registers.
-            CHECK(params.reps.size() == things.size() + 1);
-            CHECK(params.reps[0].isGPR());
-            jit.move(CCallHelpers::TrustedImm32(0), params.reps[0].gpr());
-            for (unsigned i = 1; i < params.reps.size(); ++i) {
-                if (params.reps[i].isGPR()) {
-                    CHECK(params.reps[i] != params.reps[0]);
-                    jit.add32(params.reps[i].gpr(), params.reps[0].gpr());
+            CHECK(params.size() == things.size() + 1);
+            CHECK(params[0].isGPR());
+            jit.move(CCallHelpers::TrustedImm32(0), params[0].gpr());
+            for (unsigned i = 1; i < params.size(); ++i) {
+                if (params[i].isGPR()) {
+                    CHECK(params[i] != params[0]);
+                    jit.add32(params[i].gpr(), params[0].gpr());
                 } else {
-                    CHECK(params.reps[i].isStack());
-                    jit.add32(CCallHelpers::Address(GPRInfo::callFrameRegister, params.reps[i].offsetFromFP()), params.reps[0].gpr());
+                    CHECK(params[i].isStack());
+                    jit.add32(CCallHelpers::Address(GPRInfo::callFrameRegister, params[i].offsetFromFP()), params[0].gpr());
                 }
             }
         });
@@ -3736,14 +3737,14 @@ void testPatchpointAnyImm(ValueRep rep)
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isGPR());
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isConstant());
-            CHECK(params.reps[2].value() == 42);
+            CHECK(params.size() == 3);
+            CHECK(params[0].isGPR());
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isConstant());
+            CHECK(params[2].value() == 42);
             jit.add32(
-                CCallHelpers::TrustedImm32(static_cast<int32_t>(params.reps[2].value())),
-                params.reps[1].gpr(), params.reps[0].gpr());
+                CCallHelpers::TrustedImm32(static_cast<int32_t>(params[2].value())),
+                params[1].gpr(), params[0].gpr());
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3765,11 +3766,11 @@ void testPatchpointManyImms()
     patchpoint->append(ConstrainedValue(arg4, ValueRep::WarmAny));
     patchpoint->setGenerator(
         [&] (CCallHelpers&, const StackmapGenerationParams& params) {
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[0] == ValueRep::constant(42));
-            CHECK(params.reps[1] == ValueRep::constant(43));
-            CHECK(params.reps[2] == ValueRep::constant(43000000000000ll));
-            CHECK(params.reps[3] == ValueRep::constant(bitwise_cast<int64_t>(42.5)));
+            CHECK(params.size() == 4);
+            CHECK(params[0] == ValueRep::constant(42));
+            CHECK(params[1] == ValueRep::constant(43));
+            CHECK(params[2] == ValueRep::constant(43000000000000ll));
+            CHECK(params[3] == ValueRep::constant(bitwise_cast<int64_t>(42.5)));
         });
     root->appendNew<ControlValue>(
         proc, Return, Origin(),
@@ -3791,11 +3792,11 @@ void testPatchpointWithRegisterResult()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0] == ValueRep::reg(GPRInfo::nonArgGPR0));
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            add32(jit, params.reps[1].gpr(), params.reps[2].gpr(), GPRInfo::nonArgGPR0);
+            CHECK(params.size() == 3);
+            CHECK(params[0] == ValueRep::reg(GPRInfo::nonArgGPR0));
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            add32(jit, params[1].gpr(), params[2].gpr(), GPRInfo::nonArgGPR0);
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3816,12 +3817,12 @@ void testPatchpointWithStackArgumentResult()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0] == ValueRep::stack(-static_cast<intptr_t>(proc.frameSize())));
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            jit.store32(params.reps[1].gpr(), CCallHelpers::Address(CCallHelpers::stackPointerRegister, 0));
-            jit.add32(params.reps[2].gpr(), CCallHelpers::Address(CCallHelpers::stackPointerRegister, 0));
+            CHECK(params.size() == 3);
+            CHECK(params[0] == ValueRep::stack(-static_cast<intptr_t>(proc.frameSize())));
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            jit.store32(params[1].gpr(), CCallHelpers::Address(CCallHelpers::stackPointerRegister, 0));
+            jit.add32(params[2].gpr(), CCallHelpers::Address(CCallHelpers::stackPointerRegister, 0));
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3844,13 +3845,13 @@ void testPatchpointWithAnyResult()
     patchpoint->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[0].isStack());
-            CHECK(params.reps[1].isGPR());
-            CHECK(params.reps[2].isGPR());
-            add32(jit, params.reps[1].gpr(), params.reps[2].gpr(), GPRInfo::regT0);
+            CHECK(params.size() == 3);
+            CHECK(params[0].isStack());
+            CHECK(params[1].isGPR());
+            CHECK(params[2].isGPR());
+            add32(jit, params[1].gpr(), params[2].gpr(), GPRInfo::regT0);
             jit.convertInt32ToDouble(GPRInfo::regT0, FPRInfo::fpRegT0);
-            jit.storeDouble(FPRInfo::fpRegT0, CCallHelpers::Address(GPRInfo::callFrameRegister, params.reps[0].offsetFromFP()));
+            jit.storeDouble(FPRInfo::fpRegT0, CCallHelpers::Address(GPRInfo::callFrameRegister, params[0].offsetFromFP()));
         });
     root->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
@@ -3866,7 +3867,7 @@ void testSimpleCheck()
     check->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 1);
+            CHECK(params.size() == 1);
 
             // This should always work because a function this simple should never have callee
             // saves.
@@ -3898,7 +3899,7 @@ void testCheckLessThan()
     check->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 1);
+            CHECK(params.size() == 1);
 
             // This should always work because a function this simple should never have callee
             // saves.
@@ -3944,7 +3945,7 @@ void testCheckMegaCombo()
     check->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 1);
+            CHECK(params.size() == 1);
 
             // This should always work because a function this simple should never have callee
             // saves.
@@ -3984,11 +3985,11 @@ void testCheckAddImm()
     checkAdd->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isConstant());
-            CHECK(params.reps[3].value() == 42);
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isConstant());
+            CHECK(params[3].value() == 42);
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
             jit.convertInt32ToDouble(CCallHelpers::TrustedImm32(42), FPRInfo::fpRegT1);
             jit.addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
@@ -4020,11 +4021,11 @@ void testCheckAddImmCommute()
     checkAdd->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isConstant());
-            CHECK(params.reps[3].value() == 42);
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isConstant());
+            CHECK(params[3].value() == 42);
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
             jit.convertInt32ToDouble(CCallHelpers::TrustedImm32(42), FPRInfo::fpRegT1);
             jit.addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
@@ -4056,11 +4057,11 @@ void testCheckAddImmSomeRegister()
     checkAdd->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt32ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt32ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4093,11 +4094,11 @@ void testCheckAdd()
     checkAdd->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt32ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt32ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4126,11 +4127,11 @@ void testCheckAdd64()
     checkAdd->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt64ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt64ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt64ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt64ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.addDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4200,11 +4201,11 @@ void testCheckSubImm()
     checkSub->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isConstant());
-            CHECK(params.reps[3].value() == 42);
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isConstant());
+            CHECK(params[3].value() == 42);
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
             jit.convertInt32ToDouble(CCallHelpers::TrustedImm32(42), FPRInfo::fpRegT1);
             jit.subDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
@@ -4237,11 +4238,11 @@ void testCheckSubBadImm()
     checkSub->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isConstant());
-            CHECK(params.reps[3].value() == badImm);
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isConstant());
+            CHECK(params[3].value() == badImm);
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
             jit.convertInt32ToDouble(CCallHelpers::TrustedImm32(badImm), FPRInfo::fpRegT1);
             jit.subDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
@@ -4275,11 +4276,11 @@ void testCheckSub()
     checkSub->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt32ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt32ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.subDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4313,11 +4314,11 @@ void testCheckSub64()
     checkSub->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt64ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt64ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt64ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt64ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.subDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4386,9 +4387,9 @@ void testCheckNeg()
     checkNeg->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[2].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 3);
+            CHECK(params[2].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT1);
             jit.negateDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4416,9 +4417,9 @@ void testCheckNeg64()
     checkNeg->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 3);
-            CHECK(params.reps[2].isGPR());
-            jit.convertInt64ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 3);
+            CHECK(params[2].isGPR());
+            jit.convertInt64ToDouble(params[2].gpr(), FPRInfo::fpRegT1);
             jit.negateDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4451,11 +4452,11 @@ void testCheckMul()
     checkMul->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt32ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt32ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.mulDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4492,11 +4493,11 @@ void testCheckMulMemory()
     checkMul->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt32ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt32ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.mulDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4538,11 +4539,11 @@ void testCheckMul2()
     checkMul->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isConstant());
-            CHECK(params.reps[3].value() == 2);
-            jit.convertInt32ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isConstant());
+            CHECK(params[3].value() == 2);
+            jit.convertInt32ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
             jit.convertInt32ToDouble(CCallHelpers::TrustedImm32(2), FPRInfo::fpRegT1);
             jit.mulDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
@@ -4572,11 +4573,11 @@ void testCheckMul64()
     checkMul->setGenerator(
         [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
             AllowMacroScratchRegisterUsage allowScratch(jit);
-            CHECK(params.reps.size() == 4);
-            CHECK(params.reps[2].isGPR());
-            CHECK(params.reps[3].isGPR());
-            jit.convertInt64ToDouble(params.reps[2].gpr(), FPRInfo::fpRegT0);
-            jit.convertInt64ToDouble(params.reps[3].gpr(), FPRInfo::fpRegT1);
+            CHECK(params.size() == 4);
+            CHECK(params[2].isGPR());
+            CHECK(params[3].isGPR());
+            jit.convertInt64ToDouble(params[2].gpr(), FPRInfo::fpRegT0);
+            jit.convertInt64ToDouble(params[3].gpr(), FPRInfo::fpRegT1);
             jit.mulDouble(FPRInfo::fpRegT1, FPRInfo::fpRegT0);
             jit.emitFunctionEpilogue();
             jit.ret();
@@ -4675,9 +4676,9 @@ void genericTestCompare(
         patchpoint->setGenerator(
             [&] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 AllowMacroScratchRegisterUsage allowScratch(jit);
-                CHECK(params.reps.size() == 1);
-                CHECK(params.reps[0].isGPR());
-                jit.move(CCallHelpers::TrustedImm32(1), params.reps[0].gpr());
+                CHECK(params.size() == 1);
+                CHECK(params[0].isGPR());
+                jit.move(CCallHelpers::TrustedImm32(1), params[0].gpr());
             });
         thenCase->appendNew<ControlValue>(proc, Return, Origin(), patchpoint);
 
index 11db48c..f31e2d6 100644 (file)
@@ -26,7 +26,7 @@
 #include "config.h"
 #include "FTLExceptionHandlerManager.h"
 
-#if ENABLE(FTL_JIT)
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "FTLState.h"
 
@@ -164,4 +164,4 @@ CallSiteIndex ExceptionHandlerManager::procureCallSiteIndex(uint32_t stackmapRec
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
index 5406d54..048c236 100644 (file)
@@ -26,7 +26,9 @@
 #ifndef FTLExceptionHandlerManager_h
 #define FTLExceptionHandlerManager_h
 
-#if ENABLE(FTL_JIT)
+#include "DFGCommon.h"
+
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "CallFrame.h"
 #include "FTLJSCall.h"
@@ -83,6 +85,6 @@ private:
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #endif // FTLExceptionHandlerManager_h
index 476b5e7..d4c1f0b 100644 (file)
@@ -26,7 +26,7 @@
 #include "config.h"
 #include "FTLJSCall.h"
 
-#if ENABLE(FTL_JIT)
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "DFGNode.h"
 #include "FTLState.h"
@@ -63,5 +63,5 @@ void JSCall::emit(CCallHelpers& jit, State& state, int32_t osrExitFromGenericUnw
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
index 18beb25..8df1ab5 100644 (file)
@@ -26,7 +26,9 @@
 #ifndef FTLJSCall_h
 #define FTLJSCall_h
 
-#if ENABLE(FTL_JIT)
+#include "DFGCommon.h"
+
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "FTLJSCallBase.h"
 
@@ -61,7 +63,7 @@ public:
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #endif // FTLJSCall_h
 
index 11d6de2..cc6b45d 100644 (file)
@@ -26,7 +26,7 @@
 #include "config.h"
 #include "FTLJSCallBase.h"
 
-#if ENABLE(FTL_JIT)
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "DFGNode.h"
 #include "FTLState.h"
@@ -55,12 +55,8 @@ void JSCallBase::emit(CCallHelpers& jit, State& /*state*/, int32_t osrExitFromGe
 {
     RELEASE_ASSERT(!!m_callSiteIndex);
 
-#if FTL_USES_B3
-    UNUSED_PARAM(osrExitFromGenericUnwindStackSpillSlot);
-#else // FTL_USES_B3
     if (m_correspondingGenericUnwindOSRExit)
         m_correspondingGenericUnwindOSRExit->spillRegistersToSpillSlot(jit, osrExitFromGenericUnwindStackSpillSlot);
-#endif // FTL_USES_B3
 
     jit.store32(CCallHelpers::TrustedImm32(m_callSiteIndex.bits()), CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
 
@@ -107,5 +103,5 @@ void JSCallBase::link(VM& vm, LinkBuffer& linkBuffer)
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
index 68f7fb2..0751c83 100644 (file)
@@ -26,7 +26,9 @@
 #ifndef FTLJSCallBase_h
 #define FTLJSCallBase_h
 
-#if ENABLE(FTL_JIT)
+#include "DFGCommon.h"
+
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "CCallHelpers.h"
 #include "CallLinkInfo.h"
@@ -73,7 +75,7 @@ public:
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #endif // FTLJSCallBase_h
 
index 626a15f..41c0676 100644 (file)
@@ -26,7 +26,7 @@
 #include "config.h"
 #include "FTLJSCallVarargs.h"
 
-#if ENABLE(FTL_JIT)
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "DFGNode.h"
 #include "DFGOperations.h"
@@ -225,5 +225,5 @@ void JSCallVarargs::link(VM& vm, LinkBuffer& linkBuffer, CodeLocationLabel excep
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
index 54f0536..9461202 100644 (file)
@@ -26,7 +26,9 @@
 #ifndef FTLJSCallVarargs_h
 #define FTLJSCallVarargs_h
 
-#if ENABLE(FTL_JIT)
+#include "DFGCommon.h"
+
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "FTLJSCallBase.h"
 
@@ -75,7 +77,7 @@ public:
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #endif // FTLJSCallVarargs_h
 
index ba11225..d29ea63 100644 (file)
@@ -26,7 +26,7 @@
 #include "config.h"
 #include "FTLJSTailCall.h"
 
-#if ENABLE(FTL_JIT)
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "CallFrameShuffler.h"
 #include "DFGNode.h"
 
 namespace JSC { namespace FTL {
 
-using namespace B3;
 using namespace DFG;
 
-#if FTL_USES_B3
-
-JSTailCall::JSTailCall(PatchpointValue* patchpoint, Node* node, const Vector<ExitValue>& arguments)
-    : JSCallBase(CallLinkInfo::TailCall, node->origin.semantic, node->origin.semantic)
-    , m_patchpoint(patchpoint)
-    , m_arguments(arguments)
-    , m_instructionOffset(0)
-{
-    UNREACHABLE_FOR_PLATFORM();
-}
-
-void JSTailCall::emit(JITCode&, CCallHelpers&)
-{
-    UNREACHABLE_FOR_PLATFORM();
-}
-
-#else // FTL_USES_B3
-
 namespace {
 
 FTL::Location getRegisterWithAddend(const ExitValue& value, StackMaps::Record& record, StackMaps& stackmaps)
@@ -342,8 +323,6 @@ void JSTailCall::emit(JITCode& jitCode, CCallHelpers& jit)
     m_callLinkInfo->setUpCall(m_type, m_semanticeOrigin, calleeGPR);
 }
 
-#endif // FTL_USES_B3
-
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
index a2d5451..cc824f1 100644 (file)
@@ -26,7 +26,9 @@
 #ifndef FTLJSTailCall_h
 #define FTLJSTailCall_h
 
-#if ENABLE(FTL_JIT)
+#include "DFGCommon.h"
+
+#if ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #include "B3PatchpointValue.h"
 #include "DFGCommon.h"
@@ -45,20 +47,12 @@ namespace FTL {
 class JSTailCall : public JSCallBase {
 public:
     JSTailCall(
-#if FTL_USES_B3
-        B3::PatchpointValue*,
-#else // FTL_USES_B3
         unsigned stackmapID,
-#endif // FTL_USES_B3
         DFG::Node*, const Vector<ExitValue>& arguments);
 
     void emit(JITCode&, CCallHelpers&);
 
-#if FTL_USES_B3
-    B3::PatchpointValue* patchpoint() const { return m_patchpoint; }
-#else // FTL_USES_B3
     unsigned stackmapID() const { return m_stackmapID; }
-#endif // FTL_USES_B3
 
     unsigned estimatedSize() const { return m_estimatedSize; }
 
@@ -70,11 +64,7 @@ public:
     }
     
 private:
-#if FTL_USES_B3
-    B3::PatchpointValue* m_patchpoint;
-#else // FTL_USES_B3
     unsigned m_stackmapID;
-#endif // FTL_USES_B3
     Vector<ExitValue> m_arguments;
     unsigned m_estimatedSize;
 
@@ -84,7 +74,7 @@ public:
 
 } } // namespace JSC::FTL
 
-#endif // ENABLE(FTL_JIT)
+#endif // ENABLE(FTL_JIT) && !FTL_USES_B3
 
 #endif // FTLJSTailCall_h
 
index 1ee0218..ba78ed6 100644 (file)
@@ -30,6 +30,7 @@
 
 #include "AirGenerationContext.h"
 #include "AllowMacroScratchRegisterUsage.h"
+#include "B3StackmapGenerationParams.h"
 #include "CodeBlockWithJITType.h"
 #include "DFGAbstractInterpreterInlines.h"
 #include "DFGDominators.h"
@@ -4828,14 +4829,105 @@ private:
 
     void compileCallOrConstruct()
     {
+        Node* node = m_node;
+        unsigned numArgs = node->numChildren() - 1;
+
+        LValue jsCallee = lowJSValue(m_graph.varArgChild(node, 0));
+
 #if FTL_USES_B3
-        if (verboseCompilationEnabled() || !verboseCompilationEnabled())
-            CRASH();
-#else
-        int numArgs = m_node->numChildren() - 1;
+        unsigned frameSize = JSStack::CallFrameHeaderSize + numArgs;
+        unsigned alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
+
+        // JS->JS calling convention requires that the caller allows this much space on top of stack to
+        // get trashed by the callee, even if not all of that space is used to pass arguments. We tell
+        // B3 this explicitly for two reasons:
+        //
+        // - We will only pass frameSize worth of stuff.
+        // - The trashed stack guarantee is logically separate from the act of passing arguments, so we
+        //   shouldn't rely on Air to infer the trashed stack property based on the arguments it ends
+        //   up seeing.
+        m_proc.requestCallArgAreaSize(alignedFrameSize);
+
+        // Collect the arguments, since this can generate code and we want to generate it before we emit
+        // the call.
+        Vector<ConstrainedValue> arguments;
+
+        // Make sure that the callee goes into GPR0 because that's where the slow path thunks expect the
+        // callee to be.
+        arguments.append(ConstrainedValue(jsCallee, ValueRep::reg(GPRInfo::regT0)));
+
+        auto addArgument = [&] (LValue value, VirtualRegister reg, int offset) {
+            intptr_t offsetFromSP =
+                (reg.offset() - JSStack::CallerFrameAndPCSize) * sizeof(EncodedJSValue) + offset;
+            arguments.append(ConstrainedValue(value, ValueRep::stackArgument(offsetFromSP)));
+        };
+
+        addArgument(jsCallee, VirtualRegister(JSStack::Callee), 0);
+        addArgument(m_out.constInt32(numArgs), VirtualRegister(JSStack::ArgumentCount), PayloadOffset);
+        for (unsigned i = 0; i < numArgs; ++i)
+            addArgument(lowJSValue(m_graph.varArgChild(node, 1 + i)), virtualRegisterForArgument(i), 0);
+
+        PatchpointValue* patchpoint = m_out.patchpoint(Int64);
+        patchpoint->appendVector(arguments);
+        patchpoint->clobber(RegisterSet::macroScratchRegisters());
+        patchpoint->clobberLate(RegisterSet::volatileRegistersForJSCall());
+        patchpoint->resultConstraint = ValueRep::reg(GPRInfo::returnValueGPR);
+
+        CodeOrigin codeOrigin = codeOriginDescriptionOfCallSite();
+        State* state = &m_ftlState;
+        patchpoint->setGenerator(
+            [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
+                AllowMacroScratchRegisterUsage allowScratch(jit);
+                CallSiteIndex callSiteIndex = state->jitCode->common.addUniqueCallSiteIndex(codeOrigin);
+
+                // FIXME: If we were handling exceptions, then at this point we would ask our descriptor
+                // to prepare and then we would modify the OSRExit data structure inside the
+                // OSRExitHandle to link it up to this call.
+                // https://bugs.webkit.org/show_bug.cgi?id=151686
+
+                jit.store32(
+                    CCallHelpers::TrustedImm32(callSiteIndex.bits()),
+                    CCallHelpers::tagFor(VirtualRegister(JSStack::ArgumentCount)));
+
+                CallLinkInfo* callLinkInfo = jit.codeBlock()->addCallLinkInfo();
 
-        LValue jsCallee = lowJSValue(m_graph.varArgChild(m_node, 0));
+                CCallHelpers::DataLabelPtr targetToCheck;
+                CCallHelpers::Jump slowPath = jit.branchPtrWithPatch(
+                    CCallHelpers::NotEqual, GPRInfo::regT0, targetToCheck,
+                    CCallHelpers::TrustedImmPtr(0));
 
+                CCallHelpers::Call fastCall = jit.nearCall();
+                CCallHelpers::Jump done = jit.jump();
+
+                slowPath.link(&jit);
+
+                jit.move(CCallHelpers::TrustedImmPtr(callLinkInfo), GPRInfo::regT2);
+                CCallHelpers::Call slowCall = jit.nearCall();
+                done.link(&jit);
+
+                callLinkInfo->setUpCall(
+                    node->op() == Construct ? CallLinkInfo::Construct : CallLinkInfo::Call,
+                    node->origin.semantic, GPRInfo::regT0);
+
+                jit.addPtr(
+                    CCallHelpers::TrustedImm32(-params.proc().frameSize()),
+                    GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+
+                jit.addLinkTask(
+                    [=] (LinkBuffer& linkBuffer) {
+                        MacroAssemblerCodePtr linkCall =
+                            linkBuffer.vm().getCTIStub(linkCallThunkGenerator).code();
+                        linkBuffer.link(slowCall, FunctionPtr(linkCall.executableAddress()));
+
+                        callLinkInfo->setCallLocations(
+                            linkBuffer.locationOfNearCall(slowCall),
+                            linkBuffer.locationOf(targetToCheck),
+                            linkBuffer.locationOfNearCall(fastCall));
+                    });
+            });
+
+        setJSValue(patchpoint);
+#else
         unsigned stackmapID = m_stackmapIDs++;
 
         unsigned frameSize = JSStack::CallFrameHeaderSize + numArgs;
@@ -4848,8 +4940,8 @@ private:
         arguments.append(getUndef(m_out.int64)); // code block
         arguments.append(jsCallee); // callee -> stack
         arguments.append(m_out.constInt64(numArgs)); // argument count and zeros for the tag
-        for (int i = 0; i < numArgs; ++i)
-            arguments.append(lowJSValue(m_graph.varArgChild(m_node, 1 + i)));
+        for (unsigned i = 0; i < numArgs; ++i)
+            arguments.append(lowJSValue(m_graph.varArgChild(node, 1 + i)));
         for (unsigned i = 0; i < padding; ++i)
             arguments.append(getUndef(m_out.int64));
 
@@ -4863,7 +4955,7 @@ private:
         LValue call = m_out.call(m_out.int64, m_out.patchpointInt64Intrinsic(), arguments);
         setInstructionCallingConvention(call, LLVMWebKitJSCallConv);
         
-        m_ftlState.jsCalls.append(JSCall(stackmapID, m_node, codeOriginDescriptionOfCallSite()));
+        m_ftlState.jsCalls.append(JSCall(stackmapID, node, codeOriginDescriptionOfCallSite()));
         
         setJSValue(call);
 #endif
@@ -5322,18 +5414,63 @@ private:
     
     void compileInvalidationPoint()
     {
-#if FTL_USES_B3
-        UNREACHABLE_FOR_PLATFORM();
-#else // FTL_USES_B3
         if (verboseCompilationEnabled())
             dataLog("    Invalidation point with availability: ", availabilityMap(), "\n");
 
         DFG_ASSERT(m_graph, m_node, m_origin.exitOK);
         
+#if FTL_USES_B3
+        B3::PatchpointValue* patchpoint = m_out.patchpoint(Void);
+        OSRExitDescriptor* descriptor = appendOSRExitDescriptor(noValue(), nullptr);
+        NodeOrigin origin = m_origin;
+        patchpoint->appendColdAnys(buildExitArguments(descriptor, origin.forExit, noValue()));
+        
+        State* state = &m_ftlState;
+
+        patchpoint->setGenerator(
+            [=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
+                // The MacroAssembler knows more about this than B3 does. The watchpointLabel() method
+                // will ensure that this is followed by a nop shadow but only when this is actually
+                // necessary.
+                CCallHelpers::Label label = jit.watchpointLabel();
+
+                RefPtr<OSRExitHandle> handle = descriptor->emitOSRExitLater(
+                    *state, UncountableInvalidation, origin, params);
+
+                RefPtr<JITCode> jitCode = state->jitCode.get();
+
+                jit.addLinkTask(
+                    [=] (LinkBuffer& linkBuffer) {
+                        JumpReplacement jumpReplacement(
+                            linkBuffer.locationOf(label),
+                            linkBuffer.locationOf(handle->label));
+                        jitCode->common.jumpReplacements.append(jumpReplacement);
+                    });
+            });
+
+        // Set some obvious things.
+        patchpoint->effects.terminal = false;
+        patchpoint->effects.writesSSAState = false;
+        patchpoint->effects.readsSSAState = false;
+        
+        // This is how we tell B3 about the possibility of jump replacement.
+        patchpoint->effects.exitsSideways = true;
+        
+        // It's not possible for some prior branch to determine the safety of this operation. It's always
+        // fine to execute this on some path that wouldn't have originally executed it before
+        // optimization.
+        patchpoint->effects.controlDependent = false;
+
+        // If this falls through then it won't write anything.
+        patchpoint->effects.writes = HeapRange();
+
+        // When this abruptly terminates, it could read any heap location.
+        patchpoint->effects.reads = HeapRange::top();
+#else // FTL_USES_B3
 
         OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor(UncountableInvalidation, ExceptionType::None, noValue(), nullptr, m_origin);
         
-        StackmapArgumentList arguments = buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last(), FormattedValue());
+        StackmapArgumentList arguments = buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last().m_codeOrigin, FormattedValue());
         callStackmap(exitDescriptor, arguments);
         
         exitDescriptor->m_isInvalidationPoint = true;
@@ -7901,7 +8038,7 @@ private:
         result->setGenerator(
             [=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
                 Vector<Location> locations;
-                for (const B3::ValueRep& rep : params.reps)
+                for (const B3::ValueRep& rep : params)
                     locations.append(Location::forValueRep(rep));
 
                 RefPtr<LazySlowPath::Generator> generator = functor(locations);
@@ -7909,56 +8046,55 @@ private:
                 CCallHelpers::PatchableJump patchableJump = jit.patchableJump();
                 CCallHelpers::Label done = jit.label();
 
-                RegisterSet usedRegisters = params.usedRegisters;
+                RegisterSet usedRegisters = params.usedRegisters();
 
                 // FIXME: As part of handling exceptions, we need to create a concrete OSRExit here.
                 // Doing so should automagically register late paths that emit exit thunks.
-                
-                params.context->latePaths.append(
-                    createSharedTask<Air::GenerationContext::LatePathFunction>(
-                        [=] (CCallHelpers& jit, Air::GenerationContext&) {
-                            AllowMacroScratchRegisterUsage allowScratch(jit);
-                            patchableJump.m_jump.link(&jit);
-                            unsigned index = state->jitCode->lazySlowPaths.size();
-                            state->jitCode->lazySlowPaths.append(nullptr);
-                            jit.pushToSaveImmediateWithoutTouchingRegisters(
-                                CCallHelpers::TrustedImm32(index));
-                            CCallHelpers::Jump generatorJump = jit.jump();
-
-                            // Note that so long as we're here, we don't really know if our late path
-                            // runs before or after any other late paths that we might depend on, like
-                            // the exception thunk.
-
-                            RefPtr<JITCode> jitCode = state->jitCode;
-                            VM* vm = &state->graph.m_vm;
-
-                            jit.addLinkTask(
-                                [=] (LinkBuffer& linkBuffer) {
-                                    linkBuffer.link(
-                                        generatorJump, CodeLocationLabel(
-                                            vm->getCTIStub(
-                                                lazySlowPathGenerationThunkGenerator).code()));
+
+                params.addLatePath(
+                    [=] (CCallHelpers& jit) {
+                        AllowMacroScratchRegisterUsage allowScratch(jit);
+                        patchableJump.m_jump.link(&jit);
+                        unsigned index = state->jitCode->lazySlowPaths.size();
+                        state->jitCode->lazySlowPaths.append(nullptr);
+                        jit.pushToSaveImmediateWithoutTouchingRegisters(
+                            CCallHelpers::TrustedImm32(index));
+                        CCallHelpers::Jump generatorJump = jit.jump();
+
+                        // Note that so long as we're here, we don't really know if our late path
+                        // runs before or after any other late paths that we might depend on, like
+                        // the exception thunk.
+
+                        RefPtr<JITCode> jitCode = state->jitCode;
+                        VM* vm = &state->graph.m_vm;
+
+                        jit.addLinkTask(
+                            [=] (LinkBuffer& linkBuffer) {
+                                linkBuffer.link(
+                                    generatorJump, CodeLocationLabel(
+                                        vm->getCTIStub(
+                                            lazySlowPathGenerationThunkGenerator).code()));
                                     
-                                    CodeLocationJump linkedPatchableJump = CodeLocationJump(
-                                        linkBuffer.locationOf(patchableJump));
-                                    CodeLocationLabel linkedDone = linkBuffer.locationOf(done);
-
-                                    // FIXME: Need a story for exceptions in FTL-B3. That basically means
-                                    // doing a lookup of the exception entrypoint here. We will have an
-                                    // OSR exit data structure of some sort.
-                                    // https://bugs.webkit.org/show_bug.cgi?id=151686
-                                    CodeLocationLabel exceptionTarget;
-                                    CallSiteIndex callSiteIndex =
-                                        jitCode->common.addUniqueCallSiteIndex(origin);
+                                CodeLocationJump linkedPatchableJump = CodeLocationJump(
+                                    linkBuffer.locationOf(patchableJump));
+                                CodeLocationLabel linkedDone = linkBuffer.locationOf(done);
+
+                                // FIXME: Need a story for exceptions in FTL-B3. That basically means
+                                // doing a lookup of the exception entrypoint here. We will have an
+                                // OSR exit data structure of some sort.
+                                // https://bugs.webkit.org/show_bug.cgi?id=151686
+                                CodeLocationLabel exceptionTarget;
+                                CallSiteIndex callSiteIndex =
+                                    jitCode->common.addUniqueCallSiteIndex(origin);
                                     
-                                    std::unique_ptr<LazySlowPath> lazySlowPath =
-                                        std::make_unique<LazySlowPath>(
-                                            linkedPatchableJump, linkedDone, exceptionTarget,
-                                            usedRegisters, callSiteIndex, generator);
+                                std::unique_ptr<LazySlowPath> lazySlowPath =
+                                    std::make_unique<LazySlowPath>(
+                                        linkedPatchableJump, linkedDone, exceptionTarget,
+                                        usedRegisters, callSiteIndex, generator);
                                     
-                                    jitCode->lazySlowPaths[index] = WTF::move(lazySlowPath);
-                                });
-                        }));
+                                jitCode->lazySlowPaths[index] = WTF::move(lazySlowPath);
+                            });
+                    });
             });
         return result;
 #else
@@ -9213,6 +9349,7 @@ private:
         m_out.appendTo(continuation);
     }
 
+#if !FTL_USES_B3
     void appendOSRExitArgumentsForPatchpointIfWillCatchException(StackmapArgumentList& arguments, ExceptionType exceptionType, unsigned offsetOfExitArguments)
     {
         CodeOrigin opCatchOrigin;
@@ -9230,9 +9367,10 @@ private:
         exitDescriptorImpl.m_baselineExceptionHandler = *exceptionHandler;
 
         StackmapArgumentList freshList =
-            buildExitArguments(exitDescriptor, exitDescriptorImpl, noValue(), offsetOfExitArguments);
+            buildExitArguments(exitDescriptor, exitDescriptorImpl.m_codeOrigin, noValue(), offsetOfExitArguments);
         arguments.appendVector(freshList);
     }
+#endif // !FTL_USES_B3
 
     bool emitBranchToOSRExitIfWillCatchException(LValue hadException)
     {
@@ -9251,6 +9389,16 @@ private:
         return m_blocks.get(block);
     }
 
+
+#if FTL_USES_B3
+    OSRExitDescriptor* appendOSRExitDescriptor(FormattedValue lowValue, Node* highValue)
+    {
+        return &m_ftlState.jitCode->osrExitDescriptors.alloc(
+            lowValue.format(), m_graph.methodOfGettingAValueProfileFor(highValue),
+            availabilityMap().m_locals.numberOfArguments(),
+            availabilityMap().m_locals.numberOfLocals());
+    }
+#else // FTL_USES_B3
     OSRExitDescriptor* appendOSRExitDescriptor(ExitKind kind, ExceptionType exceptionType, FormattedValue lowValue, Node* highValue, NodeOrigin origin)
     {
         OSRExitDescriptor& result = m_ftlState.jitCode->osrExitDescriptors.alloc(
@@ -9261,6 +9409,7 @@ private:
             kind, origin.forExit, origin.semantic, exceptionType);
         return &result;
     }
+#endif // FTL_USES_B3
     
     void appendOSRExit(
         ExitKind kind, FormattedValue lowValue, Node* highValue, LValue failCondition, 
@@ -9328,18 +9477,16 @@ private:
 #if FTL_USES_B3
     void blessSpeculation(B3::CheckValue* value, ExitKind kind, FormattedValue lowValue, Node* highValue, NodeOrigin origin, bool isExceptionHandler = false)
     {
-        OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor(
-            kind, isExceptionHandler ? ExceptionType::CCallException : ExceptionType::None, lowValue,
-            highValue, origin);
-        OSRExitDescriptorImpl* exitDescriptorImpl = &m_ftlState.osrExitDescriptorImpls.last();
+        OSRExitDescriptor* exitDescriptor = appendOSRExitDescriptor(lowValue, highValue);
         
         unsigned offset = value->numChildren();
-        value->appendColdAnys(buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last(), lowValue));
+        value->appendColdAnys(buildExitArguments(exitDescriptor, origin.forExit, lowValue));
 
         State* state = &m_ftlState;
         value->setGenerator(
             [=] (CCallHelpers& jit, const B3::StackmapGenerationParams& params) {
-                exitDescriptor->emitOSRExit(*state, exitDescriptorImpl, jit, params, offset);
+                exitDescriptor->emitOSRExit(
+                    *state, kind, origin, jit, params, offset, isExceptionHandler);
             });
     }
 #endif
@@ -9347,29 +9494,29 @@ private:
 #if !FTL_USES_B3
     void emitOSRExitCall(OSRExitDescriptor* exitDescriptor, FormattedValue lowValue)
     {
-        callStackmap(exitDescriptor, buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last(), lowValue));
+        callStackmap(exitDescriptor, buildExitArguments(exitDescriptor, m_ftlState.osrExitDescriptorImpls.last().m_codeOrigin, lowValue));
     }
 #endif
 
     StackmapArgumentList buildExitArguments(
-        OSRExitDescriptor* exitDescriptor, OSRExitDescriptorImpl& exitDescriptorImpl, FormattedValue lowValue,
+        OSRExitDescriptor* exitDescriptor, CodeOrigin exitOrigin, FormattedValue lowValue,
         unsigned offsetOfExitArgumentsInStackmapLocations = 0)
     {
         StackmapArgumentList result;
         buildExitArguments(
-            exitDescriptor, exitDescriptorImpl, result, lowValue, offsetOfExitArgumentsInStackmapLocations);
+            exitDescriptor, exitOrigin, result, lowValue, offsetOfExitArgumentsInStackmapLocations);
         return result;
     }
     
     void buildExitArguments(
-        OSRExitDescriptor* exitDescriptor, OSRExitDescriptorImpl& exitDescriptorImpl, StackmapArgumentList& arguments, FormattedValue lowValue,
+        OSRExitDescriptor* exitDescriptor, CodeOrigin exitOrigin, StackmapArgumentList& arguments, FormattedValue lowValue,
         unsigned offsetOfExitArgumentsInStackmapLocations = 0)
     {
         if (!!lowValue)
             arguments.append(lowValue.value());
         
         AvailabilityMap availabilityMap = this->availabilityMap();
-        availabilityMap.pruneByLiveness(m_graph, exitDescriptorImpl.m_codeOrigin);
+        availabilityMap.pruneByLiveness(m_graph, exitOrigin);
         
         HashMap<Node*, ExitTimeObjectMaterialization*> map;
         availabilityMap.forEachAvailability(
@@ -9396,7 +9543,7 @@ private:
             if (Options::validateFTLOSRExitLiveness()) {
                 DFG_ASSERT(
                     m_graph, m_node,
-                    (!(availability.isDead() && m_graph.isLiveInBytecode(VirtualRegister(operand), exitDescriptorImpl.m_codeOrigin))) || m_graph.m_plan.mode == FTLForOSREntryMode);
+                    (!(availability.isDead() && m_graph.isLiveInBytecode(VirtualRegister(operand), exitOrigin))) || m_graph.m_plan.mode == FTLForOSREntryMode);
             }
             ExitValue exitValue = exitValueForAvailability(arguments, map, availability);
             if (exitValue.hasIndexInStackmapLocations())
@@ -9475,6 +9622,10 @@ private:
         StackmapArgumentList& arguments, const HashMap<Node*, ExitTimeObjectMaterialization*>& map,
         Node* node)
     {
+        // NOTE: In FTL->B3, we cannot generate code here, because m_output is positioned after the
+        // stackmap value. Like all values, the stackmap value cannot use a child that is defined after
+        // it.
+        
         ASSERT(node->shouldGenerate());
         ASSERT(node->hasResult());
 
@@ -9522,10 +9673,15 @@ private:
             return exitArgument(arguments, DataFormatStrictInt52, value.value());
         
         value = m_booleanValues.get(node);
+#if FTL_USES_B3
+        if (isValid(value))
+            return exitArgument(arguments, DataFormatBoolean, value.value());
+#else // FTL_USES_B3
         if (isValid(value)) {
             LValue valueToPass = m_out.zeroExt(value.value(), m_out.int32);
             return exitArgument(arguments, DataFormatBoolean, valueToPass);
         }
+#endif // FTL_USES_B3
         
         value = m_jsValueValues.get(node);
         if (isValid(value))
index 0284d8c..9327f86 100644 (file)
@@ -29,6 +29,7 @@
 #if ENABLE(FTL_JIT)
 
 #include "AirGenerationContext.h"
+#include "B3StackmapGenerationParams.h"
 #include "B3StackmapValue.h"
 #include "CodeBlock.h"
 #include "DFGBasicBlock.h"
@@ -50,7 +51,9 @@ OSRExitDescriptor::OSRExitDescriptor(
     : m_profileDataFormat(profileDataFormat)
     , m_valueProfile(valueProfile)
     , m_values(numberOfArguments, numberOfLocals)
+#if !FTL_USES_B3
     , m_isInvalidationPoint(false)
+#endif // !FTL_USES_B3
 {
 }
 
@@ -65,53 +68,65 @@ void OSRExitDescriptor::validateReferences(const TrackedReferences& trackedRefer
 
 #if FTL_USES_B3
 RefPtr<OSRExitHandle> OSRExitDescriptor::emitOSRExit(
-    State& state, OSRExitDescriptorImpl* exitDescriptorImpl, CCallHelpers& jit, const StackmapGenerationParams& params, unsigned offset)
+    State& state, ExitKind exitKind, const NodeOrigin& nodeOrigin, CCallHelpers& jit,
+    const StackmapGenerationParams& params, unsigned offset, bool isExceptionHandler)
 {
-    RefPtr<OSRExitHandle> handle = prepareOSRExitHandle(state, exitDescriptorImpl, params, offset);
+    RefPtr<OSRExitHandle> handle =
+        prepareOSRExitHandle(state, exitKind, nodeOrigin, params, offset, isExceptionHandler);
     handle->emitExitThunk(jit);
     return handle;
 }
 
 RefPtr<OSRExitHandle> OSRExitDescriptor::emitOSRExitLater(
-    State& state, OSRExitDescriptorImpl* exitDescriptorImpl, const StackmapGenerationParams& params, unsigned offset)
+    State& state, ExitKind exitKind, const NodeOrigin& nodeOrigin,
+    const StackmapGenerationParams& params, unsigned offset, bool isExceptionHandler)
 {
-    RefPtr<OSRExitHandle> handle = prepareOSRExitHandle(state, exitDescriptorImpl, params, offset);
-    params.context->latePaths.append(
-        createSharedTask<Air::GenerationContext::LatePathFunction>(
-            [handle] (CCallHelpers& jit, Air::GenerationContext&) {
-                handle->emitExitThunk(jit);
-            }));
+    RefPtr<OSRExitHandle> handle =
+        prepareOSRExitHandle(state, exitKind, nodeOrigin, params, offset, isExceptionHandler);
+    params.addLatePath(
+        [handle] (CCallHelpers& jit) {
+            handle->emitExitThunk(jit);
+        });
     return handle;
 }
 
 RefPtr<OSRExitHandle> OSRExitDescriptor::prepareOSRExitHandle(
-    State& state, OSRExitDescriptorImpl* exitDescriptorImpl, const StackmapGenerationParams& params, unsigned offset)
+    State& state, ExitKind exitKind, const NodeOrigin& nodeOrigin,
+    const StackmapGenerationParams& params, unsigned offset, bool isExceptionHandler)
 {
     unsigned index = state.jitCode->osrExit.size();
-    RefPtr<OSRExitHandle> handle = adoptRef(
-        new OSRExitHandle(index, state.jitCode->osrExit.alloc(this, *exitDescriptorImpl)));
-    for (unsigned i = offset; i < params.reps.size(); ++i)
-        handle->exit.m_valueReps.append(params.reps[i]);
-    handle->exit.m_valueReps.shrinkToFit();
+    OSRExit& exit = state.jitCode->osrExit.alloc(
+        this, exitKind, nodeOrigin.forExit, nodeOrigin.semantic, isExceptionHandler);
+    RefPtr<OSRExitHandle> handle = adoptRef(new OSRExitHandle(index, exit));
+    for (unsigned i = offset; i < params.size(); ++i)
+        exit.m_valueReps.append(params[i]);
+    exit.m_valueReps.shrinkToFit();
     return handle;
 }
 #endif // FTL_USES_B3
 
+#if FTL_USES_B3
 OSRExit::OSRExit(
-    OSRExitDescriptor* descriptor, OSRExitDescriptorImpl& exitDescriptorImpl
-#if !FTL_USES_B3
-    , uint32_t stackmapRecordIndex
-#endif // !FTL_USES_B3
-    )
+    OSRExitDescriptor* descriptor,
+    ExitKind exitKind, CodeOrigin codeOrigin, CodeOrigin codeOriginForExitProfile,
+    bool isExceptionHandler)
+    : OSRExitBase(exitKind, codeOrigin, codeOriginForExitProfile)
+    , m_descriptor(descriptor)
+{
+    m_isExceptionHandler = isExceptionHandler;
+}
+#else // FTL_USES_B3
+OSRExit::OSRExit(
+    OSRExitDescriptor* descriptor, OSRExitDescriptorImpl& exitDescriptorImpl,
+    uint32_t stackmapRecordIndex)
     : OSRExitBase(exitDescriptorImpl.m_kind, exitDescriptorImpl.m_codeOrigin, exitDescriptorImpl.m_codeOriginForExitProfile)
     , m_descriptor(descriptor)
-#if !FTL_USES_B3
     , m_stackmapRecordIndex(stackmapRecordIndex)
-#endif // !FTL_USES_B3
     , m_exceptionType(exitDescriptorImpl.m_exceptionType)
 {
     m_isExceptionHandler = exitDescriptorImpl.m_exceptionType != ExceptionType::None;
 }
+#endif // FTL_USES_B3
 
 CodeLocationJump OSRExit::codeLocationForRepatch(CodeBlock* ftlCodeBlock) const
 {
@@ -196,7 +211,6 @@ void OSRExit::recoverRegistersFromSpillSlot(CCallHelpers& jit, int32_t stackSpil
         }
     }
 }
-#endif // !FTL_USES_B3
 
 bool OSRExit::willArriveAtExitFromIndirectExceptionCheck() const
 {
@@ -260,6 +274,7 @@ bool OSRExit::needsRegisterRecoveryOnGenericUnwindOSRExitPath() const
     // recover the spilled registers.
     return m_exceptionType == ExceptionType::JSCall;
 }
+#endif // !FTL_USES_B3
 
 } } // namespace JSC::FTL
 
index 800a78e..b45aa0c 100644 (file)
@@ -31,6 +31,7 @@
 #include "B3ValueRep.h"
 #include "CodeOrigin.h"
 #include "DFGExitProfile.h"
+#include "DFGNodeOrigin.h"
 #include "DFGOSRExitBase.h"
 #include "FTLAbbreviatedTypes.h"
 #include "FTLExitTimeObjectMaterialization.h"
@@ -51,17 +52,23 @@ namespace JSC {
 class TrackedReferences;
 
 namespace B3 {
-struct StackmapGenerationParams;
+class StackmapGenerationParams;
 namespace Air {
 struct GenerationContext;
 } // namespace Air
 } // namespace B3
 
+namespace DFG {
+struct NodeOrigin;
+} // namespace DFG;
+
 namespace FTL {
 
 class State;
 struct OSRExitDescriptorImpl;
+struct OSRExitHandle;
 
+#if !FTL_USES_B3
 enum class ExceptionType : uint8_t {
     None,
     CCallException,
@@ -75,6 +82,7 @@ enum class ExceptionType : uint8_t {
 };
 
 bool exceptionTypeWillArriveAtOSRExitFromGenericUnwind(ExceptionType);
+#endif // !FTL_USES_B3
 
 struct OSRExitDescriptor {
     OSRExitDescriptor(
@@ -91,9 +99,11 @@ struct OSRExitDescriptor {
     
     Operands<ExitValue> m_values;
     Bag<ExitTimeObjectMaterialization> m_materializations;
-    
+
+#if !FTL_USES_B3
     uint32_t m_stackmapID;
     bool m_isInvalidationPoint;
+#endif // !FTL_USES_B3
     
     void validateReferences(const TrackedReferences&);
 
@@ -105,7 +115,8 @@ struct OSRExitDescriptor {
     // on the ground. It contains information that is mostly not useful if you use this API, since after
     // this call, the OSRExit is simply ready to go.
     RefPtr<OSRExitHandle> emitOSRExit(
-        State&, OSRExitDescriptorImpl*, CCallHelpers&, const B3::StackmapGenerationParams&, unsigned offset);
+        State&, ExitKind, const DFG::NodeOrigin&, CCallHelpers&, const B3::StackmapGenerationParams&,
+        unsigned offset = 0, bool isExceptionHandler = false);
 
     // In some cases you want an OSRExit to come into existence, but you don't want to emit it right now.
     // This will emit the OSR exit in a late path. You can't be sure exactly when that will happen, but
@@ -116,16 +127,19 @@ struct OSRExitDescriptor {
     // have a place to jump to for OSR exit. It doesn't care where that OSR exit is emitted so long as it
     // eventually gets access to its label.
     RefPtr<OSRExitHandle> emitOSRExitLater(
-        State&, OSRExitDescriptorImpl*, const B3::StackmapGenerationParams&, unsigned offset);
+        State&, ExitKind, const DFG::NodeOrigin&, const B3::StackmapGenerationParams&,
+        unsigned offset = 0, bool isExceptionHandler = false);
 
     // This is the low-level interface. It will create a handle representing the desire to emit code for
     // an OSR exit. You can call OSRExitHandle::emitExitThunk() once you have a place to emit it. Note
     // that the above two APIs are written in terms of this and OSRExitHandle::emitExitThunk().
     RefPtr<OSRExitHandle> prepareOSRExitHandle(
-        State&, OSRExitDescriptorImpl*, const B3::StackmapGenerationParams&, unsigned offset);
+        State&, ExitKind, const DFG::NodeOrigin&, const B3::StackmapGenerationParams&,
+        unsigned offset = 0, bool isExceptionHandler = false);
 #endif // FTL_USES_B3
 };
 
+#if !FTL_USES_B3
 struct OSRExitDescriptorImpl {
     OSRExitDescriptorImpl(ExitKind kind, CodeOrigin exitOrigin, CodeOrigin forExitProfile, ExceptionType exceptionType)
         : m_kind(kind)
@@ -142,13 +156,16 @@ struct OSRExitDescriptorImpl {
     CodeOrigin m_semanticCodeOriginForCallFrameHeader;
     HandlerInfo m_baselineExceptionHandler;
 };
+#endif // !FTL_USES_B3
 
 struct OSRExit : public DFG::OSRExitBase {
     OSRExit(
-        OSRExitDescriptor*, OSRExitDescriptorImpl&
-#if !FTL_USES_B3
-        , uint32_t stackmapRecordIndex
-#endif // !FTL_USES_B3
+        OSRExitDescriptor*,
+#if FTL_USES_B3
+        ExitKind, CodeOrigin, CodeOrigin codeOriginForExitProfile, bool isExceptionHandler
+#else // FTL_USES_B3
+        OSRExitDescriptorImpl&, uint32_t stackmapRecordIndex
+#endif // FTL_USES_B3
         );
 
     OSRExitDescriptor* m_descriptor;
@@ -157,15 +174,18 @@ struct OSRExit : public DFG::OSRExitBase {
     // This tells us where to place a jump.
     CodeLocationJump m_patchableJump;
     Vector<B3::ValueRep> m_valueReps;
+    // True if this exit is used as an exception handler for unwinding. This happens to only be set when
+    // isExceptionHandler is true, but all this actually means is that the OSR exit will assume that the
+    // machine state is as it would be coming out of genericUnwind.
+    bool m_isUnwindHandler { false };
 #else // FTL_USES_B3
     // Offset within the exit stubs of the stub for this exit.
     unsigned m_patchableCodeOffset;
     // Offset within Stackmap::records
     uint32_t m_stackmapRecordIndex;
-#endif // FTL_USES_B3
     ExceptionType m_exceptionType;
-
     RegisterSet registersToPreserveForCallThatMightThrow;
+#endif // FTL_USES_B3
 
     CodeLocationJump codeLocationForRepatch(CodeBlock* ftlCodeBlock) const;
     void considerAddingAsFrequentExitSite(CodeBlock* profiledCodeBlock)
@@ -177,12 +197,12 @@ struct OSRExit : public DFG::OSRExitBase {
     void gatherRegistersToSpillForCallIfException(StackMaps&, StackMaps::Record&);
     void spillRegistersToSpillSlot(CCallHelpers&, int32_t stackSpillSlot);
     void recoverRegistersFromSpillSlot(CCallHelpers& jit, int32_t stackSpillSlot);
-#endif // !FTL_USES_B3
 
     bool willArriveAtOSRExitFromGenericUnwind() const;
     bool willArriveAtExitFromIndirectExceptionCheck() const;
     bool willArriveAtOSRExitFromCallOperation() const;
     bool needsRegisterRecoveryOnGenericUnwindOSRExitPath() const;
+#endif // !FTL_USES_B3
 };
 
 
index ac5cf0e..242aad4 100644 (file)
@@ -207,23 +207,34 @@ static void compileStub(
     CCallHelpers jit(vm, codeBlock);
 
     // The first thing we need to do is restablish our frame in the case of an exception.
-    if (exit.willArriveAtOSRExitFromGenericUnwind()) {
+    if (
+#if FTL_USES_B3
+        exit.m_isUnwindHandler
+#else // FTL_USES_B3
+        exit.willArriveAtOSRExitFromGenericUnwind()
+#endif // FTL_USES_B3
+        ) {
         RELEASE_ASSERT(vm->callFrameForCatch); // The first time we hit this exit, like at all other times, this field should be non-null.
         jit.restoreCalleeSavesFromVMCalleeSavesBuffer();
         jit.loadPtr(vm->addressOfCallFrameForCatch(), MacroAssembler::framePointerRegister);
         jit.addPtr(CCallHelpers::TrustedImm32(codeBlock->stackPointerOffset() * sizeof(Register)),
             MacroAssembler::framePointerRegister, CCallHelpers::stackPointerRegister);
 
+#if !FTL_USES_B3
         if (exit.needsRegisterRecoveryOnGenericUnwindOSRExitPath())
             exit.recoverRegistersFromSpillSlot(jit, jitCode->osrExitFromGenericUnwindStackSpillSlot);
+#endif // !FTL_USES_B3
 
         // Do a pushToSave because that's what the exit compiler below expects the stack
         // to look like because that's the last thing the ExitThunkGenerator does. The code
         // below doesn't actually use the value that was pushed, but it does rely on the
         // general shape of the stack being as it is in the non-exception OSR case.
         jit.pushToSaveImmediateWithoutTouchingRegisters(CCallHelpers::TrustedImm32(0xbadbeef));
-    } else if (exit.willArriveAtOSRExitFromCallOperation())
+    }
+#if !FTL_USES_B3
+    if (exit.willArriveAtOSRExitFromCallOperation())
         exit.recoverRegistersFromSpillSlot(jit, jitCode->osrExitFromGenericUnwindStackSpillSlot);
+#endif // !FTL_USES_B3
     
 
     // We need scratch space to save all registers, to build up the JS stack, to deal with unwind
@@ -603,11 +614,17 @@ extern "C" void* compileFTLOSRExit(ExecState* exec, unsigned exitID)
         dataLog("    Origin: ", exit.m_codeOrigin, "\n");
         if (exit.m_codeOriginForExitProfile != exit.m_codeOrigin)
             dataLog("    Origin for exit profile: ", exit.m_codeOriginForExitProfile, "\n");
+#if !FTL_USES_B3
         dataLog("    Exit stackmap ID: ", exit.m_descriptor->m_stackmapID, "\n");
+#endif // !FTL_USES_B3
         dataLog("    Current call site index: ", exec->callSiteIndex().bits(), "\n");
-        dataLog("    Exit is exception handler: ", exit.m_isExceptionHandler,
-            " will arrive at exit from genericUnwind(): ", exit.willArriveAtOSRExitFromGenericUnwind(), 
-            " will arrive at exit from lazy slow path: ", exit.m_exceptionType == ExceptionType::LazySlowPath, "\n");
+        dataLog("    Exit is exception handler: ", exit.m_isExceptionHandler, "\n");
+#if FTL_USES_B3
+        dataLog("    Is unwind handler: ", exit.m_isUnwindHandler, "\n");
+#else // FTL_USES_B3
+        dataLog("    Will arrive at exit from genericUnwind(): ", exit.willArriveAtOSRExitFromGenericUnwind(), "\n");
+        dataLog("    Will arrive at exit from lazy slow path: ", exit.m_exceptionType == ExceptionType::LazySlowPath, "\n");
+#endif // FTL_USES_B3
         dataLog("    Exit values: ", exit.m_descriptor->m_values, "\n");
         if (!exit.m_descriptor->m_materializations.isEmpty()) {
             dataLog("    Materializations:\n");
index 92bfcb6..b23c925 100644 (file)
@@ -96,12 +96,14 @@ public:
 #if ENABLE(MASM_PROBE)
     SegmentedVector<ProbeDescriptor> probes;
 #endif
+#if !FTL_USES_B3
     Vector<JSCall> jsCalls;
     Vector<JSCallVarargs> jsCallVarargses;
     Vector<JSTailCall> jsTailCalls;
     Vector<CString> codeSectionNames;
     Vector<CString> dataSectionNames;
     SegmentedVector<OSRExitDescriptorImpl> osrExitDescriptorImpls;
+#endif // !FTL_USES_B3
     void* unwindDataSection;
     size_t unwindDataSectionSize;
     RefPtr<DataSection> stackmapsSection;