dfg/DFGPlan.cpp
dfg/DFGPredictionInjectionPhase.cpp
dfg/DFGPredictionPropagationPhase.cpp
+ dfg/DFGRepatch.cpp
dfg/DFGSSAConversionPhase.cpp
dfg/DFGSpeculativeJIT.cpp
dfg/DFGSpeculativeJIT32_64.cpp
jit/JITExceptions.cpp
jit/JITOpcodes.cpp
jit/JITOpcodes32_64.cpp
- jit/JITOperations.cpp
jit/JITPropertyAccess.cpp
jit/JITPropertyAccess32_64.cpp
jit/JITStubRoutine.cpp
jit/JITThunks.cpp
jit/JITToDFGDeferredCompilationCallback.cpp
jit/JumpReplacementWatchpoint.cpp
- jit/Repatch.cpp
jit/ThunkGenerators.cpp
parser/Lexer.cpp
+2013-09-21 Filip Pizlo <fpizlo@apple.com>
+
+ Unreviewed, revert http://trac.webkit.org/changeset/156235. It won't work on Windows.
+
+ * CMakeLists.txt:
+ * GNUmakefile.list.am:
+ * JavaScriptCore.vcxproj/JavaScriptCore.vcxproj:
+ * JavaScriptCore.xcodeproj/project.pbxproj:
+ * Target.pri:
+ * bytecode/CallLinkInfo.cpp:
+ (JSC::CallLinkInfo::unlink):
+ * bytecode/CodeBlock.cpp:
+ (JSC::CodeBlock::resetStubInternal):
+ * bytecode/StructureStubInfo.h:
+ * dfg/DFGCallArrayAllocatorSlowPathGenerator.h:
+ (JSC::DFG::CallArrayAllocatorSlowPathGenerator::CallArrayAllocatorSlowPathGenerator):
+ (JSC::DFG::CallArrayAllocatorWithVariableSizeSlowPathGenerator::CallArrayAllocatorWithVariableSizeSlowPathGenerator):
+ * dfg/DFGJITCompiler.h:
+ * dfg/DFGOSRExitCompiler.h:
+ * dfg/DFGOperations.cpp:
+ (JSC::DFG::operationPutByValInternal):
+ * dfg/DFGOperations.h:
+ (JSC::DFG::operationNewTypedArrayWithSizeForType):
+ (JSC::DFG::operationNewTypedArrayWithOneArgumentForType):
+ * dfg/DFGRegisterSet.h: Added.
+ (JSC::DFG::RegisterSet::RegisterSet):
+ (JSC::DFG::RegisterSet::asPOD):
+ (JSC::DFG::RegisterSet::copyInfo):
+ (JSC::DFG::RegisterSet::set):
+ (JSC::DFG::RegisterSet::setGPRByIndex):
+ (JSC::DFG::RegisterSet::clear):
+ (JSC::DFG::RegisterSet::get):
+ (JSC::DFG::RegisterSet::getGPRByIndex):
+ (JSC::DFG::RegisterSet::getFreeGPR):
+ (JSC::DFG::RegisterSet::setFPRByIndex):
+ (JSC::DFG::RegisterSet::getFPRByIndex):
+ (JSC::DFG::RegisterSet::setByIndex):
+ (JSC::DFG::RegisterSet::getByIndex):
+ (JSC::DFG::RegisterSet::numberOfSetGPRs):
+ (JSC::DFG::RegisterSet::numberOfSetFPRs):
+ (JSC::DFG::RegisterSet::numberOfSetRegisters):
+ (JSC::DFG::RegisterSet::setBit):
+ (JSC::DFG::RegisterSet::clearBit):
+ (JSC::DFG::RegisterSet::getBit):
+ * dfg/DFGRepatch.cpp: Added.
+ (JSC::DFG::repatchCall):
+ (JSC::DFG::repatchByIdSelfAccess):
+ (JSC::DFG::addStructureTransitionCheck):
+ (JSC::DFG::replaceWithJump):
+ (JSC::DFG::emitRestoreScratch):
+ (JSC::DFG::linkRestoreScratch):
+ (JSC::DFG::generateProtoChainAccessStub):
+ (JSC::DFG::tryCacheGetByID):
+ (JSC::DFG::repatchGetByID):
+ (JSC::DFG::getPolymorphicStructureList):
+ (JSC::DFG::patchJumpToGetByIdStub):
+ (JSC::DFG::tryBuildGetByIDList):
+ (JSC::DFG::buildGetByIDList):
+ (JSC::DFG::appropriateGenericPutByIdFunction):
+ (JSC::DFG::appropriateListBuildingPutByIdFunction):
+ (JSC::DFG::emitPutReplaceStub):
+ (JSC::DFG::emitPutTransitionStub):
+ (JSC::DFG::tryCachePutByID):
+ (JSC::DFG::repatchPutByID):
+ (JSC::DFG::tryBuildPutByIdList):
+ (JSC::DFG::buildPutByIdList):
+ (JSC::DFG::tryRepatchIn):
+ (JSC::DFG::repatchIn):
+ (JSC::DFG::linkSlowFor):
+ (JSC::DFG::linkFor):
+ (JSC::DFG::linkClosureCall):
+ (JSC::DFG::resetGetByID):
+ (JSC::DFG::resetPutByID):
+ (JSC::DFG::resetIn):
+ * dfg/DFGRepatch.h: Added.
+ (JSC::DFG::resetGetByID):
+ (JSC::DFG::resetPutByID):
+ (JSC::DFG::resetIn):
+ * dfg/DFGScratchRegisterAllocator.h: Added.
+ (JSC::DFG::ScratchRegisterAllocator::ScratchRegisterAllocator):
+ (JSC::DFG::ScratchRegisterAllocator::lock):
+ (JSC::DFG::ScratchRegisterAllocator::allocateScratch):
+ (JSC::DFG::ScratchRegisterAllocator::allocateScratchGPR):
+ (JSC::DFG::ScratchRegisterAllocator::allocateScratchFPR):
+ (JSC::DFG::ScratchRegisterAllocator::didReuseRegisters):
+ (JSC::DFG::ScratchRegisterAllocator::preserveReusedRegistersByPushing):
+ (JSC::DFG::ScratchRegisterAllocator::restoreReusedRegistersByPopping):
+ (JSC::DFG::ScratchRegisterAllocator::desiredScratchBufferSize):
+ (JSC::DFG::ScratchRegisterAllocator::preserveUsedRegistersToScratchBuffer):
+ (JSC::DFG::ScratchRegisterAllocator::restoreUsedRegistersFromScratchBuffer):
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::writeBarrier):
+ (JSC::DFG::SpeculativeJIT::nonSpeculativeCompare):
+ (JSC::DFG::SpeculativeJIT::compilePeepHoleBranch):
+ (JSC::DFG::SpeculativeJIT::compare):
+ * dfg/DFGSpeculativeJIT.h:
+ (JSC::DFG::SpeculativeJIT::callOperation):
+ * dfg/DFGSpeculativeJIT32_64.cpp:
+ (JSC::DFG::SpeculativeJIT::cachedPutById):
+ (JSC::DFG::SpeculativeJIT::nonSpeculativePeepholeBranch):
+ (JSC::DFG::CompareAndBoxBooleanSlowPathGenerator::CompareAndBoxBooleanSlowPathGenerator):
+ (JSC::DFG::SpeculativeJIT::nonSpeculativeNonPeepholeCompare):
+ (JSC::DFG::SpeculativeJIT::compile):
+ * dfg/DFGSpeculativeJIT64.cpp:
+ (JSC::DFG::SpeculativeJIT::cachedPutById):
+ (JSC::DFG::SpeculativeJIT::nonSpeculativePeepholeBranch):
+ (JSC::DFG::CompareAndBoxBooleanSlowPathGenerator::CompareAndBoxBooleanSlowPathGenerator):
+ (JSC::DFG::SpeculativeJIT::nonSpeculativeNonPeepholeCompare):
+ (JSC::DFG::SpeculativeJIT::compile):
+ * dfg/DFGThunks.cpp:
+ (JSC::DFG::emitPointerValidation):
+ (JSC::DFG::throwExceptionFromCallSlowPathGenerator):
+ (JSC::DFG::slowPathFor):
+ (JSC::DFG::linkForThunkGenerator):
+ (JSC::DFG::linkCallThunkGenerator):
+ (JSC::DFG::linkConstructThunkGenerator):
+ (JSC::DFG::linkClosureCallThunkGenerator):
+ (JSC::DFG::virtualForThunkGenerator):
+ (JSC::DFG::virtualCallThunkGenerator):
+ (JSC::DFG::virtualConstructThunkGenerator):
+ * dfg/DFGThunks.h:
+ * ftl/FTLIntrinsicRepository.h:
+ * ftl/FTLLowerDFGToLLVM.cpp:
+ (JSC::FTL::LowerDFGToLLVM::compileCallOrConstruct):
+ * ftl/FTLOSRExitCompiler.h:
+ * jit/AssemblyHelpers.h:
+ * jit/JIT.cpp:
+ (JSC::JIT::linkFor):
+ (JSC::JIT::linkSlowCall):
+ * jit/JITCall.cpp:
+ (JSC::JIT::compileCallEvalSlowCase):
+ (JSC::JIT::compileOpCallSlowCase):
+ (JSC::JIT::privateCompileClosureCall):
+ * jit/JITCall32_64.cpp:
+ (JSC::JIT::compileCallEvalSlowCase):
+ (JSC::JIT::compileOpCallSlowCase):
+ (JSC::JIT::privateCompileClosureCall):
+ * jit/JITOperationWrappers.h: Removed.
+ * jit/JITOperations.cpp: Removed.
+ * jit/JITOperations.h: Removed.
+ * jit/RegisterSet.h: Removed.
+ * jit/Repatch.cpp: Removed.
+ * jit/Repatch.h: Removed.
+ * jit/ScratchRegisterAllocator.h: Removed.
+ * jit/ThunkGenerators.cpp:
+ (JSC::generateSlowCaseFor):
+ (JSC::linkForGenerator):
+ (JSC::linkCallGenerator):
+ (JSC::linkConstructGenerator):
+ (JSC::linkClosureCallGenerator):
+ (JSC::virtualForGenerator):
+ (JSC::virtualCallGenerator):
+ (JSC::virtualConstructGenerator):
+ * jit/ThunkGenerators.h:
+
2013-09-21 Filip Pizlo <fpizlo@apple.com>
Move DFG inline caching logic into jit/
Source/JavaScriptCore/dfg/DFGPredictionPropagationPhase.cpp \
Source/JavaScriptCore/dfg/DFGPredictionPropagationPhase.h \
Source/JavaScriptCore/dfg/DFGRegisterBank.h \
+ Source/JavaScriptCore/dfg/DFGRegisterSet.h \
+ Source/JavaScriptCore/dfg/DFGRepatch.cpp \
+ Source/JavaScriptCore/dfg/DFGRepatch.h \
Source/JavaScriptCore/dfg/DFGSafeToExecute.h \
Source/JavaScriptCore/dfg/DFGSaneStringGetByValSlowPathGenerator.h \
Source/JavaScriptCore/dfg/DFGScoreBoard.h \
+ Source/JavaScriptCore/dfg/DFGScratchRegisterAllocator.h \
Source/JavaScriptCore/dfg/DFGSilentRegisterSavePlan.h \
Source/JavaScriptCore/dfg/DFGSlowPathGenerator.h \
Source/JavaScriptCore/dfg/DFGSpeculativeJIT32_64.cpp \
Source/JavaScriptCore/jit/JITInlines.h \
Source/JavaScriptCore/jit/JITOpcodes32_64.cpp \
Source/JavaScriptCore/jit/JITOpcodes.cpp \
- Source/JavaScriptCore/jit/JITOperationWrappers.h \
- Source/JavaScriptCore/jit/JITOperations.cpp \
- Source/JavaScriptCore/jit/JITOperations.h \
Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp \
Source/JavaScriptCore/jit/JITPropertyAccess.cpp \
Source/JavaScriptCore/jit/JITStubCall.h \
Source/JavaScriptCore/jit/JSInterfaceJIT.h \
Source/JavaScriptCore/jit/JumpReplacementWatchpoint.cpp \
Source/JavaScriptCore/jit/JumpReplacementWatchpoint.h \
- Source/JavaScriptCore/jit/Repatch.cpp \
- Source/JavaScriptCore/jit/Repatch.h \
- Source/JavaScriptCore/jit/ScratchRegisterAllocator.h \
Source/JavaScriptCore/jit/SlowPathCall.h \
Source/JavaScriptCore/jit/SpecializedThunkJIT.h \
Source/JavaScriptCore/jit/ThunkGenerator.h \
<ClCompile Include="..\jit\JITExceptions.cpp" />\r
<ClCompile Include="..\jit\JITOpcodes.cpp" />\r
<ClCompile Include="..\jit\JITOpcodes32_64.cpp" />\r
- <ClCompile Include="..\jit\JITOperations.cpp" />\r
<ClCompile Include="..\jit\JITPropertyAccess.cpp" />\r
<ClCompile Include="..\jit\JITPropertyAccess32_64.cpp" />\r
<ClCompile Include="..\jit\JITStubRoutine.cpp" />\r
<ClCompile Include="..\jit\JITThunks.cpp" />\r
<ClCompile Include="..\jit\JITToDFGDeferredCompilationCallback.cpp" />\r
<ClCompile Include="..\jit\JumpReplacementWatchpoint.cpp" />\r
- <ClCompile Include="..\jit\Repatch.cpp" />\r
<ClCompile Include="..\jit\ThunkGenerators.cpp" />\r
<ClCompile Include="..\llint\LLIntCLoop.cpp" />\r
<ClCompile Include="..\llint\LLIntData.cpp" />\r
<ClInclude Include="..\jit\JITDisassembler.h" />\r
<ClInclude Include="..\jit\JITExceptions.h" />\r
<ClInclude Include="..\jit\JITInlines.h" />\r
- <ClInclude Include="..\jit\JITOperationWrappers.h" />\r
- <ClInclude Include="..\jit\JITOperations.h" />\r
<ClInclude Include="..\jit\JITStubCall.h" />\r
<ClInclude Include="..\jit\JITStubRoutine.h" />\r
<ClInclude Include="..\jit\JITStubs.h" />\r
<ClInclude Include="..\jit\JITWriteBarrier.h" />\r
<ClInclude Include="..\jit\JSInterfaceJIT.h" />\r
<ClInclude Include="..\jit\JumpReplacementWatchpoint.h" />\r
- <ClInclude Include="..\jit\RegisterSet.h" />\r
- <ClInclude Include="..\jit\Repatch.h" />\r
- <ClInclude Include="..\jit\ScratchRegisterAllocator.h" />\r
<ClInclude Include="..\jit\SpecializedThunkJIT.h" />\r
<ClInclude Include="..\jit\ThunkGenerator.h" />\r
<ClInclude Include="..\jit\ThunkGenerators.h" />\r
0F24E54217EA9F5900ABB217 /* CCallHelpers.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E53D17EA9F5900ABB217 /* CCallHelpers.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F24E54317EA9F5900ABB217 /* FPRInfo.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E53E17EA9F5900ABB217 /* FPRInfo.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F24E54417EA9F5900ABB217 /* GPRInfo.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E53F17EA9F5900ABB217 /* GPRInfo.h */; settings = {ATTRIBUTES = (Private, ); }; };
- 0F24E54C17EE274900ABB217 /* JITOperations.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F24E54517EE274900ABB217 /* JITOperations.cpp */; };
- 0F24E54D17EE274900ABB217 /* JITOperations.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E54617EE274900ABB217 /* JITOperations.h */; };
- 0F24E54E17EE274900ABB217 /* JITOperationWrappers.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E54717EE274900ABB217 /* JITOperationWrappers.h */; };
- 0F24E54F17EE274900ABB217 /* RegisterSet.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E54817EE274900ABB217 /* RegisterSet.h */; };
- 0F24E55017EE274900ABB217 /* Repatch.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F24E54917EE274900ABB217 /* Repatch.cpp */; };
- 0F24E55117EE274900ABB217 /* Repatch.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E54A17EE274900ABB217 /* Repatch.h */; };
- 0F24E55217EE274900ABB217 /* ScratchRegisterAllocator.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F24E54B17EE274900ABB217 /* ScratchRegisterAllocator.h */; };
0F256C361627B0AD007F2783 /* DFGCallArrayAllocatorSlowPathGenerator.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F256C341627B0AA007F2783 /* DFGCallArrayAllocatorSlowPathGenerator.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F2B66AC17B6B53F00A7AE3F /* GCIncomingRefCounted.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2B66A817B6B53D00A7AE3F /* GCIncomingRefCounted.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F2B66AD17B6B54500A7AE3F /* GCIncomingRefCountedInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F2B66A917B6B53D00A7AE3F /* GCIncomingRefCountedInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F766D3515AE253B008F363E /* JumpReplacementWatchpoint.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F766D3315AE2535008F363E /* JumpReplacementWatchpoint.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F766D3815AE4A1C008F363E /* StructureStubClearingWatchpoint.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F766D3615AE4A1A008F363E /* StructureStubClearingWatchpoint.cpp */; };
0F766D3915AE4A1F008F363E /* StructureStubClearingWatchpoint.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F766D3715AE4A1A008F363E /* StructureStubClearingWatchpoint.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F766D4415B2A3C0008F363E /* DFGRegisterSet.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F766D4215B2A3BD008F363E /* DFGRegisterSet.h */; settings = {ATTRIBUTES = (Private, ); }; };
+ 0F766D4615B3701F008F363E /* DFGScratchRegisterAllocator.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F766D4515B3701D008F363E /* DFGScratchRegisterAllocator.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F7700921402FF3C0078EB39 /* SamplingCounter.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 0F7700911402FF280078EB39 /* SamplingCounter.cpp */; };
0F7B294B14C3CD2F007C3DB1 /* DFGCapabilities.h in Headers */ = {isa = PBXBuildFile; fileRef = 0FD82E1F14172C2F00179C94 /* DFGCapabilities.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F7B294D14C3CD4C007C3DB1 /* DFGCommon.h in Headers */ = {isa = PBXBuildFile; fileRef = 0FC0977E1469EBC400CF2442 /* DFGCommon.h */; settings = {ATTRIBUTES = (Private, ); }; };
86AE64AA135E5E1C00963012 /* SH4Assembler.h in Headers */ = {isa = PBXBuildFile; fileRef = 86AE64A7135E5E1C00963012 /* SH4Assembler.h */; settings = {ATTRIBUTES = (Private, ); }; };
86B5826714D2796C00A9C306 /* CodeProfile.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 86B5822E14D2373B00A9C306 /* CodeProfile.cpp */; };
86B5826914D2797000A9C306 /* CodeProfiling.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 8603CEF214C7546400AE59E3 /* CodeProfiling.cpp */; };
+ 86BB09C0138E381B0056702F /* DFGRepatch.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 86BB09BE138E381B0056702F /* DFGRepatch.cpp */; };
+ 86BB09C1138E381B0056702F /* DFGRepatch.h in Headers */ = {isa = PBXBuildFile; fileRef = 86BB09BF138E381B0056702F /* DFGRepatch.h */; settings = {ATTRIBUTES = (Private, ); }; };
86C36EEA0EE1289D00B3DF59 /* MacroAssembler.h in Headers */ = {isa = PBXBuildFile; fileRef = 86C36EE90EE1289D00B3DF59 /* MacroAssembler.h */; settings = {ATTRIBUTES = (Private, ); }; };
86C568E011A213EE0007F7F0 /* MacroAssemblerARM.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 86C568DD11A213EE0007F7F0 /* MacroAssemblerARM.cpp */; };
86C568E111A213EE0007F7F0 /* MacroAssemblerMIPS.h in Headers */ = {isa = PBXBuildFile; fileRef = 86C568DE11A213EE0007F7F0 /* MacroAssemblerMIPS.h */; settings = {ATTRIBUTES = (Private, ); }; };
0F24E53D17EA9F5900ABB217 /* CCallHelpers.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CCallHelpers.h; sourceTree = "<group>"; };
0F24E53E17EA9F5900ABB217 /* FPRInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FPRInfo.h; sourceTree = "<group>"; };
0F24E53F17EA9F5900ABB217 /* GPRInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = GPRInfo.h; sourceTree = "<group>"; };
- 0F24E54517EE274900ABB217 /* JITOperations.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JITOperations.cpp; sourceTree = "<group>"; };
- 0F24E54617EE274900ABB217 /* JITOperations.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JITOperations.h; sourceTree = "<group>"; };
- 0F24E54717EE274900ABB217 /* JITOperationWrappers.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JITOperationWrappers.h; sourceTree = "<group>"; };
- 0F24E54817EE274900ABB217 /* RegisterSet.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RegisterSet.h; sourceTree = "<group>"; };
- 0F24E54917EE274900ABB217 /* Repatch.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = Repatch.cpp; sourceTree = "<group>"; };
- 0F24E54A17EE274900ABB217 /* Repatch.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Repatch.h; sourceTree = "<group>"; };
- 0F24E54B17EE274900ABB217 /* ScratchRegisterAllocator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ScratchRegisterAllocator.h; sourceTree = "<group>"; };
0F256C341627B0AA007F2783 /* DFGCallArrayAllocatorSlowPathGenerator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGCallArrayAllocatorSlowPathGenerator.h; path = dfg/DFGCallArrayAllocatorSlowPathGenerator.h; sourceTree = "<group>"; };
0F2B66A817B6B53D00A7AE3F /* GCIncomingRefCounted.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GCIncomingRefCounted.h; sourceTree = "<group>"; };
0F2B66A917B6B53D00A7AE3F /* GCIncomingRefCountedInlines.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GCIncomingRefCountedInlines.h; sourceTree = "<group>"; };
0F766D3315AE2535008F363E /* JumpReplacementWatchpoint.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JumpReplacementWatchpoint.h; sourceTree = "<group>"; };
0F766D3615AE4A1A008F363E /* StructureStubClearingWatchpoint.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = StructureStubClearingWatchpoint.cpp; sourceTree = "<group>"; };
0F766D3715AE4A1A008F363E /* StructureStubClearingWatchpoint.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = StructureStubClearingWatchpoint.h; sourceTree = "<group>"; };
+ 0F766D4215B2A3BD008F363E /* DFGRegisterSet.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGRegisterSet.h; path = dfg/DFGRegisterSet.h; sourceTree = "<group>"; };
+ 0F766D4515B3701D008F363E /* DFGScratchRegisterAllocator.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGScratchRegisterAllocator.h; path = dfg/DFGScratchRegisterAllocator.h; sourceTree = "<group>"; };
0F77008E1402FDD60078EB39 /* SamplingCounter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SamplingCounter.h; sourceTree = "<group>"; };
0F7700911402FF280078EB39 /* SamplingCounter.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = SamplingCounter.cpp; sourceTree = "<group>"; };
0F8023E91613832300A0BA45 /* ByValInfo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ByValInfo.h; sourceTree = "<group>"; };
86B5822E14D2373B00A9C306 /* CodeProfile.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = CodeProfile.cpp; sourceTree = "<group>"; };
86B5822F14D2373B00A9C306 /* CodeProfile.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CodeProfile.h; sourceTree = "<group>"; };
86B5826A14D35D5100A9C306 /* TieredMMapArray.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TieredMMapArray.h; sourceTree = "<group>"; };
+ 86BB09BE138E381B0056702F /* DFGRepatch.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = DFGRepatch.cpp; path = dfg/DFGRepatch.cpp; sourceTree = "<group>"; };
+ 86BB09BF138E381B0056702F /* DFGRepatch.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = DFGRepatch.h; path = dfg/DFGRepatch.h; sourceTree = "<group>"; };
86BF642A148DB2B5004DE36A /* Intrinsic.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Intrinsic.h; sourceTree = "<group>"; };
86C36EE90EE1289D00B3DF59 /* MacroAssembler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MacroAssembler.h; sourceTree = "<group>"; };
86C568DD11A213EE0007F7F0 /* MacroAssemblerARM.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = MacroAssemblerARM.cpp; sourceTree = "<group>"; };
0F24E53B17EA9F5900ABB217 /* AssemblyHelpers.cpp */,
0F24E53C17EA9F5900ABB217 /* AssemblyHelpers.h */,
0F24E53D17EA9F5900ABB217 /* CCallHelpers.h */,
+ 0F24E53E17EA9F5900ABB217 /* FPRInfo.h */,
+ 0F24E53F17EA9F5900ABB217 /* GPRInfo.h */,
0F73D7AB165A142A00ACAB71 /* ClosureCallStubRoutine.cpp */,
0F73D7AC165A142A00ACAB71 /* ClosureCallStubRoutine.h */,
0FD82E37141AB14200179C94 /* CompactJITCodeMap.h */,
A7B48DB60EE74CFC00DCBDB6 /* ExecutableAllocator.cpp */,
A7B48DB50EE74CFC00DCBDB6 /* ExecutableAllocator.h */,
86DB64630F95C6FC00D7D921 /* ExecutableAllocatorFixedVMPool.cpp */,
- 0F24E53E17EA9F5900ABB217 /* FPRInfo.h */,
0F766D2D15A8DCDD008F363E /* GCAwareJITStubRoutine.cpp */,
0F766D2E15A8DCDD008F363E /* GCAwareJITStubRoutine.h */,
- 0F24E53F17EA9F5900ABB217 /* GPRInfo.h */,
0F4680D014BBC5F800BFE272 /* HostCallReturnValue.cpp */,
0F4680D114BBC5F800BFE272 /* HostCallReturnValue.h */,
1429D92D0ED22D7000B89619 /* JIT.cpp */,
86CC85A00EE79A4700288682 /* JITInlines.h */,
BCDD51E90FB8DF74004A8BDC /* JITOpcodes.cpp */,
A71236E41195F33C00BD2174 /* JITOpcodes32_64.cpp */,
- 0F24E54517EE274900ABB217 /* JITOperations.cpp */,
- 0F24E54617EE274900ABB217 /* JITOperations.h */,
- 0F24E54717EE274900ABB217 /* JITOperationWrappers.h */,
86CC85C30EE7A89400288682 /* JITPropertyAccess.cpp */,
A7C1E8C8112E701C00A37F98 /* JITPropertyAccess32_64.cpp */,
960626950FB8EC02009798AB /* JITStubCall.h */,
A76C51741182748D00715B05 /* JSInterfaceJIT.h */,
0F766D3215AE2535008F363E /* JumpReplacementWatchpoint.cpp */,
0F766D3315AE2535008F363E /* JumpReplacementWatchpoint.h */,
- 0F24E54817EE274900ABB217 /* RegisterSet.h */,
- 0F24E54917EE274900ABB217 /* Repatch.cpp */,
- 0F24E54A17EE274900ABB217 /* Repatch.h */,
- 0F24E54B17EE274900ABB217 /* ScratchRegisterAllocator.h */,
A709F2EF17A0AC0400512E98 /* SlowPathCall.h */,
A7386551118697B400540279 /* SpecializedThunkJIT.h */,
0F572D4D16879FDB00E57FBD /* ThunkGenerator.h */,
0FFFC95114EF909500C72532 /* DFGPredictionPropagationPhase.cpp */,
0FFFC95214EF909500C72532 /* DFGPredictionPropagationPhase.h */,
86EC9DC11328DF82002B2AD7 /* DFGRegisterBank.h */,
+ 0F766D4215B2A3BD008F363E /* DFGRegisterSet.h */,
+ 86BB09BE138E381B0056702F /* DFGRepatch.cpp */,
+ 86BB09BF138E381B0056702F /* DFGRepatch.h */,
A77A423C17A0BBFD00A8DB81 /* DFGSafeToExecute.h */,
A741017E179DAF80002EB8BA /* DFGSaneStringGetByValSlowPathGenerator.h */,
86ECA3F9132DF25A002B2AD7 /* DFGScoreBoard.h */,
+ 0F766D4515B3701D008F363E /* DFGScratchRegisterAllocator.h */,
0F1E3A65153A21DF000F9456 /* DFGSilentRegisterSavePlan.h */,
0F1E3A501537C2CB000F9456 /* DFGSlowPathGenerator.h */,
86EC9DC21328DF82002B2AD7 /* DFGSpeculativeJIT.cpp */,
0F66E16B14DF3F1600B7B2E4 /* DFGAdjacencyList.h in Headers */,
0FFB921816D02EB20055A5DB /* DFGAllocator.h in Headers */,
A737810C1799EA2E00817533 /* DFGAnalysis.h in Headers */,
- 0F24E54E17EE274900ABB217 /* JITOperationWrappers.h in Headers */,
0F1E3A461534CBAF000F9456 /* DFGArgumentPosition.h in Headers */,
0F16015E156198C900C2587C /* DFGArgumentsSimplificationPhase.h in Headers */,
0F05C3B41683CF9200BAF45B /* DFGArrayifySlowPathGenerator.h in Headers */,
0FFB921A16D02EC50055A5DB /* DFGBasicBlockInlines.h in Headers */,
A70B083317A0B79B00DAF14B /* DFGBinarySwitch.h in Headers */,
A7D89CF417A0B8CC00773AD8 /* DFGBlockInsertionSet.h in Headers */,
- 0F24E55217EE274900ABB217 /* ScratchRegisterAllocator.h in Headers */,
0F8364B7164B0C110053329A /* DFGBranchDirection.h in Headers */,
86EC9DC51328DF82002B2AD7 /* DFGByteCodeParser.h in Headers */,
0F256C361627B0AD007F2783 /* DFGCallArrayAllocatorSlowPathGenerator.h in Headers */,
0FBE0F7516C1DB0B0082C5E8 /* DFGPredictionInjectionPhase.h in Headers */,
0FFFC95E14EF90B700C72532 /* DFGPredictionPropagationPhase.h in Headers */,
86EC9DD11328DF82002B2AD7 /* DFGRegisterBank.h in Headers */,
+ 0F766D4415B2A3C0008F363E /* DFGRegisterSet.h in Headers */,
+ 86BB09C1138E381B0056702F /* DFGRepatch.h in Headers */,
2AD8932B17E3868F00668276 /* HeapIterationScope.h in Headers */,
A77A424317A0BBFD00A8DB81 /* DFGSafeToExecute.h in Headers */,
A741017F179DAF80002EB8BA /* DFGSaneStringGetByValSlowPathGenerator.h in Headers */,
86ECA3FA132DF25A002B2AD7 /* DFGScoreBoard.h in Headers */,
0FD8A31417D4326C00CA2C40 /* CodeBlockSet.h in Headers */,
+ 0F766D4615B3701F008F363E /* DFGScratchRegisterAllocator.h in Headers */,
0F1E3A67153A21E2000F9456 /* DFGSilentRegisterSavePlan.h in Headers */,
0FFB921D16D02F300055A5DB /* DFGSlowPathGenerator.h in Headers */,
86EC9DD31328DF82002B2AD7 /* DFGSpeculativeJIT.h in Headers */,
A7A4AE1017973B4D005612B1 /* JITStubsX86Common.h in Headers */,
0F5EF91F16878F7D003E5C25 /* JITThunks.h in Headers */,
0FC712E317CD8793008CC93C /* JITToDFGDeferredCompilationCallback.h in Headers */,
- 0F24E54D17EE274900ABB217 /* JITOperations.h in Headers */,
A76F54A313B28AAB00EF2BCE /* JITWriteBarrier.h in Headers */,
BC18C4160E16F5CD00B34460 /* JSActivation.h in Headers */,
840480131021A1D9008E7F01 /* JSAPIValueWrapper.h in Headers */,
0FF42742158EBE91004CB9FF /* udis86_extern.h in Headers */,
0FF42744158EBE91004CB9FF /* udis86_input.h in Headers */,
0FF42748158EBE91004CB9FF /* udis86_syn.h in Headers */,
- 0F24E55117EE274900ABB217 /* Repatch.h in Headers */,
- 0F24E54F17EE274900ABB217 /* RegisterSet.h in Headers */,
0FF42749158EBE91004CB9FF /* udis86_types.h in Headers */,
A7E5AB391799E4B200D2833D /* UDis86Disassembler.h in Headers */,
A7A8AF4117ADB5F3005AB174 /* Uint16Array.h in Headers */,
0F2BDC15151C5D4D00CD8910 /* DFGFixupPhase.cpp in Sources */,
A7D89CF717A0B8CC00773AD8 /* DFGFlushFormat.cpp in Sources */,
A7D89CF917A0B8CC00773AD8 /* DFGFlushLivenessAnalysisPhase.cpp in Sources */,
- 0F24E54C17EE274900ABB217 /* JITOperations.cpp in Sources */,
86EC9DC71328DF82002B2AD7 /* DFGGraph.cpp in Sources */,
A704D90517A0BAA8006BA554 /* DFGInPlaceAbstractState.cpp in Sources */,
0FEA0A33170D40BF00BB722C /* DFGJITCode.cpp in Sources */,
A78A977A179738B8009DF744 /* DFGPlan.cpp in Sources */,
0FBE0F7416C1DB090082C5E8 /* DFGPredictionInjectionPhase.cpp in Sources */,
0FFFC95D14EF90B300C72532 /* DFGPredictionPropagationPhase.cpp in Sources */,
+ 86BB09C0138E381B0056702F /* DFGRepatch.cpp in Sources */,
86EC9DD21328DF82002B2AD7 /* DFGSpeculativeJIT.cpp in Sources */,
86880F1F14328BB900B08D42 /* DFGSpeculativeJIT32_64.cpp in Sources */,
86880F4D14353B2100B08D42 /* DFGSpeculativeJIT64.cpp in Sources */,
9335F24D12E6765B002B5553 /* StringRecursionChecker.cpp in Sources */,
BCDE3B430E6C832D001453A7 /* Structure.cpp in Sources */,
7E4EE70F0EBB7A5B005934AA /* StructureChain.cpp in Sources */,
- 0F24E55017EE274900ABB217 /* Repatch.cpp in Sources */,
C2F0F2D116BAEEE900187C19 /* StructureRareData.cpp in Sources */,
0F766D3815AE4A1C008F363E /* StructureStubClearingWatchpoint.cpp in Sources */,
BCCF0D0C0EF0B8A500413C8F /* StructureStubInfo.cpp in Sources */,
dfg/DFGPlan.cpp \
dfg/DFGPredictionPropagationPhase.cpp \
dfg/DFGPredictionInjectionPhase.cpp \
+ dfg/DFGRepatch.cpp \
dfg/DFGSSAConversionPhase.cpp \
dfg/DFGSpeculativeJIT.cpp \
dfg/DFGSpeculativeJIT32_64.cpp \
jit/JITExceptions.cpp \
jit/JITOpcodes.cpp \
jit/JITOpcodes32_64.cpp \
- jit/JITOperations.cpp \
jit/JITPropertyAccess.cpp \
jit/JITPropertyAccess32_64.cpp \
jit/JITStubRoutine.cpp \
jit/JITThunks.cpp \
jit/JITToDFGDeferredCompilationCallback.cpp \
jit/JumpReplacementWatchpoint.cpp \
- jit/Repatch.cpp \
jit/ThunkGenerators.cpp \
llint/LLIntCLoop.cpp \
llint/LLIntData.cpp \
repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(RepatchBuffer::startOfBranchPtrWithPatchOnRegister(hotPathBegin), static_cast<MacroAssembler::RegisterID>(calleeGPR), 0);
if (isDFG) {
#if ENABLE(DFG_JIT)
- repatchBuffer.relink(callReturnLocation, (callType == Construct ? vm.getCTIStub(linkConstructThunkGenerator) : vm.getCTIStub(linkCallThunkGenerator)).code());
+ repatchBuffer.relink(callReturnLocation, (callType == Construct ? vm.getCTIStub(DFG::linkConstructThunkGenerator) : vm.getCTIStub(DFG::linkCallThunkGenerator)).code());
#else
RELEASE_ASSERT_NOT_REACHED();
#endif
} else
- repatchBuffer.relink(callReturnLocation, callType == Construct ? vm.getCTIStub(oldStyleLinkConstructGenerator).code() : vm.getCTIStub(oldStyleLinkCallGenerator).code());
+ repatchBuffer.relink(callReturnLocation, callType == Construct ? vm.getCTIStub(linkConstructGenerator).code() : vm.getCTIStub(linkCallGenerator).code());
hasSeenShouldRepatch = false;
callee.clear();
stub.clear();
#include "DFGCommon.h"
#include "DFGDriver.h"
#include "DFGNode.h"
+#include "DFGRepatch.h"
#include "DFGWorklist.h"
#include "Debugger.h"
#include "Interpreter.h"
#include "PolymorphicPutByIdList.h"
#include "ReduceWhitespace.h"
#include "RepatchBuffer.h"
-#include "Repatch.h"
#include "SlotVisitorInlines.h"
#include <stdio.h>
#include <wtf/CommaPrinter.h>
break;
case JITCode::DFGJIT:
if (isGetByIdAccess(accessType))
- resetGetByID(repatchBuffer, stubInfo);
+ DFG::resetGetByID(repatchBuffer, stubInfo);
else if (isPutByIdAccess(accessType))
- resetPutByID(repatchBuffer, stubInfo);
+ DFG::resetPutByID(repatchBuffer, stubInfo);
else {
RELEASE_ASSERT(isInAccess(accessType));
- resetIn(repatchBuffer, stubInfo);
+ DFG::resetIn(repatchBuffer, stubInfo);
}
break;
default:
#if ENABLE(JIT)
#include "CodeOrigin.h"
+#include "DFGRegisterSet.h"
#include "Instruction.h"
#include "JITStubRoutine.h"
#include "MacroAssembler.h"
#include "Opcode.h"
#include "PolymorphicAccessStructureList.h"
-#include "RegisterSet.h"
#include "Structure.h"
#include "StructureStubClearingWatchpoint.h"
#include <wtf/OwnPtr.h>
int8_t valueTagGPR;
#endif
int8_t valueGPR;
- RegisterSetPOD usedRegisters;
+ DFG::RegisterSetPOD usedRegisters;
int32_t deltaCallToDone;
int32_t deltaCallToStorageLoad;
int32_t deltaCallToStructCheck;
class CallArrayAllocatorSlowPathGenerator : public JumpingSlowPathGenerator<MacroAssembler::JumpList> {
public:
CallArrayAllocatorSlowPathGenerator(
- MacroAssembler::JumpList from, SpeculativeJIT* jit, P_JITOperation_EStZ function,
+ MacroAssembler::JumpList from, SpeculativeJIT* jit, P_DFGOperation_EStZ function,
GPRReg resultGPR, GPRReg storageGPR, Structure* structure, size_t size)
: JumpingSlowPathGenerator<MacroAssembler::JumpList>(from, jit)
, m_function(function)
}
private:
- P_JITOperation_EStZ m_function;
+ P_DFGOperation_EStZ m_function;
GPRReg m_resultGPR;
GPRReg m_storageGPR;
Structure* m_structure;
class CallArrayAllocatorWithVariableSizeSlowPathGenerator : public JumpingSlowPathGenerator<MacroAssembler::JumpList> {
public:
CallArrayAllocatorWithVariableSizeSlowPathGenerator(
- MacroAssembler::JumpList from, SpeculativeJIT* jit, P_JITOperation_EStZ function,
+ MacroAssembler::JumpList from, SpeculativeJIT* jit, P_DFGOperation_EStZ function,
GPRReg resultGPR, Structure* contiguousStructure, Structure* arrayStorageStructure, GPRReg sizeGPR)
: JumpingSlowPathGenerator<MacroAssembler::JumpList>(from, jit)
, m_function(function)
}
private:
- P_JITOperation_EStZ m_function;
+ P_DFGOperation_EStZ m_function;
GPRReg m_resultGPR;
Structure* m_contiguousStructure;
Structure* m_arrayStorageStructure;
#include "DFGJITCode.h"
#include "DFGOSRExitCompilationInfo.h"
#include "DFGRegisterBank.h"
+#include "DFGRegisterSet.h"
#include "FPRInfo.h"
#include "GPRInfo.h"
#include "JITCode.h"
#include "LinkBuffer.h"
#include "MacroAssembler.h"
-#include "RegisterSet.h"
namespace JSC {
};
extern "C" {
-void JIT_OPERATION compileOSRExit(ExecState*) WTF_INTERNAL;
+void DFG_OPERATION compileOSRExit(ExecState*) WTF_INTERNAL;
}
} } // namespace JSC::DFG
#include "CopiedSpaceInlines.h"
#include "DFGDriver.h"
#include "DFGOSRExit.h"
+#include "DFGRepatch.h"
#include "DFGThunks.h"
#include "DFGToFTLDeferredCompilationCallback.h"
#include "DFGToFTLForOSREntryDeferredCompilationCallback.h"
#include "Interpreter.h"
#include "JIT.h"
#include "JITExceptions.h"
-#include "JITOperationWrappers.h"
#include "JSActivation.h"
#include "VM.h"
#include "JSNameScope.h"
#include "NameInstance.h"
#include "ObjectConstructor.h"
#include "Operations.h"
-#include "Repatch.h"
#include "StringConstructor.h"
#include "TypedArrayInlines.h"
#include <wtf/InlineASM.h>
#if ENABLE(JIT)
+
+#if CPU(MIPS)
+#if WTF_MIPS_PIC
+#define LOAD_FUNCTION_TO_T9(function) \
+ ".set noreorder" "\n" \
+ ".cpload $25" "\n" \
+ ".set reorder" "\n" \
+ "la $t9, " LOCAL_REFERENCE(function) "\n"
+#else
+#define LOAD_FUNCTION_TO_T9(function) "" "\n"
+#endif
+#endif
+
#if ENABLE(DFG_JIT)
+#if COMPILER(GCC) && CPU(X86_64)
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, register) \
+ asm( \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov (%rsp), %" STRINGIZE(register) "\n" \
+ "jmp " LOCAL_REFERENCE(function##WithReturnAddress) "\n" \
+ );
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rsi)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rcx)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rcx)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, r8)
+
+#elif COMPILER(GCC) && CPU(X86)
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, offset) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov (%esp), %eax\n" \
+ "mov %eax, " STRINGIZE(offset) "(%esp)\n" \
+ "jmp " LOCAL_REFERENCE(function##WithReturnAddress) "\n" \
+ );
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 8)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 16)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 20)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 24)
+
+#elif COMPILER(GCC) && CPU(ARM_THUMB2)
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
+ asm ( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ ".thumb" "\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov a2, lr" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ ".thumb" "\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov a4, lr" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+// EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
+// As a result, return address will be at a 4-byte further location in the following cases.
+#if COMPILER_SUPPORTS(EABI) && CPU(ARM)
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #4]"
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #8]"
+#else
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #0]"
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #4]"
+#endif
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ ".thumb" "\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ INSTRUCTION_STORE_RETURN_ADDRESS_EJI "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ ".thumb" "\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
+ asm ( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ INLINE_ARM_FUNCTION(function) \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov a2, lr" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ INLINE_ARM_FUNCTION(function) \
+ SYMBOL_STRING(function) ":" "\n" \
+ "mov a4, lr" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+// EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
+// As a result, return address will be at a 4-byte further location in the following cases.
+#if COMPILER_SUPPORTS(EABI) && CPU(ARM)
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #4]"
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #8]"
+#else
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #0]"
+#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #4]"
+#endif
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ INLINE_ARM_FUNCTION(function) \
+ SYMBOL_STRING(function) ":" "\n" \
+ INSTRUCTION_STORE_RETURN_ADDRESS_EJI "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
+ asm ( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ INLINE_ARM_FUNCTION(function) \
+ SYMBOL_STRING(function) ":" "\n" \
+ INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#elif COMPILER(GCC) && CPU(MIPS)
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
+ "move $a1, $ra" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
+ "move $a3, $ra" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
+ "sw $ra, 20($sp)" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
+ "sw $ra, 24($sp)" "\n" \
+ "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ );
+
+#elif COMPILER(GCC) && CPU(SH4)
+
+#define SH4_SCRATCH_REGISTER "r11"
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "sts pr, r5" "\n" \
+ "bra " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
+ "nop" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "sts pr, r7" "\n" \
+ "mov.l 2f, " SH4_SCRATCH_REGISTER "\n" \
+ "braf " SH4_SCRATCH_REGISTER "\n" \
+ "nop" "\n" \
+ "1: .balign 4" "\n" \
+ "2: .long " LOCAL_REFERENCE(function) "WithReturnAddress-1b" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, offset, scratch) \
+ asm( \
+ ".text" "\n" \
+ ".globl " SYMBOL_STRING(function) "\n" \
+ HIDE_SYMBOL(function) "\n" \
+ SYMBOL_STRING(function) ":" "\n" \
+ "sts pr, " scratch "\n" \
+ "mov.l " scratch ", @(" STRINGIZE(offset) ", r15)" "\n" \
+ "mov.l 2f, " scratch "\n" \
+ "braf " scratch "\n" \
+ "nop" "\n" \
+ "1: .balign 4" "\n" \
+ "2: .long " LOCAL_REFERENCE(function) "WithReturnAddress-1b" "\n" \
+ );
+
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 0, SH4_SCRATCH_REGISTER)
+#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 4, SH4_SCRATCH_REGISTER)
+
+#endif
+
+#define P_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
+void* DFG_OPERATION function##WithReturnAddress(ExecState*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
+FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function)
+
+#define J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
+EncodedJSValue DFG_OPERATION function##WithReturnAddress(ExecState*, JSCell*, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
+FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function)
+
+#define J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
+EncodedJSValue DFG_OPERATION function##WithReturnAddress(ExecState*, EncodedJSValue, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
+FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function)
+
+#define V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
+void DFG_OPERATION function##WithReturnAddress(ExecState*, EncodedJSValue, JSCell*, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
+FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function)
+
namespace JSC { namespace DFG {
template<bool strict>
}
template<bool strict>
-ALWAYS_INLINE static void JIT_OPERATION operationPutByValInternal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
+ALWAYS_INLINE static void DFG_OPERATION operationPutByValInternal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
extern "C" {
-EncodedJSValue JIT_OPERATION operationToThis(ExecState* exec, EncodedJSValue encodedOp)
+EncodedJSValue DFG_OPERATION operationToThis(ExecState* exec, EncodedJSValue encodedOp)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(JSValue::decode(encodedOp).toThis(exec, NotStrictMode));
}
-EncodedJSValue JIT_OPERATION operationToThisStrict(ExecState* exec, EncodedJSValue encodedOp)
+EncodedJSValue DFG_OPERATION operationToThisStrict(ExecState* exec, EncodedJSValue encodedOp)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(JSValue::decode(encodedOp).toThis(exec, StrictMode));
}
-JSCell* JIT_OPERATION operationCreateThis(ExecState* exec, JSObject* constructor, int32_t inlineCapacity)
+JSCell* DFG_OPERATION operationCreateThis(ExecState* exec, JSObject* constructor, int32_t inlineCapacity)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return constructEmptyObject(exec, jsCast<JSFunction*>(constructor)->allocationProfile(exec, inlineCapacity)->structure());
}
-JSCell* JIT_OPERATION operationNewObject(ExecState* exec, Structure* structure)
+JSCell* DFG_OPERATION operationNewObject(ExecState* exec, Structure* structure)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return constructEmptyObject(exec, structure);
}
-EncodedJSValue JIT_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+EncodedJSValue DFG_OPERATION operationValueAdd(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(jsAdd(exec, op1, op2));
}
-EncodedJSValue JIT_OPERATION operationValueAddNotNumber(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+EncodedJSValue DFG_OPERATION operationValueAddNotNumber(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(JSValue(base).get(exec, index));
}
-EncodedJSValue JIT_OPERATION operationGetByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty)
+EncodedJSValue DFG_OPERATION operationGetByVal(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(baseValue.get(exec, ident));
}
-EncodedJSValue JIT_OPERATION operationGetByValCell(ExecState* exec, JSCell* base, EncodedJSValue encodedProperty)
+EncodedJSValue DFG_OPERATION operationGetByValCell(ExecState* exec, JSCell* base, EncodedJSValue encodedProperty)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(JSValue(base).get(exec, index));
}
-EncodedJSValue JIT_OPERATION operationGetByValArrayInt(ExecState* exec, JSArray* base, int32_t index)
+EncodedJSValue DFG_OPERATION operationGetByValArrayInt(ExecState* exec, JSArray* base, int32_t index)
{
return getByValCellInt(exec, base, index);
}
-EncodedJSValue JIT_OPERATION operationGetByValStringInt(ExecState* exec, JSString* base, int32_t index)
+EncodedJSValue DFG_OPERATION operationGetByValStringInt(ExecState* exec, JSString* base, int32_t index)
{
return getByValCellInt(exec, base, index);
}
-void JIT_OPERATION operationPutByValStrict(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
+EncodedJSValue DFG_OPERATION operationGetById(ExecState* exec, EncodedJSValue base, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ JSValue baseValue = JSValue::decode(base);
+ PropertySlot slot(baseValue);
+ Identifier ident(vm, uid);
+ return JSValue::encode(baseValue.get(exec, ident, slot));
+}
+
+J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(operationGetByIdBuildList);
+EncodedJSValue DFG_OPERATION operationGetByIdBuildListWithReturnAddress(ExecState* exec, EncodedJSValue base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue baseValue = JSValue::decode(base);
+ PropertySlot slot(baseValue);
+ JSValue result = baseValue.get(exec, ident, slot);
+
+ if (accessType == static_cast<AccessType>(stubInfo.accessType))
+ buildGetByIDList(exec, baseValue, ident, slot, stubInfo);
+
+ return JSValue::encode(result);
+}
+
+J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(operationGetByIdOptimize);
+EncodedJSValue DFG_OPERATION operationGetByIdOptimizeWithReturnAddress(ExecState* exec, EncodedJSValue base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue baseValue = JSValue::decode(base);
+ PropertySlot slot(baseValue);
+ JSValue result = baseValue.get(exec, ident, slot);
+
+ if (accessType == static_cast<AccessType>(stubInfo.accessType)) {
+ if (stubInfo.seen)
+ repatchGetByID(exec, baseValue, ident, slot, stubInfo);
+ else
+ stubInfo.seen = true;
+ }
+
+ return JSValue::encode(result);
+}
+
+J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(operationInOptimize);
+EncodedJSValue DFG_OPERATION operationInOptimizeWithReturnAddress(ExecState* exec, JSCell* base, StringImpl* key, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ if (!base->isObject()) {
+ vm->throwException(exec, createInvalidParameterError(exec, "in", base));
+ return JSValue::encode(jsUndefined());
+ }
+
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ Identifier ident(vm, key);
+ PropertySlot slot(base);
+ bool result = asObject(base)->getPropertySlot(exec, ident, slot);
+
+ RELEASE_ASSERT(accessType == stubInfo.accessType);
+
+ if (stubInfo.seen)
+ repatchIn(exec, base, ident, result, slot, stubInfo);
+ else
+ stubInfo.seen = true;
+
+ return JSValue::encode(jsBoolean(result));
+}
+
+EncodedJSValue DFG_OPERATION operationIn(ExecState* exec, JSCell* base, StringImpl* key)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ if (!base->isObject()) {
+ vm->throwException(exec, createInvalidParameterError(exec, "in", base));
+ return JSValue::encode(jsUndefined());
+ }
+
+ Identifier ident(vm, key);
+ return JSValue::encode(jsBoolean(asObject(base)->hasProperty(exec, ident)));
+}
+
+EncodedJSValue DFG_OPERATION operationGenericIn(ExecState* exec, JSCell* base, EncodedJSValue key)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ return JSValue::encode(jsBoolean(CommonSlowPaths::opIn(exec, JSValue::decode(key), base)));
+}
+
+EncodedJSValue DFG_OPERATION operationCallCustomGetter(ExecState* exec, JSCell* base, PropertySlot::GetValueFunc function, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+
+ return JSValue::encode(function(exec, asObject(base), ident));
+}
+
+EncodedJSValue DFG_OPERATION operationCallGetter(ExecState* exec, JSCell* base, JSCell* getterSetter)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ return JSValue::encode(callGetter(exec, base, getterSetter));
+}
+
+void DFG_OPERATION operationPutByValStrict(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
operationPutByValInternal<true>(exec, encodedBase, encodedProperty, encodedValue);
}
-void JIT_OPERATION operationPutByValNonStrict(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
+void DFG_OPERATION operationPutByValNonStrict(ExecState* exec, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
operationPutByValInternal<false>(exec, encodedBase, encodedProperty, encodedValue);
}
-void JIT_OPERATION operationPutByValCellStrict(ExecState* exec, JSCell* cell, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
+void DFG_OPERATION operationPutByValCellStrict(ExecState* exec, JSCell* cell, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
operationPutByValInternal<true>(exec, JSValue::encode(cell), encodedProperty, encodedValue);
}
-void JIT_OPERATION operationPutByValCellNonStrict(ExecState* exec, JSCell* cell, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
+void DFG_OPERATION operationPutByValCellNonStrict(ExecState* exec, JSCell* cell, EncodedJSValue encodedProperty, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
operationPutByValInternal<false>(exec, JSValue::encode(cell), encodedProperty, encodedValue);
}
-void JIT_OPERATION operationPutByValBeyondArrayBoundsStrict(ExecState* exec, JSObject* array, int32_t index, EncodedJSValue encodedValue)
+void DFG_OPERATION operationPutByValBeyondArrayBoundsStrict(ExecState* exec, JSObject* array, int32_t index, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
array, exec, Identifier::from(exec, index), JSValue::decode(encodedValue), slot);
}
-void JIT_OPERATION operationPutByValBeyondArrayBoundsNonStrict(ExecState* exec, JSObject* array, int32_t index, EncodedJSValue encodedValue)
+void DFG_OPERATION operationPutByValBeyondArrayBoundsNonStrict(ExecState* exec, JSObject* array, int32_t index, EncodedJSValue encodedValue)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
array, exec, Identifier::from(exec, index), JSValue::decode(encodedValue), slot);
}
-void JIT_OPERATION operationPutDoubleByValBeyondArrayBoundsStrict(ExecState* exec, JSObject* array, int32_t index, double value)
+void DFG_OPERATION operationPutDoubleByValBeyondArrayBoundsStrict(ExecState* exec, JSObject* array, int32_t index, double value)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
array, exec, Identifier::from(exec, index), jsValue, slot);
}
-void JIT_OPERATION operationPutDoubleByValBeyondArrayBoundsNonStrict(ExecState* exec, JSObject* array, int32_t index, double value)
+void DFG_OPERATION operationPutDoubleByValBeyondArrayBoundsNonStrict(ExecState* exec, JSObject* array, int32_t index, double value)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
array, exec, Identifier::from(exec, index), jsValue, slot);
}
-EncodedJSValue JIT_OPERATION operationArrayPush(ExecState* exec, EncodedJSValue encodedValue, JSArray* array)
+EncodedJSValue DFG_OPERATION operationArrayPush(ExecState* exec, EncodedJSValue encodedValue, JSArray* array)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(jsNumber(array->length()));
}
-EncodedJSValue JIT_OPERATION operationArrayPushDouble(ExecState* exec, double value, JSArray* array)
+EncodedJSValue DFG_OPERATION operationArrayPushDouble(ExecState* exec, double value, JSArray* array)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(jsNumber(array->length()));
}
-EncodedJSValue JIT_OPERATION operationArrayPop(ExecState* exec, JSArray* array)
+EncodedJSValue DFG_OPERATION operationArrayPop(ExecState* exec, JSArray* array)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(array->pop(exec));
}
-EncodedJSValue JIT_OPERATION operationArrayPopAndRecoverLength(ExecState* exec, JSArray* array)
+EncodedJSValue DFG_OPERATION operationArrayPopAndRecoverLength(ExecState* exec, JSArray* array)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(array->pop(exec));
}
-EncodedJSValue JIT_OPERATION operationRegExpExec(ExecState* exec, JSCell* base, JSCell* argument)
+EncodedJSValue DFG_OPERATION operationRegExpExec(ExecState* exec, JSCell* base, JSCell* argument)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue::encode(asRegExpObject(base)->exec(exec, input));
}
-size_t JIT_OPERATION operationRegExpTest(ExecState* exec, JSCell* base, JSCell* argument)
+size_t DFG_OPERATION operationRegExpTest(ExecState* exec, JSCell* base, JSCell* argument)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return asRegExpObject(base)->test(exec, input);
}
-size_t JIT_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+void DFG_OPERATION operationPutByIdStrict(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+ base->methodTable()->put(base, exec, ident, JSValue::decode(encodedValue), slot);
+}
+
+void DFG_OPERATION operationPutByIdNonStrict(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+ base->methodTable()->put(base, exec, ident, JSValue::decode(encodedValue), slot);
+}
+
+void DFG_OPERATION operationPutByIdDirectStrict(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
+}
+
+void DFG_OPERATION operationPutByIdDirectNonStrict(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, JSValue::decode(encodedValue), slot);
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdStrictOptimize);
+void DFG_OPERATION operationPutByIdStrictOptimizeWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ JSValue baseValue(base);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+
+ baseValue.put(exec, ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ if (stubInfo.seen)
+ repatchPutByID(exec, baseValue, ident, slot, stubInfo, NotDirect);
+ else
+ stubInfo.seen = true;
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdNonStrictOptimize);
+void DFG_OPERATION operationPutByIdNonStrictOptimizeWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ JSValue baseValue(base);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+
+ baseValue.put(exec, ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ if (stubInfo.seen)
+ repatchPutByID(exec, baseValue, ident, slot, stubInfo, NotDirect);
+ else
+ stubInfo.seen = true;
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdDirectStrictOptimize);
+void DFG_OPERATION operationPutByIdDirectStrictOptimizeWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ if (stubInfo.seen)
+ repatchPutByID(exec, base, ident, slot, stubInfo, Direct);
+ else
+ stubInfo.seen = true;
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdDirectNonStrictOptimize);
+void DFG_OPERATION operationPutByIdDirectNonStrictOptimizeWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ if (stubInfo.seen)
+ repatchPutByID(exec, base, ident, slot, stubInfo, Direct);
+ else
+ stubInfo.seen = true;
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdStrictBuildList);
+void DFG_OPERATION operationPutByIdStrictBuildListWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ JSValue baseValue(base);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+
+ baseValue.put(exec, ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ buildPutByIdList(exec, baseValue, ident, slot, stubInfo, NotDirect);
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdNonStrictBuildList);
+void DFG_OPERATION operationPutByIdNonStrictBuildListWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ JSValue baseValue(base);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+
+ baseValue.put(exec, ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ buildPutByIdList(exec, baseValue, ident, slot, stubInfo, NotDirect);
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdDirectStrictBuildList);
+void DFG_OPERATION operationPutByIdDirectStrictBuildListWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ PutPropertySlot slot(true, exec->codeBlock()->putByIdContext());
+
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ buildPutByIdList(exec, base, ident, slot, stubInfo, Direct);
+}
+
+V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(operationPutByIdDirectNonStrictBuildList);
+void DFG_OPERATION operationPutByIdDirectNonStrictBuildListWithReturnAddress(ExecState* exec, EncodedJSValue encodedValue, JSCell* base, StringImpl* uid, ReturnAddressPtr returnAddress)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ Identifier ident(vm, uid);
+ StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
+ AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+
+ JSValue value = JSValue::decode(encodedValue);
+ PutPropertySlot slot(false, exec->codeBlock()->putByIdContext());
+
+ ASSERT(base->isObject());
+ asObject(base)->putDirect(exec->vm(), ident, value, slot);
+
+ if (accessType != static_cast<AccessType>(stubInfo.accessType))
+ return;
+
+ buildPutByIdList(exec, base, ident, slot, stubInfo, Direct);
+}
+
+size_t DFG_OPERATION operationCompareLess(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return jsLess<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
}
-size_t JIT_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareLessEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return jsLessEq<true>(exec, JSValue::decode(encodedOp1), JSValue::decode(encodedOp2));
}
-size_t JIT_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareGreater(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return jsLess<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
}
-size_t JIT_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareGreaterEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return jsLessEq<false>(exec, JSValue::decode(encodedOp2), JSValue::decode(encodedOp1));
}
-size_t JIT_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
}
#if USE(JSVALUE64)
-EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
+EncodedJSValue DFG_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
#else
-size_t JIT_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
+size_t DFG_OPERATION operationCompareStringEq(ExecState* exec, JSCell* left, JSCell* right)
#endif
{
VM* vm = &exec->vm();
#endif
}
-size_t JIT_OPERATION operationCompareStrictEqCell(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareStrictEqCell(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::strictEqualSlowCaseInline(exec, op1, op2);
}
-size_t JIT_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
+size_t DFG_OPERATION operationCompareStrictEq(ExecState* exec, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::strictEqual(exec, src1, src2);
}
-EncodedJSValue JIT_OPERATION operationToPrimitive(ExecState* exec, EncodedJSValue value)
+static void* handleHostCall(ExecState* execCallee, JSValue callee, CodeSpecializationKind kind)
+{
+ ExecState* exec = execCallee->callerFrame();
+ VM* vm = &exec->vm();
+
+ execCallee->setScope(exec->scope());
+ execCallee->setCodeBlock(0);
+
+ if (kind == CodeForCall) {
+ CallData callData;
+ CallType callType = getCallData(callee, callData);
+
+ ASSERT(callType != CallTypeJS);
+
+ if (callType == CallTypeHost) {
+ NativeCallFrameTracer tracer(vm, execCallee);
+ execCallee->setCallee(asObject(callee));
+ vm->hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
+ if (vm->exception())
+ return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
+
+ return reinterpret_cast<void*>(getHostCallReturnValue);
+ }
+
+ ASSERT(callType == CallTypeNone);
+ exec->vm().throwException(exec, createNotAFunctionError(exec, callee));
+ return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
+ }
+
+ ASSERT(kind == CodeForConstruct);
+
+ ConstructData constructData;
+ ConstructType constructType = getConstructData(callee, constructData);
+
+ ASSERT(constructType != ConstructTypeJS);
+
+ if (constructType == ConstructTypeHost) {
+ NativeCallFrameTracer tracer(vm, execCallee);
+ execCallee->setCallee(asObject(callee));
+ vm->hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
+ if (vm->exception())
+ return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
+
+ return reinterpret_cast<void*>(getHostCallReturnValue);
+ }
+
+ ASSERT(constructType == ConstructTypeNone);
+ exec->vm().throwException(exec, createNotAConstructorError(exec, callee));
+ return vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress();
+}
+
+inline char* linkFor(ExecState* execCallee, CodeSpecializationKind kind)
+{
+ ExecState* exec = execCallee->callerFrame();
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ JSValue calleeAsValue = execCallee->calleeAsValue();
+ JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
+ if (!calleeAsFunctionCell)
+ return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
+
+ JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
+ execCallee->setScope(callee->scopeUnchecked());
+ ExecutableBase* executable = callee->executable();
+
+ MacroAssemblerCodePtr codePtr;
+ CodeBlock* codeBlock = 0;
+ if (executable->isHostFunction())
+ codePtr = executable->generatedJITCodeFor(kind)->addressForCall();
+ else {
+ FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
+ JSObject* error = functionExecutable->prepareForExecution(execCallee, callee->scope(), kind);
+ if (error) {
+ vm->throwException(exec, createStackOverflowError(exec));
+ return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
+ }
+ codeBlock = functionExecutable->codeBlockFor(kind);
+ if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
+ codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
+ else
+ codePtr = functionExecutable->generatedJITCodeFor(kind)->addressForCall();
+ }
+ CallLinkInfo& callLinkInfo = exec->codeBlock()->getCallLinkInfo(execCallee->returnPC());
+ if (!callLinkInfo.seenOnce())
+ callLinkInfo.setSeen();
+ else
+ linkFor(execCallee, callLinkInfo, codeBlock, callee, codePtr, kind);
+ return reinterpret_cast<char*>(codePtr.executableAddress());
+}
+
+char* DFG_OPERATION operationLinkCall(ExecState* execCallee)
+{
+ return linkFor(execCallee, CodeForCall);
+}
+
+char* DFG_OPERATION operationLinkConstruct(ExecState* execCallee)
+{
+ return linkFor(execCallee, CodeForConstruct);
+}
+
+inline char* virtualForWithFunction(ExecState* execCallee, CodeSpecializationKind kind, JSCell*& calleeAsFunctionCell)
+{
+ ExecState* exec = execCallee->callerFrame();
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ JSValue calleeAsValue = execCallee->calleeAsValue();
+ calleeAsFunctionCell = getJSFunction(calleeAsValue);
+ if (UNLIKELY(!calleeAsFunctionCell))
+ return reinterpret_cast<char*>(handleHostCall(execCallee, calleeAsValue, kind));
+
+ JSFunction* function = jsCast<JSFunction*>(calleeAsFunctionCell);
+ execCallee->setScope(function->scopeUnchecked());
+ ExecutableBase* executable = function->executable();
+ if (UNLIKELY(!executable->hasJITCodeFor(kind))) {
+ FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
+ JSObject* error = functionExecutable->prepareForExecution(execCallee, function->scope(), kind);
+ if (error) {
+ exec->vm().throwException(execCallee, error);
+ return reinterpret_cast<char*>(vm->getCTIStub(throwExceptionFromCallSlowPathGenerator).code().executableAddress());
+ }
+ }
+ return reinterpret_cast<char*>(executable->generatedJITCodeWithArityCheckFor(kind).executableAddress());
+}
+
+inline char* virtualFor(ExecState* execCallee, CodeSpecializationKind kind)
+{
+ JSCell* calleeAsFunctionCellIgnored;
+ return virtualForWithFunction(execCallee, kind, calleeAsFunctionCellIgnored);
+}
+
+static bool attemptToOptimizeClosureCall(ExecState* execCallee, JSCell* calleeAsFunctionCell, CallLinkInfo& callLinkInfo)
+{
+ if (!calleeAsFunctionCell)
+ return false;
+
+ JSFunction* callee = jsCast<JSFunction*>(calleeAsFunctionCell);
+ JSFunction* oldCallee = callLinkInfo.callee.get();
+
+ if (!oldCallee
+ || oldCallee->structure() != callee->structure()
+ || oldCallee->executable() != callee->executable())
+ return false;
+
+ ASSERT(callee->executable()->hasJITCodeForCall());
+ MacroAssemblerCodePtr codePtr = callee->executable()->generatedJITCodeForCall()->addressForCall();
+
+ CodeBlock* codeBlock;
+ if (callee->executable()->isHostFunction())
+ codeBlock = 0;
+ else {
+ codeBlock = jsCast<FunctionExecutable*>(callee->executable())->codeBlockForCall();
+ if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
+ return false;
+ }
+
+ linkClosureCall(
+ execCallee, callLinkInfo, codeBlock,
+ callee->structure(), callee->executable(), codePtr);
+
+ return true;
+}
+
+char* DFG_OPERATION operationLinkClosureCall(ExecState* execCallee)
+{
+ JSCell* calleeAsFunctionCell;
+ char* result = virtualForWithFunction(execCallee, CodeForCall, calleeAsFunctionCell);
+ CallLinkInfo& callLinkInfo = execCallee->callerFrame()->codeBlock()->getCallLinkInfo(execCallee->returnPC());
+
+ if (!attemptToOptimizeClosureCall(execCallee, calleeAsFunctionCell, callLinkInfo))
+ linkSlowFor(execCallee, callLinkInfo, CodeForCall);
+
+ return result;
+}
+
+char* DFG_OPERATION operationVirtualCall(ExecState* execCallee)
+{
+ return virtualFor(execCallee, CodeForCall);
+}
+
+char* DFG_OPERATION operationVirtualConstruct(ExecState* execCallee)
+{
+ return virtualFor(execCallee, CodeForConstruct);
+}
+
+EncodedJSValue DFG_OPERATION operationToPrimitive(ExecState* exec, EncodedJSValue value)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::encode(JSValue::decode(value).toPrimitive(exec));
}
-char* JIT_OPERATION operationNewArray(ExecState* exec, Structure* arrayStructure, void* buffer, size_t size)
+char* DFG_OPERATION operationNewArray(ExecState* exec, Structure* arrayStructure, void* buffer, size_t size)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return bitwise_cast<char*>(constructArrayNegativeIndexed(exec, arrayStructure, static_cast<JSValue*>(buffer), size));
}
-char* JIT_OPERATION operationNewEmptyArray(ExecState* exec, Structure* arrayStructure)
+char* DFG_OPERATION operationNewEmptyArray(ExecState* exec, Structure* arrayStructure)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return bitwise_cast<char*>(JSArray::create(*vm, arrayStructure));
}
-char* JIT_OPERATION operationNewArrayWithSize(ExecState* exec, Structure* arrayStructure, int32_t size)
+char* DFG_OPERATION operationNewArrayWithSize(ExecState* exec, Structure* arrayStructure, int32_t size)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return bitwise_cast<char*>(JSArray::create(*vm, arrayStructure, size));
}
-char* JIT_OPERATION operationNewArrayBuffer(ExecState* exec, Structure* arrayStructure, size_t start, size_t size)
+char* DFG_OPERATION operationNewArrayBuffer(ExecState* exec, Structure* arrayStructure, size_t start, size_t size)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return bitwise_cast<char*>(constructArrayNegativeIndexed(exec, arrayStructure, exec->codeBlock()->constantBuffer(start), size));
}
-char* JIT_OPERATION operationNewInt8ArrayWithSize(
+char* DFG_OPERATION operationNewInt8ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSInt8Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewInt8ArrayWithOneArgument(
+char* DFG_OPERATION operationNewInt8ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSInt8Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewInt16ArrayWithSize(
+char* DFG_OPERATION operationNewInt16ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSInt16Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewInt16ArrayWithOneArgument(
+char* DFG_OPERATION operationNewInt16ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSInt16Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewInt32ArrayWithSize(
+char* DFG_OPERATION operationNewInt32ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSInt32Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewInt32ArrayWithOneArgument(
+char* DFG_OPERATION operationNewInt32ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSInt32Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewUint8ArrayWithSize(
+char* DFG_OPERATION operationNewUint8ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSUint8Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewUint8ArrayWithOneArgument(
+char* DFG_OPERATION operationNewUint8ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSUint8Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewUint8ClampedArrayWithSize(
+char* DFG_OPERATION operationNewUint8ClampedArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSUint8ClampedArray>(exec, structure, length);
}
-char* JIT_OPERATION operationNewUint8ClampedArrayWithOneArgument(
+char* DFG_OPERATION operationNewUint8ClampedArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSUint8ClampedArray>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewUint16ArrayWithSize(
+char* DFG_OPERATION operationNewUint16ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSUint16Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewUint16ArrayWithOneArgument(
+char* DFG_OPERATION operationNewUint16ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSUint16Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewUint32ArrayWithSize(
+char* DFG_OPERATION operationNewUint32ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSUint32Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewUint32ArrayWithOneArgument(
+char* DFG_OPERATION operationNewUint32ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSUint32Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewFloat32ArrayWithSize(
+char* DFG_OPERATION operationNewFloat32ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSFloat32Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewFloat32ArrayWithOneArgument(
+char* DFG_OPERATION operationNewFloat32ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSFloat32Array>(exec, structure, encodedValue);
}
-char* JIT_OPERATION operationNewFloat64ArrayWithSize(
+char* DFG_OPERATION operationNewFloat64ArrayWithSize(
ExecState* exec, Structure* structure, int32_t length)
{
return newTypedArrayWithSize<JSFloat64Array>(exec, structure, length);
}
-char* JIT_OPERATION operationNewFloat64ArrayWithOneArgument(
+char* DFG_OPERATION operationNewFloat64ArrayWithOneArgument(
ExecState* exec, Structure* structure, EncodedJSValue encodedValue)
{
return newTypedArrayWithOneArgument<JSFloat64Array>(exec, structure, encodedValue);
}
-EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
+EncodedJSValue DFG_OPERATION operationNewRegexp(ExecState* exec, void* regexpPtr)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue::encode(RegExpObject::create(exec->vm(), exec->lexicalGlobalObject(), exec->lexicalGlobalObject()->regExpStructure(), regexp));
}
-JSCell* JIT_OPERATION operationCreateActivation(ExecState* exec)
+JSCell* DFG_OPERATION operationCreateActivation(ExecState* exec)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return activation;
}
-JSCell* JIT_OPERATION operationCreateArguments(ExecState* exec)
+JSCell* DFG_OPERATION operationCreateArguments(ExecState* exec)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return result;
}
-JSCell* JIT_OPERATION operationCreateInlinedArguments(
+JSCell* DFG_OPERATION operationCreateInlinedArguments(
ExecState* exec, InlineCallFrame* inlineCallFrame)
{
VM& vm = exec->vm();
return result;
}
-void JIT_OPERATION operationTearOffArguments(ExecState* exec, JSCell* argumentsCell, JSCell* activationCell)
+void DFG_OPERATION operationTearOffArguments(ExecState* exec, JSCell* argumentsCell, JSCell* activationCell)
{
ASSERT(exec->codeBlock()->usesArguments());
if (activationCell) {
jsCast<Arguments*>(argumentsCell)->tearOff(exec);
}
-void JIT_OPERATION operationTearOffInlinedArguments(
+void DFG_OPERATION operationTearOffInlinedArguments(
ExecState* exec, JSCell* argumentsCell, JSCell* activationCell, InlineCallFrame* inlineCallFrame)
{
ASSERT_UNUSED(activationCell, !activationCell); // Currently, we don't inline functions with activations.
jsCast<Arguments*>(argumentsCell)->tearOff(exec, inlineCallFrame);
}
-EncodedJSValue JIT_OPERATION operationGetArgumentsLength(ExecState* exec, int32_t argumentsRegister)
+EncodedJSValue DFG_OPERATION operationGetArgumentsLength(ExecState* exec, int32_t argumentsRegister)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue::encode(baseValue.get(exec, ident, slot));
}
-EncodedJSValue JIT_OPERATION operationGetArgumentByVal(ExecState* exec, int32_t argumentsRegister, int32_t index)
+EncodedJSValue DFG_OPERATION operationGetArgumentByVal(ExecState* exec, int32_t argumentsRegister, int32_t index)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue::encode(argumentsValue.get(exec, index));
}
-EncodedJSValue JIT_OPERATION operationGetInlinedArgumentByVal(
+EncodedJSValue DFG_OPERATION operationGetInlinedArgumentByVal(
ExecState* exec, int32_t argumentsRegister, InlineCallFrame* inlineCallFrame, int32_t index)
{
VM& vm = exec->vm();
return JSValue::encode(argumentsValue.get(exec, index));
}
-JSCell* JIT_OPERATION operationNewFunctionNoCheck(ExecState* exec, JSCell* functionExecutable)
+JSCell* DFG_OPERATION operationNewFunctionNoCheck(ExecState* exec, JSCell* functionExecutable)
{
ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
VM& vm = exec->vm();
return JSFunction::create(exec, static_cast<FunctionExecutable*>(functionExecutable), exec->scope());
}
-EncodedJSValue JIT_OPERATION operationNewFunction(ExecState* exec, JSCell* functionExecutable)
+EncodedJSValue DFG_OPERATION operationNewFunction(ExecState* exec, JSCell* functionExecutable)
{
ASSERT(functionExecutable->inherits(FunctionExecutable::info()));
VM& vm = exec->vm();
return JSValue::encode(JSFunction::create(exec, static_cast<FunctionExecutable*>(functionExecutable), exec->scope()));
}
-JSCell* JIT_OPERATION operationNewFunctionExpression(ExecState* exec, JSCell* functionExecutableAsCell)
+JSCell* DFG_OPERATION operationNewFunctionExpression(ExecState* exec, JSCell* functionExecutableAsCell)
{
ASSERT(functionExecutableAsCell->inherits(FunctionExecutable::info()));
return JSFunction::create(exec, functionExecutable, exec->scope());
}
-size_t JIT_OPERATION operationIsObject(ExecState* exec, EncodedJSValue value)
+size_t DFG_OPERATION operationIsObject(ExecState* exec, EncodedJSValue value)
{
return jsIsObjectType(exec, JSValue::decode(value));
}
-size_t JIT_OPERATION operationIsFunction(EncodedJSValue value)
+size_t DFG_OPERATION operationIsFunction(EncodedJSValue value)
{
return jsIsFunctionType(JSValue::decode(value));
}
-JSCell* JIT_OPERATION operationTypeOf(ExecState* exec, JSCell* value)
+JSCell* DFG_OPERATION operationTypeOf(ExecState* exec, JSCell* value)
{
return jsTypeStringForValue(exec, JSValue(value)).asCell();
}
-char* JIT_OPERATION operationAllocatePropertyStorageWithInitialCapacity(ExecState* exec)
+void DFG_OPERATION operationReallocateStorageAndFinishPut(ExecState* exec, JSObject* base, Structure* structure, PropertyOffset offset, EncodedJSValue value)
+{
+ VM& vm = exec->vm();
+ NativeCallFrameTracer tracer(&vm, exec);
+
+ ASSERT(structure->outOfLineCapacity() > base->structure()->outOfLineCapacity());
+ ASSERT(!vm.heap.storageAllocator().fastPathShouldSucceed(structure->outOfLineCapacity() * sizeof(JSValue)));
+ base->setStructureAndReallocateStorageIfNecessary(vm, structure);
+ base->putDirect(vm, offset, JSValue::decode(value));
+}
+
+char* DFG_OPERATION operationAllocatePropertyStorageWithInitialCapacity(ExecState* exec)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
Butterfly::createUninitialized(vm, 0, 0, initialOutOfLineCapacity, false, 0));
}
-char* JIT_OPERATION operationAllocatePropertyStorage(ExecState* exec, size_t newSize)
+char* DFG_OPERATION operationAllocatePropertyStorage(ExecState* exec, size_t newSize)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
Butterfly::createUninitialized(vm, 0, 0, newSize, false, 0));
}
-char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
+char* DFG_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState* exec, JSObject* object)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(result);
}
-char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
+char* DFG_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState* exec, JSObject* object, size_t newSize)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(result);
}
-char* JIT_OPERATION operationEnsureInt32(ExecState* exec, JSCell* cell)
+char* DFG_OPERATION operationEnsureInt32(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(asObject(cell)->ensureInt32(vm).data());
}
-char* JIT_OPERATION operationEnsureDouble(ExecState* exec, JSCell* cell)
+char* DFG_OPERATION operationEnsureDouble(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(asObject(cell)->ensureDouble(vm).data());
}
-char* JIT_OPERATION operationEnsureContiguous(ExecState* exec, JSCell* cell)
+char* DFG_OPERATION operationEnsureContiguous(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(asObject(cell)->ensureContiguous(vm).data());
}
-char* JIT_OPERATION operationRageEnsureContiguous(ExecState* exec, JSCell* cell)
+char* DFG_OPERATION operationRageEnsureContiguous(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(asObject(cell)->rageEnsureContiguous(vm).data());
}
-char* JIT_OPERATION operationEnsureArrayStorage(ExecState* exec, JSCell* cell)
+char* DFG_OPERATION operationEnsureArrayStorage(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return reinterpret_cast<char*>(asObject(cell)->ensureArrayStorage(vm));
}
-StringImpl* JIT_OPERATION operationResolveRope(ExecState* exec, JSString* string)
+StringImpl* DFG_OPERATION operationResolveRope(ExecState* exec, JSString* string)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return string->value(exec).impl();
}
-JSString* JIT_OPERATION operationSingleCharacterString(ExecState* exec, int32_t character)
+JSString* DFG_OPERATION operationSingleCharacterString(ExecState* exec, int32_t character)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return jsSingleCharacterString(exec, static_cast<UChar>(character));
}
-JSCell* JIT_OPERATION operationNewStringObject(ExecState* exec, JSString* string, Structure* structure)
+JSCell* DFG_OPERATION operationNewStringObject(ExecState* exec, JSString* string, Structure* structure)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return StringObject::create(exec, structure, string);
}
-JSCell* JIT_OPERATION operationToStringOnCell(ExecState* exec, JSCell* cell)
+JSCell* DFG_OPERATION operationToStringOnCell(ExecState* exec, JSCell* cell)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue(cell).toString(exec);
}
-JSCell* JIT_OPERATION operationToString(ExecState* exec, EncodedJSValue value)
+JSCell* DFG_OPERATION operationToString(ExecState* exec, EncodedJSValue value)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSValue::decode(value).toString(exec);
}
-JSCell* JIT_OPERATION operationMakeRope2(ExecState* exec, JSString* left, JSString* right)
+JSCell* DFG_OPERATION operationMakeRope2(ExecState* exec, JSString* left, JSString* right)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSRopeString::create(vm, left, right);
}
-JSCell* JIT_OPERATION operationMakeRope3(ExecState* exec, JSString* a, JSString* b, JSString* c)
+JSCell* DFG_OPERATION operationMakeRope3(ExecState* exec, JSString* a, JSString* b, JSString* c)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return JSRopeString::create(vm, a, b, c);
}
-char* JIT_OPERATION operationFindSwitchImmTargetForDouble(
+char* DFG_OPERATION operationFindSwitchImmTargetForDouble(
ExecState* exec, EncodedJSValue encodedValue, size_t tableIndex)
{
CodeBlock* codeBlock = exec->codeBlock();
return static_cast<char*>(table.ctiDefault.executableAddress());
}
-char* JIT_OPERATION operationSwitchString(ExecState* exec, size_t tableIndex, JSString* string)
+char* DFG_OPERATION operationSwitchString(ExecState* exec, size_t tableIndex, JSString* string)
{
VM& vm = exec->vm();
NativeCallFrameTracer tracer(&vm, exec);
return static_cast<char*>(exec->codeBlock()->stringSwitchJumpTable(tableIndex).ctiForValue(string->value(exec).impl()).executableAddress());
}
-double JIT_OPERATION operationFModOnInts(int32_t a, int32_t b)
+double DFG_OPERATION operationFModOnInts(int32_t a, int32_t b)
{
return fmod(a, b);
}
-JSCell* JIT_OPERATION operationStringFromCharCode(ExecState* exec, int32_t op1)
+JSCell* DFG_OPERATION operationStringFromCharCode(ExecState* exec, int32_t op1)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSC::stringFromCharCode(exec, op1);
}
-size_t JIT_OPERATION dfgConvertJSValueToInt32(ExecState* exec, EncodedJSValue value)
+DFGHandlerEncoded DFG_OPERATION lookupExceptionHandler(ExecState* exec, uint32_t callIndex)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ JSValue exceptionValue = exec->exception();
+ ASSERT(exceptionValue);
+
+ unsigned vPCIndex = exec->codeBlock()->bytecodeOffsetForCallAtIndex(callIndex);
+ ExceptionHandler handler = genericUnwind(vm, exec, exceptionValue, vPCIndex);
+ ASSERT(handler.catchRoutine);
+ return dfgHandlerEncoded(handler.callFrame, handler.catchRoutine);
+}
+
+DFGHandlerEncoded DFG_OPERATION lookupExceptionHandlerInStub(ExecState* exec, StructureStubInfo* stubInfo)
+{
+ VM* vm = &exec->vm();
+ NativeCallFrameTracer tracer(vm, exec);
+
+ JSValue exceptionValue = exec->exception();
+ ASSERT(exceptionValue);
+
+ CodeOrigin codeOrigin = stubInfo->codeOrigin;
+ while (codeOrigin.inlineCallFrame)
+ codeOrigin = codeOrigin.inlineCallFrame->caller;
+
+ ExceptionHandler handler = genericUnwind(vm, exec, exceptionValue, codeOrigin.bytecodeIndex);
+ ASSERT(handler.catchRoutine);
+ return dfgHandlerEncoded(handler.callFrame, handler.catchRoutine);
+}
+
+size_t DFG_OPERATION dfgConvertJSValueToInt32(ExecState* exec, EncodedJSValue value)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::decode(value).toUInt32(exec);
}
-size_t JIT_OPERATION dfgConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
+size_t DFG_OPERATION dfgConvertJSValueToBoolean(ExecState* exec, EncodedJSValue encodedOp)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
return JSValue::decode(encodedOp).toBoolean(exec);
}
-void JIT_OPERATION debugOperationPrintSpeculationFailure(ExecState* exec, void* debugInfoRaw, void* scratch)
+void DFG_OPERATION debugOperationPrintSpeculationFailure(ExecState* exec, void* debugInfoRaw, void* scratch)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
dataLog("\n");
}
-extern "C" void JIT_OPERATION triggerReoptimizationNow(CodeBlock* codeBlock)
+extern "C" void DFG_OPERATION triggerReoptimizationNow(CodeBlock* codeBlock)
{
// It's sort of preferable that we don't GC while in here. Anyways, doing so wouldn't
// really be profitable.
}
#if ENABLE(FTL_JIT)
-void JIT_OPERATION triggerTierUpNow(ExecState* exec)
+void DFG_OPERATION triggerTierUpNow(ExecState* exec)
{
VM* vm = &exec->vm();
NativeCallFrameTracer tracer(vm, exec);
ToFTLDeferredCompilationCallback::create(codeBlock), vm->ensureWorklist());
}
-char* JIT_OPERATION triggerOSREntryNow(
+char* DFG_OPERATION triggerOSREntryNow(
ExecState* exec, int32_t bytecodeIndex, int32_t streamIndex)
{
VM* vm = &exec->vm();
// FIXME: Make calls work well. Currently they're a pure regression.
// https://bugs.webkit.org/show_bug.cgi?id=113621
-EncodedJSValue JIT_OPERATION operationFTLCall(ExecState* exec)
+EncodedJSValue DFG_OPERATION operationFTLCall(ExecState* exec)
{
ExecState* callerExec = exec->callerFrame();
// FIXME: Make calls work well. Currently they're a pure regression.
// https://bugs.webkit.org/show_bug.cgi?id=113621
-EncodedJSValue JIT_OPERATION operationFTLConstruct(ExecState* exec)
+EncodedJSValue DFG_OPERATION operationFTLConstruct(ExecState* exec)
{
ExecState* callerExec = exec->callerFrame();
#if ENABLE(DFG_JIT)
#include "DFGJITCompiler.h"
-#include "JITOperations.h"
#include "PutKind.h"
namespace JSC {
extern "C" {
-JSCell* JIT_OPERATION operationStringFromCharCode(ExecState*, int32_t) WTF_INTERNAL;
+#if CALLING_CONVENTION_IS_STDCALL
+#define DFG_OPERATION CDECL
+#else
+#define DFG_OPERATION
+#endif
+
+// These typedefs provide typechecking when generating calls out to helper routines;
+// this helps prevent calling a helper routine with the wrong arguments!
+/*
+ Key:
+ V: void
+ J: JSValue
+ P: pointer (void*)
+ C: JSCell*
+ A: JSArray*
+ S: size_t
+ Z: int32_t
+ D: double
+ I: StringImpl*
+*/
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_E)(ExecState*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EA)(ExecState*, JSArray*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EAZ)(ExecState*, JSArray*, int32_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EC)(ExecState*, JSCell*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_ECC)(ExecState*, JSCell*, JSCell*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_ECI)(ExecState*, JSCell*, StringImpl*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_ECJ)(ExecState*, JSCell*, EncodedJSValue);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EDA)(ExecState*, double, JSArray*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EI)(ExecState*, StringImpl*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJ)(ExecState*, EncodedJSValue);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJA)(ExecState*, EncodedJSValue, JSArray*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJI)(ExecState*, EncodedJSValue, StringImpl*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJJ)(ExecState*, EncodedJSValue, EncodedJSValue);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJssZ)(ExecState*, JSString*, int32_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EJP)(ExecState*, EncodedJSValue, void*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EP)(ExecState*, void*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EPP)(ExecState*, void*, void*);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EPS)(ExecState*, void*, size_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_ESS)(ExecState*, size_t, size_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EZ)(ExecState*, int32_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EZIcfZ)(ExecState*, int32_t, InlineCallFrame*, int32_t);
+typedef EncodedJSValue DFG_OPERATION (*J_DFGOperation_EZZ)(ExecState*, int32_t, int32_t);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_E)(ExecState*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EZ)(ExecState*, int32_t);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EC)(ExecState*, JSCell*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_ECC)(ExecState*, JSCell*, JSCell*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EIcf)(ExecState*, InlineCallFrame*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EJ)(ExecState*, EncodedJSValue);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EJssSt)(ExecState*, JSString*, Structure*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EJssJss)(ExecState*, JSString*, JSString*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EJssJssJss)(ExecState*, JSString*, JSString*, JSString*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EOZ)(ExecState*, JSObject*, int32_t);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_ESt)(ExecState*, Structure*);
+typedef JSCell* DFG_OPERATION (*C_DFGOperation_EZ)(ExecState*, int32_t);
+typedef double DFG_OPERATION (*D_DFGOperation_DD)(double, double);
+typedef double DFG_OPERATION (*D_DFGOperation_ZZ)(int32_t, int32_t);
+typedef double DFG_OPERATION (*D_DFGOperation_EJ)(ExecState*, EncodedJSValue);
+typedef int32_t DFG_OPERATION (*Z_DFGOperation_D)(double);
+typedef size_t DFG_OPERATION (*S_DFGOperation_ECC)(ExecState*, JSCell*, JSCell*);
+typedef size_t DFG_OPERATION (*S_DFGOperation_EJ)(ExecState*, EncodedJSValue);
+typedef size_t DFG_OPERATION (*S_DFGOperation_EJJ)(ExecState*, EncodedJSValue, EncodedJSValue);
+typedef size_t DFG_OPERATION (*S_DFGOperation_J)(EncodedJSValue);
+typedef void DFG_OPERATION (*V_DFGOperation_E)(ExecState*);
+typedef void DFG_OPERATION (*V_DFGOperation_EOZD)(ExecState*, JSObject*, int32_t, double);
+typedef void DFG_OPERATION (*V_DFGOperation_EOZJ)(ExecState*, JSObject*, int32_t, EncodedJSValue);
+typedef void DFG_OPERATION (*V_DFGOperation_EC)(ExecState*, JSCell*);
+typedef void DFG_OPERATION (*V_DFGOperation_ECIcf)(ExecState*, JSCell*, InlineCallFrame*);
+typedef void DFG_OPERATION (*V_DFGOperation_ECCIcf)(ExecState*, JSCell*, JSCell*, InlineCallFrame*);
+typedef void DFG_OPERATION (*V_DFGOperation_ECJJ)(ExecState*, JSCell*, EncodedJSValue, EncodedJSValue);
+typedef void DFG_OPERATION (*V_DFGOperation_ECZ)(ExecState*, JSCell*, int32_t);
+typedef void DFG_OPERATION (*V_DFGOperation_ECC)(ExecState*, JSCell*, JSCell*);
+typedef void DFG_OPERATION (*V_DFGOperation_EJCI)(ExecState*, EncodedJSValue, JSCell*, StringImpl*);
+typedef void DFG_OPERATION (*V_DFGOperation_EJJJ)(ExecState*, EncodedJSValue, EncodedJSValue, EncodedJSValue);
+typedef void DFG_OPERATION (*V_DFGOperation_EJPP)(ExecState*, EncodedJSValue, void*, void*);
+typedef void DFG_OPERATION (*V_DFGOperation_EPZJ)(ExecState*, void*, int32_t, EncodedJSValue);
+typedef void DFG_OPERATION (*V_DFGOperation_W)(WatchpointSet*);
+typedef char* DFG_OPERATION (*P_DFGOperation_E)(ExecState*);
+typedef char* DFG_OPERATION (*P_DFGOperation_EC)(ExecState*, JSCell*);
+typedef char* DFG_OPERATION (*P_DFGOperation_EJS)(ExecState*, EncodedJSValue, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EO)(ExecState*, JSObject*);
+typedef char* DFG_OPERATION (*P_DFGOperation_EOS)(ExecState*, JSObject*, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EOZ)(ExecState*, JSObject*, int32_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EPS)(ExecState*, void*, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_ES)(ExecState*, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_ESJss)(ExecState*, size_t, JSString*);
+typedef char* DFG_OPERATION (*P_DFGOperation_ESt)(ExecState*, Structure*);
+typedef char* DFG_OPERATION (*P_DFGOperation_EStJ)(ExecState*, Structure*, EncodedJSValue);
+typedef char* DFG_OPERATION (*P_DFGOperation_EStPS)(ExecState*, Structure*, void*, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EStSS)(ExecState*, Structure*, size_t, size_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EStZ)(ExecState*, Structure*, int32_t);
+typedef char* DFG_OPERATION (*P_DFGOperation_EZZ)(ExecState*, int32_t, int32_t);
+typedef StringImpl* DFG_OPERATION (*I_DFGOperation_EJss)(ExecState*, JSString*);
+typedef JSString* DFG_OPERATION (*Jss_DFGOperation_EZ)(ExecState*, int32_t);
+JSCell* DFG_OPERATION operationStringFromCharCode(ExecState*, int32_t) WTF_INTERNAL;
// These routines are provide callbacks out to C++ implementations of operations too complex to JIT.
-JSCell* JIT_OPERATION operationNewObject(ExecState*, Structure*) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationCreateThis(ExecState*, JSObject* constructor, int32_t inlineCapacity) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationToThis(ExecState*, EncodedJSValue encodedOp1) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationToThisStrict(ExecState*, EncodedJSValue encodedOp1) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationValueAdd(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationValueAddNotNumber(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetByVal(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetByValCell(ExecState*, JSCell*, EncodedJSValue encodedProperty) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetByValArrayInt(ExecState*, JSArray*, int32_t) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetByValStringInt(ExecState*, JSString*, int32_t) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationToPrimitive(ExecState*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewArray(ExecState*, Structure*, void*, size_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewArrayBuffer(ExecState*, Structure*, size_t, size_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewEmptyArray(ExecState*, Structure*) WTF_INTERNAL;
-char* JIT_OPERATION operationNewArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt8ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt8ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt16ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt16ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewInt32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint8ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint8ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint8ClampedArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint8ClampedArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint16ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint16ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewUint32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewFloat32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewFloat32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-char* JIT_OPERATION operationNewFloat64ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
-char* JIT_OPERATION operationNewFloat64ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationNewRegexp(ExecState*, void*) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValStrict(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValNonStrict(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValCellStrict(ExecState*, JSCell*, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValCellNonStrict(ExecState*, JSCell*, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValBeyondArrayBoundsStrict(ExecState*, JSObject*, int32_t index, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutByValBeyondArrayBoundsNonStrict(ExecState*, JSObject*, int32_t index, EncodedJSValue encodedValue) WTF_INTERNAL;
-void JIT_OPERATION operationPutDoubleByValBeyondArrayBoundsStrict(ExecState*, JSObject*, int32_t index, double value) WTF_INTERNAL;
-void JIT_OPERATION operationPutDoubleByValBeyondArrayBoundsNonStrict(ExecState*, JSObject*, int32_t index, double value) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationArrayPush(ExecState*, EncodedJSValue encodedValue, JSArray*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationArrayPushDouble(ExecState*, double value, JSArray*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationArrayPop(ExecState*, JSArray*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationArrayPopAndRecoverLength(ExecState*, JSArray*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationRegExpExec(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationNewObject(ExecState*, Structure*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationCreateThis(ExecState*, JSObject* constructor, int32_t inlineCapacity) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationToThis(ExecState*, EncodedJSValue encodedOp1) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationToThisStrict(ExecState*, EncodedJSValue encodedOp1) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationValueAdd(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationValueAddNotNumber(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByVal(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByValCell(ExecState*, JSCell*, EncodedJSValue encodedProperty) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByValArrayInt(ExecState*, JSArray*, int32_t) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByValStringInt(ExecState*, JSString*, int32_t) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetById(ExecState*, EncodedJSValue, StringImpl*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByIdBuildList(ExecState*, EncodedJSValue, StringImpl*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetByIdOptimize(ExecState*, EncodedJSValue, StringImpl*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationInOptimize(ExecState*, JSCell*, StringImpl*);
+EncodedJSValue DFG_OPERATION operationIn(ExecState*, JSCell*, StringImpl*);
+EncodedJSValue DFG_OPERATION operationGenericIn(ExecState*, JSCell*, EncodedJSValue);
+EncodedJSValue DFG_OPERATION operationCallCustomGetter(ExecState*, JSCell*, PropertySlot::GetValueFunc, StringImpl*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationCallGetter(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationToPrimitive(ExecState*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewArray(ExecState*, Structure*, void*, size_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewArrayBuffer(ExecState*, Structure*, size_t, size_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewEmptyArray(ExecState*, Structure*) WTF_INTERNAL;
+char* DFG_OPERATION operationNewArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt8ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt8ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt16ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt16ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewInt32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint8ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint8ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint8ClampedArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint8ClampedArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint16ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint16ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewUint32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewFloat32ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewFloat32ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationNewFloat64ArrayWithSize(ExecState*, Structure*, int32_t) WTF_INTERNAL;
+char* DFG_OPERATION operationNewFloat64ArrayWithOneArgument(ExecState*, Structure*, EncodedJSValue) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationNewRegexp(ExecState*, void*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValStrict(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValNonStrict(ExecState*, EncodedJSValue encodedBase, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValCellStrict(ExecState*, JSCell*, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValCellNonStrict(ExecState*, JSCell*, EncodedJSValue encodedProperty, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValBeyondArrayBoundsStrict(ExecState*, JSObject*, int32_t index, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutByValBeyondArrayBoundsNonStrict(ExecState*, JSObject*, int32_t index, EncodedJSValue encodedValue) WTF_INTERNAL;
+void DFG_OPERATION operationPutDoubleByValBeyondArrayBoundsStrict(ExecState*, JSObject*, int32_t index, double value) WTF_INTERNAL;
+void DFG_OPERATION operationPutDoubleByValBeyondArrayBoundsNonStrict(ExecState*, JSObject*, int32_t index, double value) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationArrayPush(ExecState*, EncodedJSValue encodedValue, JSArray*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationArrayPushDouble(ExecState*, double value, JSArray*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationArrayPop(ExecState*, JSArray*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationArrayPopAndRecoverLength(ExecState*, JSArray*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationRegExpExec(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdStrict(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdNonStrict(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectStrict(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectNonStrict(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdStrictOptimize(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdNonStrictOptimize(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectStrictOptimize(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectNonStrictOptimize(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdStrictBuildList(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdNonStrictBuildList(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectStrictBuildList(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
+void DFG_OPERATION operationPutByIdDirectNonStrictBuildList(ExecState*, EncodedJSValue encodedValue, JSCell* base, StringImpl*) WTF_INTERNAL;
// These comparisons return a boolean within a size_t such that the value is zero extended to fill the register.
-size_t JIT_OPERATION operationRegExpTest(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareLess(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareLessEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareGreater(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareGreaterEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationRegExpTest(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareLess(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareLessEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareGreater(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareGreaterEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
#if USE(JSVALUE64)
-EncodedJSValue JIT_OPERATION operationCompareStringEq(ExecState*, JSCell* left, JSCell* right) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationCompareStringEq(ExecState*, JSCell* left, JSCell* right) WTF_INTERNAL;
#else
-size_t JIT_OPERATION operationCompareStringEq(ExecState*, JSCell* left, JSCell* right) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareStringEq(ExecState*, JSCell* left, JSCell* right) WTF_INTERNAL;
#endif
-size_t JIT_OPERATION operationCompareStrictEqCell(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-size_t JIT_OPERATION operationCompareStrictEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationCreateActivation(ExecState*) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationCreateArguments(ExecState*) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationCreateInlinedArguments(ExecState*, InlineCallFrame*) WTF_INTERNAL;
-void JIT_OPERATION operationTearOffArguments(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
-void JIT_OPERATION operationTearOffInlinedArguments(ExecState*, JSCell*, JSCell*, InlineCallFrame*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetArgumentsLength(ExecState*, int32_t) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetInlinedArgumentByVal(ExecState*, int32_t, InlineCallFrame*, int32_t) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationGetArgumentByVal(ExecState*, int32_t, int32_t) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationNewFunctionNoCheck(ExecState*, JSCell*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationNewFunction(ExecState*, JSCell*) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationNewFunctionExpression(ExecState*, JSCell*) WTF_INTERNAL;
-double JIT_OPERATION operationFModOnInts(int32_t, int32_t) WTF_INTERNAL;
-size_t JIT_OPERATION operationIsObject(ExecState*, EncodedJSValue) WTF_INTERNAL;
-size_t JIT_OPERATION operationIsFunction(EncodedJSValue) WTF_INTERNAL;
-JSCell* JIT_OPERATION operationTypeOf(ExecState*, JSCell*) WTF_INTERNAL;
-char* JIT_OPERATION operationAllocatePropertyStorageWithInitialCapacity(ExecState*) WTF_INTERNAL;
-char* JIT_OPERATION operationAllocatePropertyStorage(ExecState*, size_t newSize) WTF_INTERNAL;
-char* JIT_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState*, JSObject*) WTF_INTERNAL;
-char* JIT_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState*, JSObject*, size_t newSize) WTF_INTERNAL;
-char* JIT_OPERATION operationEnsureInt32(ExecState*, JSCell*);
-char* JIT_OPERATION operationEnsureDouble(ExecState*, JSCell*);
-char* JIT_OPERATION operationEnsureContiguous(ExecState*, JSCell*);
-char* JIT_OPERATION operationRageEnsureContiguous(ExecState*, JSCell*);
-char* JIT_OPERATION operationEnsureArrayStorage(ExecState*, JSCell*);
-StringImpl* JIT_OPERATION operationResolveRope(ExecState*, JSString*);
-JSString* JIT_OPERATION operationSingleCharacterString(ExecState*, int32_t);
-
-JSCell* JIT_OPERATION operationNewStringObject(ExecState*, JSString*, Structure*);
-JSCell* JIT_OPERATION operationToStringOnCell(ExecState*, JSCell*);
-JSCell* JIT_OPERATION operationToString(ExecState*, EncodedJSValue);
-JSCell* JIT_OPERATION operationMakeRope2(ExecState*, JSString*, JSString*);
-JSCell* JIT_OPERATION operationMakeRope3(ExecState*, JSString*, JSString*, JSString*);
-char* JIT_OPERATION operationFindSwitchImmTargetForDouble(ExecState*, EncodedJSValue, size_t tableIndex);
-char* JIT_OPERATION operationSwitchString(ExecState*, size_t tableIndex, JSString*);
+size_t DFG_OPERATION operationCompareStrictEqCell(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+size_t DFG_OPERATION operationCompareStrictEq(ExecState*, EncodedJSValue encodedOp1, EncodedJSValue encodedOp2) WTF_INTERNAL;
+char* DFG_OPERATION operationVirtualCall(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION operationLinkCall(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION operationLinkClosureCall(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION operationVirtualConstruct(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION operationLinkConstruct(ExecState*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationCreateActivation(ExecState*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationCreateArguments(ExecState*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationCreateInlinedArguments(ExecState*, InlineCallFrame*) WTF_INTERNAL;
+void DFG_OPERATION operationTearOffArguments(ExecState*, JSCell*, JSCell*) WTF_INTERNAL;
+void DFG_OPERATION operationTearOffInlinedArguments(ExecState*, JSCell*, JSCell*, InlineCallFrame*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetArgumentsLength(ExecState*, int32_t) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetInlinedArgumentByVal(ExecState*, int32_t, InlineCallFrame*, int32_t) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationGetArgumentByVal(ExecState*, int32_t, int32_t) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationNewFunctionNoCheck(ExecState*, JSCell*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationNewFunction(ExecState*, JSCell*) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationNewFunctionExpression(ExecState*, JSCell*) WTF_INTERNAL;
+double DFG_OPERATION operationFModOnInts(int32_t, int32_t) WTF_INTERNAL;
+size_t DFG_OPERATION operationIsObject(ExecState*, EncodedJSValue) WTF_INTERNAL;
+size_t DFG_OPERATION operationIsFunction(EncodedJSValue) WTF_INTERNAL;
+JSCell* DFG_OPERATION operationTypeOf(ExecState*, JSCell*) WTF_INTERNAL;
+void DFG_OPERATION operationReallocateStorageAndFinishPut(ExecState*, JSObject*, Structure*, PropertyOffset, EncodedJSValue) WTF_INTERNAL;
+char* DFG_OPERATION operationAllocatePropertyStorageWithInitialCapacity(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION operationAllocatePropertyStorage(ExecState*, size_t newSize) WTF_INTERNAL;
+char* DFG_OPERATION operationReallocateButterflyToHavePropertyStorageWithInitialCapacity(ExecState*, JSObject*) WTF_INTERNAL;
+char* DFG_OPERATION operationReallocateButterflyToGrowPropertyStorage(ExecState*, JSObject*, size_t newSize) WTF_INTERNAL;
+char* DFG_OPERATION operationEnsureInt32(ExecState*, JSCell*);
+char* DFG_OPERATION operationEnsureDouble(ExecState*, JSCell*);
+char* DFG_OPERATION operationEnsureContiguous(ExecState*, JSCell*);
+char* DFG_OPERATION operationRageEnsureContiguous(ExecState*, JSCell*);
+char* DFG_OPERATION operationEnsureArrayStorage(ExecState*, JSCell*);
+StringImpl* DFG_OPERATION operationResolveRope(ExecState*, JSString*);
+JSString* DFG_OPERATION operationSingleCharacterString(ExecState*, int32_t);
+
+JSCell* DFG_OPERATION operationNewStringObject(ExecState*, JSString*, Structure*);
+JSCell* DFG_OPERATION operationToStringOnCell(ExecState*, JSCell*);
+JSCell* DFG_OPERATION operationToString(ExecState*, EncodedJSValue);
+JSCell* DFG_OPERATION operationMakeRope2(ExecState*, JSString*, JSString*);
+JSCell* DFG_OPERATION operationMakeRope3(ExecState*, JSString*, JSString*, JSString*);
+char* DFG_OPERATION operationFindSwitchImmTargetForDouble(ExecState*, EncodedJSValue, size_t tableIndex);
+char* DFG_OPERATION operationSwitchString(ExecState*, size_t tableIndex, JSString*);
#if ENABLE(FTL_JIT)
// FIXME: Make calls work well. Currently they're a pure regression.
// https://bugs.webkit.org/show_bug.cgi?id=113621
-EncodedJSValue JIT_OPERATION operationFTLCall(ExecState*) WTF_INTERNAL;
-EncodedJSValue JIT_OPERATION operationFTLConstruct(ExecState*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationFTLCall(ExecState*) WTF_INTERNAL;
+EncodedJSValue DFG_OPERATION operationFTLConstruct(ExecState*) WTF_INTERNAL;
#endif // ENABLE(FTL_JIT)
+// This method is used to lookup an exception hander, keyed by faultLocation, which is
+// the return location from one of the calls out to one of the helper operations above.
+
+// According to C++ rules, a type used for the return signature of function with C linkage (i.e.
+// 'extern "C"') needs to be POD; hence putting any constructors into it could cause either compiler
+// warnings, or worse, a change in the ABI used to return these types.
+struct DFGHandler {
+ union Union {
+ struct Struct {
+ ExecState* exec;
+ void* handler;
+ } s;
+ uint64_t encoded;
+ } u;
+};
+
+inline DFGHandler createDFGHandler(ExecState* exec, void* handler)
+{
+ DFGHandler result;
+ result.u.s.exec = exec;
+ result.u.s.handler = handler;
+ return result;
+}
+
+#if CPU(X86_64)
+typedef DFGHandler DFGHandlerEncoded;
+inline DFGHandlerEncoded dfgHandlerEncoded(ExecState* exec, void* handler)
+{
+ return createDFGHandler(exec, handler);
+}
+#else
+typedef uint64_t DFGHandlerEncoded;
+inline DFGHandlerEncoded dfgHandlerEncoded(ExecState* exec, void* handler)
+{
+ COMPILE_ASSERT(sizeof(DFGHandler::Union) == sizeof(uint64_t), DFGHandler_Union_is_64bit);
+ return createDFGHandler(exec, handler).u.encoded;
+}
+#endif
+DFGHandlerEncoded DFG_OPERATION lookupExceptionHandler(ExecState*, uint32_t) WTF_INTERNAL;
+DFGHandlerEncoded DFG_OPERATION lookupExceptionHandlerInStub(ExecState*, StructureStubInfo*) WTF_INTERNAL;
+
// These operations implement the implicitly called ToInt32 and ToBoolean conversions from ES5.
// This conversion returns an int32_t within a size_t such that the value is zero extended to fill the register.
-size_t JIT_OPERATION dfgConvertJSValueToInt32(ExecState*, EncodedJSValue) WTF_INTERNAL;
-size_t JIT_OPERATION dfgConvertJSValueToBoolean(ExecState*, EncodedJSValue) WTF_INTERNAL;
+size_t DFG_OPERATION dfgConvertJSValueToInt32(ExecState*, EncodedJSValue) WTF_INTERNAL;
+size_t DFG_OPERATION dfgConvertJSValueToBoolean(ExecState*, EncodedJSValue) WTF_INTERNAL;
-void JIT_OPERATION debugOperationPrintSpeculationFailure(ExecState*, void*, void*) WTF_INTERNAL;
+void DFG_OPERATION debugOperationPrintSpeculationFailure(ExecState*, void*, void*) WTF_INTERNAL;
-void JIT_OPERATION triggerReoptimizationNow(CodeBlock*) WTF_INTERNAL;
+void DFG_OPERATION triggerReoptimizationNow(CodeBlock*) WTF_INTERNAL;
#if ENABLE(FTL_JIT)
-void JIT_OPERATION triggerTierUpNow(ExecState*) WTF_INTERNAL;
-char* JIT_OPERATION triggerOSREntryNow(ExecState*, int32_t bytecodeIndex, int32_t streamIndex) WTF_INTERNAL;
+void DFG_OPERATION triggerTierUpNow(ExecState*) WTF_INTERNAL;
+char* DFG_OPERATION triggerOSREntryNow(ExecState*, int32_t bytecodeIndex, int32_t streamIndex) WTF_INTERNAL;
#endif // ENABLE(FTL_JIT)
} // extern "C"
-inline P_JITOperation_EStZ operationNewTypedArrayWithSizeForType(TypedArrayType type)
+inline P_DFGOperation_EStZ operationNewTypedArrayWithSizeForType(TypedArrayType type)
{
switch (type) {
case TypeInt8:
return 0;
}
-inline P_JITOperation_EStJ operationNewTypedArrayWithOneArgumentForType(TypedArrayType type)
+inline P_DFGOperation_EStJ operationNewTypedArrayWithOneArgumentForType(TypedArrayType type)
{
switch (type) {
case TypeInt8:
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef RegisterSet_h
-#define RegisterSet_h
+#ifndef DFGRegisterSet_h
+#define DFGRegisterSet_h
#include <wtf/Platform.h>
-#if ENABLE(JIT)
+#if ENABLE(DFG_JIT)
#include "FPRInfo.h"
#include "GPRInfo.h"
#include <wtf/Bitmap.h>
-namespace JSC {
+namespace JSC { namespace DFG {
static const unsigned totalNumberOfRegisters =
GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters;
RegisterSetPOD m_set;
};
-} // namespace JSC
+} } // namespace JSC::DFG
-#else // ENABLE(JIT) -> so if JIT is disabled
+#else // ENABLE(DFG_JIT) -> so if DFG is disabled
-namespace JSC {
+namespace JSC { namespace DFG {
// Define RegisterSetPOD to something that is a POD, but is otherwise useless,
// to make it easier to refer to this type in code that may be compiled when
struct RegisterSetPOD { };
-} // namespace JSC
+} } // namespace JSC::DFG
-#endif // ENABLE(JIT)
+#endif // ENABLE(DFG_JIT)
-#endif // RegisterSet_h
+#endif // DFGRegisterSet_h
--- /dev/null
+/*
+ * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGRepatch.h"
+
+#if ENABLE(DFG_JIT)
+
+#include "CCallHelpers.h"
+#include "CallFrameInlines.h"
+#include "DFGScratchRegisterAllocator.h"
+#include "DFGSpeculativeJIT.h"
+#include "DFGThunks.h"
+#include "GCAwareJITStubRoutine.h"
+#include "LinkBuffer.h"
+#include "Operations.h"
+#include "PolymorphicPutByIdList.h"
+#include "RepatchBuffer.h"
+#include "StructureRareDataInlines.h"
+#include <wtf/StringPrintStream.h>
+
+namespace JSC { namespace DFG {
+
+static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
+{
+ RepatchBuffer repatchBuffer(codeblock);
+ repatchBuffer.relink(call, newCalleeFunction);
+}
+
+static void repatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
+{
+ RepatchBuffer repatchBuffer(codeBlock);
+
+ // Only optimize once!
+ repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
+
+ // Patch the structure check & the offset of the load.
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
+ repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
+#if USE(JSVALUE64)
+ if (compact)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+ else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+#elif USE(JSVALUE32_64)
+ if (compact) {
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+ } else {
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+ }
+#endif
+}
+
+static void addStructureTransitionCheck(
+ JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
+{
+ if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
+ structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
+#if DFG_ENABLE(JIT_ASSERT)
+ // If we execute this code, the object must have the structure we expect. Assert
+ // this in debug modes.
+ jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
+ MacroAssembler::Jump ok = jit.branchPtr(
+ MacroAssembler::Equal,
+ MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure));
+ jit.breakpoint();
+ ok.link(&jit);
+#endif
+ return;
+ }
+
+ jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
+ failureCases.append(
+ jit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure)));
+}
+
+static void addStructureTransitionCheck(
+ JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
+{
+ if (prototype.isNull())
+ return;
+
+ ASSERT(prototype.isCell());
+
+ addStructureTransitionCheck(
+ prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
+ failureCases, scratchGPR);
+}
+
+static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
+{
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.replaceWithJump(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
+ stubInfo.callReturnLocation.dataLabelPtrAtOffset(
+ -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)),
+ CodeLocationLabel(target));
+ return;
+ }
+
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(target));
+}
+
+static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
+{
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+
+ success = stubJit.jump();
+
+ // link failure cases here, so we can pop scratchGPR, and then jump back.
+ failureCases.link(&stubJit);
+
+ stubJit.pop(scratchGPR);
+
+ fail = stubJit.jump();
+ return;
+ }
+
+ success = stubJit.jump();
+}
+
+static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
+{
+ patchBuffer.link(success, successLabel);
+
+ if (needToRestoreScratch) {
+ patchBuffer.link(fail, slowCaseBegin);
+ return;
+ }
+
+ // link failure cases directly back to normal path
+ patchBuffer.link(failureCases, slowCaseBegin);
+}
+
+static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
+{
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+
+ MacroAssembler stubJit;
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+
+ if (scratchGPR == InvalidGPRReg) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
+#endif
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ }
+
+ MacroAssembler::JumpList failureCases;
+
+ failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
+
+ Structure* currStructure = structure;
+ WriteBarrier<Structure>* it = chain->head();
+ JSObject* protoObject = 0;
+ for (unsigned i = 0; i < count; ++i, ++it) {
+ protoObject = asObject(currStructure->prototypeForLookup(exec));
+ addStructureTransitionCheck(
+ protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit,
+ failureCases, scratchGPR);
+ currStructure = it->get();
+ }
+
+ if (isInlineOffset(offset)) {
+#if USE(JSVALUE64)
+ stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ } else {
+ stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ }
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG prototype chain access stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
+}
+
+static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ // FIXME: Write a test that proves we need to check for recursion here just
+ // like the interpreter does, then add a check for recursion.
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+
+ MacroAssembler stubJit;
+
+ if (scratchGPR == InvalidGPRReg) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
+#endif
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ }
+
+ MacroAssembler::JumpList failureCases;
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR);
+ stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR);
+ failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
+ failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
+ failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
+
+#if USE(JSVALUE64)
+ stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.move(scratchGPR, resultGPR);
+ stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
+#endif
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
+
+ stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG GetById array length stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
+
+ return true;
+ }
+
+ // FIXME: should support length access for String.
+
+ // FIXME: Cache property access for immediates.
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ if (!slot.isCacheable())
+ return false;
+ if (!structure->propertyAccessesAreCacheable())
+ return false;
+
+ // Optimize self access.
+ if (slot.slotBase() == baseValue) {
+ if (!slot.isCacheableValue()
+ || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
+ repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
+ return true;
+ }
+
+ repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
+ stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
+ return true;
+ }
+
+ if (structure->isDictionary())
+ return false;
+
+ // FIXME: optimize getters and setters
+ if (!slot.isCacheableValue())
+ return false;
+
+ PropertyOffset offset = slot.cachedOffset();
+ size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdBuildList);
+
+ stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
+ return true;
+}
+
+void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
+}
+
+static bool getPolymorphicStructureList(
+ VM* vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ PolymorphicAccessStructureList*& polymorphicStructureList, int& listIndex,
+ CodeLocationLabel& slowCase)
+{
+ slowCase = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
+
+ if (stubInfo.accessType == access_unset) {
+ RELEASE_ASSERT(!stubInfo.stubRoutine);
+ polymorphicStructureList = new PolymorphicAccessStructureList();
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 0, false);
+ listIndex = 0;
+ } else if (stubInfo.accessType == access_get_by_id_self) {
+ RELEASE_ASSERT(!stubInfo.stubRoutine);
+ polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(slowCase), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, true);
+ listIndex = 1;
+ } else if (stubInfo.accessType == access_get_by_id_chain) {
+ RELEASE_ASSERT(!!stubInfo.stubRoutine);
+ slowCase = CodeLocationLabel(stubInfo.stubRoutine->code().code());
+ polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
+ stubInfo.stubRoutine.clear();
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, false);
+ listIndex = 1;
+ } else {
+ RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
+ polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
+ listIndex = stubInfo.u.getByIdSelfList.listSize;
+ slowCase = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
+ }
+
+ if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
+ return false;
+
+ RELEASE_ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
+ return true;
+}
+
+static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
+{
+ RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
+ RepatchBuffer repatchBuffer(codeBlock);
+ if (stubInfo.u.getByIdSelfList.didSelfPatching) {
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(stubRoutine->code().code()));
+ return;
+ }
+
+ replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
+}
+
+static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (!baseValue.isCell()
+ || !slot.isCacheable()
+ || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
+ return false;
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+
+ if (slot.slotBase() == baseValue) {
+ if (!stubInfo.patch.dfg.registersFlushed) {
+ // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
+ // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
+ // if registers were not flushed, don't do non-Value caching.
+ if (!slot.isCacheableValue())
+ return false;
+ }
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+ CodeLocationLabel slowCase;
+
+ if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
+ return false;
+
+ stubInfo.u.getByIdSelfList.listSize++;
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+
+ CCallHelpers stubJit(vm, codeBlock);
+
+ MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
+
+ // The strategy we use for stubs is as follows:
+ // 1) Call DFG helper that calls the getter.
+ // 2) Check if there was an exception, and if there was, call yet another
+ // helper.
+
+ bool isDirect = false;
+ MacroAssembler::Call operationCall;
+ MacroAssembler::Call handlerCall;
+ FunctionPtr operationFunction;
+ MacroAssembler::Jump success;
+
+ if (slot.isCacheableGetter() || slot.isCacheableCustom()) {
+ if (slot.isCacheableGetter()) {
+ ASSERT(scratchGPR != InvalidGPRReg);
+ ASSERT(baseGPR != scratchGPR);
+ if (isInlineOffset(slot.cachedOffset())) {
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#endif
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#endif
+ }
+ stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
+ operationFunction = operationCallGetter;
+ } else {
+ stubJit.setupArgumentsWithExecState(
+ baseGPR,
+ MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
+ MacroAssembler::TrustedImmPtr(ident.impl()));
+ operationFunction = operationCallCustomGetter;
+ }
+
+ // Need to make sure that whenever this call is made in the future, we remember the
+ // place that we made it from. It just so happens to be the place that we are at
+ // right now!
+ stubJit.store32(
+ MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
+ CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
+
+ operationCall = stubJit.call();
+#if USE(JSVALUE64)
+ stubJit.move(GPRInfo::returnValueGPR, resultGPR);
+#else
+ stubJit.setupResults(resultGPR, resultTagGPR);
+#endif
+ success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+
+ stubJit.setupArgumentsWithExecState(
+ MacroAssembler::TrustedImmPtr(&stubInfo));
+ handlerCall = stubJit.call();
+ stubJit.jump(GPRInfo::returnValueGPR2);
+ } else {
+ if (isInlineOffset(slot.cachedOffset())) {
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
+#else
+ if (baseGPR == resultTagGPR) {
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ } else {
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+ }
+#endif
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ }
+ success = stubJit.jump();
+ isDirect = true;
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
+
+ patchBuffer.link(wrongStruct, slowCase);
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ if (!isDirect) {
+ patchBuffer.link(operationCall, operationFunction);
+ patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
+ }
+
+ RefPtr<JITStubRoutine> stubRoutine =
+ createJITStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG GetById polymorphic list access for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
+ *vm,
+ codeBlock->ownerExecutable(),
+ slot.isCacheableGetter() || slot.isCacheableCustom());
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
+
+ patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+ }
+
+ if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching() || !slot.isCacheableValue())
+ return false;
+
+ PropertyOffset offset = slot.cachedOffset();
+ size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), ident, offset);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+ CodeLocationLabel slowCase;
+ if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
+ return false;
+
+ stubInfo.u.getByIdProtoList.listSize++;
+
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), slowCase, stubRoutine);
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
+
+ patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
+
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+}
+
+void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
+ if (!dontChangeCall)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
+}
+
+static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+{
+ if (slot.isStrictMode()) {
+ if (putKind == Direct)
+ return operationPutByIdDirectStrict;
+ return operationPutByIdStrict;
+ }
+ if (putKind == Direct)
+ return operationPutByIdDirectNonStrict;
+ return operationPutByIdNonStrict;
+}
+
+static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+{
+ if (slot.isStrictMode()) {
+ if (putKind == Direct)
+ return operationPutByIdDirectStrictBuildList;
+ return operationPutByIdStrictBuildList;
+ }
+ if (putKind == Direct)
+ return operationPutByIdDirectNonStrictBuildList;
+ return operationPutByIdNonStrictBuildList;
+}
+
+static void emitPutReplaceStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind,
+ Structure* structure,
+ CodeLocationLabel failureLabel,
+ RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ GPRReg scratchGPR2;
+ const bool writeBarrierNeeded = true;
+#else
+ const bool writeBarrierNeeded = false;
+#endif
+
+ MacroAssembler stubJit;
+
+ if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR);
+#endif
+ needToRestoreScratch = true;
+ stubJit.push(scratchGPR);
+ }
+
+ MacroAssembler::Jump badStructure = stubJit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure));
+
+#if ENABLE(WRITE_BARRIER_PROFILING)
+#if USE(JSVALUE64)
+ scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
+#else
+ scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR);
+#endif
+ stubJit.push(scratchGPR2);
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
+ stubJit.pop(scratchGPR2);
+#endif
+
+#if USE(JSVALUE64)
+ if (isInlineOffset(slot.cachedOffset()))
+ stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
+ else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (isInlineOffset(slot.cachedOffset())) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+ success = stubJit.jump();
+
+ badStructure.link(&stubJit);
+ stubJit.pop(scratchGPR);
+ failure = stubJit.jump();
+ } else {
+ success = stubJit.jump();
+ failure = badStructure;
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ patchBuffer.link(failure, failureLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG PutById replace stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
+}
+
+static void emitPutTransitionStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind putKind,
+ Structure* structure,
+ Structure* oldStructure,
+ StructureChain* prototypeChain,
+ CodeLocationLabel failureLabel,
+ RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+
+ ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters);
+ allocator.lock(baseGPR);
+#if USE(JSVALUE32_64)
+ allocator.lock(valueTagGPR);
+#endif
+ allocator.lock(valueGPR);
+
+ CCallHelpers stubJit(vm);
+
+ GPRReg scratchGPR1 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR1 != baseGPR);
+ ASSERT(scratchGPR1 != valueGPR);
+
+ bool needSecondScratch = false;
+ bool needThirdScratch = false;
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ needSecondScratch = true;
+#endif
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity()) {
+ needSecondScratch = true;
+ needThirdScratch = true;
+ }
+
+ GPRReg scratchGPR2;
+ if (needSecondScratch) {
+ scratchGPR2 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR2 != baseGPR);
+ ASSERT(scratchGPR2 != valueGPR);
+ ASSERT(scratchGPR2 != scratchGPR1);
+ } else
+ scratchGPR2 = InvalidGPRReg;
+ GPRReg scratchGPR3;
+ if (needThirdScratch) {
+ scratchGPR3 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR3 != baseGPR);
+ ASSERT(scratchGPR3 != valueGPR);
+ ASSERT(scratchGPR3 != scratchGPR1);
+ ASSERT(scratchGPR3 != scratchGPR2);
+ } else
+ scratchGPR3 = InvalidGPRReg;
+
+ allocator.preserveReusedRegistersByPushing(stubJit);
+
+ MacroAssembler::JumpList failureCases;
+
+ ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
+
+ failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
+
+ addStructureTransitionCheck(
+ oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
+ scratchGPR1);
+
+ if (putKind == NotDirect) {
+ for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
+ addStructureTransitionCheck(
+ (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
+ scratchGPR1);
+ }
+ }
+
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ ASSERT(needSecondScratch);
+ ASSERT(scratchGPR2 != InvalidGPRReg);
+ // Must always emit this write barrier as the structure transition itself requires it
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess);
+#endif
+
+ MacroAssembler::JumpList slowPath;
+
+ bool scratchGPR1HasStorage = false;
+
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
+ CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
+
+ if (!oldStructure->outOfLineCapacity()) {
+ stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
+ slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
+ stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
+ stubJit.negPtr(scratchGPR1);
+ stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
+ stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
+ } else {
+ size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
+ ASSERT(newSize > oldSize);
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
+ stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
+ slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
+ stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
+ stubJit.negPtr(scratchGPR1);
+ stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
+ stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
+ // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
+ for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) {
+ stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
+ stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*))));
+ }
+ }
+
+ stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
+ scratchGPR1HasStorage = true;
+ }
+
+ stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
+#if USE(JSVALUE64)
+ if (isInlineOffset(slot.cachedOffset()))
+ stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
+ else {
+ if (!scratchGPR1HasStorage)
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
+ stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (isInlineOffset(slot.cachedOffset())) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ if (!scratchGPR1HasStorage)
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (allocator.didReuseRegisters()) {
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ success = stubJit.jump();
+
+ failureCases.link(&stubJit);
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ failure = stubJit.jump();
+ } else
+ success = stubJit.jump();
+
+ MacroAssembler::Call operationCall;
+ MacroAssembler::Jump successInSlowPath;
+
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ slowPath.link(&stubJit);
+
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSize());
+ allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
+#if USE(JSVALUE64)
+ stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
+#else
+ stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
+#endif
+ operationCall = stubJit.call();
+ allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
+ successInSlowPath = stubJit.jump();
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ if (allocator.didReuseRegisters())
+ patchBuffer.link(failure, failureLabel);
+ else
+ patchBuffer.link(failureCases, failureLabel);
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
+ patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ }
+
+ stubRoutine =
+ createJITStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
+ structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
+ oldStructure, structure,
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
+ *vm,
+ exec->codeBlock()->ownerExecutable(),
+ structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
+ structure);
+}
+
+static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ Structure* oldStructure = structure->previousID();
+
+ if (!slot.isCacheable())
+ return false;
+ if (structure->isUncacheableDictionary())
+ return false;
+
+ // Optimize self access.
+ if (slot.base() == baseValue) {
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (structure->isDictionary())
+ return false;
+
+ // Skip optimizing the case where we need a realloc, if we don't have
+ // enough registers to make it happen.
+ if (GPRInfo::numberOfRegisters < 6
+ && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity())
+ return false;
+
+ // Skip optimizing the case where we need realloc, and the structure has
+ // indexing storage.
+ if (oldStructure->couldHaveIndexingHeader())
+ return false;
+
+ if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ emitPutTransitionStub(
+ exec, baseValue, ident, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
+ stubInfo.stubRoutine);
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(stubInfo.stubRoutine->code().code()));
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
+
+ stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
+
+ return true;
+ }
+
+ repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
+ stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
+ return true;
+ }
+
+ return false;
+}
+
+void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+}
+
+static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ Structure* oldStructure = structure->previousID();
+
+ if (!slot.isCacheable())
+ return false;
+ if (structure->isUncacheableDictionary())
+ return false;
+
+ // Optimize self access.
+ if (slot.base() == baseValue) {
+ PolymorphicPutByIdList* list;
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (structure->isDictionary())
+ return false;
+
+ // Skip optimizing the case where we need a realloc, if we don't have
+ // enough registers to make it happen.
+ if (GPRInfo::numberOfRegisters < 6
+ && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity())
+ return false;
+
+ // Skip optimizing the case where we need realloc, and the structure has
+ // indexing storage.
+ if (oldStructure->couldHaveIndexingHeader())
+ return false;
+
+ if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+
+ emitPutTransitionStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ CodeLocationLabel(list->currentSlowPathTarget()),
+ stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::transition(
+ *vm, codeBlock->ownerExecutable(),
+ oldStructure, structure, prototypeChain,
+ stubRoutine));
+ } else {
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+
+ emitPutReplaceStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::replace(
+ *vm, codeBlock->ownerExecutable(),
+ structure, stubRoutine));
+ }
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
+
+ if (list->isFull())
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+
+ return true;
+ }
+
+ return false;
+}
+
+void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+}
+
+static bool tryRepatchIn(
+ ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
+ const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (!base->structure()->propertyAccessesAreCacheable())
+ return false;
+
+ if (wasFound) {
+ if (!slot.isCacheable())
+ return false;
+ }
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+ Structure* structure = base->structure();
+
+ PropertyOffset offsetIgnored;
+ size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+
+ CodeLocationLabel successLabel = stubInfo.hotPathBegin;
+ CodeLocationLabel slowCaseLabel;
+
+ if (stubInfo.accessType == access_unset) {
+ polymorphicStructureList = new PolymorphicAccessStructureList();
+ stubInfo.initInList(polymorphicStructureList, 0);
+ slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToSlowCase);
+ listIndex = 0;
+ } else {
+ RELEASE_ASSERT(stubInfo.accessType == access_in_list);
+ polymorphicStructureList = stubInfo.u.inList.structureList;
+ listIndex = stubInfo.u.inList.listSize;
+ slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
+
+ if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
+ return false;
+ }
+
+ StructureChain* chain = structure->prototypeChain(exec);
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ {
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+
+ CCallHelpers stubJit(vm);
+
+ bool needToRestoreScratch;
+ if (scratchGPR == InvalidGPRReg) {
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ } else
+ needToRestoreScratch = false;
+
+ MacroAssembler::JumpList failureCases;
+ failureCases.append(stubJit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure)));
+
+ Structure* currStructure = structure;
+ WriteBarrier<Structure>* it = chain->head();
+ for (unsigned i = 0; i < count; ++i, ++it) {
+ JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
+ addStructureTransitionCheck(
+ prototype, prototype->structure(), exec->codeBlock(), stubInfo, stubJit,
+ failureCases, scratchGPR);
+ currStructure = it->get();
+ }
+
+#if USE(JSVALUE64)
+ stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
+#else
+ stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
+#endif
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG In (found = %s) stub for %s, return point %p",
+ wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
+ successLabel.executableAddress()));
+ }
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
+ stubInfo.u.inList.listSize++;
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(stubInfo.hotPathBegin.jumpAtOffset(0), CodeLocationLabel(stubRoutine->code().code()));
+
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+}
+
+void repatchIn(
+ ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
+ const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
+ return;
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
+}
+
+static void linkSlowFor(RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
+{
+ if (kind == CodeForCall) {
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualCallThunkGenerator).code());
+ return;
+ }
+ ASSERT(kind == CodeForConstruct);
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualConstructThunkGenerator).code());
+}
+
+void linkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
+{
+ ASSERT(!callLinkInfo.stub);
+
+ // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
+ if (calleeCodeBlock)
+ calleeCodeBlock->m_shouldAlwaysBeInlined = false;
+
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ ASSERT(!callLinkInfo.isLinked());
+ callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
+ callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
+ repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
+
+ if (calleeCodeBlock)
+ calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
+
+ if (kind == CodeForCall) {
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGenerator).code());
+ return;
+ }
+
+ ASSERT(kind == CodeForConstruct);
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct);
+}
+
+void linkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
+{
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, kind);
+}
+
+void linkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr)
+{
+ ASSERT(!callLinkInfo.stub);
+
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
+
+ CCallHelpers stubJit(vm, callerCodeBlock);
+
+ CCallHelpers::JumpList slowPath;
+
+#if USE(JSVALUE64)
+ slowPath.append(
+ stubJit.branchTest64(
+ CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
+#else
+ // We would have already checked that the callee is a cell.
+#endif
+
+ slowPath.append(
+ stubJit.branchPtr(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(calleeGPR, JSCell::structureOffset()),
+ CCallHelpers::TrustedImmPtr(structure)));
+
+ slowPath.append(
+ stubJit.branchPtr(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
+ CCallHelpers::TrustedImmPtr(executable)));
+
+ stubJit.loadPtr(
+ CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
+ GPRInfo::returnValueGPR);
+
+#if USE(JSVALUE64)
+ stubJit.store64(
+ GPRInfo::returnValueGPR,
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain)));
+#else
+ stubJit.storePtr(
+ GPRInfo::returnValueGPR,
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(
+ CCallHelpers::TrustedImm32(JSValue::CellTag),
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+#endif
+
+ JITCompiler::Call call = stubJit.nearCall();
+ JITCompiler::Jump done = stubJit.jump();
+
+ slowPath.link(&stubJit);
+ stubJit.move(calleeGPR, GPRInfo::nonArgGPR0);
+#if USE(JSVALUE32_64)
+ stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1);
+#endif
+ stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
+ stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
+ JITCompiler::Jump slow = stubJit.jump();
+
+ LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
+
+ patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
+ patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
+ patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualCallThunkGenerator).code()));
+
+ RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG closure call stub for %s, return point %p, target %p (%s)",
+ toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
+ codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
+ *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ repatchBuffer.replaceWithJump(
+ RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
+ CodeLocationLabel(stubRoutine->code().code()));
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall);
+
+ callLinkInfo.stub = stubRoutine.release();
+
+ ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
+}
+
+void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
+ CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
+ MacroAssembler::Address(
+ static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
+ JSCell::structureOffset()),
+ reinterpret_cast<void*>(unusedPointer));
+ }
+ repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
+#if USE(JSVALUE64)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
+#else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
+#endif
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
+ V_DFGOperation_EJCI optimizedFunction;
+ if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
+ optimizedFunction = operationPutByIdStrictOptimize;
+ else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
+ optimizedFunction = operationPutByIdNonStrictOptimize;
+ else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
+ optimizedFunction = operationPutByIdDirectStrictOptimize;
+ else {
+ ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
+ optimizedFunction = operationPutByIdDirectNonStrictOptimize;
+ }
+ repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
+ CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
+ MacroAssembler::Address(
+ static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
+ JSCell::structureOffset()),
+ reinterpret_cast<void*>(unusedPointer));
+ }
+ repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
+#if USE(JSVALUE64)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
+#else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
+#endif
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ repatchBuffer.relink(stubInfo.hotPathBegin.jumpAtOffset(0), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+} } // namespace JSC::DFG
+
+#endif
+/*
+ * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "DFGRepatch.h"
+
+#if ENABLE(DFG_JIT)
+
+#include "CCallHelpers.h"
+#include "CallFrameInlines.h"
+#include "ScratchRegisterAllocator.h"
+#include "DFGSpeculativeJIT.h"
+#include "DFGThunks.h"
+#include "GCAwareJITStubRoutine.h"
+#include "LinkBuffer.h"
+#include "Operations.h"
+#include "PolymorphicPutByIdList.h"
+#include "RepatchBuffer.h"
+#include "StructureRareDataInlines.h"
+#include <wtf/StringPrintStream.h>
+
+namespace JSC { namespace DFG {
+
+static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
+{
+ RepatchBuffer repatchBuffer(codeblock);
+ repatchBuffer.relink(call, newCalleeFunction);
+}
+
+static void repatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact)
+{
+ RepatchBuffer repatchBuffer(codeBlock);
+
+ // Only optimize once!
+ repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
+
+ // Patch the structure check & the offset of the load.
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure);
+ repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset));
+#if USE(JSVALUE64)
+ if (compact)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+ else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+#elif USE(JSVALUE32_64)
+ if (compact) {
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+ } else {
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+ }
+#endif
+}
+
+static void addStructureTransitionCheck(
+ JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
+{
+ if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
+ structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
+#if DFG_ENABLE(JIT_ASSERT)
+ // If we execute this code, the object must have the structure we expect. Assert
+ // this in debug modes.
+ jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
+ MacroAssembler::Jump ok = jit.branchPtr(
+ MacroAssembler::Equal,
+ MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure));
+ jit.breakpoint();
+ ok.link(&jit);
+#endif
+ return;
+ }
+
+ jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
+ failureCases.append(
+ jit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(scratchGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure)));
+}
+
+static void addStructureTransitionCheck(
+ JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
+{
+ if (prototype.isNull())
+ return;
+
+ ASSERT(prototype.isCell());
+
+ addStructureTransitionCheck(
+ prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
+ failureCases, scratchGPR);
+}
+
+static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
+{
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.replaceWithJump(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
+ stubInfo.callReturnLocation.dataLabelPtrAtOffset(
+ -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)),
+ CodeLocationLabel(target));
+ return;
+ }
+
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(target));
+}
+
+static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
+{
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+
+ success = stubJit.jump();
+
+ // link failure cases here, so we can pop scratchGPR, and then jump back.
+ failureCases.link(&stubJit);
+
+ stubJit.pop(scratchGPR);
+
+ fail = stubJit.jump();
+ return;
+ }
+
+ success = stubJit.jump();
+}
+
+static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
+{
+ patchBuffer.link(success, successLabel);
+
+ if (needToRestoreScratch) {
+ patchBuffer.link(fail, slowCaseBegin);
+ return;
+ }
+
+ // link failure cases directly back to normal path
+ patchBuffer.link(failureCases, slowCaseBegin);
+}
+
+static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
+{
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+
+ MacroAssembler stubJit;
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+
+ if (scratchGPR == InvalidGPRReg) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
+#endif
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ }
+
+ MacroAssembler::JumpList failureCases;
+
+ failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)));
+
+ Structure* currStructure = structure;
+ WriteBarrier<Structure>* it = chain->head();
+ JSObject* protoObject = 0;
+ for (unsigned i = 0; i < count; ++i, ++it) {
+ protoObject = asObject(currStructure->prototypeForLookup(exec));
+ addStructureTransitionCheck(
+ protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit,
+ failureCases, scratchGPR);
+ currStructure = it->get();
+ }
+
+ if (isInlineOffset(offset)) {
+#if USE(JSVALUE64)
+ stubJit.load64(protoObject->locationForOffset(offset), resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ } else {
+ stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ }
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG prototype chain access stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), successLabel.executableAddress()));
+}
+
+static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ // FIXME: Write a test that proves we need to check for recursion here just
+ // like the interpreter does, then add a check for recursion.
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) {
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+
+ MacroAssembler stubJit;
+
+ if (scratchGPR == InvalidGPRReg) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
+#endif
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ }
+
+ MacroAssembler::JumpList failureCases;
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR);
+ stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR);
+ failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
+ failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
+ failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
+
+#if USE(JSVALUE64)
+ stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR);
+#elif USE(JSVALUE32_64)
+ stubJit.move(scratchGPR, resultGPR);
+ stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
+#endif
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
+
+ stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG GetById array length stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById);
+
+ return true;
+ }
+
+ // FIXME: should support length access for String.
+
+ // FIXME: Cache property access for immediates.
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ if (!slot.isCacheable())
+ return false;
+ if (!structure->propertyAccessesAreCacheable())
+ return false;
+
+ // Optimize self access.
+ if (slot.slotBase() == baseValue) {
+ if (!slot.isCacheableValue()
+ || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
+ repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
+ return true;
+ }
+
+ repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true);
+ stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
+ return true;
+ }
+
+ if (structure->isDictionary())
+ return false;
+
+ // FIXME: optimize getters and setters
+ if (!slot.isCacheableValue())
+ return false;
+
+ PropertyOffset offset = slot.cachedOffset();
+ size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine);
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdBuildList);
+
+ stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, true);
+ return true;
+}
+
+void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
+}
+
+static bool getPolymorphicStructureList(
+ VM* vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
+ PolymorphicAccessStructureList*& polymorphicStructureList, int& listIndex,
+ CodeLocationLabel& slowCase)
+{
+ slowCase = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase);
+
+ if (stubInfo.accessType == access_unset) {
+ RELEASE_ASSERT(!stubInfo.stubRoutine);
+ polymorphicStructureList = new PolymorphicAccessStructureList();
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 0, false);
+ listIndex = 0;
+ } else if (stubInfo.accessType == access_get_by_id_self) {
+ RELEASE_ASSERT(!stubInfo.stubRoutine);
+ polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(slowCase), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true);
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, true);
+ listIndex = 1;
+ } else if (stubInfo.accessType == access_get_by_id_chain) {
+ RELEASE_ASSERT(!!stubInfo.stubRoutine);
+ slowCase = CodeLocationLabel(stubInfo.stubRoutine->code().code());
+ polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true);
+ stubInfo.stubRoutine.clear();
+ stubInfo.initGetByIdSelfList(polymorphicStructureList, 1, false);
+ listIndex = 1;
+ } else {
+ RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
+ polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList;
+ listIndex = stubInfo.u.getByIdSelfList.listSize;
+ slowCase = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
+ }
+
+ if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
+ return false;
+
+ RELEASE_ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
+ return true;
+}
+
+static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
+{
+ RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_self_list);
+ RepatchBuffer repatchBuffer(codeBlock);
+ if (stubInfo.u.getByIdSelfList.didSelfPatching) {
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(stubRoutine->code().code()));
+ return;
+ }
+
+ replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
+}
+
+static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (!baseValue.isCell()
+ || !slot.isCacheable()
+ || !baseValue.asCell()->structure()->propertyAccessesAreCacheable())
+ return false;
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+
+ if (slot.slotBase() == baseValue) {
+ if (!stubInfo.patch.dfg.registersFlushed) {
+ // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
+ // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
+ // if registers were not flushed, don't do non-Value caching.
+ if (!slot.isCacheableValue())
+ return false;
+ }
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+ CodeLocationLabel slowCase;
+
+ if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
+ return false;
+
+ stubInfo.u.getByIdSelfList.listSize++;
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+
+ CCallHelpers stubJit(vm, codeBlock);
+
+ MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure));
+
+ // The strategy we use for stubs is as follows:
+ // 1) Call DFG helper that calls the getter.
+ // 2) Check if there was an exception, and if there was, call yet another
+ // helper.
+
+ bool isDirect = false;
+ MacroAssembler::Call operationCall;
+ MacroAssembler::Call handlerCall;
+ FunctionPtr operationFunction;
+ MacroAssembler::Jump success;
+
+ if (slot.isCacheableGetter() || slot.isCacheableCustom()) {
+ if (slot.isCacheableGetter()) {
+ ASSERT(scratchGPR != InvalidGPRReg);
+ ASSERT(baseGPR != scratchGPR);
+ if (isInlineOffset(slot.cachedOffset())) {
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#endif
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR);
+#endif
+ }
+ stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR);
+ operationFunction = operationCallGetter;
+ } else {
+ stubJit.setupArgumentsWithExecState(
+ baseGPR,
+ MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()),
+ MacroAssembler::TrustedImmPtr(ident.impl()));
+ operationFunction = operationCallCustomGetter;
+ }
+
+ // Need to make sure that whenever this call is made in the future, we remember the
+ // place that we made it from. It just so happens to be the place that we are at
+ // right now!
+ stubJit.store32(
+ MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
+ CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
+
+ operationCall = stubJit.call();
+#if USE(JSVALUE64)
+ stubJit.move(GPRInfo::returnValueGPR, resultGPR);
+#else
+ stubJit.setupResults(resultGPR, resultTagGPR);
+#endif
+ success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+
+ stubJit.setupArgumentsWithExecState(
+ MacroAssembler::TrustedImmPtr(&stubInfo));
+ handlerCall = stubJit.call();
+ stubJit.jump(GPRInfo::returnValueGPR2);
+ } else {
+ if (isInlineOffset(slot.cachedOffset())) {
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
+#else
+ if (baseGPR == resultTagGPR) {
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ } else {
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+ }
+#endif
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR);
+#if USE(JSVALUE64)
+ stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR);
+#else
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR);
+ stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR);
+#endif
+ }
+ success = stubJit.jump();
+ isDirect = true;
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, codeBlock);
+
+ patchBuffer.link(wrongStruct, slowCase);
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ if (!isDirect) {
+ patchBuffer.link(operationCall, operationFunction);
+ patchBuffer.link(handlerCall, lookupExceptionHandlerInStub);
+ }
+
+ RefPtr<JITStubRoutine> stubRoutine =
+ createJITStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG GetById polymorphic list access for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
+ *vm,
+ codeBlock->ownerExecutable(),
+ slot.isCacheableGetter() || slot.isCacheableCustom());
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
+
+ patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+ }
+
+ if (baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching() || !slot.isCacheableValue())
+ return false;
+
+ PropertyOffset offset = slot.cachedOffset();
+ size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), ident, offset);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+ CodeLocationLabel slowCase;
+ if (!getPolymorphicStructureList(vm, codeBlock, stubInfo, polymorphicStructureList, listIndex, slowCase))
+ return false;
+
+ stubInfo.u.getByIdProtoList.listSize++;
+
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), slowCase, stubRoutine);
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
+
+ patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
+
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+}
+
+void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo);
+ if (!dontChangeCall)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
+}
+
+static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+{
+ if (slot.isStrictMode()) {
+ if (putKind == Direct)
+ return operationPutByIdDirectStrict;
+ return operationPutByIdStrict;
+ }
+ if (putKind == Direct)
+ return operationPutByIdDirectNonStrict;
+ return operationPutByIdNonStrict;
+}
+
+static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+{
+ if (slot.isStrictMode()) {
+ if (putKind == Direct)
+ return operationPutByIdDirectStrictBuildList;
+ return operationPutByIdStrictBuildList;
+ }
+ if (putKind == Direct)
+ return operationPutByIdDirectNonStrictBuildList;
+ return operationPutByIdNonStrictBuildList;
+}
+
+static void emitPutReplaceStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind,
+ Structure* structure,
+ CodeLocationLabel failureLabel,
+ RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+ bool needToRestoreScratch = false;
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ GPRReg scratchGPR2;
+ const bool writeBarrierNeeded = true;
+#else
+ const bool writeBarrierNeeded = false;
+#endif
+
+ MacroAssembler stubJit;
+
+ if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) {
+#if USE(JSVALUE64)
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
+#else
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR);
+#endif
+ needToRestoreScratch = true;
+ stubJit.push(scratchGPR);
+ }
+
+ MacroAssembler::Jump badStructure = stubJit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure));
+
+#if ENABLE(WRITE_BARRIER_PROFILING)
+#if USE(JSVALUE64)
+ scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
+#else
+ scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR);
+#endif
+ stubJit.push(scratchGPR2);
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
+ stubJit.pop(scratchGPR2);
+#endif
+
+#if USE(JSVALUE64)
+ if (isInlineOffset(slot.cachedOffset()))
+ stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
+ else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (isInlineOffset(slot.cachedOffset())) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+ success = stubJit.jump();
+
+ badStructure.link(&stubJit);
+ stubJit.pop(scratchGPR);
+ failure = stubJit.jump();
+ } else {
+ success = stubJit.jump();
+ failure = badStructure;
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ patchBuffer.link(failure, failureLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG PutById replace stub for %s, return point %p",
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress()));
+}
+
+static void emitPutTransitionStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind putKind,
+ Structure* structure,
+ Structure* oldStructure,
+ StructureChain* prototypeChain,
+ CodeLocationLabel failureLabel,
+ RefPtr<JITStubRoutine>& stubRoutine)
+{
+ VM* vm = &exec->vm();
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+
+ ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters);
+ allocator.lock(baseGPR);
+#if USE(JSVALUE32_64)
+ allocator.lock(valueTagGPR);
+#endif
+ allocator.lock(valueGPR);
+
+ CCallHelpers stubJit(vm);
+
+ GPRReg scratchGPR1 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR1 != baseGPR);
+ ASSERT(scratchGPR1 != valueGPR);
+
+ bool needSecondScratch = false;
+ bool needThirdScratch = false;
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ needSecondScratch = true;
+#endif
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity()) {
+ needSecondScratch = true;
+ needThirdScratch = true;
+ }
+
+ GPRReg scratchGPR2;
+ if (needSecondScratch) {
+ scratchGPR2 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR2 != baseGPR);
+ ASSERT(scratchGPR2 != valueGPR);
+ ASSERT(scratchGPR2 != scratchGPR1);
+ } else
+ scratchGPR2 = InvalidGPRReg;
+ GPRReg scratchGPR3;
+ if (needThirdScratch) {
+ scratchGPR3 = allocator.allocateScratchGPR();
+ ASSERT(scratchGPR3 != baseGPR);
+ ASSERT(scratchGPR3 != valueGPR);
+ ASSERT(scratchGPR3 != scratchGPR1);
+ ASSERT(scratchGPR3 != scratchGPR2);
+ } else
+ scratchGPR3 = InvalidGPRReg;
+
+ allocator.preserveReusedRegistersByPushing(stubJit);
+
+ MacroAssembler::JumpList failureCases;
+
+ ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
+
+ failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
+
+ addStructureTransitionCheck(
+ oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
+ scratchGPR1);
+
+ if (putKind == NotDirect) {
+ for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
+ addStructureTransitionCheck(
+ (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
+ scratchGPR1);
+ }
+ }
+
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ ASSERT(needSecondScratch);
+ ASSERT(scratchGPR2 != InvalidGPRReg);
+ // Must always emit this write barrier as the structure transition itself requires it
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess);
+#endif
+
+ MacroAssembler::JumpList slowPath;
+
+ bool scratchGPR1HasStorage = false;
+
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
+ CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
+
+ if (!oldStructure->outOfLineCapacity()) {
+ stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
+ slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
+ stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
+ stubJit.negPtr(scratchGPR1);
+ stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
+ stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
+ } else {
+ size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
+ ASSERT(newSize > oldSize);
+
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
+ stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
+ slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
+ stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
+ stubJit.negPtr(scratchGPR1);
+ stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
+ stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
+ // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
+ for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) {
+ stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
+ stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*))));
+ }
+ }
+
+ stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
+ scratchGPR1HasStorage = true;
+ }
+
+ stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
+#if USE(JSVALUE64)
+ if (isInlineOffset(slot.cachedOffset()))
+ stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
+ else {
+ if (!scratchGPR1HasStorage)
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
+ stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (isInlineOffset(slot.cachedOffset())) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ if (!scratchGPR1HasStorage)
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (allocator.didReuseRegisters()) {
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ success = stubJit.jump();
+
+ failureCases.link(&stubJit);
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ failure = stubJit.jump();
+ } else
+ success = stubJit.jump();
+
+ MacroAssembler::Call operationCall;
+ MacroAssembler::Jump successInSlowPath;
+
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ slowPath.link(&stubJit);
+
+ allocator.restoreReusedRegistersByPopping(stubJit);
+ ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSize());
+ allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
+#if USE(JSVALUE64)
+ stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
+#else
+ stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
+#endif
+ operationCall = stubJit.call();
+ allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1);
+ successInSlowPath = stubJit.jump();
+ }
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ if (allocator.didReuseRegisters())
+ patchBuffer.link(failure, failureLabel);
+ else
+ patchBuffer.link(failureCases, failureLabel);
+ if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
+ patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
+ patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone));
+ }
+
+ stubRoutine =
+ createJITStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
+ structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
+ oldStructure, structure,
+ toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToDone).executableAddress())),
+ *vm,
+ exec->codeBlock()->ownerExecutable(),
+ structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
+ structure);
+}
+
+static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ Structure* oldStructure = structure->previousID();
+
+ if (!slot.isCacheable())
+ return false;
+ if (structure->isUncacheableDictionary())
+ return false;
+
+ // Optimize self access.
+ if (slot.base() == baseValue) {
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (structure->isDictionary())
+ return false;
+
+ // Skip optimizing the case where we need a realloc, if we don't have
+ // enough registers to make it happen.
+ if (GPRInfo::numberOfRegisters < 6
+ && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity())
+ return false;
+
+ // Skip optimizing the case where we need realloc, and the structure has
+ // indexing storage.
+ if (oldStructure->couldHaveIndexingHeader())
+ return false;
+
+ if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ emitPutTransitionStub(
+ exec, baseValue, ident, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase),
+ stubInfo.stubRoutine);
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(
+ stubInfo.callReturnLocation.jumpAtOffset(
+ stubInfo.patch.dfg.deltaCallToStructCheck),
+ CodeLocationLabel(stubInfo.stubRoutine->code().code()));
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
+
+ stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
+
+ return true;
+ }
+
+ repatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
+ stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
+ return true;
+ }
+
+ return false;
+}
+
+void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+}
+
+static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ Structure* oldStructure = structure->previousID();
+
+ if (!slot.isCacheable())
+ return false;
+ if (structure->isUncacheableDictionary())
+ return false;
+
+ // Optimize self access.
+ if (slot.base() == baseValue) {
+ PolymorphicPutByIdList* list;
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (structure->isDictionary())
+ return false;
+
+ // Skip optimizing the case where we need a realloc, if we don't have
+ // enough registers to make it happen.
+ if (GPRInfo::numberOfRegisters < 6
+ && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
+ && oldStructure->outOfLineCapacity())
+ return false;
+
+ // Skip optimizing the case where we need realloc, and the structure has
+ // indexing storage.
+ if (oldStructure->couldHaveIndexingHeader())
+ return false;
+
+ if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
+ return false;
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+
+ emitPutTransitionStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ CodeLocationLabel(list->currentSlowPathTarget()),
+ stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::transition(
+ *vm, codeBlock->ownerExecutable(),
+ oldStructure, structure, prototypeChain,
+ stubRoutine));
+ } else {
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+
+ emitPutReplaceStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::replace(
+ *vm, codeBlock->ownerExecutable(),
+ structure, stubRoutine));
+ }
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code()));
+
+ if (list->isFull())
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+
+ return true;
+ }
+
+ return false;
+}
+
+void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
+ if (!cached)
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+}
+
+static bool tryRepatchIn(
+ ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
+ const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (!base->structure()->propertyAccessesAreCacheable())
+ return false;
+
+ if (wasFound) {
+ if (!slot.isCacheable())
+ return false;
+ }
+
+ CodeBlock* codeBlock = exec->codeBlock();
+ VM* vm = &exec->vm();
+ Structure* structure = base->structure();
+
+ PropertyOffset offsetIgnored;
+ size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
+ if (count == InvalidPrototypeChain)
+ return false;
+
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex;
+
+ CodeLocationLabel successLabel = stubInfo.hotPathBegin;
+ CodeLocationLabel slowCaseLabel;
+
+ if (stubInfo.accessType == access_unset) {
+ polymorphicStructureList = new PolymorphicAccessStructureList();
+ stubInfo.initInList(polymorphicStructureList, 0);
+ slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
+ stubInfo.patch.dfg.deltaCallToSlowCase);
+ listIndex = 0;
+ } else {
+ RELEASE_ASSERT(stubInfo.accessType == access_in_list);
+ polymorphicStructureList = stubInfo.u.inList.structureList;
+ listIndex = stubInfo.u.inList.listSize;
+ slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
+
+ if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
+ return false;
+ }
+
+ StructureChain* chain = structure->prototypeChain(exec);
+ RefPtr<JITStubRoutine> stubRoutine;
+
+ {
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR);
+ GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR);
+ GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR();
+
+ CCallHelpers stubJit(vm);
+
+ bool needToRestoreScratch;
+ if (scratchGPR == InvalidGPRReg) {
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR);
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ } else
+ needToRestoreScratch = false;
+
+ MacroAssembler::JumpList failureCases;
+ failureCases.append(stubJit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure)));
+
+ Structure* currStructure = structure;
+ WriteBarrier<Structure>* it = chain->head();
+ for (unsigned i = 0; i < count; ++i, ++it) {
+ JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
+ addStructureTransitionCheck(
+ prototype, prototype->structure(), exec->codeBlock(), stubInfo, stubJit,
+ failureCases, scratchGPR);
+ currStructure = it->get();
+ }
+
+#if USE(JSVALUE64)
+ stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
+#else
+ stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
+#endif
+
+ MacroAssembler::Jump success, fail;
+
+ emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
+
+ LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock());
+
+ linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
+
+ stubRoutine = FINALIZE_CODE_FOR_DFG_STUB(
+ patchBuffer,
+ ("DFG In (found = %s) stub for %s, return point %p",
+ wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
+ successLabel.executableAddress()));
+ }
+
+ polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
+ stubInfo.u.inList.listSize++;
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(stubInfo.hotPathBegin.jumpAtOffset(0), CodeLocationLabel(stubRoutine->code().code()));
+
+ return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1);
+}
+
+void repatchIn(
+ ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
+ const PropertySlot& slot, StructureStubInfo& stubInfo)
+{
+ if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo))
+ return;
+ repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
+}
+
+static void linkSlowFor(RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
+{
+ if (kind == CodeForCall) {
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualCallThunkGenerator).code());
+ return;
+ }
+ ASSERT(kind == CodeForConstruct);
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualConstructThunkGenerator).code());
+}
+
+void linkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
+{
+ ASSERT(!callLinkInfo.stub);
+
+ // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
+ if (calleeCodeBlock)
+ calleeCodeBlock->m_shouldAlwaysBeInlined = false;
+
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ ASSERT(!callLinkInfo.isLinked());
+ callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
+ callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
+ repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
+
+ if (calleeCodeBlock)
+ calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
+
+ if (kind == CodeForCall) {
+ repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGenerator).code());
+ return;
+ }
+
+ ASSERT(kind == CodeForConstruct);
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct);
+}
+
+void linkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind)
+{
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, kind);
+}
+
+void linkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr)
+{
+ ASSERT(!callLinkInfo.stub);
+
+ CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
+ VM* vm = callerCodeBlock->vm();
+
+ GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
+
+ CCallHelpers stubJit(vm, callerCodeBlock);
+
+ CCallHelpers::JumpList slowPath;
+
+#if USE(JSVALUE64)
+ slowPath.append(
+ stubJit.branchTest64(
+ CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
+#else
+ // We would have already checked that the callee is a cell.
+#endif
+
+ slowPath.append(
+ stubJit.branchPtr(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(calleeGPR, JSCell::structureOffset()),
+ CCallHelpers::TrustedImmPtr(structure)));
+
+ slowPath.append(
+ stubJit.branchPtr(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
+ CCallHelpers::TrustedImmPtr(executable)));
+
+ stubJit.loadPtr(
+ CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
+ GPRInfo::returnValueGPR);
+
+#if USE(JSVALUE64)
+ stubJit.store64(
+ GPRInfo::returnValueGPR,
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain)));
+#else
+ stubJit.storePtr(
+ GPRInfo::returnValueGPR,
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(
+ CCallHelpers::TrustedImm32(JSValue::CellTag),
+ CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+#endif
+
+ JITCompiler::Call call = stubJit.nearCall();
+ JITCompiler::Jump done = stubJit.jump();
+
+ slowPath.link(&stubJit);
+ stubJit.move(calleeGPR, GPRInfo::nonArgGPR0);
+#if USE(JSVALUE32_64)
+ stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1);
+#endif
+ stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2);
+ stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2);
+ JITCompiler::Jump slow = stubJit.jump();
+
+ LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock);
+
+ patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
+ patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
+ patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualCallThunkGenerator).code()));
+
+ RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
+ FINALIZE_DFG_CODE(
+ patchBuffer,
+ ("DFG closure call stub for %s, return point %p, target %p (%s)",
+ toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
+ codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
+ *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
+
+ RepatchBuffer repatchBuffer(callerCodeBlock);
+
+ repatchBuffer.replaceWithJump(
+ RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
+ CodeLocationLabel(stubRoutine->code().code()));
+ linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall);
+
+ callLinkInfo.stub = stubRoutine.release();
+
+ ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
+}
+
+void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
+ CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
+ MacroAssembler::Address(
+ static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
+ JSCell::structureOffset()),
+ reinterpret_cast<void*>(unusedPointer));
+ }
+ repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
+#if USE(JSVALUE64)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
+#else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
+#endif
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
+ V_DFGOperation_EJCI optimizedFunction;
+ if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
+ optimizedFunction = operationPutByIdStrictOptimize;
+ else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
+ optimizedFunction = operationPutByIdNonStrictOptimize;
+ else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
+ optimizedFunction = operationPutByIdDirectStrictOptimize;
+ else {
+ ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
+ optimizedFunction = operationPutByIdDirectNonStrictOptimize;
+ }
+ repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
+ CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall);
+ if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
+ repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch(
+ RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel),
+ MacroAssembler::Address(
+ static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR),
+ JSCell::structureOffset()),
+ reinterpret_cast<void*>(unusedPointer));
+ }
+ repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer));
+#if USE(JSVALUE64)
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0);
+#else
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0);
+ repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0);
+#endif
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+ repatchBuffer.relink(stubInfo.hotPathBegin.jumpAtOffset(0), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase));
+}
+
+} } // namespace JSC::DFG
+
+#endif
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef Repatch_h
-#define Repatch_h
+#ifndef DFGRepatch_h
+#define DFGRepatch_h
#include <wtf/Platform.h>
-#if ENABLE(JIT)
+#if ENABLE(DFG_JIT)
-#include "CCallHelpers.h"
-#include "JITOperations.h"
+#include "DFGJITCompiler.h"
+#include "DFGOperations.h"
-namespace JSC {
+namespace JSC { namespace DFG {
void repatchGetByID(ExecState*, JSValue, const Identifier&, const PropertySlot&, StructureStubInfo&);
void buildGetByIDList(ExecState*, JSValue, const Identifier&, const PropertySlot&, StructureStubInfo&);
void resetPutByID(RepatchBuffer&, StructureStubInfo&);
void resetIn(RepatchBuffer&, StructureStubInfo&);
-} // namespace JSC
+} } // namespace JSC::DFG
-#else // ENABLE(JIT)
+#else // ENABLE(DFG_JIT)
#include <wtf/Assertions.h>
class RepatchBuffer;
struct StructureStubInfo;
+namespace DFG {
+
inline NO_RETURN_DUE_TO_CRASH void resetGetByID(RepatchBuffer&, StructureStubInfo&) { RELEASE_ASSERT_NOT_REACHED(); }
inline NO_RETURN_DUE_TO_CRASH void resetPutByID(RepatchBuffer&, StructureStubInfo&) { RELEASE_ASSERT_NOT_REACHED(); }
inline NO_RETURN void resetIn(RepatchBuffer&, StructureStubInfo&) { RELEASE_ASSERT_NOT_REACHED(); }
-} // namespace JSC
+} } // namespace JSC::DFG
-#endif // ENABLE(JIT)
-#endif // Repatch_h
+#endif // ENABLE(DFG_JIT)
+#endif // DFGRepatch_h
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
-#ifndef ScratchRegisterAllocator_h
-#define ScratchRegisterAllocator_h
+#ifndef DFGScratchRegisterAllocator_h
+#define DFGScratchRegisterAllocator_h
#include <wtf/Platform.h>
-#if ENABLE(JIT)
+#if ENABLE(DFG_JIT)
+#include "DFGRegisterSet.h"
#include "MacroAssembler.h"
-#include "RegisterSet.h"
-namespace JSC {
+namespace JSC { namespace DFG {
// This class provides a low-level register allocator for use in stubs.
bool m_didReuseRegisters;
};
-} // namespace JSC
+} } // namespace JSC::DFG
-#endif // ENABLE(JIT)
+#endif // ENABLE(DFG_JIT)
-#endif // ScratchRegisterAllocator_h
+#endif // DFGScratchRegisterAllocator_h
}
}
+void SpeculativeJIT::writeBarrier(MacroAssembler& jit, GPRReg owner, GPRReg scratch1, GPRReg scratch2, WriteBarrierUseKind useKind)
+{
+ UNUSED_PARAM(jit);
+ UNUSED_PARAM(owner);
+ UNUSED_PARAM(scratch1);
+ UNUSED_PARAM(scratch2);
+ UNUSED_PARAM(useKind);
+ ASSERT(owner != scratch1);
+ ASSERT(owner != scratch2);
+ ASSERT(scratch1 != scratch2);
+
+#if ENABLE(WRITE_BARRIER_PROFILING)
+ JITCompiler::emitCount(jit, WriteBarrierCounters::jitCounterFor(useKind));
+#endif
+}
+
void SpeculativeJIT::writeBarrier(GPRReg ownerGPR, GPRReg valueGPR, Edge valueUse, WriteBarrierUseKind useKind, GPRReg scratch1, GPRReg scratch2)
{
UNUSED_PARAM(ownerGPR);
#endif
}
-bool SpeculativeJIT::nonSpeculativeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_JITOperation_EJJ helperFunction)
+bool SpeculativeJIT::nonSpeculativeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
{
unsigned branchIndexInBlock = detectPeepHoleBranch();
if (branchIndexInBlock != UINT_MAX) {
}
// Returns true if the compare is fused with a subsequent branch.
-bool SpeculativeJIT::compilePeepHoleBranch(Node* node, MacroAssembler::RelationalCondition condition, MacroAssembler::DoubleCondition doubleCondition, S_JITOperation_EJJ operation)
+bool SpeculativeJIT::compilePeepHoleBranch(Node* node, MacroAssembler::RelationalCondition condition, MacroAssembler::DoubleCondition doubleCondition, S_DFGOperation_EJJ operation)
{
// Fused compare & branch.
unsigned branchIndexInBlock = detectPeepHoleBranch();
}
// Returns true if the compare is fused with a subsequent branch.
-bool SpeculativeJIT::compare(Node* node, MacroAssembler::RelationalCondition condition, MacroAssembler::DoubleCondition doubleCondition, S_JITOperation_EJJ operation)
+bool SpeculativeJIT::compare(Node* node, MacroAssembler::RelationalCondition condition, MacroAssembler::DoubleCondition doubleCondition, S_DFGOperation_EJJ operation)
{
if (compilePeepHoleBranch(node, condition, doubleCondition, operation))
return true;
#include "DFGJITCompiler.h"
#include "DFGOSRExit.h"
#include "DFGOSRExitJumpPlaceholder.h"
+#include "DFGOperations.h"
#include "DFGSilentRegisterSavePlan.h"
#include "DFGValueSource.h"
-#include "JITOperations.h"
#include "MarkedAllocator.h"
-#include "PutKind.h"
#include "ValueRecovery.h"
namespace JSC { namespace DFG {
speculationWatchpointForMasqueradesAsUndefined(m_currentNode->codeOrigin);
}
+ static void writeBarrier(MacroAssembler&, GPRReg ownerGPR, GPRReg scratchGPR1, GPRReg scratchGPR2, WriteBarrierUseKind);
+
void writeBarrier(GPRReg ownerGPR, GPRReg valueGPR, Edge valueUse, WriteBarrierUseKind, GPRReg scratchGPR1 = InvalidGPRReg, GPRReg scratchGPR2 = InvalidGPRReg);
void writeBarrier(GPRReg ownerGPR, JSCell* value, WriteBarrierUseKind, GPRReg scratchGPR1 = InvalidGPRReg, GPRReg scratchGPR2 = InvalidGPRReg);
void writeBarrier(JSCell* owner, GPRReg valueGPR, Edge valueUse, WriteBarrierUseKind, GPRReg scratchGPR1 = InvalidGPRReg);
void nonSpeculativePeepholeBranchNull(Edge operand, Node* branchNode, bool invert = false);
bool nonSpeculativeCompareNull(Node*, Edge operand, bool invert = false);
- void nonSpeculativePeepholeBranch(Node*, Node* branchNode, MacroAssembler::RelationalCondition, S_JITOperation_EJJ helperFunction);
- void nonSpeculativeNonPeepholeCompare(Node*, MacroAssembler::RelationalCondition, S_JITOperation_EJJ helperFunction);
- bool nonSpeculativeCompare(Node*, MacroAssembler::RelationalCondition, S_JITOperation_EJJ helperFunction);
+ void nonSpeculativePeepholeBranch(Node*, Node* branchNode, MacroAssembler::RelationalCondition, S_DFGOperation_EJJ helperFunction);
+ void nonSpeculativeNonPeepholeCompare(Node*, MacroAssembler::RelationalCondition, S_DFGOperation_EJJ helperFunction);
+ bool nonSpeculativeCompare(Node*, MacroAssembler::RelationalCondition, S_DFGOperation_EJJ helperFunction);
void nonSpeculativePeepholeStrictEq(Node*, Node* branchNode, bool invert = false);
void nonSpeculativeNonPeepholeStrictEq(Node*, bool invert = false);
// machine registers, and delegate the calling convention specific
// decision as to how to fill the regsiters to setupArguments* methods.
- JITCompiler::Call callOperation(P_JITOperation_E operation, GPRReg result)
+ JITCompiler::Call callOperation(P_DFGOperation_E operation, GPRReg result)
{
m_jit.setupArgumentsExecState();
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EC operation, GPRReg result, GPRReg cell)
+ JITCompiler::Call callOperation(P_DFGOperation_EC operation, GPRReg result, GPRReg cell)
{
m_jit.setupArgumentsWithExecState(cell);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EO operation, GPRReg result, GPRReg object)
+ JITCompiler::Call callOperation(P_DFGOperation_EO operation, GPRReg result, GPRReg object)
{
m_jit.setupArgumentsWithExecState(object);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EOS operation, GPRReg result, GPRReg object, size_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_EOS operation, GPRReg result, GPRReg object, size_t size)
{
m_jit.setupArgumentsWithExecState(object, TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EOZ operation, GPRReg result, GPRReg object, int32_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_EOZ operation, GPRReg result, GPRReg object, int32_t size)
{
m_jit.setupArgumentsWithExecState(object, TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EOZ operation, GPRReg result, GPRReg object, int32_t size)
+ JITCompiler::Call callOperation(C_DFGOperation_EOZ operation, GPRReg result, GPRReg object, int32_t size)
{
m_jit.setupArgumentsWithExecState(object, TrustedImmPtr(static_cast<size_t>(size)));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EPS operation, GPRReg result, GPRReg old, size_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_EPS operation, GPRReg result, GPRReg old, size_t size)
{
m_jit.setupArgumentsWithExecState(old, TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_ES operation, GPRReg result, size_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_ES operation, GPRReg result, size_t size)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_ESJss operation, GPRReg result, size_t index, GPRReg arg1)
+ JITCompiler::Call callOperation(P_DFGOperation_ESJss operation, GPRReg result, size_t index, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(index), arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_ESt operation, GPRReg result, Structure* structure)
+ JITCompiler::Call callOperation(P_DFGOperation_ESt operation, GPRReg result, Structure* structure)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStZ operation, GPRReg result, Structure* structure, GPRReg arg2)
+ JITCompiler::Call callOperation(P_DFGOperation_EStZ operation, GPRReg result, Structure* structure, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStZ operation, GPRReg result, Structure* structure, size_t arg2)
+ JITCompiler::Call callOperation(P_DFGOperation_EStZ operation, GPRReg result, Structure* structure, size_t arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), TrustedImm32(arg2));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(P_DFGOperation_EStZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStPS operation, GPRReg result, Structure* structure, void* pointer, size_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_EStPS operation, GPRReg result, Structure* structure, void* pointer, size_t size)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), TrustedImmPtr(pointer), TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStSS operation, GPRReg result, Structure* structure, size_t index, size_t size)
+ JITCompiler::Call callOperation(P_DFGOperation_EStSS operation, GPRReg result, Structure* structure, size_t index, size_t size)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), TrustedImmPtr(index), TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_E operation, GPRReg result)
+ JITCompiler::Call callOperation(C_DFGOperation_E operation, GPRReg result)
{
m_jit.setupArgumentsExecState();
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EC operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(C_DFGOperation_EC operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EC operation, GPRReg result, JSCell* cell)
+ JITCompiler::Call callOperation(C_DFGOperation_EC operation, GPRReg result, JSCell* cell)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(cell));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_ECC operation, GPRReg result, GPRReg arg1, JSCell* cell)
+ JITCompiler::Call callOperation(C_DFGOperation_ECC operation, GPRReg result, GPRReg arg1, JSCell* cell)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(cell));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EIcf operation, GPRReg result, InlineCallFrame* inlineCallFrame)
+ JITCompiler::Call callOperation(C_DFGOperation_EIcf operation, GPRReg result, InlineCallFrame* inlineCallFrame)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(inlineCallFrame));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_ESt operation, GPRReg result, Structure* structure)
+ JITCompiler::Call callOperation(C_DFGOperation_ESt operation, GPRReg result, Structure* structure)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EJssSt operation, GPRReg result, GPRReg arg1, Structure* structure)
+ JITCompiler::Call callOperation(C_DFGOperation_EJssSt operation, GPRReg result, GPRReg arg1, Structure* structure)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(structure));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EJssJss operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(C_DFGOperation_EJssJss operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EJssJssJss operation, GPRReg result, GPRReg arg1, GPRReg arg2, GPRReg arg3)
+ JITCompiler::Call callOperation(C_DFGOperation_EJssJssJss operation, GPRReg result, GPRReg arg1, GPRReg arg2, GPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_ECC operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(S_DFGOperation_ECC operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(Jss_JITOperation_EZ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(Jss_DFGOperation_EZ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(V_JITOperation_EC operation, GPRReg arg1)
+ JITCompiler::Call callOperation(V_DFGOperation_EC operation, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECIcf operation, GPRReg arg1, InlineCallFrame* inlineCallFrame)
+ JITCompiler::Call callOperation(V_DFGOperation_ECIcf operation, GPRReg arg1, InlineCallFrame* inlineCallFrame)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(inlineCallFrame));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECCIcf operation, GPRReg arg1, GPRReg arg2, InlineCallFrame* inlineCallFrame)
+ JITCompiler::Call callOperation(V_DFGOperation_ECCIcf operation, GPRReg arg1, GPRReg arg2, InlineCallFrame* inlineCallFrame)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(inlineCallFrame));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECZ operation, GPRReg arg1, int arg2)
+ JITCompiler::Call callOperation(V_DFGOperation_ECZ operation, GPRReg arg1, int arg2)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImm32(arg2));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECC operation, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(V_DFGOperation_ECC operation, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_W operation, WatchpointSet* watchpointSet)
+ JITCompiler::Call callOperation(V_DFGOperation_W operation, WatchpointSet* watchpointSet)
{
m_jit.setupArguments(TrustedImmPtr(watchpointSet));
return appendCall(operation);
return callOperation(operation, arg1, arg2, arg3, arg4, arg5);
}
- JITCompiler::Call callOperation(D_JITOperation_ZZ operation, FPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(D_DFGOperation_ZZ operation, FPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArguments(arg1, arg2);
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(D_JITOperation_DD operation, FPRReg result, FPRReg arg1, FPRReg arg2)
+ JITCompiler::Call callOperation(D_DFGOperation_DD operation, FPRReg result, FPRReg arg1, FPRReg arg2)
{
m_jit.setupArguments(arg1, arg2);
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(I_JITOperation_EJss operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(I_DFGOperation_EJss operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EZ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(C_DFGOperation_EZ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
#if USE(JSVALUE64)
- JITCompiler::Call callOperation(J_JITOperation_E operation, GPRReg result)
+ JITCompiler::Call callOperation(J_DFGOperation_E operation, GPRReg result)
{
m_jit.setupArgumentsExecState();
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EP operation, GPRReg result, void* pointer)
+ JITCompiler::Call callOperation(J_DFGOperation_EP operation, GPRReg result, void* pointer)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(pointer));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(Z_JITOperation_D operation, GPRReg result, FPRReg arg1)
+ JITCompiler::Call callOperation(Z_DFGOperation_D operation, GPRReg result, FPRReg arg1)
{
m_jit.setupArguments(arg1);
JITCompiler::Call call = m_jit.appendCall(operation);
m_jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, result);
return call;
}
- JITCompiler::Call callOperation(J_JITOperation_EI operation, GPRReg result, StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_EI operation, GPRReg result, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EA operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EA operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EAZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EAZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJssZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJssZ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EPS operation, GPRReg result, void* pointer, size_t size)
+ JITCompiler::Call callOperation(J_DFGOperation_EPS operation, GPRReg result, void* pointer, size_t size)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(pointer), TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_ESS operation, GPRReg result, int startConstant, int numConstants)
+ JITCompiler::Call callOperation(J_DFGOperation_ESS operation, GPRReg result, int startConstant, int numConstants)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(startConstant), TrustedImm32(numConstants));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EPP operation, GPRReg result, GPRReg arg1, void* pointer)
+ JITCompiler::Call callOperation(J_DFGOperation_EPP operation, GPRReg result, GPRReg arg1, void* pointer)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(pointer));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EC operation, GPRReg result, JSCell* cell)
+ JITCompiler::Call callOperation(J_DFGOperation_EC operation, GPRReg result, JSCell* cell)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(cell));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_ECI operation, GPRReg result, GPRReg arg1, const StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_ECI operation, GPRReg result, GPRReg arg1, const StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJI operation, GPRReg result, GPRReg arg1, StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_EJI operation, GPRReg result, GPRReg arg1, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EDA operation, GPRReg result, FPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EDA operation, GPRReg result, FPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJA operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJA operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EP operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EP operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EZ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EZ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EZ operation, GPRReg result, int32_t arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EZ operation, GPRReg result, int32_t arg1)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EZZ operation, GPRReg result, int32_t arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EZZ operation, GPRReg result, int32_t arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1), arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EZIcfZ operation, GPRReg result, int32_t arg1, InlineCallFrame* inlineCallFrame, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EZIcfZ operation, GPRReg result, int32_t arg1, InlineCallFrame* inlineCallFrame, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1), TrustedImmPtr(inlineCallFrame), arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EJS operation, GPRReg result, GPRReg value, size_t index)
+ JITCompiler::Call callOperation(P_DFGOperation_EJS operation, GPRReg result, GPRReg value, size_t index)
{
m_jit.setupArgumentsWithExecState(value, TrustedImmPtr(index));
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStJ operation, GPRReg result, Structure* structure, GPRReg arg2)
+ JITCompiler::Call callOperation(P_DFGOperation_EStJ operation, GPRReg result, Structure* structure, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EJ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(C_DFGOperation_EJ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_J operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(S_DFGOperation_J operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArguments(arg1);
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_EJ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(S_DFGOperation_EJ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJ operation, GPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EJ operation, GPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_EJJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(S_DFGOperation_EJJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EPP operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EPP operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg result, GPRReg arg1, MacroAssembler::TrustedImm32 imm)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg result, GPRReg arg1, MacroAssembler::TrustedImm32 imm)
{
m_jit.setupArgumentsWithExecState(arg1, MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(imm.m_value))));
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg result, MacroAssembler::TrustedImm32 imm, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg result, MacroAssembler::TrustedImm32 imm, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(imm.m_value))), arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_ECC operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_ECC operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_ECJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_ECJ operation, GPRReg result, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_ECJ operation, GPRReg result, GPRReg arg1, JSValueRegs arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_ECJ operation, GPRReg result, GPRReg arg1, JSValueRegs arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2.gpr());
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(V_JITOperation_EOZD operation, GPRReg arg1, GPRReg arg2, FPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_EOZD operation, GPRReg arg1, GPRReg arg2, FPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EJPP operation, GPRReg arg1, GPRReg arg2, void* pointer)
+ JITCompiler::Call callOperation(V_DFGOperation_EJPP operation, GPRReg arg1, GPRReg arg2, void* pointer)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(pointer));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EJCI operation, GPRReg arg1, GPRReg arg2, StringImpl* uid)
+ JITCompiler::Call callOperation(V_DFGOperation_EJCI operation, GPRReg arg1, GPRReg arg2, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(uid));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EJJJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_EJJJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EPZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_EPZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECJJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_ECJJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(D_JITOperation_EJ operation, FPRReg result, GPRReg arg1)
+ JITCompiler::Call callOperation(D_DFGOperation_EJ operation, FPRReg result, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, result);
#define SH4_32BIT_DUMMY_ARG
#endif
- JITCompiler::Call callOperation(Z_JITOperation_D operation, GPRReg result, FPRReg arg1)
+ JITCompiler::Call callOperation(Z_DFGOperation_D operation, GPRReg result, FPRReg arg1)
{
prepareForExternalCall();
m_jit.setupArguments(arg1);
m_jit.zeroExtend32ToPtr(GPRInfo::returnValueGPR, result);
return call;
}
- JITCompiler::Call callOperation(J_JITOperation_E operation, GPRReg resultTag, GPRReg resultPayload)
+ JITCompiler::Call callOperation(J_DFGOperation_E operation, GPRReg resultTag, GPRReg resultPayload)
{
m_jit.setupArgumentsExecState();
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EP operation, GPRReg resultTag, GPRReg resultPayload, void* pointer)
+ JITCompiler::Call callOperation(J_DFGOperation_EP operation, GPRReg resultTag, GPRReg resultPayload, void* pointer)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(pointer));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EPP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, void* pointer)
+ JITCompiler::Call callOperation(J_DFGOperation_EPP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, void* pointer)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(pointer));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EI operation, GPRReg resultTag, GPRReg resultPayload, StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_EI operation, GPRReg resultTag, GPRReg resultPayload, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EA operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EA operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EAZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EAZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJssZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJssZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EPS operation, GPRReg resultTag, GPRReg resultPayload, void* pointer, size_t size)
+ JITCompiler::Call callOperation(J_DFGOperation_EPS operation, GPRReg resultTag, GPRReg resultPayload, void* pointer, size_t size)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(pointer), TrustedImmPtr(size));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_ESS operation, GPRReg resultTag, GPRReg resultPayload, int startConstant, int numConstants)
+ JITCompiler::Call callOperation(J_DFGOperation_ESS operation, GPRReg resultTag, GPRReg resultPayload, int startConstant, int numConstants)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(startConstant), TrustedImm32(numConstants));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, void* pointer)
+ JITCompiler::Call callOperation(J_DFGOperation_EJP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, void* pointer)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(pointer));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJP operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EC operation, GPRReg resultTag, GPRReg resultPayload, JSCell* cell)
+ JITCompiler::Call callOperation(J_DFGOperation_EC operation, GPRReg resultTag, GPRReg resultPayload, JSCell* cell)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(cell));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_ECI operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, const StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_ECI operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, const StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(arg1, TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJI operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_EJI operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJI operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1Tag, GPRReg arg1Payload, StringImpl* uid)
+ JITCompiler::Call callOperation(J_DFGOperation_EJI operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1Tag, GPRReg arg1Payload, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, TrustedImm32(arg1Tag), TrustedImmPtr(uid));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EDA operation, GPRReg resultTag, GPRReg resultPayload, FPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EDA operation, GPRReg resultTag, GPRReg resultPayload, FPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJA operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJA operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJA operation, GPRReg resultTag, GPRReg resultPayload, TrustedImm32 arg1Tag, GPRReg arg1Payload, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EJA operation, GPRReg resultTag, GPRReg resultPayload, TrustedImm32 arg1Tag, GPRReg arg1Payload, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload)
+ JITCompiler::Call callOperation(J_DFGOperation_EJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EZ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1)
{
m_jit.setupArgumentsWithExecState(arg1);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1)
+ JITCompiler::Call callOperation(J_DFGOperation_EZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EZIcfZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1, InlineCallFrame* inlineCallFrame, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EZIcfZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1, InlineCallFrame* inlineCallFrame, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1), TrustedImmPtr(inlineCallFrame), arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EZZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_EZZ operation, GPRReg resultTag, GPRReg resultPayload, int32_t arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(TrustedImm32(arg1), arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(P_JITOperation_EJS operation, GPRReg result, JSValueRegs value, size_t index)
+ JITCompiler::Call callOperation(P_DFGOperation_EJS operation, GPRReg result, JSValueRegs value, size_t index)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG value.payloadGPR(), value.tagGPR(), TrustedImmPtr(index));
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(P_JITOperation_EStJ operation, GPRReg result, Structure* structure, GPRReg arg2Tag, GPRReg arg2Payload)
+ JITCompiler::Call callOperation(P_DFGOperation_EStJ operation, GPRReg result, Structure* structure, GPRReg arg2Tag, GPRReg arg2Payload)
{
m_jit.setupArgumentsWithExecState(TrustedImmPtr(structure), arg2Payload, arg2Tag);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(C_JITOperation_EJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
+ JITCompiler::Call callOperation(C_DFGOperation_EJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_J operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
+ JITCompiler::Call callOperation(S_DFGOperation_J operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
{
m_jit.setupArguments(arg1Payload, arg1Tag);
return appendCallSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_EJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
+ JITCompiler::Call callOperation(S_DFGOperation_EJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(S_JITOperation_EJJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
+ JITCompiler::Call callOperation(S_DFGOperation_EJJ operation, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
return appendCallWithExceptionCheckSetResult(operation, result);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, MacroAssembler::TrustedImm32 imm)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1Tag, GPRReg arg1Payload, MacroAssembler::TrustedImm32 imm)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG imm, TrustedImm32(JSValue::Int32Tag));
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, MacroAssembler::TrustedImm32 imm, GPRReg arg2Tag, GPRReg arg2Payload)
+ JITCompiler::Call callOperation(J_DFGOperation_EJJ operation, GPRReg resultTag, GPRReg resultPayload, MacroAssembler::TrustedImm32 imm, GPRReg arg2Tag, GPRReg arg2Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG imm, TrustedImm32(JSValue::Int32Tag), SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_ECJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload)
+ JITCompiler::Call callOperation(J_DFGOperation_ECJ operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload)
{
m_jit.setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(J_JITOperation_ECJ operation, JSValueRegs result, GPRReg arg1, JSValueRegs arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_ECJ operation, JSValueRegs result, GPRReg arg1, JSValueRegs arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2.payloadGPR(), arg2.tagGPR());
return appendCallWithExceptionCheckSetResult(operation, result.payloadGPR(), result.tagGPR());
}
- JITCompiler::Call callOperation(J_JITOperation_ECC operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
+ JITCompiler::Call callOperation(J_DFGOperation_ECC operation, GPRReg resultTag, GPRReg resultPayload, GPRReg arg1, GPRReg arg2)
{
m_jit.setupArgumentsWithExecState(arg1, arg2);
return appendCallWithExceptionCheckSetResult(operation, resultPayload, resultTag);
}
- JITCompiler::Call callOperation(V_JITOperation_EOZD operation, GPRReg arg1, GPRReg arg2, FPRReg arg3)
+ JITCompiler::Call callOperation(V_DFGOperation_EOZD operation, GPRReg arg1, GPRReg arg2, FPRReg arg3)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, EABI_32BIT_DUMMY_ARG arg3);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EJPP operation, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2, void* pointer)
+ JITCompiler::Call callOperation(V_DFGOperation_EJPP operation, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2, void* pointer)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, arg2, TrustedImmPtr(pointer));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EJCI operation, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2, StringImpl* uid)
+ JITCompiler::Call callOperation(V_DFGOperation_EJCI operation, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2, StringImpl* uid)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, arg2, TrustedImmPtr(uid));
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_ECJJ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, GPRReg arg3Tag, GPRReg arg3Payload)
+ JITCompiler::Call callOperation(V_DFGOperation_ECJJ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, GPRReg arg3Tag, GPRReg arg3Payload)
{
m_jit.setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, arg3Payload, arg3Tag);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EPZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3Tag, GPRReg arg3Payload)
+ JITCompiler::Call callOperation(V_DFGOperation_EPZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3Tag, GPRReg arg3Payload)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload, arg3Tag);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3Tag, GPRReg arg3Payload)
+ JITCompiler::Call callOperation(V_DFGOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3Tag, GPRReg arg3Payload)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload, arg3Tag);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(V_JITOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, TrustedImm32 arg3Tag, GPRReg arg3Payload)
+ JITCompiler::Call callOperation(V_DFGOperation_EOZJ operation, GPRReg arg1, GPRReg arg2, TrustedImm32 arg3Tag, GPRReg arg3Payload)
{
m_jit.setupArgumentsWithExecState(arg1, arg2, EABI_32BIT_DUMMY_ARG SH4_32BIT_DUMMY_ARG arg3Payload, arg3Tag);
return appendCallWithExceptionCheck(operation);
}
- JITCompiler::Call callOperation(D_JITOperation_EJ operation, FPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
+ JITCompiler::Call callOperation(D_DFGOperation_EJ operation, FPRReg result, GPRReg arg1Tag, GPRReg arg1Payload)
{
m_jit.setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
return appendCallWithExceptionCheckSetResult(operation, result);
return betterUseStrictInt52(edge.node());
}
- bool compare(Node*, MacroAssembler::RelationalCondition, MacroAssembler::DoubleCondition, S_JITOperation_EJJ);
- bool compilePeepHoleBranch(Node*, MacroAssembler::RelationalCondition, MacroAssembler::DoubleCondition, S_JITOperation_EJJ);
+ bool compare(Node*, MacroAssembler::RelationalCondition, MacroAssembler::DoubleCondition, S_DFGOperation_EJJ);
+ bool compilePeepHoleBranch(Node*, MacroAssembler::RelationalCondition, MacroAssembler::DoubleCondition, S_DFGOperation_EJJ);
void compilePeepHoleInt32Branch(Node*, Node* branchNode, JITCompiler::RelationalCondition);
void compilePeepHoleInt52Branch(Node*, Node* branchNode, JITCompiler::RelationalCondition);
void compilePeepHoleBooleanBranch(Node*, Node* branchNode, JITCompiler::RelationalCondition);
#include "ArrayPrototype.h"
#include "DFGAbstractInterpreterInlines.h"
#include "DFGCallArrayAllocatorSlowPathGenerator.h"
-#include "DFGOperations.h"
#include "DFGSlowPathGenerator.h"
#include "JSActivation.h"
#include "ObjectPrototype.h"
JITCompiler::DataLabel32 payloadStoreWithPatch = m_jit.store32WithAddressOffsetPatch(valuePayloadGPR, JITCompiler::Address(scratchGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
JITCompiler::Label doneLabel = m_jit.label();
- V_JITOperation_EJCI optimizedCall;
+ V_DFGOperation_EJCI optimizedCall;
if (m_jit.strictModeFor(m_currentNode->codeOrigin)) {
if (putKind == Direct)
optimizedCall = operationPutByIdDirectStrictOptimize;
return false;
}
-void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_JITOperation_EJJ helperFunction)
+void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
{
BasicBlock* taken = branchNode->takenBlock();
BasicBlock* notTaken = branchNode->notTakenBlock();
template<typename JumpType>
class CompareAndBoxBooleanSlowPathGenerator
- : public CallSlowPathGenerator<JumpType, S_JITOperation_EJJ, GPRReg> {
+ : public CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg> {
public:
CompareAndBoxBooleanSlowPathGenerator(
JumpType from, SpeculativeJIT* jit,
- S_JITOperation_EJJ function, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload,
+ S_DFGOperation_EJJ function, GPRReg result, GPRReg arg1Tag, GPRReg arg1Payload,
GPRReg arg2Tag, GPRReg arg2Payload)
- : CallSlowPathGenerator<JumpType, S_JITOperation_EJJ, GPRReg>(
+ : CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg>(
from, jit, function, NeedToSpill, result)
, m_arg1Tag(arg1Tag)
, m_arg1Payload(arg1Payload)
GPRReg m_arg2Payload;
};
-void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_JITOperation_EJJ helperFunction)
+void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
{
JSValueOperand arg1(this, node->child1());
JSValueOperand arg2(this, node->child2());
TrustedImm32(FinalObjectType)));
m_jit.move(thisValuePayloadGPR, tempGPR);
m_jit.move(thisValueTagGPR, tempTagGPR);
- J_JITOperation_EJ function;
+ J_DFGOperation_EJ function;
if (m_jit.graph().executableFor(node->codeOrigin)->isStrictMode())
function = operationToThisStrict;
else
#include "ArrayPrototype.h"
#include "DFGAbstractInterpreterInlines.h"
#include "DFGCallArrayAllocatorSlowPathGenerator.h"
-#include "DFGOperations.h"
#include "DFGSlowPathGenerator.h"
#include "JSCJSValueInlines.h"
#include "ObjectPrototype.h"
JITCompiler::Label doneLabel = m_jit.label();
- V_JITOperation_EJCI optimizedCall;
+ V_DFGOperation_EJCI optimizedCall;
if (m_jit.strictModeFor(m_currentNode->codeOrigin)) {
if (putKind == Direct)
optimizedCall = operationPutByIdDirectStrictOptimize;
return false;
}
-void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_JITOperation_EJJ helperFunction)
+void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
{
BasicBlock* taken = branchNode->takenBlock();
BasicBlock* notTaken = branchNode->notTakenBlock();
template<typename JumpType>
class CompareAndBoxBooleanSlowPathGenerator
- : public CallSlowPathGenerator<JumpType, S_JITOperation_EJJ, GPRReg> {
+ : public CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg> {
public:
CompareAndBoxBooleanSlowPathGenerator(
JumpType from, SpeculativeJIT* jit,
- S_JITOperation_EJJ function, GPRReg result, GPRReg arg1, GPRReg arg2)
- : CallSlowPathGenerator<JumpType, S_JITOperation_EJJ, GPRReg>(
+ S_DFGOperation_EJJ function, GPRReg result, GPRReg arg1, GPRReg arg2)
+ : CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg>(
from, jit, function, NeedToSpill, result)
, m_arg1(arg1)
, m_arg2(arg2)
GPRReg m_arg2;
};
-void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_JITOperation_EJJ helperFunction)
+void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction)
{
JSValueOperand arg1(this, node->child1());
JSValueOperand arg2(this, node->child2());
MacroAssembler::Address(tempGPR, Structure::typeInfoTypeOffset()),
TrustedImm32(FinalObjectType)));
m_jit.move(thisValueGPR, tempGPR);
- J_JITOperation_EJ function;
+ J_DFGOperation_EJ function;
if (m_jit.graph().executableFor(node->codeOrigin)->isStrictMode())
function = operationToThisStrict;
else
return FINALIZE_CODE(patchBuffer, ("DFG OSR exit generation thunk"));
}
+inline void emitPointerValidation(CCallHelpers& jit, GPRReg pointerGPR)
+{
+#if !ASSERT_DISABLED
+ CCallHelpers::Jump isNonZero = jit.branchTestPtr(CCallHelpers::NonZero, pointerGPR);
+ jit.breakpoint();
+ isNonZero.link(&jit);
+ jit.push(pointerGPR);
+ jit.load8(pointerGPR, pointerGPR);
+ jit.pop(pointerGPR);
+#else
+ UNUSED_PARAM(jit);
+ UNUSED_PARAM(pointerGPR);
+#endif
+}
+
+MacroAssemblerCodeRef throwExceptionFromCallSlowPathGenerator(VM* vm)
+{
+ CCallHelpers jit(vm);
+
+ // We will jump to here if the JIT code thinks it's making a call, but the
+ // linking helper (C++ code) decided to throw an exception instead. We will
+ // have saved the callReturnIndex in the first arguments of JITStackFrame.
+ // Note that the return address will be on the stack at this point, so we
+ // need to remove it and drop it on the floor, since we don't care about it.
+ // Finally note that the call frame register points at the callee frame, so
+ // we need to pop it.
+ jit.preserveReturnAddressAfterCall(GPRInfo::nonPreservedNonReturnGPR);
+ jit.loadPtr(
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::CallerFrame),
+ GPRInfo::callFrameRegister);
+#if USE(JSVALUE64)
+ jit.peek64(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
+#else
+ jit.peek(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
+#endif
+ jit.setupArgumentsWithExecState(GPRInfo::nonPreservedNonReturnGPR);
+ jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(lookupExceptionHandler)), GPRInfo::nonArgGPR0);
+ emitPointerValidation(jit, GPRInfo::nonArgGPR0);
+ jit.call(GPRInfo::nonArgGPR0);
+ emitPointerValidation(jit, GPRInfo::returnValueGPR2);
+ jit.jump(GPRInfo::returnValueGPR2);
+
+ LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
+ return FINALIZE_CODE(patchBuffer, ("DFG throw exception from call slow path thunk"));
+}
+
+static void slowPathFor(
+ CCallHelpers& jit, VM* vm, P_DFGOperation_E slowPathFunction)
+{
+ jit.preserveReturnAddressAfterCall(GPRInfo::nonArgGPR2);
+ emitPointerValidation(jit, GPRInfo::nonArgGPR2);
+ jit.storePtr(
+ GPRInfo::nonArgGPR2,
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC));
+ jit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
+#if USE(JSVALUE64)
+ jit.poke64(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
+#else
+ jit.poke(GPRInfo::nonPreservedNonReturnGPR, JITSTACKFRAME_ARGS_INDEX);
+#endif
+ jit.setupArgumentsExecState();
+ jit.move(CCallHelpers::TrustedImmPtr(bitwise_cast<void*>(slowPathFunction)), GPRInfo::nonArgGPR0);
+ emitPointerValidation(jit, GPRInfo::nonArgGPR0);
+ jit.call(GPRInfo::nonArgGPR0);
+
+ // This slow call will return the address of one of the following:
+ // 1) Exception throwing thunk.
+ // 2) Host call return value returner thingy.
+ // 3) The function to call.
+ jit.loadPtr(
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC),
+ GPRInfo::nonPreservedNonReturnGPR);
+ jit.storePtr(
+ CCallHelpers::TrustedImmPtr(0),
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ReturnPC));
+ emitPointerValidation(jit, GPRInfo::nonPreservedNonReturnGPR);
+ jit.restoreReturnAddressBeforeReturn(GPRInfo::nonPreservedNonReturnGPR);
+ emitPointerValidation(jit, GPRInfo::returnValueGPR);
+ jit.jump(GPRInfo::returnValueGPR);
+}
+
+static MacroAssemblerCodeRef linkForThunkGenerator(
+ VM* vm, CodeSpecializationKind kind)
+{
+ // The return address is on the stack or in the link register. We will hence
+ // save the return address to the call frame while we make a C++ function call
+ // to perform linking and lazy compilation if necessary. We expect the callee
+ // to be in nonArgGPR0/nonArgGPR1 (payload/tag), the call frame to have already
+ // been adjusted, nonPreservedNonReturnGPR holds the exception handler index,
+ // and all other registers to be available for use. We use JITStackFrame::args
+ // to save important information across calls.
+
+ CCallHelpers jit(vm);
+
+ slowPathFor(jit, vm, kind == CodeForCall ? operationLinkCall : operationLinkConstruct);
+
+ LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
+ return FINALIZE_CODE(
+ patchBuffer,
+ ("DFG link %s slow path thunk", kind == CodeForCall ? "call" : "construct"));
+}
+
+MacroAssemblerCodeRef linkCallThunkGenerator(VM* vm)
+{
+ return linkForThunkGenerator(vm, CodeForCall);
+}
+
+MacroAssemblerCodeRef linkConstructThunkGenerator(VM* vm)
+{
+ return linkForThunkGenerator(vm, CodeForConstruct);
+}
+
+// For closure optimizations, we only include calls, since if you're using closures for
+// object construction then you're going to lose big time anyway.
+MacroAssemblerCodeRef linkClosureCallThunkGenerator(VM* vm)
+{
+ CCallHelpers jit(vm);
+
+ slowPathFor(jit, vm, operationLinkClosureCall);
+
+ LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
+ return FINALIZE_CODE(patchBuffer, ("DFG link closure call slow path thunk"));
+}
+
+static MacroAssemblerCodeRef virtualForThunkGenerator(
+ VM* vm, CodeSpecializationKind kind)
+{
+ // The return address is on the stack, or in the link register. We will hence
+ // jump to the callee, or save the return address to the call frame while we
+ // make a C++ function call to the appropriate DFG operation.
+
+ CCallHelpers jit(vm);
+
+ CCallHelpers::JumpList slowCase;
+
+ // FIXME: we should have a story for eliminating these checks. In many cases,
+ // the DFG knows that the value is definitely a cell, or definitely a function.
+
+#if USE(JSVALUE64)
+ slowCase.append(
+ jit.branchTest64(
+ CCallHelpers::NonZero, GPRInfo::nonArgGPR0, GPRInfo::tagMaskRegister));
+#else
+ slowCase.append(
+ jit.branch32(
+ CCallHelpers::NotEqual, GPRInfo::nonArgGPR1,
+ CCallHelpers::TrustedImm32(JSValue::CellTag)));
+#endif
+ jit.loadPtr(CCallHelpers::Address(GPRInfo::nonArgGPR0, JSCell::structureOffset()), GPRInfo::nonArgGPR2);
+ slowCase.append(
+ jit.branchPtr(
+ CCallHelpers::NotEqual,
+ CCallHelpers::Address(GPRInfo::nonArgGPR2, Structure::classInfoOffset()),
+ CCallHelpers::TrustedImmPtr(JSFunction::info())));
+
+ // Now we know we have a JSFunction.
+
+ jit.loadPtr(
+ CCallHelpers::Address(GPRInfo::nonArgGPR0, JSFunction::offsetOfExecutable()),
+ GPRInfo::nonArgGPR2);
+ slowCase.append(
+ jit.branch32(
+ CCallHelpers::LessThan,
+ CCallHelpers::Address(
+ GPRInfo::nonArgGPR2, ExecutableBase::offsetOfNumParametersFor(kind)),
+ CCallHelpers::TrustedImm32(0)));
+
+ // Now we know that we have a CodeBlock, and we're committed to making a fast
+ // call.
+
+ jit.loadPtr(
+ CCallHelpers::Address(GPRInfo::nonArgGPR0, JSFunction::offsetOfScopeChain()),
+ GPRInfo::nonArgGPR1);
+#if USE(JSVALUE64)
+ jit.store64(
+ GPRInfo::nonArgGPR1,
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain));
+#else
+ jit.storePtr(
+ GPRInfo::nonArgGPR1,
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain +
+ OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ jit.store32(
+ CCallHelpers::TrustedImm32(JSValue::CellTag),
+ CCallHelpers::Address(
+ GPRInfo::callFrameRegister,
+ static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain +
+ OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+#endif
+
+ jit.loadPtr(
+ CCallHelpers::Address(GPRInfo::nonArgGPR2, ExecutableBase::offsetOfJITCodeWithArityCheckFor(kind)),
+ GPRInfo::regT0);
+
+ // Make a tail call. This will return back to DFG code.
+ emitPointerValidation(jit, GPRInfo::regT0);
+ jit.jump(GPRInfo::regT0);
+
+ slowCase.link(&jit);
+
+ // Here we don't know anything, so revert to the full slow path.
+
+ slowPathFor(jit, vm, kind == CodeForCall ? operationVirtualCall : operationVirtualConstruct);
+
+ LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
+ return FINALIZE_CODE(
+ patchBuffer,
+ ("DFG virtual %s slow path thunk", kind == CodeForCall ? "call" : "construct"));
+}
+
+MacroAssemblerCodeRef virtualCallThunkGenerator(VM* vm)
+{
+ return virtualForThunkGenerator(vm, CodeForCall);
+}
+
+MacroAssemblerCodeRef virtualConstructThunkGenerator(VM* vm)
+{
+ return virtualForThunkGenerator(vm, CodeForConstruct);
+}
+
} } // namespace JSC::DFG
#endif // ENABLE(DFG_JIT)
MacroAssemblerCodeRef osrExitGenerationThunkGenerator(VM*);
+MacroAssemblerCodeRef throwExceptionFromCallSlowPathGenerator(VM*);
+
+MacroAssemblerCodeRef linkCallThunkGenerator(VM*);
+MacroAssemblerCodeRef linkConstructThunkGenerator(VM*);
+
+MacroAssemblerCodeRef linkClosureCallThunkGenerator(VM*);
+
+MacroAssemblerCodeRef virtualCallThunkGenerator(VM*);
+MacroAssemblerCodeRef virtualConstructThunkGenerator(VM*);
+
} } // namespace JSC::DFG
#endif // ENABLE(DFG_JIT)
macro(osrExit, "webkit_osr_exit", functionType(voidType, boolean, int32, Variadic))
#define FOR_EACH_FUNCTION_TYPE(macro) \
- macro(I_JITOperation_EJss, functionType(intPtr, intPtr, intPtr)) \
- macro(J_JITOperation_E, functionType(int64, intPtr)) \
- macro(P_JITOperation_EC, functionType(intPtr, intPtr, intPtr)) \
- macro(V_JITOperation_EOZD, functionType(voidType, intPtr, intPtr, int32, doubleType)) \
- macro(V_JITOperation_EOZJ, functionType(voidType, intPtr, intPtr, int32, int64)) \
- macro(Z_JITOperation_D, functionType(int32, doubleType))
+ macro(I_DFGOperation_EJss, functionType(intPtr, intPtr, intPtr)) \
+ macro(J_DFGOperation_E, functionType(int64, intPtr)) \
+ macro(P_DFGOperation_EC, functionType(intPtr, intPtr, intPtr)) \
+ macro(V_DFGOperation_EOZD, functionType(voidType, intPtr, intPtr, int32, doubleType)) \
+ macro(V_DFGOperation_EOZJ, functionType(voidType, intPtr, intPtr, int32, int64)) \
+ macro(Z_DFGOperation_D, functionType(int32, doubleType))
class IntrinsicRepository : public CommonValues {
public:
#undef FUNCTION_TYPE_GETTER
#define FUNCTION_TYPE_RESOLVER(typeName, type) \
- LType operationType(JSC::typeName) \
+ LType operationType(DFG::typeName) \
{ \
return typeName(); \
}
// FIXME: This is unacceptably slow.
// https://bugs.webkit.org/show_bug.cgi?id=113621
- J_JITOperation_E function =
+ J_DFGOperation_E function =
m_node->op() == Call ? operationFTLCall : operationFTLConstruct;
int dummyThisArgument = m_node->op() == Call ? 0 : 1;
// Takes the call frame and the index of the exit, and returns the address to
// jump to.
extern "C" {
-void* JIT_OPERATION compileFTLOSRExit(ExecState*, unsigned exitID) WTF_INTERNAL;
+void* DFG_OPERATION compileFTLOSRExit(ExecState*, unsigned exitID) WTF_INTERNAL;
}
} } // namespace JSC::FTL
return (codeOrigin.inlineCallFrame->stackOffset + CallFrame::argumentOffsetIncludingThis(0)) * sizeof(Register);
}
- void writeBarrier(GPRReg owner, GPRReg scratch1, GPRReg scratch2, WriteBarrierUseKind useKind)
- {
- UNUSED_PARAM(owner);
- UNUSED_PARAM(scratch1);
- UNUSED_PARAM(scratch2);
- UNUSED_PARAM(useKind);
- ASSERT(owner != scratch1);
- ASSERT(owner != scratch2);
- ASSERT(scratch1 != scratch2);
-
-#if ENABLE(WRITE_BARRIER_PROFILING)
- emitCount(WriteBarrierCounters::jitCounterFor(useKind));
-#endif
- }
-
Vector<BytecodeAndMachineOffset>& decodedCodeMapFor(CodeBlock*);
protected:
ASSERT(callLinkInfo->callType == CallLinkInfo::Call
|| callLinkInfo->callType == CallLinkInfo::CallVarargs);
if (callLinkInfo->callType == CallLinkInfo::Call) {
- repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(oldStyleLinkClosureCallGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(linkClosureCallGenerator).code());
return;
}
- repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(oldStyleVirtualCallGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(virtualCallGenerator).code());
return;
}
ASSERT(kind == CodeForConstruct);
- repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(oldStyleVirtualConstructGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, vm->getCTIStub(virtualConstructGenerator).code());
}
void JIT::linkSlowCall(CodeBlock* callerCodeBlock, CallLinkInfo* callLinkInfo)
{
RepatchBuffer repatchBuffer(callerCodeBlock);
- repatchBuffer.relink(callLinkInfo->callReturnLocation, callerCodeBlock->vm()->getCTIStub(oldStyleVirtualCallGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, callerCodeBlock->vm()->getCTIStub(virtualCallGenerator).code());
}
} // namespace JSC
linkSlowCase(iter);
emitGetFromCallFrameHeader64(JSStack::Callee, regT0);
- emitNakedCall(m_vm->getCTIStub(oldStyleVirtualCallGenerator).code());
+ emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code());
sampleCodeBlock(m_codeBlock);
linkSlowCase(iter);
- m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(oldStyleLinkConstructGenerator).code() : m_vm->getCTIStub(oldStyleLinkCallGenerator).code());
+ m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code());
sampleCodeBlock(m_codeBlock);
patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
- patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(oldStyleVirtualCallGenerator).code()));
+ patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code()));
RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
FINALIZE_CODE(
repatchBuffer.replaceWithJump(
RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
CodeLocationLabel(stubRoutine->code().code()));
- repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(oldStyleVirtualCallGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code());
callLinkInfo->stub = stubRoutine.release();
}
linkSlowCase(iter);
emitLoad(JSStack::Callee, regT1, regT0);
- emitNakedCall(m_vm->getCTIStub(oldStyleVirtualCallGenerator).code());
+ emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code());
sampleCodeBlock(m_codeBlock);
linkSlowCase(iter);
linkSlowCase(iter);
- m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(oldStyleLinkConstructGenerator).code() : m_vm->getCTIStub(oldStyleLinkCallGenerator).code());
+ m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code());
sampleCodeBlock(m_codeBlock);
emitPutCallResult(instruction);
patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0));
- patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(oldStyleVirtualCallGenerator).code()));
+ patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code()));
RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
FINALIZE_CODE(
repatchBuffer.replaceWithJump(
RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin),
CodeLocationLabel(stubRoutine->code().code()));
- repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(oldStyleVirtualCallGenerator).code());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code());
callLinkInfo->stub = stubRoutine.release();
}
+++ /dev/null
-/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef JITOperationWrappers_h
-#define JITOperationWrappers_h
-
-#include <wtf/Compiler.h>
-#include <wtf/InlineASM.h>
-
-namespace JSC {
-
-#if CPU(MIPS)
-#if WTF_MIPS_PIC
-#define LOAD_FUNCTION_TO_T9(function) \
- ".set noreorder" "\n" \
- ".cpload $25" "\n" \
- ".set reorder" "\n" \
- "la $t9, " LOCAL_REFERENCE(function) "\n"
-#else
-#define LOAD_FUNCTION_TO_T9(function) "" "\n"
-#endif
-#endif
-
-#if COMPILER(GCC) && CPU(X86_64)
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, register) \
- asm( \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "mov (%rsp), %" STRINGIZE(register) "\n" \
- "jmp " LOCAL_REFERENCE(function##WithReturnAddress) "\n" \
- );
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rsi)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rcx)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, rcx)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, r8)
-
-#elif COMPILER(GCC) && CPU(X86)
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, offset) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "mov (%esp), %eax\n" \
- "mov %eax, " STRINGIZE(offset) "(%esp)\n" \
- "jmp " LOCAL_REFERENCE(function##WithReturnAddress) "\n" \
- );
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 8)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 16)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 20)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 24)
-
-#elif COMPILER(GCC) && CPU(ARM_THUMB2)
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
- asm ( \
- ".text" "\n" \
- ".align 2" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- ".thumb" "\n" \
- ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "mov a2, lr" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
- asm ( \
- ".text" "\n" \
- ".align 2" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- ".thumb" "\n" \
- ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "mov a4, lr" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-// EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
-// As a result, return address will be at a 4-byte further location in the following cases.
-#if COMPILER_SUPPORTS(EABI) && CPU(ARM)
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #4]"
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #8]"
-#else
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #0]"
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #4]"
-#endif
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
- asm ( \
- ".text" "\n" \
- ".align 2" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- ".thumb" "\n" \
- ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- INSTRUCTION_STORE_RETURN_ADDRESS_EJI "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
- asm ( \
- ".text" "\n" \
- ".align 2" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- ".thumb" "\n" \
- ".thumb_func " THUMB_FUNC_PARAM(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
- asm ( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- INLINE_ARM_FUNCTION(function) \
- SYMBOL_STRING(function) ":" "\n" \
- "mov a2, lr" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
- asm ( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- INLINE_ARM_FUNCTION(function) \
- SYMBOL_STRING(function) ":" "\n" \
- "mov a4, lr" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-// EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
-// As a result, return address will be at a 4-byte further location in the following cases.
-#if COMPILER_SUPPORTS(EABI) && CPU(ARM)
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #4]"
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #8]"
-#else
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJI "str lr, [sp, #0]"
-#define INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "str lr, [sp, #4]"
-#endif
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
- asm ( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- INLINE_ARM_FUNCTION(function) \
- SYMBOL_STRING(function) ":" "\n" \
- INSTRUCTION_STORE_RETURN_ADDRESS_EJI "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
- asm ( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- INLINE_ARM_FUNCTION(function) \
- SYMBOL_STRING(function) ":" "\n" \
- INSTRUCTION_STORE_RETURN_ADDRESS_EJCI "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#elif COMPILER(GCC) && CPU(MIPS)
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
- "move $a1, $ra" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
- "move $a3, $ra" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
- "sw $ra, 20($sp)" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- LOAD_FUNCTION_TO_T9(function##WithReturnAddress) \
- "sw $ra, 24($sp)" "\n" \
- "b " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- );
-
-#elif COMPILER(GCC) && CPU(SH4)
-
-#define SH4_SCRATCH_REGISTER "r11"
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "sts pr, r5" "\n" \
- "bra " LOCAL_REFERENCE(function) "WithReturnAddress" "\n" \
- "nop" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "sts pr, r7" "\n" \
- "mov.l 2f, " SH4_SCRATCH_REGISTER "\n" \
- "braf " SH4_SCRATCH_REGISTER "\n" \
- "nop" "\n" \
- "1: .balign 4" "\n" \
- "2: .long " LOCAL_REFERENCE(function) "WithReturnAddress-1b" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, offset, scratch) \
- asm( \
- ".text" "\n" \
- ".globl " SYMBOL_STRING(function) "\n" \
- HIDE_SYMBOL(function) "\n" \
- SYMBOL_STRING(function) ":" "\n" \
- "sts pr, " scratch "\n" \
- "mov.l " scratch ", @(" STRINGIZE(offset) ", r15)" "\n" \
- "mov.l 2f, " scratch "\n" \
- "braf " scratch "\n" \
- "nop" "\n" \
- "1: .balign 4" "\n" \
- "2: .long " LOCAL_REFERENCE(function) "WithReturnAddress-1b" "\n" \
- );
-
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 0, SH4_SCRATCH_REGISTER)
-#define FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) FUNCTION_WRAPPER_WITH_RETURN_ADDRESS(function, 4, SH4_SCRATCH_REGISTER)
-
-#endif
-
-#define P_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
-void* JIT_OPERATION function##WithReturnAddress(ExecState*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
-FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function)
-
-#define J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function) \
-EncodedJSValue JIT_OPERATION function##WithReturnAddress(ExecState*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
-FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_E(function)
-
-#define J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function) \
-EncodedJSValue JIT_OPERATION function##WithReturnAddress(ExecState*, JSCell*, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
-FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_ECI(function)
-
-#define J_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function) \
-EncodedJSValue JIT_OPERATION function##WithReturnAddress(ExecState*, EncodedJSValue, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
-FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJI(function)
-
-#define V_FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function) \
-void JIT_OPERATION function##WithReturnAddress(ExecState*, EncodedJSValue, JSCell*, StringImpl*, ReturnAddressPtr) REFERENCED_FROM_ASM WTF_INTERNAL; \
-FUNCTION_WRAPPER_WITH_RETURN_ADDRESS_EJCI(function)
-
-} // namespace JSC
-
-#endif // JITOperationWrappers_h
-
+++ /dev/null
-/*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "JITOperations.h"
-
-#include "CommonSlowPaths.h"
-#include "GetterSetter.h"
-#include "HostCallReturnValue.h"
-#include "JITOperationWrappers.h"
-#include "Operations.h"
-#include "Repatch.h"
-
-namespace JSC {
-
-extern "C" {
-
-EncodedJSValue JIT_OPERATION operationGetById(ExecSt