We should be able to inline getter/setter calls inside an inline cache even when...
authorsbarati@apple.com <sbarati@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 8 Oct 2015 19:37:28 +0000 (19:37 +0000)
committersbarati@apple.com <sbarati@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Thu, 8 Oct 2015 19:37:28 +0000 (19:37 +0000)
https://bugs.webkit.org/show_bug.cgi?id=149601

Reviewed by Filip Pizlo.

Source/JavaScriptCore:

Before, if we had a PolymorphicAccess with and a StructureStubInfo
with a NeedToSpill spillMode, we wouldn't generate getter/setter
calls. This patch changes it such that we will generate the
getter/setter call and do the necessary register spilling/filling
around the getter/setter call to preserve any "usedRegisters".

This has an interesting story with how it relates to exception handling
inside the DFG. Because the GetById variants are considered a throwing call
site, we must make sure that we properly restore the registers spilled to the stack
in case of an exception being thrown inside the getter/setter call. We do
this by having the inline cache register itself as a new exception handling
call site. When the inline cache "catches" the exception (i.e, genericUnwind
will jump to this code), it will restore the registers it spilled that are
live inside the original catch handler, and then jump to the original catch
handler. We make sure to only generate this makeshift catch handler when we
actually need to do any cleanup. If we determine that we don't need to restore
any registers, we don't bother generating this makeshift catch handler.

* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::~CodeBlock):
(JSC::CodeBlock::handlerForIndex):
(JSC::CodeBlock::newExceptionHandlingCallSiteIndex):
(JSC::CodeBlock::removeExceptionHandlerForCallSite):
(JSC::CodeBlock::lineNumberForBytecodeOffset):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::appendExceptionHandler):
* bytecode/PolymorphicAccess.cpp:
(JSC::AccessGenerationState::AccessGenerationState):
(JSC::AccessGenerationState::restoreScratch):
(JSC::AccessGenerationState::succeed):
(JSC::AccessGenerationState::calculateLiveRegistersForCallAndExceptionHandling):
(JSC::AccessGenerationState::preserveLiveRegistersToStackForCall):
(JSC::AccessGenerationState::restoreLiveRegistersFromStackForCall):
(JSC::AccessGenerationState::restoreLiveRegistersFromStackForCallWithThrownException):
(JSC::AccessGenerationState::liveRegistersForCall):
(JSC::AccessGenerationState::callSiteIndexForExceptionHandlingOrOriginal):
(JSC::AccessGenerationState::callSiteIndexForExceptionHandling):
(JSC::AccessGenerationState::originalExceptionHandler):
(JSC::AccessGenerationState::numberOfStackBytesUsedForRegisterPreservation):
(JSC::AccessGenerationState::needsToRestoreRegistersIfException):
(JSC::AccessGenerationState::originalCallSiteIndex):
(JSC::AccessGenerationState::liveRegistersToPreserveAtExceptionHandlingCallSite):
(JSC::AccessCase::AccessCase):
(JSC::AccessCase::generate):
(JSC::PolymorphicAccess::regenerateWithCases):
(JSC::PolymorphicAccess::regenerate):
(JSC::PolymorphicAccess::aboutToDie):
* bytecode/PolymorphicAccess.h:
(JSC::AccessCase::doesCalls):
(JSC::AccessCase::isGetter):
(JSC::AccessCase::callLinkInfo):
* bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::deref):
(JSC::StructureStubInfo::aboutToDie):
(JSC::StructureStubInfo::addAccessCase):
* bytecode/StructureStubInfo.h:
* bytecode/ValueRecovery.h:
(JSC::ValueRecovery::isInJSValueRegs):
(JSC::ValueRecovery::fpr):
* dfg/DFGCommonData.cpp:
(JSC::DFG::CommonData::addCodeOrigin):
(JSC::DFG::CommonData::addCodeOriginUnconditionally):
(JSC::DFG::CommonData::lastCallSite):
(JSC::DFG::CommonData::removeCallSiteIndex):
(JSC::DFG::CommonData::shrinkToFit):
* dfg/DFGCommonData.h:
* dfg/DFGJITCode.cpp:
(JSC::DFG::JITCode::reconstruct):
(JSC::DFG::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
(JSC::DFG::JITCode::checkIfOptimizationThresholdReached):
* dfg/DFGJITCode.h:
(JSC::DFG::JITCode::osrEntryBlock):
(JSC::DFG::JITCode::setOSREntryBlock):
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::appendExceptionHandlingOSRExit):
* dfg/DFGOSRExit.cpp:
(JSC::DFG::OSRExit::OSRExit):
* dfg/DFGOSRExit.h:
* dfg/DFGSpeculativeJIT.cpp:
(JSC::DFG::SpeculativeJIT::compileIn):
* dfg/DFGSpeculativeJIT32_64.cpp:
(JSC::DFG::SpeculativeJIT::cachedGetById):
(JSC::DFG::SpeculativeJIT::cachedPutById):
* dfg/DFGSpeculativeJIT64.cpp:
(JSC::DFG::SpeculativeJIT::cachedGetById):
(JSC::DFG::SpeculativeJIT::cachedPutById):
* ftl/FTLCompile.cpp:
(JSC::FTL::mmAllocateDataSection):
* ftl/FTLJITCode.cpp:
(JSC::FTL::JITCode::validateReferences):
(JSC::FTL::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
* ftl/FTLJITCode.h:
(JSC::FTL::JITCode::handles):
(JSC::FTL::JITCode::dataSections):
* jit/GCAwareJITStubRoutine.cpp:
(JSC::GCAwareJITStubRoutine::GCAwareJITStubRoutine):
(JSC::GCAwareJITStubRoutine::~GCAwareJITStubRoutine):
(JSC::GCAwareJITStubRoutine::observeZeroRefCount):
(JSC::MarkingGCAwareJITStubRoutineWithOneObject::markRequiredObjectsInternal):
(JSC::GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler):
(JSC::GCAwareJITStubRoutineWithExceptionHandler::aboutToDie):
(JSC::GCAwareJITStubRoutineWithExceptionHandler::~GCAwareJITStubRoutineWithExceptionHandler):
(JSC::createJITStubRoutine):
* jit/GCAwareJITStubRoutine.h:
* jit/JITCode.cpp:
(JSC::NativeJITCode::addressForCall):
(JSC::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
* jit/JITCode.h:
* jit/JITInlineCacheGenerator.cpp:
(JSC::JITByIdGenerator::JITByIdGenerator):
(JSC::JITGetByIdGenerator::JITGetByIdGenerator):
(JSC::JITPutByIdGenerator::JITPutByIdGenerator):
* jit/JITInlineCacheGenerator.h:
(JSC::JITByIdGenerator::reportSlowPathCall):
* jit/JITPropertyAccess.cpp:
(JSC::JIT::emitGetByValWithCachedId):
(JSC::JIT::emitPutByValWithCachedId):
(JSC::JIT::emit_op_get_by_id):
(JSC::JIT::emit_op_put_by_id):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::emitGetByValWithCachedId):
(JSC::JIT::emitPutByValWithCachedId):
(JSC::JIT::emit_op_get_by_id):
(JSC::JIT::emit_op_put_by_id):
* jit/JITStubRoutine.h:
(JSC::JITStubRoutine::createSelfManagedRoutine):
(JSC::JITStubRoutine::aboutToDie):
* jit/RegisterSet.cpp:
(JSC::RegisterSet::webAssemblyCalleeSaveRegisters):
(JSC::RegisterSet::registersToNotSaveForCall):
(JSC::RegisterSet::allGPRs):
* jit/RegisterSet.h:
(JSC::RegisterSet::set):
(JSC::RegisterSet::clear):
* jit/ScratchRegisterAllocator.cpp:
(JSC::ScratchRegisterAllocator::allocateScratchGPR):
(JSC::ScratchRegisterAllocator::allocateScratchFPR):
(JSC::ScratchRegisterAllocator::preserveReusedRegistersByPushing):
(JSC::ScratchRegisterAllocator::restoreReusedRegistersByPopping):
(JSC::ScratchRegisterAllocator::usedRegistersForCall):
(JSC::ScratchRegisterAllocator::preserveUsedRegistersToScratchBufferForCall):
(JSC::ScratchRegisterAllocator::restoreUsedRegistersFromScratchBufferForCall):
(JSC::ScratchRegisterAllocator::preserveRegistersToStackForCall):
(JSC::ScratchRegisterAllocator::restoreRegistersFromStackForCall):
* jit/ScratchRegisterAllocator.h:
(JSC::ScratchRegisterAllocator::numberOfReusedRegisters):
(JSC::ScratchRegisterAllocator::usedRegisters):
* jsc.cpp:
(WTF::CustomGetter::CustomGetter):
(WTF::CustomGetter::createStructure):
(WTF::CustomGetter::create):
(WTF::CustomGetter::getOwnPropertySlot):
(WTF::CustomGetter::customGetter):
(WTF::Element::handleOwner):
(GlobalObject::finishCreation):
(functionCreateImpureGetter):
(functionCreateCustomGetterObject):
(functionSetImpureGetterDelegate):
* tests/stress/try-catch-custom-getter-as-get-by-id.js: Added.
(assert):
(bar):
(foo):
* tests/stress/try-catch-getter-as-get-by-id-register-restoration.js: Added.
(assert):
(o1.get f):
(bar):
(foo):
* tests/stress/try-catch-getter-as-get-by-id.js: Added.
(assert):
(o1.get f):
(bar):
(foo):
* tests/stress/try-catch-setter-as-put-by-id.js: Added.
(assert):
(o1.set f):
(bar):
(foo):
* tests/stress/try-catch-stub-routine-replaced.js: Added.
(assert):
(arr):
(hello):
(foo):
(objChain.get f):
(fakeOut.get f):
(o.get f):

LayoutTests:

* js/regress/custom-setter-getter-as-put-get-by-id-expected.txt: Added.
* js/regress/custom-setter-getter-as-put-get-by-id.html: Added.
* js/regress/script-tests/custom-setter-getter-as-put-get-by-id.js: Added.
(assert):
(test):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@190735 268f45cc-cd09-0410-ab3c-d52691b4dbfc

44 files changed:
LayoutTests/ChangeLog
LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id-expected.txt [new file with mode: 0644]
LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id.html [new file with mode: 0644]
LayoutTests/js/regress/script-tests/custom-setter-getter-as-put-get-by-id.js [new file with mode: 0644]
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/bytecode/CodeBlock.cpp
Source/JavaScriptCore/bytecode/CodeBlock.h
Source/JavaScriptCore/bytecode/PolymorphicAccess.cpp
Source/JavaScriptCore/bytecode/PolymorphicAccess.h
Source/JavaScriptCore/bytecode/StructureStubInfo.cpp
Source/JavaScriptCore/bytecode/StructureStubInfo.h
Source/JavaScriptCore/bytecode/ValueRecovery.h
Source/JavaScriptCore/dfg/DFGCommonData.cpp
Source/JavaScriptCore/dfg/DFGCommonData.h
Source/JavaScriptCore/dfg/DFGJITCode.cpp
Source/JavaScriptCore/dfg/DFGJITCode.h
Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
Source/JavaScriptCore/dfg/DFGOSRExit.cpp
Source/JavaScriptCore/dfg/DFGOSRExit.h
Source/JavaScriptCore/dfg/DFGSpeculativeJIT.cpp
Source/JavaScriptCore/dfg/DFGSpeculativeJIT32_64.cpp
Source/JavaScriptCore/dfg/DFGSpeculativeJIT64.cpp
Source/JavaScriptCore/ftl/FTLCompile.cpp
Source/JavaScriptCore/ftl/FTLJITCode.cpp
Source/JavaScriptCore/ftl/FTLJITCode.h
Source/JavaScriptCore/jit/GCAwareJITStubRoutine.cpp
Source/JavaScriptCore/jit/GCAwareJITStubRoutine.h
Source/JavaScriptCore/jit/JITCode.cpp
Source/JavaScriptCore/jit/JITCode.h
Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp
Source/JavaScriptCore/jit/JITInlineCacheGenerator.h
Source/JavaScriptCore/jit/JITPropertyAccess.cpp
Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
Source/JavaScriptCore/jit/JITStubRoutine.h
Source/JavaScriptCore/jit/RegisterSet.cpp
Source/JavaScriptCore/jit/RegisterSet.h
Source/JavaScriptCore/jit/ScratchRegisterAllocator.cpp
Source/JavaScriptCore/jit/ScratchRegisterAllocator.h
Source/JavaScriptCore/jsc.cpp
Source/JavaScriptCore/tests/stress/try-catch-custom-getter-as-get-by-id.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id-register-restoration.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/try-catch-setter-as-put-by-id.js [new file with mode: 0644]
Source/JavaScriptCore/tests/stress/try-catch-stub-routine-replaced.js [new file with mode: 0644]

index b453f27..2ade555 100644 (file)
@@ -1,3 +1,16 @@
+2015-10-08  Saam barati  <sbarati@apple.com>
+
+        We should be able to inline getter/setter calls inside an inline cache even when the SpillRegistersMode is NeedsToSpill
+        https://bugs.webkit.org/show_bug.cgi?id=149601
+
+        Reviewed by Filip Pizlo.
+
+        * js/regress/custom-setter-getter-as-put-get-by-id-expected.txt: Added.
+        * js/regress/custom-setter-getter-as-put-get-by-id.html: Added.
+        * js/regress/script-tests/custom-setter-getter-as-put-get-by-id.js: Added.
+        (assert):
+        (test):
+
 2015-10-08  Alexey Proskuryakov  <ap@apple.com>
 
         fast/events/scroll-after-click-on-tab-index.html is flaky
 2015-10-08  Alexey Proskuryakov  <ap@apple.com>
 
         fast/events/scroll-after-click-on-tab-index.html is flaky
diff --git a/LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id-expected.txt b/LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id-expected.txt
new file mode 100644 (file)
index 0000000..4b27b09
--- /dev/null
@@ -0,0 +1,10 @@
+JSRegress/custom-setter-getter-as-put-get-by-id
+
+On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
+
+
+PASS no exception thrown
+PASS successfullyParsed is true
+
+TEST COMPLETE
+
diff --git a/LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id.html b/LayoutTests/js/regress/custom-setter-getter-as-put-get-by-id.html
new file mode 100644 (file)
index 0000000..56a5742
--- /dev/null
@@ -0,0 +1,12 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<html>
+<head>
+<script src="../../resources/js-test-pre.js"></script>
+</head>
+<body>
+<script src="../../resources/regress-pre.js"></script>
+<script src="script-tests/custom-setter-getter-as-put-get-by-id.js"></script>
+<script src="../../resources/regress-post.js"></script>
+<script src="../../resources/js-test-post.js"></script>
+</body>
+</html>
diff --git a/LayoutTests/js/regress/script-tests/custom-setter-getter-as-put-get-by-id.js b/LayoutTests/js/regress/script-tests/custom-setter-getter-as-put-get-by-id.js
new file mode 100644 (file)
index 0000000..b3a6ed3
--- /dev/null
@@ -0,0 +1,33 @@
+function assert(b) {
+    if (!b)
+        throw new Error("bad assertion");
+}
+noInline(assert);
+
+// RegExp.input is a handy custom getter/setter.
+var o1 = RegExp;
+function test(o) {
+    o.input = "bar";
+    return o.input;
+}
+noInline(test);
+
+var o2 = {
+    input: "hello"
+}
+
+var o3 = {
+    x: 20,
+    input: "hello"
+}
+
+// First compile as GetById node.
+for (let i = 0; i < 1000; i++) {
+    assert(test(i % 2 ? o2 : o3) === "bar");
+}
+
+// Cause the inline cache to generate customSetter/customGetter code on a GetBydId.
+for (let i = 0; i < 100; i++) {
+    assert(test(o1) === "bar");
+}
+
index dfa6920..98f6210 100644 (file)
@@ -1,3 +1,196 @@
+2015-10-08  Saam barati  <sbarati@apple.com>
+
+        We should be able to inline getter/setter calls inside an inline cache even when the SpillRegistersMode is NeedsToSpill
+        https://bugs.webkit.org/show_bug.cgi?id=149601
+
+        Reviewed by Filip Pizlo.
+
+        Before, if we had a PolymorphicAccess with and a StructureStubInfo
+        with a NeedToSpill spillMode, we wouldn't generate getter/setter
+        calls. This patch changes it such that we will generate the
+        getter/setter call and do the necessary register spilling/filling
+        around the getter/setter call to preserve any "usedRegisters".
+
+        This has an interesting story with how it relates to exception handling 
+        inside the DFG. Because the GetById variants are considered a throwing call 
+        site, we must make sure that we properly restore the registers spilled to the stack 
+        in case of an exception being thrown inside the getter/setter call. We do 
+        this by having the inline cache register itself as a new exception handling 
+        call site. When the inline cache "catches" the exception (i.e, genericUnwind 
+        will jump to this code), it will restore the registers it spilled that are 
+        live inside the original catch handler, and then jump to the original catch 
+        handler. We make sure to only generate this makeshift catch handler when we 
+        actually need to do any cleanup. If we determine that we don't need to restore 
+        any registers, we don't bother generating this makeshift catch handler.
+
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::~CodeBlock):
+        (JSC::CodeBlock::handlerForIndex):
+        (JSC::CodeBlock::newExceptionHandlingCallSiteIndex):
+        (JSC::CodeBlock::removeExceptionHandlerForCallSite):
+        (JSC::CodeBlock::lineNumberForBytecodeOffset):
+        * bytecode/CodeBlock.h:
+        (JSC::CodeBlock::appendExceptionHandler):
+        * bytecode/PolymorphicAccess.cpp:
+        (JSC::AccessGenerationState::AccessGenerationState):
+        (JSC::AccessGenerationState::restoreScratch):
+        (JSC::AccessGenerationState::succeed):
+        (JSC::AccessGenerationState::calculateLiveRegistersForCallAndExceptionHandling):
+        (JSC::AccessGenerationState::preserveLiveRegistersToStackForCall):
+        (JSC::AccessGenerationState::restoreLiveRegistersFromStackForCall):
+        (JSC::AccessGenerationState::restoreLiveRegistersFromStackForCallWithThrownException):
+        (JSC::AccessGenerationState::liveRegistersForCall):
+        (JSC::AccessGenerationState::callSiteIndexForExceptionHandlingOrOriginal):
+        (JSC::AccessGenerationState::callSiteIndexForExceptionHandling):
+        (JSC::AccessGenerationState::originalExceptionHandler):
+        (JSC::AccessGenerationState::numberOfStackBytesUsedForRegisterPreservation):
+        (JSC::AccessGenerationState::needsToRestoreRegistersIfException):
+        (JSC::AccessGenerationState::originalCallSiteIndex):
+        (JSC::AccessGenerationState::liveRegistersToPreserveAtExceptionHandlingCallSite):
+        (JSC::AccessCase::AccessCase):
+        (JSC::AccessCase::generate):
+        (JSC::PolymorphicAccess::regenerateWithCases):
+        (JSC::PolymorphicAccess::regenerate):
+        (JSC::PolymorphicAccess::aboutToDie):
+        * bytecode/PolymorphicAccess.h:
+        (JSC::AccessCase::doesCalls):
+        (JSC::AccessCase::isGetter):
+        (JSC::AccessCase::callLinkInfo):
+        * bytecode/StructureStubInfo.cpp:
+        (JSC::StructureStubInfo::deref):
+        (JSC::StructureStubInfo::aboutToDie):
+        (JSC::StructureStubInfo::addAccessCase):
+        * bytecode/StructureStubInfo.h:
+        * bytecode/ValueRecovery.h:
+        (JSC::ValueRecovery::isInJSValueRegs):
+        (JSC::ValueRecovery::fpr):
+        * dfg/DFGCommonData.cpp:
+        (JSC::DFG::CommonData::addCodeOrigin):
+        (JSC::DFG::CommonData::addCodeOriginUnconditionally):
+        (JSC::DFG::CommonData::lastCallSite):
+        (JSC::DFG::CommonData::removeCallSiteIndex):
+        (JSC::DFG::CommonData::shrinkToFit):
+        * dfg/DFGCommonData.h:
+        * dfg/DFGJITCode.cpp:
+        (JSC::DFG::JITCode::reconstruct):
+        (JSC::DFG::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
+        (JSC::DFG::JITCode::checkIfOptimizationThresholdReached):
+        * dfg/DFGJITCode.h:
+        (JSC::DFG::JITCode::osrEntryBlock):
+        (JSC::DFG::JITCode::setOSREntryBlock):
+        * dfg/DFGJITCompiler.cpp:
+        (JSC::DFG::JITCompiler::appendExceptionHandlingOSRExit):
+        * dfg/DFGOSRExit.cpp:
+        (JSC::DFG::OSRExit::OSRExit):
+        * dfg/DFGOSRExit.h:
+        * dfg/DFGSpeculativeJIT.cpp:
+        (JSC::DFG::SpeculativeJIT::compileIn):
+        * dfg/DFGSpeculativeJIT32_64.cpp:
+        (JSC::DFG::SpeculativeJIT::cachedGetById):
+        (JSC::DFG::SpeculativeJIT::cachedPutById):
+        * dfg/DFGSpeculativeJIT64.cpp:
+        (JSC::DFG::SpeculativeJIT::cachedGetById):
+        (JSC::DFG::SpeculativeJIT::cachedPutById):
+        * ftl/FTLCompile.cpp:
+        (JSC::FTL::mmAllocateDataSection):
+        * ftl/FTLJITCode.cpp:
+        (JSC::FTL::JITCode::validateReferences):
+        (JSC::FTL::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
+        * ftl/FTLJITCode.h:
+        (JSC::FTL::JITCode::handles):
+        (JSC::FTL::JITCode::dataSections):
+        * jit/GCAwareJITStubRoutine.cpp:
+        (JSC::GCAwareJITStubRoutine::GCAwareJITStubRoutine):
+        (JSC::GCAwareJITStubRoutine::~GCAwareJITStubRoutine):
+        (JSC::GCAwareJITStubRoutine::observeZeroRefCount):
+        (JSC::MarkingGCAwareJITStubRoutineWithOneObject::markRequiredObjectsInternal):
+        (JSC::GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler):
+        (JSC::GCAwareJITStubRoutineWithExceptionHandler::aboutToDie):
+        (JSC::GCAwareJITStubRoutineWithExceptionHandler::~GCAwareJITStubRoutineWithExceptionHandler):
+        (JSC::createJITStubRoutine):
+        * jit/GCAwareJITStubRoutine.h:
+        * jit/JITCode.cpp:
+        (JSC::NativeJITCode::addressForCall):
+        (JSC::JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite):
+        * jit/JITCode.h:
+        * jit/JITInlineCacheGenerator.cpp:
+        (JSC::JITByIdGenerator::JITByIdGenerator):
+        (JSC::JITGetByIdGenerator::JITGetByIdGenerator):
+        (JSC::JITPutByIdGenerator::JITPutByIdGenerator):
+        * jit/JITInlineCacheGenerator.h:
+        (JSC::JITByIdGenerator::reportSlowPathCall):
+        * jit/JITPropertyAccess.cpp:
+        (JSC::JIT::emitGetByValWithCachedId):
+        (JSC::JIT::emitPutByValWithCachedId):
+        (JSC::JIT::emit_op_get_by_id):
+        (JSC::JIT::emit_op_put_by_id):
+        * jit/JITPropertyAccess32_64.cpp:
+        (JSC::JIT::emitGetByValWithCachedId):
+        (JSC::JIT::emitPutByValWithCachedId):
+        (JSC::JIT::emit_op_get_by_id):
+        (JSC::JIT::emit_op_put_by_id):
+        * jit/JITStubRoutine.h:
+        (JSC::JITStubRoutine::createSelfManagedRoutine):
+        (JSC::JITStubRoutine::aboutToDie):
+        * jit/RegisterSet.cpp:
+        (JSC::RegisterSet::webAssemblyCalleeSaveRegisters):
+        (JSC::RegisterSet::registersToNotSaveForCall):
+        (JSC::RegisterSet::allGPRs):
+        * jit/RegisterSet.h:
+        (JSC::RegisterSet::set):
+        (JSC::RegisterSet::clear):
+        * jit/ScratchRegisterAllocator.cpp:
+        (JSC::ScratchRegisterAllocator::allocateScratchGPR):
+        (JSC::ScratchRegisterAllocator::allocateScratchFPR):
+        (JSC::ScratchRegisterAllocator::preserveReusedRegistersByPushing):
+        (JSC::ScratchRegisterAllocator::restoreReusedRegistersByPopping):
+        (JSC::ScratchRegisterAllocator::usedRegistersForCall):
+        (JSC::ScratchRegisterAllocator::preserveUsedRegistersToScratchBufferForCall):
+        (JSC::ScratchRegisterAllocator::restoreUsedRegistersFromScratchBufferForCall):
+        (JSC::ScratchRegisterAllocator::preserveRegistersToStackForCall):
+        (JSC::ScratchRegisterAllocator::restoreRegistersFromStackForCall):
+        * jit/ScratchRegisterAllocator.h:
+        (JSC::ScratchRegisterAllocator::numberOfReusedRegisters):
+        (JSC::ScratchRegisterAllocator::usedRegisters):
+        * jsc.cpp:
+        (WTF::CustomGetter::CustomGetter):
+        (WTF::CustomGetter::createStructure):
+        (WTF::CustomGetter::create):
+        (WTF::CustomGetter::getOwnPropertySlot):
+        (WTF::CustomGetter::customGetter):
+        (WTF::Element::handleOwner):
+        (GlobalObject::finishCreation):
+        (functionCreateImpureGetter):
+        (functionCreateCustomGetterObject):
+        (functionSetImpureGetterDelegate):
+        * tests/stress/try-catch-custom-getter-as-get-by-id.js: Added.
+        (assert):
+        (bar):
+        (foo):
+        * tests/stress/try-catch-getter-as-get-by-id-register-restoration.js: Added.
+        (assert):
+        (o1.get f):
+        (bar):
+        (foo):
+        * tests/stress/try-catch-getter-as-get-by-id.js: Added.
+        (assert):
+        (o1.get f):
+        (bar):
+        (foo):
+        * tests/stress/try-catch-setter-as-put-by-id.js: Added.
+        (assert):
+        (o1.set f):
+        (bar):
+        (foo):
+        * tests/stress/try-catch-stub-routine-replaced.js: Added.
+        (assert):
+        (arr):
+        (hello):
+        (foo):
+        (objChain.get f):
+        (fakeOut.get f):
+        (o.get f):
+
 2015-10-08  Commit Queue  <commit-queue@webkit.org>
 
         Unreviewed, rolling out r190716.
 2015-10-08  Commit Queue  <commit-queue@webkit.org>
 
         Unreviewed, rolling out r190716.
index 8de2a53..8e44b77 100644 (file)
@@ -2263,8 +2263,11 @@ CodeBlock::~CodeBlock()
     // destructors.
 
 #if ENABLE(JIT)
     // destructors.
 
 #if ENABLE(JIT)
-    for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter)
-        (*iter)->deref();
+    for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
+        StructureStubInfo* stub = *iter;
+        stub->aboutToDie();
+        stub->deref();
+    }
 #endif // ENABLE(JIT)
 }
 
 #endif // ENABLE(JIT)
 }
 
@@ -2982,6 +2985,38 @@ HandlerInfo* CodeBlock::handlerForIndex(unsigned index, RequiredHandler required
     return 0;
 }
 
     return 0;
 }
 
+CallSiteIndex CodeBlock::newExceptionHandlingCallSiteIndex(CallSiteIndex originalCallSite)
+{
+#if ENABLE(DFG_JIT)
+    RELEASE_ASSERT(jitType() == JITCode::DFGJIT); // FIXME: When implementing FTL try/catch we should include that JITType here as well: https://bugs.webkit.org/show_bug.cgi?id=149409
+    RELEASE_ASSERT(canGetCodeOrigin(originalCallSite));
+    ASSERT(!!handlerForIndex(originalCallSite.bits()));
+    CodeOrigin originalOrigin = codeOrigin(originalCallSite);
+    return m_jitCode->dfgCommon()->addCodeOriginUnconditionally(originalOrigin);
+#else
+    // We never create new on-the-fly exception handling
+    // call sites outside the DFG/FTL inline caches.
+    RELEASE_ASSERT_NOT_REACHED();
+    return CallSiteIndex(0);
+#endif
+}
+
+void CodeBlock::removeExceptionHandlerForCallSite(CallSiteIndex callSiteIndex)
+{
+    RELEASE_ASSERT(m_rareData);
+    Vector<HandlerInfo>& exceptionHandlers = m_rareData->m_exceptionHandlers;
+    unsigned index = callSiteIndex.bits();
+    for (size_t i = 0; i < exceptionHandlers.size(); ++i) {
+        HandlerInfo& handler = exceptionHandlers[i];
+        if (handler.start <= index && handler.end > index) {
+            exceptionHandlers.remove(i);
+            return;
+        }
+    }
+
+    RELEASE_ASSERT_NOT_REACHED();
+}
+
 unsigned CodeBlock::lineNumberForBytecodeOffset(unsigned bytecodeOffset)
 {
     RELEASE_ASSERT(bytecodeOffset < instructions().size());
 unsigned CodeBlock::lineNumberForBytecodeOffset(unsigned bytecodeOffset)
 {
     RELEASE_ASSERT(bytecodeOffset < instructions().size());
index 1e4e3ec..3fffdbd 100644 (file)
@@ -217,6 +217,7 @@ public:
     };
     HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset, RequiredHandler = RequiredHandler::AnyHandler);
     HandlerInfo* handlerForIndex(unsigned, RequiredHandler = RequiredHandler::AnyHandler);
     };
     HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset, RequiredHandler = RequiredHandler::AnyHandler);
     HandlerInfo* handlerForIndex(unsigned, RequiredHandler = RequiredHandler::AnyHandler);
+    void removeExceptionHandlerForCallSite(CallSiteIndex);
     unsigned lineNumberForBytecodeOffset(unsigned bytecodeOffset);
     unsigned columnNumberForBytecodeOffset(unsigned bytecodeOffset);
     void expressionRangeForBytecodeOffset(unsigned bytecodeOffset, int& divot,
     unsigned lineNumberForBytecodeOffset(unsigned bytecodeOffset);
     unsigned columnNumberForBytecodeOffset(unsigned bytecodeOffset);
     void expressionRangeForBytecodeOffset(unsigned bytecodeOffset, int& divot,
@@ -916,6 +917,8 @@ public:
         m_rareData->m_exceptionHandlers.append(handler);
     }
 
         m_rareData->m_exceptionHandlers.append(handler);
     }
 
+    CallSiteIndex newExceptionHandlingCallSiteIndex(CallSiteIndex originalCallSite);
+
 protected:
     void finalizeLLIntInlineCaches();
     void finalizeBaselineJITInlineCaches();
 protected:
     void finalizeLLIntInlineCaches();
     void finalizeBaselineJITInlineCaches();
index 98fcf26..e394c16 100644 (file)
@@ -36,6 +36,7 @@
 #include "JITOperations.h"
 #include "JSCInlines.h"
 #include "LinkBuffer.h"
 #include "JITOperations.h"
 #include "JSCInlines.h"
 #include "LinkBuffer.h"
+#include "MaxFrameExtentForSlowPathCall.h"
 #include "ScratchRegisterAllocator.h"
 #include "StructureStubClearingWatchpoint.h"
 #include "StructureStubInfo.h"
 #include "ScratchRegisterAllocator.h"
 #include "StructureStubClearingWatchpoint.h"
 #include "StructureStubInfo.h"
@@ -47,9 +48,15 @@ namespace JSC {
 static const bool verbose = false;
 
 struct AccessGenerationState {
 static const bool verbose = false;
 
 struct AccessGenerationState {
+    AccessGenerationState() 
+        : m_calculatedRegistersForCallAndExceptionHandling(false)
+        , m_needsToRestoreRegistersIfException(false)
+        , m_calculatedCallSiteIndex(false)
+    {
+    }
     CCallHelpers* jit { nullptr };
     ScratchRegisterAllocator* allocator;
     CCallHelpers* jit { nullptr };
     ScratchRegisterAllocator* allocator;
-    size_t numberOfPaddingBytes { 0 };
+    unsigned numberOfBytesUsedToPreserveReusedRegisters { 0 };
     PolymorphicAccess* access { nullptr };
     StructureStubInfo* stubInfo { nullptr };
     CCallHelpers::JumpList success;
     PolymorphicAccess* access { nullptr };
     StructureStubInfo* stubInfo { nullptr };
     CCallHelpers::JumpList success;
@@ -71,7 +78,7 @@ struct AccessGenerationState {
 
     void restoreScratch()
     {
 
     void restoreScratch()
     {
-        allocator->restoreReusedRegistersByPopping(*jit, numberOfPaddingBytes);
+        allocator->restoreReusedRegistersByPopping(*jit, numberOfBytesUsedToPreserveReusedRegisters);
     }
 
     void succeed()
     }
 
     void succeed()
@@ -79,6 +86,127 @@ struct AccessGenerationState {
         restoreScratch();
         success.append(jit->jump());
     }
         restoreScratch();
         success.append(jit->jump());
     }
+
+    void calculateLiveRegistersForCallAndExceptionHandling()
+    {
+        if (!m_calculatedRegistersForCallAndExceptionHandling) {
+            m_calculatedRegistersForCallAndExceptionHandling = true;
+
+            m_liveRegistersToPreserveAtExceptionHandlingCallSite = jit->codeBlock()->jitCode()->liveRegistersToPreserveAtExceptionHandlingCallSite(jit->codeBlock(), stubInfo->callSiteIndex);
+            m_needsToRestoreRegistersIfException = m_liveRegistersToPreserveAtExceptionHandlingCallSite.numberOfSetRegisters() > 0;
+            if (m_needsToRestoreRegistersIfException)
+                RELEASE_ASSERT(JITCode::isOptimizingJIT(jit->codeBlock()->jitType()));
+
+            m_liveRegistersForCall = RegisterSet(m_liveRegistersToPreserveAtExceptionHandlingCallSite, allocator->usedRegisters());
+            m_liveRegistersForCall.exclude(RegisterSet::registersToNotSaveForCall());
+        }
+    }
+
+    void preserveLiveRegistersToStackForCall()
+    {
+        unsigned extraStackPadding = 0;
+        unsigned numberOfStackBytesUsedForRegisterPreservation = ScratchRegisterAllocator::preserveRegistersToStackForCall(*jit, liveRegistersForCall(), extraStackPadding);
+        if (m_numberOfStackBytesUsedForRegisterPreservation != std::numeric_limits<unsigned>::max())
+            RELEASE_ASSERT(numberOfStackBytesUsedForRegisterPreservation == m_numberOfStackBytesUsedForRegisterPreservation);
+        m_numberOfStackBytesUsedForRegisterPreservation = numberOfStackBytesUsedForRegisterPreservation;
+    }
+
+    void restoreLiveRegistersFromStackForCall(bool isGetter)
+    {
+        RegisterSet dontRestore;
+        if (isGetter) {
+            // This is the result value. We don't want to overwrite the result with what we stored to the stack.
+            // We sometimes have to store it to the stack just in case we throw an exception and need the original value.
+            dontRestore.set(valueRegs); 
+        }
+        restoreLiveRegistersFromStackForCall(dontRestore);
+    }
+
+    void restoreLiveRegistersFromStackForCallWithThrownException()
+    {
+        // Even if we're a getter, we don't want to ignore the result value like we normally do 
+        // because the getter threw, and therefore, didn't return a value that means anything. 
+        // Instead, we want to restore that register to what it was upon entering the getter 
+        // inline cache. The subtlety here is if the base and the result are the same register, 
+        // and the getter threw, we want OSR exit to see the original base value, not the result 
+        // of the getter call.
+        RegisterSet dontRestore = liveRegistersForCall();
+        // As an optimization here, we only need to restore what is live for exception handling.
+        // We can construct the dontRestore set to accomplish this goal by having it contain only
+        // what is live for call but not live for exception handling. By ignoring things that are 
+        // only live at the call but not the exception handler, we will only restore things live 
+        // at the exception handler.
+        dontRestore.exclude(liveRegistersToPreserveAtExceptionHandlingCallSite());
+        restoreLiveRegistersFromStackForCall(dontRestore);
+    }
+
+    void restoreLiveRegistersFromStackForCall(const RegisterSet& dontRestore)
+    {
+        unsigned extraStackPadding = 0;
+        ScratchRegisterAllocator::restoreRegistersFromStackForCall(*jit, liveRegistersForCall(), dontRestore, m_numberOfStackBytesUsedForRegisterPreservation, extraStackPadding);
+    }
+
+    const RegisterSet& liveRegistersForCall()
+    {
+        RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
+        return m_liveRegistersForCall;
+    }
+
+    CallSiteIndex callSiteIndexForExceptionHandlingOrOriginal()
+    {
+        RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
+
+        if (!m_calculatedCallSiteIndex) {
+            m_calculatedCallSiteIndex = true;
+
+            if (m_needsToRestoreRegistersIfException)
+                m_callSiteIndex = jit->codeBlock()->newExceptionHandlingCallSiteIndex(stubInfo->callSiteIndex);
+            else
+                m_callSiteIndex = originalCallSiteIndex();
+        }
+
+        return m_callSiteIndex;
+    }
+
+    CallSiteIndex callSiteIndexForExceptionHandling()
+    {
+        RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
+        RELEASE_ASSERT(m_needsToRestoreRegistersIfException);
+        RELEASE_ASSERT(m_calculatedCallSiteIndex);
+        return m_callSiteIndex;
+    }
+
+    const HandlerInfo& originalExceptionHandler() const
+    { 
+        RELEASE_ASSERT(m_needsToRestoreRegistersIfException);
+        HandlerInfo* exceptionHandler = jit->codeBlock()->handlerForIndex(stubInfo->callSiteIndex.bits());
+        RELEASE_ASSERT(exceptionHandler);
+        return *exceptionHandler;
+    }
+
+    unsigned numberOfStackBytesUsedForRegisterPreservation() const 
+    {
+        RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
+        return m_numberOfStackBytesUsedForRegisterPreservation; 
+    }
+
+    bool needsToRestoreRegistersIfException() const { return m_needsToRestoreRegistersIfException; }
+    CallSiteIndex originalCallSiteIndex() const { return stubInfo->callSiteIndex; }
+
+private:
+    const RegisterSet& liveRegistersToPreserveAtExceptionHandlingCallSite()
+    {
+        RELEASE_ASSERT(m_calculatedRegistersForCallAndExceptionHandling);
+        return m_liveRegistersToPreserveAtExceptionHandlingCallSite;
+    }
+    
+    RegisterSet m_liveRegistersToPreserveAtExceptionHandlingCallSite;
+    RegisterSet m_liveRegistersForCall;
+    CallSiteIndex m_callSiteIndex { CallSiteIndex(std::numeric_limits<unsigned>::max()) };
+    unsigned m_numberOfStackBytesUsedForRegisterPreservation { std::numeric_limits<unsigned>::max() };
+    bool m_calculatedRegistersForCallAndExceptionHandling : 1;
+    bool m_needsToRestoreRegistersIfException : 1;
+    bool m_calculatedCallSiteIndex : 1;
 };
 
 AccessCase::AccessCase()
 };
 
 AccessCase::AccessCase()
@@ -497,208 +625,250 @@ void AccessCase::generate(AccessGenerationState& state)
 #endif
         }
 
 #endif
         }
 
-        // Stuff for custom getters.
+        if (m_type == Load) {
+            state.succeed();
+            return;
+        }
+
+        // Stuff for custom getters/setters.
         CCallHelpers::Call operationCall;
         CCallHelpers::Call operationCall;
-        CCallHelpers::Call handlerCall;
+        CCallHelpers::Call lookupExceptionHandlerCall;
 
 
-        // Stuff for JS getters.
+        // Stuff for JS getters/setters.
         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
         CCallHelpers::Call fastPathCall;
         CCallHelpers::Call slowPathCall;
 
         CCallHelpers::Jump success;
         CCallHelpers::Jump fail;
         CCallHelpers::DataLabelPtr addressOfLinkFunctionCheck;
         CCallHelpers::Call fastPathCall;
         CCallHelpers::Call slowPathCall;
 
         CCallHelpers::Jump success;
         CCallHelpers::Jump fail;
-        if (m_type != Load && m_type != Miss) {
-            // Need to make sure that whenever this call is made in the future, we remember the
-            // place that we made it from.
-            jit.store32(
-                CCallHelpers::TrustedImm32(stubInfo.callSiteIndex.bits()),
-                CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
-
-            if (m_type == Getter || m_type == Setter) {
-                // Create a JS call using a JS call inline cache. Assume that:
-                //
-                // - SP is aligned and represents the extent of the calling compiler's stack usage.
-                //
-                // - FP is set correctly (i.e. it points to the caller's call frame header).
-                //
-                // - SP - FP is an aligned difference.
-                //
-                // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
-                //   code.
-                //
-                // Therefore, we temporarily grow the stack for the purpose of the call and then
-                // shrink it after.
-
-                RELEASE_ASSERT(!m_rareData->callLinkInfo);
-                m_rareData->callLinkInfo = std::make_unique<CallLinkInfo>();
-                
-                // FIXME: If we generated a polymorphic call stub that jumped back to the getter
-                // stub, which then jumped back to the main code, then we'd have a reachability
-                // situation that the GC doesn't know about. The GC would ensure that the polymorphic
-                // call stub stayed alive, and it would ensure that the main code stayed alive, but
-                // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
-                // be GC objects, and then we'd be able to say that the polymorphic call stub has a
-                // reference to the getter stub.
-                // https://bugs.webkit.org/show_bug.cgi?id=148914
-                m_rareData->callLinkInfo->disallowStubs();
-                
-                m_rareData->callLinkInfo->setUpCall(
-                    CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
-
-                CCallHelpers::JumpList done;
-
-                // There is a "this" argument.
-                unsigned numberOfParameters = 1;
-                // ... and a value argument if we're calling a setter.
-                if (m_type == Setter)
-                    numberOfParameters++;
-
-                // Get the accessor; if there ain't one then the result is jsUndefined().
-                if (m_type == Setter) {
-                    jit.loadPtr(
-                        CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
-                        loadedValueGPR);
-                } else {
-                    jit.loadPtr(
-                        CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
-                        loadedValueGPR);
-                }
 
 
-                CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
-                    CCallHelpers::Zero, loadedValueGPR);
+        // This also does the necessary calculations of whether or not we're an
+        // exception handling call site.
+        state.calculateLiveRegistersForCallAndExceptionHandling();
+        state.preserveLiveRegistersToStackForCall();
 
 
-                unsigned numberOfRegsForCall = JSStack::CallFrameHeaderSize + numberOfParameters;
+        // Need to make sure that whenever this call is made in the future, we remember the
+        // place that we made it from.
+        jit.store32(
+            CCallHelpers::TrustedImm32(state.callSiteIndexForExceptionHandlingOrOriginal().bits()),
+            CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
+
+        if (m_type == Getter || m_type == Setter) {
+            // Create a JS call using a JS call inline cache. Assume that:
+            //
+            // - SP is aligned and represents the extent of the calling compiler's stack usage.
+            //
+            // - FP is set correctly (i.e. it points to the caller's call frame header).
+            //
+            // - SP - FP is an aligned difference.
+            //
+            // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
+            //   code.
+            //
+            // Therefore, we temporarily grow the stack for the purpose of the call and then
+            // shrink it after.
+
+            RELEASE_ASSERT(!m_rareData->callLinkInfo);
+            m_rareData->callLinkInfo = std::make_unique<CallLinkInfo>();
+            
+            // FIXME: If we generated a polymorphic call stub that jumped back to the getter
+            // stub, which then jumped back to the main code, then we'd have a reachability
+            // situation that the GC doesn't know about. The GC would ensure that the polymorphic
+            // call stub stayed alive, and it would ensure that the main code stayed alive, but
+            // it wouldn't know that the getter stub was alive. Ideally JIT stub routines would
+            // be GC objects, and then we'd be able to say that the polymorphic call stub has a
+            // reference to the getter stub.
+            // https://bugs.webkit.org/show_bug.cgi?id=148914
+            m_rareData->callLinkInfo->disallowStubs();
+            
+            m_rareData->callLinkInfo->setUpCall(
+                CallLinkInfo::Call, stubInfo.codeOrigin, loadedValueGPR);
 
 
-                unsigned numberOfBytesForCall =
-                    numberOfRegsForCall * sizeof(Register) + sizeof(CallerFrameAndPC);
+            CCallHelpers::JumpList done;
 
 
-                unsigned alignedNumberOfBytesForCall =
-                    WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
+            // There is a "this" argument.
+            unsigned numberOfParameters = 1;
+            // ... and a value argument if we're calling a setter.
+            if (m_type == Setter)
+                numberOfParameters++;
 
 
-                jit.subPtr(
-                    CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
-                    CCallHelpers::stackPointerRegister);
+            // Get the accessor; if there ain't one then the result is jsUndefined().
+            if (m_type == Setter) {
+                jit.loadPtr(
+                    CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
+                    loadedValueGPR);
+            } else {
+                jit.loadPtr(
+                    CCallHelpers::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
+                    loadedValueGPR);
+            }
 
 
-                CCallHelpers::Address calleeFrame = CCallHelpers::Address(
-                    CCallHelpers::stackPointerRegister,
-                    -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
+            CCallHelpers::Jump returnUndefined = jit.branchTestPtr(
+                CCallHelpers::Zero, loadedValueGPR);
 
 
-                jit.store32(
-                    CCallHelpers::TrustedImm32(numberOfParameters),
-                    calleeFrame.withOffset(JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
+            unsigned numberOfRegsForCall = JSStack::CallFrameHeaderSize + numberOfParameters;
 
 
-                jit.storeCell(
-                    loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
+            unsigned numberOfBytesForCall =
+                numberOfRegsForCall * sizeof(Register) + sizeof(CallerFrameAndPC);
 
 
-                jit.storeCell(
-                    baseForGetGPR,
-                    calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
+            unsigned alignedNumberOfBytesForCall =
+                WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
 
 
-                if (m_type == Setter) {
-                    jit.storeValue(
-                        valueRegs,
-                        calleeFrame.withOffset(
-                            virtualRegisterForArgument(1).offset() * sizeof(Register)));
-                }
+            jit.subPtr(
+                CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
+                CCallHelpers::stackPointerRegister);
 
 
-                CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
-                    CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
-                    CCallHelpers::TrustedImmPtr(0));
+            CCallHelpers::Address calleeFrame = CCallHelpers::Address(
+                CCallHelpers::stackPointerRegister,
+                -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
 
 
-                fastPathCall = jit.nearCall();
+            jit.store32(
+                CCallHelpers::TrustedImm32(numberOfParameters),
+                calleeFrame.withOffset(JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
 
 
-                jit.addPtr(
-                    CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
-                    CCallHelpers::stackPointerRegister);
-                if (m_type == Getter)
-                    jit.setupResults(valueRegs);
+            jit.storeCell(
+                loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
 
 
-                done.append(jit.jump());
-                slowCase.link(&jit);
+            jit.storeCell(
+                baseForGetGPR,
+                calleeFrame.withOffset(virtualRegisterForArgument(0).offset() * sizeof(Register)));
 
 
-                jit.move(loadedValueGPR, GPRInfo::regT0);
+            if (m_type == Setter) {
+                jit.storeValue(
+                    valueRegs,
+                    calleeFrame.withOffset(
+                        virtualRegisterForArgument(1).offset() * sizeof(Register)));
+            }
+
+            CCallHelpers::Jump slowCase = jit.branchPtrWithPatch(
+                CCallHelpers::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
+                CCallHelpers::TrustedImmPtr(0));
+
+            fastPathCall = jit.nearCall();
+            if (m_type == Getter)
+                jit.setupResults(valueRegs);
+            done.append(jit.jump());
+
+            slowCase.link(&jit);
+            jit.move(loadedValueGPR, GPRInfo::regT0);
 #if USE(JSVALUE32_64)
 #if USE(JSVALUE32_64)
-                // We *always* know that the getter/setter, if non-null, is a cell.
-                jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
+            // We *always* know that the getter/setter, if non-null, is a cell.
+            jit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
 #endif
 #endif
-                jit.move(CCallHelpers::TrustedImmPtr(m_rareData->callLinkInfo.get()), GPRInfo::regT2);
-                slowPathCall = jit.nearCall();
-
-                jit.addPtr(
-                    CCallHelpers::TrustedImm32(alignedNumberOfBytesForCall),
-                    CCallHelpers::stackPointerRegister);
-                if (m_type == Getter)
-                    jit.setupResults(valueRegs);
+            jit.move(CCallHelpers::TrustedImmPtr(m_rareData->callLinkInfo.get()), GPRInfo::regT2);
+            slowPathCall = jit.nearCall();
+            if (m_type == Getter)
+                jit.setupResults(valueRegs);
+            done.append(jit.jump());
 
 
-                done.append(jit.jump());
-                returnUndefined.link(&jit);
+            returnUndefined.link(&jit);
+            if (m_type == Getter)
+                jit.moveTrustedValue(jsUndefined(), valueRegs);
 
 
-                if (m_type == Getter)
-                    jit.moveTrustedValue(jsUndefined(), valueRegs);
+            done.link(&jit);
 
 
-                done.link(&jit);
+            jit.addPtr(CCallHelpers::TrustedImm32((jit.codeBlock()->stackPointerOffset() * sizeof(Register)) - state.numberOfBytesUsedToPreserveReusedRegisters - state.numberOfStackBytesUsedForRegisterPreservation()),
+                GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+            state.restoreLiveRegistersFromStackForCall(isGetter());
 
 
-                jit.addPtr(
-                    CCallHelpers::TrustedImm32(
-                        jit.codeBlock()->stackPointerOffset() * sizeof(Register)),
-                    GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+            state.callbacks.append(
+                [=, &vm] (LinkBuffer& linkBuffer) {
+                    m_rareData->callLinkInfo->setCallLocations(
+                        linkBuffer.locationOfNearCall(slowPathCall),
+                        linkBuffer.locationOf(addressOfLinkFunctionCheck),
+                        linkBuffer.locationOfNearCall(fastPathCall));
+
+                    linkBuffer.link(
+                        slowPathCall,
+                        CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
+                });
+        } else {
+            unsigned stackOffset = 0;
+            // Need to make room for the C call so our spillage isn't overwritten.
+            if (state.numberOfStackBytesUsedForRegisterPreservation()) {
+                if (maxFrameExtentForSlowPathCall)
+                    stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), maxFrameExtentForSlowPathCall);
+            }
+            if (stackOffset) {
+                jit.subPtr(
+                    CCallHelpers::TrustedImm32(stackOffset),
+                    CCallHelpers::stackPointerRegister);
+            }
 
 
-                state.callbacks.append(
-                    [=, &vm] (LinkBuffer& linkBuffer) {
-                        m_rareData->callLinkInfo->setCallLocations(
-                            linkBuffer.locationOfNearCall(slowPathCall),
-                            linkBuffer.locationOf(addressOfLinkFunctionCheck),
-                            linkBuffer.locationOfNearCall(fastPathCall));
-
-                        linkBuffer.link(
-                            slowPathCall,
-                            CodeLocationLabel(vm.getCTIStub(linkCallThunkGenerator).code()));
-                    });
-            } else {
-                // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
-                // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
+            // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
+            // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
 #if USE(JSVALUE64)
 #if USE(JSVALUE64)
-                if (m_type == CustomGetter) {
-                    jit.setupArgumentsWithExecState(
-                        baseForAccessGPR, baseForGetGPR,
-                        CCallHelpers::TrustedImmPtr(ident.impl()));
-                } else
-                    jit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
+            if (m_type == CustomGetter) {
+                jit.setupArgumentsWithExecState(
+                    baseForAccessGPR, baseForGetGPR,
+                    CCallHelpers::TrustedImmPtr(ident.impl()));
+            } else
+                jit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
 #else
 #else
-                if (m_type == CustomGetter) {
-                    jit.setupArgumentsWithExecState(
-                        baseForAccessGPR, baseForGetGPR,
-                        CCallHelpers::TrustedImm32(JSValue::CellTag),
-                        CCallHelpers::TrustedImmPtr(ident.impl()));
-                } else {
-                    jit.setupArgumentsWithExecState(
-                        baseForAccessGPR, baseForGetGPR,
-                        CCallHelpers::TrustedImm32(JSValue::CellTag),
-                        valueRegs.payloadGPR(), valueRegs.tagGPR());
-                }
+            if (m_type == CustomGetter) {
+                jit.setupArgumentsWithExecState(
+                    baseForAccessGPR, baseForGetGPR,
+                    CCallHelpers::TrustedImm32(JSValue::CellTag),
+                    CCallHelpers::TrustedImmPtr(ident.impl()));
+            } else {
+                jit.setupArgumentsWithExecState(
+                    baseForAccessGPR, baseForGetGPR,
+                    CCallHelpers::TrustedImm32(JSValue::CellTag),
+                    valueRegs.payloadGPR(), valueRegs.tagGPR());
+            }
 #endif
 #endif
-                jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
+            jit.storePtr(GPRInfo::callFrameRegister, &vm.topCallFrame);
 
 
-                operationCall = jit.call();
-                if (m_type == CustomGetter)
-                    jit.setupResults(valueRegs);
-                CCallHelpers::Jump noException =
-                    jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+            operationCall = jit.call();
+            if (m_type == CustomGetter)
+                jit.setupResults(valueRegs);
 
 
-                jit.copyCalleeSavesToVMCalleeSavesBuffer();
-                jit.setupArguments(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::callFrameRegister);
-                handlerCall = jit.call();
-                jit.jumpToExceptionHandler();
-            
-                noException.link(&jit);
+            if (stackOffset) {
+                jit.addPtr(
+                    CCallHelpers::TrustedImm32(stackOffset),
+                    CCallHelpers::stackPointerRegister);
+            }
 
 
+            CCallHelpers::Jump noException =
+                jit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
+
+            bool didSetLookupExceptionHandler = false;
+            state.restoreLiveRegistersFromStackForCallWithThrownException();
+            state.restoreScratch();
+            jit.copyCalleeSavesToVMCalleeSavesBuffer();
+            if (state.needsToRestoreRegistersIfException()) {
+                // To the JIT that produces the original exception handling
+                // call site, they will expect the OSR exit to be arrived
+                // at from genericUnwind. Therefore we must model what genericUnwind
+                // does here. I.e, set callFrameForCatch and copy callee saves.
+
+                jit.storePtr(GPRInfo::callFrameRegister, vm.addressOfCallFrameForCatch());
+                CCallHelpers::Jump jumpToOSRExitExceptionHandler = jit.jump();
+
+                // We don't need to insert a new exception handler in the table
+                // because we're doing a manual exception check here. i.e, we'll
+                // never arrive here from genericUnwind().
+                HandlerInfo originalHandler = state.originalExceptionHandler();
                 state.callbacks.append(
                     [=] (LinkBuffer& linkBuffer) {
                 state.callbacks.append(
                     [=] (LinkBuffer& linkBuffer) {
-                        linkBuffer.link(operationCall, FunctionPtr(m_rareData->customAccessor.opaque));
-                        linkBuffer.link(handlerCall, lookupExceptionHandler);
+                        linkBuffer.link(jumpToOSRExitExceptionHandler, originalHandler.nativeCode);
                     });
                     });
+            } else {
+                jit.setupArguments(CCallHelpers::TrustedImmPtr(&vm), GPRInfo::callFrameRegister);
+                lookupExceptionHandlerCall = jit.call();
+                didSetLookupExceptionHandler = true;
+                jit.jumpToExceptionHandler();
             }
             }
+        
+            noException.link(&jit);
+            state.restoreLiveRegistersFromStackForCall(isGetter());
+
+            state.callbacks.append(
+                [=] (LinkBuffer& linkBuffer) {
+                    linkBuffer.link(operationCall, FunctionPtr(m_rareData->customAccessor.opaque));
+                    if (didSetLookupExceptionHandler)
+                        linkBuffer.link(lookupExceptionHandlerCall, lookupExceptionHandler);
+                });
         }
         state.succeed();
         return;
         }
         state.succeed();
         return;
@@ -761,7 +931,7 @@ void AccessCase::generate(AccessGenerationState& state)
         else
             scratchGPR3 = InvalidGPRReg;
 
         else
             scratchGPR3 = InvalidGPRReg;
 
-        size_t numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(jit);
+        size_t numberOfBytesUsedToPreserveReusedRegisters = allocator.preserveReusedRegistersByPushing(jit);
 
         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
 
 
         ASSERT(structure()->transitionWatchpointSetHasBeenInvalidated());
 
@@ -883,12 +1053,12 @@ void AccessCase::generate(AccessGenerationState& state)
                 });
         }
         
                 });
         }
         
-        allocator.restoreReusedRegistersByPopping(jit, numberOfPaddingBytes);
+        allocator.restoreReusedRegistersByPopping(jit, numberOfBytesUsedToPreserveReusedRegisters);
         state.succeed();
 
         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()) {
             slowPath.link(&jit);
         state.succeed();
 
         if (newStructure()->outOfLineCapacity() != structure()->outOfLineCapacity()) {
             slowPath.link(&jit);
-            allocator.restoreReusedRegistersByPopping(jit, numberOfPaddingBytes);
+            allocator.restoreReusedRegistersByPopping(jit, numberOfBytesUsedToPreserveReusedRegisters);
             allocator.preserveUsedRegistersToScratchBufferForCall(jit, scratchBuffer, scratchGPR);
 #if USE(JSVALUE64)
             jit.setupArgumentsWithExecState(
             allocator.preserveUsedRegistersToScratchBufferForCall(jit, scratchBuffer, scratchGPR);
 #if USE(JSVALUE64)
             jit.setupArgumentsWithExecState(
@@ -971,9 +1141,6 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerateWithCases(
         if (found)
             continue;
         
         if (found)
             continue;
         
-        if (myCase->doesCalls() && stubInfo.patch.spillMode == NeedToSpill)
-            return MacroAssemblerCodePtr();
-
         casesToAdd.append(WTF::move(myCase));
     }
 
         casesToAdd.append(WTF::move(myCase));
     }
 
@@ -1091,11 +1258,15 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerate(
     CCallHelpers jit(&vm, codeBlock);
     state.jit = &jit;
 
     CCallHelpers jit(&vm, codeBlock);
     state.jit = &jit;
 
-    state.numberOfPaddingBytes = allocator.preserveReusedRegistersByPushing(jit);
+    state.numberOfBytesUsedToPreserveReusedRegisters = allocator.preserveReusedRegistersByPushing(jit);
 
     bool allGuardedByStructureCheck = true;
 
     bool allGuardedByStructureCheck = true;
-    for (auto& entry : cases)
+    bool hasJSGetterSetterCall = false;
+    for (auto& entry : cases) {
         allGuardedByStructureCheck &= entry->guardedByStructureCheck();
         allGuardedByStructureCheck &= entry->guardedByStructureCheck();
+        if (entry->type() == AccessCase::Getter || entry->type() == AccessCase::Setter)
+            hasJSGetterSetterCall = true;
+    }
 
     if (cases.isEmpty()) {
         // This is super unlikely, but we make it legal anyway.
 
     if (cases.isEmpty()) {
         // This is super unlikely, but we make it legal anyway.
@@ -1147,6 +1318,38 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerate(
         failure = state.failAndRepatch;
     failure.append(jit.jump());
 
         failure = state.failAndRepatch;
     failure.append(jit.jump());
 
+    if (state.needsToRestoreRegistersIfException() && hasJSGetterSetterCall) {
+        // Emit the exception handler.
+        // Note that this code is only reachable when doing genericUnwind from a pure JS getter/setter .
+        // Note also that this is not reachable from custom getter/setter. Custom getter/setters will have 
+        // their own exception handling logic that doesn't go through genericUnwind.
+        MacroAssembler::Label makeshiftCatchHandler = jit.label();
+
+        int stackPointerOffset = codeBlock->stackPointerOffset() * sizeof(EncodedJSValue);
+        stackPointerOffset -= state.numberOfBytesUsedToPreserveReusedRegisters;
+        stackPointerOffset -= state.numberOfStackBytesUsedForRegisterPreservation();
+
+        jit.loadPtr(vm.addressOfCallFrameForCatch(), GPRInfo::callFrameRegister);
+        jit.addPtr(CCallHelpers::TrustedImm32(stackPointerOffset), GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+
+        state.restoreLiveRegistersFromStackForCallWithThrownException();
+        state.restoreScratch();
+        CCallHelpers::Jump jumpToOSRExitExceptionHandler = jit.jump();
+
+        HandlerInfo oldHandler = state.originalExceptionHandler();
+        CallSiteIndex newExceptionHandlingCallSite = state.callSiteIndexForExceptionHandling();
+        state.callbacks.append(
+            [=] (LinkBuffer& linkBuffer) {
+                linkBuffer.link(jumpToOSRExitExceptionHandler, oldHandler.nativeCode);
+
+                HandlerInfo handlerToRegister = oldHandler;
+                handlerToRegister.nativeCode = linkBuffer.locationOf(makeshiftCatchHandler);
+                handlerToRegister.start = newExceptionHandlingCallSite.bits();
+                handlerToRegister.end = newExceptionHandlingCallSite.bits() + 1;
+                codeBlock->appendExceptionHandler(handlerToRegister);
+            });
+    }
+
     LinkBuffer linkBuffer(vm, jit, codeBlock, JITCompilationCanFail);
     if (linkBuffer.didFailToAllocate()) {
         if (verbose)
     LinkBuffer linkBuffer(vm, jit, codeBlock, JITCompilationCanFail);
     if (linkBuffer.didFailToAllocate()) {
         if (verbose)
@@ -1168,7 +1371,7 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerate(
 
     if (verbose)
         dataLog(*codeBlock, " ", stubInfo.codeOrigin, ": Generating polymorphic access stub for ", listDump(cases), "\n");
 
     if (verbose)
         dataLog(*codeBlock, " ", stubInfo.codeOrigin, ": Generating polymorphic access stub for ", listDump(cases), "\n");
-    
+
     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
         codeBlock, linkBuffer,
         ("%s", toCString("Access stub for ", *codeBlock, " ", stubInfo.codeOrigin, " with return point ", successLabel, ": ", listDump(cases)).data()));
     MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
         codeBlock, linkBuffer,
         ("%s", toCString("Access stub for ", *codeBlock, " ", stubInfo.codeOrigin, " with return point ", successLabel, ": ", listDump(cases)).data()));
@@ -1177,7 +1380,15 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerate(
     for (auto& entry : cases)
         doesCalls |= entry->doesCalls();
     
     for (auto& entry : cases)
         doesCalls |= entry->doesCalls();
     
-    m_stubRoutine = createJITStubRoutine(code, vm, codeBlock, doesCalls);
+    CodeBlock* codeBlockThatOwnsExceptionHandlers = nullptr;
+    CallSiteIndex callSiteIndexForExceptionHandling = state.originalCallSiteIndex();
+    if (state.needsToRestoreRegistersIfException()) {
+        codeBlockThatOwnsExceptionHandlers = codeBlock;
+        ASSERT(JITCode::isOptimizingJIT(codeBlockThatOwnsExceptionHandlers->jitType()));
+        callSiteIndexForExceptionHandling = state.callSiteIndexForExceptionHandling();
+    }
+
+    m_stubRoutine = createJITStubRoutine(code, vm, codeBlock, doesCalls, nullptr, codeBlockThatOwnsExceptionHandlers, callSiteIndexForExceptionHandling);
     m_watchpoints = WTF::move(state.watchpoints);
     if (!state.weakReferences.isEmpty())
         m_weakReferences = std::make_unique<Vector<WriteBarrier<JSCell>>>(WTF::move(state.weakReferences));
     m_watchpoints = WTF::move(state.watchpoints);
     if (!state.weakReferences.isEmpty())
         m_weakReferences = std::make_unique<Vector<WriteBarrier<JSCell>>>(WTF::move(state.weakReferences));
@@ -1186,6 +1397,11 @@ MacroAssemblerCodePtr PolymorphicAccess::regenerate(
     return code.code();
 }
 
     return code.code();
 }
 
+void PolymorphicAccess::aboutToDie()
+{
+    m_stubRoutine->aboutToDie();
+}
+
 } // namespace JSC
 
 namespace WTF {
 } // namespace JSC
 
 namespace WTF {
index cdd5113..ad09b55 100644 (file)
@@ -199,6 +199,17 @@ public:
         }
     }
 
         }
     }
 
+    bool isGetter() const
+    {
+        switch (type()) {
+        case Getter:
+        case CustomGetter:
+            return true;
+        default:
+            return false;
+        }
+    }
+
     CallLinkInfo* callLinkInfo() const
     {
         if (!m_rareData)
     CallLinkInfo* callLinkInfo() const
     {
         if (!m_rareData)
@@ -289,6 +300,8 @@ public:
     // If this returns false then we are requesting a reset of the owning StructureStubInfo.
     bool visitWeak(VM&) const;
 
     // If this returns false then we are requesting a reset of the owning StructureStubInfo.
     bool visitWeak(VM&) const;
 
+    void aboutToDie();
+
     void dump(PrintStream& out) const;
 
 private:
     void dump(PrintStream& out) const;
 
 private:
index 8728518..4affa46 100644 (file)
@@ -89,6 +89,21 @@ void StructureStubInfo::deref()
     RELEASE_ASSERT_NOT_REACHED();
 }
 
     RELEASE_ASSERT_NOT_REACHED();
 }
 
+void StructureStubInfo::aboutToDie()
+{
+    switch (cacheType) {
+    case CacheType::Stub:
+        u.stub->aboutToDie();
+        return;
+    case CacheType::Unset:
+    case CacheType::GetByIdSelf:
+    case CacheType::PutByIdReplace:
+        return;
+    }
+
+    RELEASE_ASSERT_NOT_REACHED();
+}
+
 MacroAssemblerCodePtr StructureStubInfo::addAccessCase(
     CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase)
 {
 MacroAssemblerCodePtr StructureStubInfo::addAccessCase(
     CodeBlock* codeBlock, const Identifier& ident, std::unique_ptr<AccessCase> accessCase)
 {
index 30eaa51..c1936ca 100644 (file)
@@ -35,7 +35,6 @@
 #include "Options.h"
 #include "PolymorphicAccess.h"
 #include "RegisterSet.h"
 #include "Options.h"
 #include "PolymorphicAccess.h"
 #include "RegisterSet.h"
-#include "SpillRegistersMode.h"
 #include "Structure.h"
 #include "StructureStubClearingWatchpoint.h"
 
 #include "Structure.h"
 #include "StructureStubClearingWatchpoint.h"
 
@@ -75,6 +74,7 @@ public:
     void reset(CodeBlock*);
 
     void deref();
     void reset(CodeBlock*);
 
     void deref();
+    void aboutToDie();
 
     // Check if the stub has weak references that are dead. If it does, then it resets itself,
     // either entirely or just enough to ensure that those dead pointers don't get used anymore.
 
     // Check if the stub has weak references that are dead. If it does, then it resets itself,
     // either entirely or just enough to ensure that those dead pointers don't get used anymore.
@@ -131,7 +131,6 @@ public:
     } u;
 
     struct {
     } u;
 
     struct {
-        unsigned spillMode : 8;
         int8_t baseGPR;
 #if USE(JSVALUE32_64)
         int8_t valueTagGPR;
         int8_t baseGPR;
 #if USE(JSVALUE32_64)
         int8_t valueTagGPR;
index 3a7125e..5f6ee9c 100644 (file)
@@ -326,7 +326,7 @@ public:
     {
         return isInGPR();
     }
     {
         return isInGPR();
     }
-#endif
+#endif // USE(JSVALUE32_64)
     
     MacroAssembler::FPRegisterID fpr() const
     {
     
     MacroAssembler::FPRegisterID fpr() const
     {
index 741bc16..751a061 100644 (file)
@@ -57,12 +57,29 @@ CallSiteIndex CommonData::addCodeOrigin(CodeOrigin codeOrigin)
     return CallSiteIndex(index);
 }
 
     return CallSiteIndex(index);
 }
 
+CallSiteIndex CommonData::addCodeOriginUnconditionally(CodeOrigin codeOrigin)
+{
+    if (callSiteIndexFreeList.size())
+        return CallSiteIndex(callSiteIndexFreeList.takeAny());
+
+    codeOrigins.append(codeOrigin);
+    unsigned index = codeOrigins.size() - 1;
+    ASSERT(codeOrigins[index] == codeOrigin);
+    return CallSiteIndex(index);
+}
+
 CallSiteIndex CommonData::lastCallSite() const
 {
     RELEASE_ASSERT(codeOrigins.size());
     return CallSiteIndex(codeOrigins.size() - 1);
 }
 
 CallSiteIndex CommonData::lastCallSite() const
 {
     RELEASE_ASSERT(codeOrigins.size());
     return CallSiteIndex(codeOrigins.size() - 1);
 }
 
+void CommonData::removeCallSiteIndex(CallSiteIndex callSite)
+{
+    RELEASE_ASSERT(callSite.bits() < codeOrigins.size());
+    callSiteIndexFreeList.add(callSite.bits());
+}
+
 void CommonData::shrinkToFit()
 {
     codeOrigins.shrinkToFit();
 void CommonData::shrinkToFit()
 {
     codeOrigins.shrinkToFit();
index 4be6013..6bbf86f 100644 (file)
@@ -80,7 +80,9 @@ public:
     
     void notifyCompilingStructureTransition(Plan&, CodeBlock*, Node*);
     CallSiteIndex addCodeOrigin(CodeOrigin);
     
     void notifyCompilingStructureTransition(Plan&, CodeBlock*, Node*);
     CallSiteIndex addCodeOrigin(CodeOrigin);
+    CallSiteIndex addCodeOriginUnconditionally(CodeOrigin);
     CallSiteIndex lastCallSite() const;
     CallSiteIndex lastCallSite() const;
+    void removeCallSiteIndex(CallSiteIndex);
     
     void shrinkToFit();
     
     
     void shrinkToFit();
     
@@ -116,6 +118,10 @@ public:
     
     unsigned frameRegisterCount;
     unsigned requiredRegisterCountForExit;
     
     unsigned frameRegisterCount;
     unsigned requiredRegisterCountForExit;
+
+private:
+    HashSet<unsigned, WTF::IntHash<unsigned>, WTF::UnsignedWithZeroKeyHashTraits<unsigned>> callSiteIndexFreeList;
+
 };
 
 } } // namespace JSC::DFG
 };
 
 } } // namespace JSC::DFG
index db044e5..48217d3 100644 (file)
@@ -87,6 +87,38 @@ void JITCode::reconstruct(
         result[i] = recoveries[i].recover(exec);
 }
 
         result[i] = recoveries[i].recover(exec);
 }
 
+RegisterSet JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock* codeBlock, CallSiteIndex callSiteIndex)
+{
+    for (OSRExit& exit : osrExit) {
+        if (exit.m_isExceptionHandler && exit.m_exceptionHandlerCallSiteIndex.bits() == callSiteIndex.bits()) {
+            Operands<ValueRecovery> valueRecoveries;
+            reconstruct(codeBlock, exit.m_codeOrigin, exit.m_streamIndex, valueRecoveries);
+            RegisterSet liveAtOSRExit;
+            for (size_t index = 0; index < valueRecoveries.size(); ++index) {
+                const ValueRecovery& recovery = valueRecoveries[index];
+                if (recovery.isInRegisters()) {
+                    if (recovery.isInGPR())
+                        liveAtOSRExit.set(recovery.gpr());
+                    else if (recovery.isInFPR())
+                        liveAtOSRExit.set(recovery.fpr());
+#if USE(JSVALUE32_64)
+                    else if (recovery.isInJSValueRegs()) {
+                        liveAtOSRExit.set(recovery.payloadGPR());
+                        liveAtOSRExit.set(recovery.tagGPR());
+                    }
+#endif
+                    else
+                        RELEASE_ASSERT_NOT_REACHED();
+                }
+            }
+
+            return liveAtOSRExit;
+        }
+    }
+
+    return RegisterSet();
+}
+
 #if ENABLE(FTL_JIT)
 bool JITCode::checkIfOptimizationThresholdReached(CodeBlock* codeBlock)
 {
 #if ENABLE(FTL_JIT)
 bool JITCode::checkIfOptimizationThresholdReached(CodeBlock* codeBlock)
 {
index ff6c866..de23428 100644 (file)
@@ -116,6 +116,7 @@ public:
     
     void shrinkToFit();
 
     
     void shrinkToFit();
 
+    RegisterSet liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock*, CallSiteIndex) override;
 #if ENABLE(FTL_JIT)
     CodeBlock* osrEntryBlock() { return m_osrEntryBlock.get(); }
     void setOSREntryBlock(VM& vm, const JSCell* owner, CodeBlock* osrEntryBlock) { m_osrEntryBlock.set(vm, owner, osrEntryBlock); }
 #if ENABLE(FTL_JIT)
     CodeBlock* osrEntryBlock() { return m_osrEntryBlock.get(); }
     void setOSREntryBlock(VM& vm, const JSCell* owner, CodeBlock* osrEntryBlock) { m_osrEntryBlock.set(vm, owner, osrEntryBlock); }
index 81fae73..b8f3881 100644 (file)
@@ -555,6 +555,7 @@ void JITCompiler::appendExceptionHandlingOSRExit(unsigned eventStreamIndex, Code
     exit.m_willArriveAtOSRExitFromGenericUnwind = jumpsToFail.empty(); // If jumps are empty, we're going to jump here from genericUnwind from a child call frame.
     exit.m_isExceptionHandler = true;
     exit.m_codeOrigin = opCatchOrigin;
     exit.m_willArriveAtOSRExitFromGenericUnwind = jumpsToFail.empty(); // If jumps are empty, we're going to jump here from genericUnwind from a child call frame.
     exit.m_isExceptionHandler = true;
     exit.m_codeOrigin = opCatchOrigin;
+    exit.m_exceptionHandlerCallSiteIndex = callSite;
     OSRExitCompilationInfo& exitInfo = appendExitInfo(jumpsToFail);
     jitCode()->appendOSRExit(exit);
     m_exceptionHandlerOSRExitCallSites.append(ExceptionHandlingOSRExitInfo { exitInfo, *exceptionHandler, callSite });
     OSRExitCompilationInfo& exitInfo = appendExitInfo(jumpsToFail);
     jitCode()->appendOSRExit(exit);
     m_exceptionHandlerOSRExitCallSites.append(ExceptionHandlingOSRExitInfo { exitInfo, *exceptionHandler, callSite });
index 867e233..0cfe3a9 100644 (file)
@@ -43,6 +43,7 @@ OSRExit::OSRExit(ExitKind kind, JSValueSource jsValueSource, MethodOfGettingAVal
     , m_patchableCodeOffset(0)
     , m_recoveryIndex(recoveryIndex)
     , m_streamIndex(streamIndex)
     , m_patchableCodeOffset(0)
     , m_recoveryIndex(recoveryIndex)
     , m_streamIndex(streamIndex)
+    , m_exceptionHandlerCallSiteIndex(std::numeric_limits<unsigned>::max())
     , m_willArriveAtOSRExitFromGenericUnwind(false)
     , m_isExceptionHandler(false)
 {
     , m_willArriveAtOSRExitFromGenericUnwind(false)
     , m_isExceptionHandler(false)
 {
index 13e7684..5ceefb0 100644 (file)
@@ -100,6 +100,7 @@ struct OSRExit : public OSRExitBase {
     void correctJump(LinkBuffer&);
 
     unsigned m_streamIndex;
     void correctJump(LinkBuffer&);
 
     unsigned m_streamIndex;
+    CallSiteIndex m_exceptionHandlerCallSiteIndex;
 
     bool m_willArriveAtOSRExitFromGenericUnwind : 1;
     bool m_isExceptionHandler : 1;
 
     bool m_willArriveAtOSRExitFromGenericUnwind : 1;
     bool m_isExceptionHandler : 1;
index 58d434c..5ad0462 100755 (executable)
@@ -960,7 +960,6 @@ void SpeculativeJIT::compileIn(Node* node)
             stubInfo->patch.valueTagGPR = static_cast<int8_t>(InvalidGPRReg);
 #endif
             stubInfo->patch.usedRegisters = usedRegisters();
             stubInfo->patch.valueTagGPR = static_cast<int8_t>(InvalidGPRReg);
 #endif
             stubInfo->patch.usedRegisters = usedRegisters();
-            stubInfo->patch.spillMode = NeedToSpill;
 
             m_jit.addIn(InRecord(jump, done, slowPath.get(), stubInfo));
             addSlowPathGenerator(WTF::move(slowPath));
 
             m_jit.addIn(InRecord(jump, done, slowPath.get(), stubInfo));
             addSlowPathGenerator(WTF::move(slowPath));
index 5a4955d..8b2a098 100644 (file)
@@ -183,11 +183,18 @@ void SpeculativeJIT::cachedGetById(
         basePayloadGPR = resultPayloadGPR;
     }
     
         basePayloadGPR = resultPayloadGPR;
     }
     
+    RegisterSet usedRegisters = this->usedRegisters();
+    if (spillMode == DontSpill) {
+        // We've already flushed registers to the stack, we don't need to spill these.
+        usedRegisters.set(JSValueRegs(baseTagGPROrNone, basePayloadGPR), false);
+        usedRegisters.set(JSValueRegs(resultTagGPR, resultPayloadGPR), false);
+    }
+    
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
     JITGetByIdGenerator gen(
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
     JITGetByIdGenerator gen(
-        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters(),
+        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters,
         JSValueRegs(baseTagGPROrNone, basePayloadGPR),
         JSValueRegs(baseTagGPROrNone, basePayloadGPR),
-        JSValueRegs(resultTagGPR, resultPayloadGPR), spillMode);
+        JSValueRegs(resultTagGPR, resultPayloadGPR));
     
     gen.generateFastPath(m_jit);
     
     
     gen.generateFastPath(m_jit);
     
@@ -216,11 +223,17 @@ void SpeculativeJIT::cachedGetById(
 
 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg basePayloadGPR, GPRReg valueTagGPR, GPRReg valuePayloadGPR, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
 
 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg basePayloadGPR, GPRReg valueTagGPR, GPRReg valuePayloadGPR, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
+    RegisterSet usedRegisters = this->usedRegisters();
+    if (spillMode == DontSpill) {
+        // We've already flushed registers to the stack, we don't need to spill these.
+        usedRegisters.set(basePayloadGPR, false);
+        usedRegisters.set(JSValueRegs(valueTagGPR, valuePayloadGPR), false);
+    }
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
     JITPutByIdGenerator gen(
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
     JITPutByIdGenerator gen(
-        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters(),
+        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters,
         JSValueRegs::payloadOnly(basePayloadGPR), JSValueRegs(valueTagGPR, valuePayloadGPR),
         JSValueRegs::payloadOnly(basePayloadGPR), JSValueRegs(valueTagGPR, valuePayloadGPR),
-        scratchGPR, spillMode, m_jit.ecmaModeFor(codeOrigin), putKind);
+        scratchGPR, m_jit.ecmaModeFor(codeOrigin), putKind);
     
     gen.generateFastPath(m_jit);
     
     
     gen.generateFastPath(m_jit);
     
index 264c4df..845bd8e 100644 (file)
@@ -155,10 +155,15 @@ GPRReg SpeculativeJIT::fillJSValue(Edge edge)
 void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg resultGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
 void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg resultGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
-
+    RegisterSet usedRegisters = this->usedRegisters();
+    if (spillMode == DontSpill) {
+        // We've already flushed registers to the stack, we don't need to spill these.
+        usedRegisters.set(baseGPR, false);
+        usedRegisters.set(resultGPR, false);
+    }
     JITGetByIdGenerator gen(
     JITGetByIdGenerator gen(
-        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters(), JSValueRegs(baseGPR),
-        JSValueRegs(resultGPR), spillMode);
+        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, JSValueRegs(baseGPR),
+        JSValueRegs(resultGPR));
     gen.generateFastPath(m_jit);
     
     JITCompiler::JumpList slowCases;
     gen.generateFastPath(m_jit);
     
     JITCompiler::JumpList slowCases;
@@ -177,10 +182,16 @@ void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg
 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg valueGPR, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
 void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg valueGPR, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode)
 {
     CallSiteIndex callSite = m_jit.recordCallSiteAndGenerateExceptionHandlingOSRExitIfNeeded(codeOrigin, m_stream->size());
+    RegisterSet usedRegisters = this->usedRegisters();
+    if (spillMode == DontSpill) {
+        // We've already flushed registers to the stack, we don't need to spill these.
+        usedRegisters.set(baseGPR, false);
+        usedRegisters.set(valueGPR, false);
+    }
 
     JITPutByIdGenerator gen(
 
     JITPutByIdGenerator gen(
-        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters(), JSValueRegs(baseGPR),
-        JSValueRegs(valueGPR), scratchGPR, spillMode, m_jit.ecmaModeFor(codeOrigin), putKind);
+        m_jit.codeBlock(), codeOrigin, callSite, usedRegisters, JSValueRegs(baseGPR),
+        JSValueRegs(valueGPR), scratchGPR, m_jit.ecmaModeFor(codeOrigin), putKind);
 
     gen.generateFastPath(m_jit);
     
 
     gen.generateFastPath(m_jit);
     
index a72a0c7..eee4fab 100644 (file)
@@ -467,7 +467,7 @@ static void fixFunctionBasedOnStackMaps(
                 
                 JITGetByIdGenerator gen(
                     codeBlock, codeOrigin, getById.callSiteIndex(), usedRegisters, JSValueRegs(base),
                 
                 JITGetByIdGenerator gen(
                     codeBlock, codeOrigin, getById.callSiteIndex(), usedRegisters, JSValueRegs(base),
-                    JSValueRegs(result), NeedToSpill);
+                    JSValueRegs(result));
                 
                 MacroAssembler::Label begin = slowPathJIT.label();
 
                 
                 MacroAssembler::Label begin = slowPathJIT.label();
 
@@ -505,8 +505,7 @@ static void fixFunctionBasedOnStackMaps(
                 
                 JITPutByIdGenerator gen(
                     codeBlock, codeOrigin, putById.callSiteIndex(), usedRegisters, JSValueRegs(base),
                 
                 JITPutByIdGenerator gen(
                     codeBlock, codeOrigin, putById.callSiteIndex(), usedRegisters, JSValueRegs(base),
-                    JSValueRegs(value), GPRInfo::patchpointScratchRegister, NeedToSpill,
-                    putById.ecmaMode(), putById.putKind());
+                    JSValueRegs(value), GPRInfo::patchpointScratchRegister, putById.ecmaMode(), putById.putKind());
                 
                 MacroAssembler::Label begin = slowPathJIT.label();
                 
                 
                 MacroAssembler::Label begin = slowPathJIT.label();
                 
@@ -545,7 +544,6 @@ static void fixFunctionBasedOnStackMaps(
                 stubInfo->patch.baseGPR = static_cast<int8_t>(obj);
                 stubInfo->patch.valueGPR = static_cast<int8_t>(result);
                 stubInfo->patch.usedRegisters = usedRegisters;
                 stubInfo->patch.baseGPR = static_cast<int8_t>(obj);
                 stubInfo->patch.valueGPR = static_cast<int8_t>(result);
                 stubInfo->patch.usedRegisters = usedRegisters;
-                stubInfo->patch.spillMode = NeedToSpill;
 
                 MacroAssembler::Label begin = slowPathJIT.label();
 
 
                 MacroAssembler::Label begin = slowPathJIT.label();
 
index a0f4929..2bc6bbe 100644 (file)
@@ -144,6 +144,13 @@ void JITCode::validateReferences(const TrackedReferences& trackedReferences)
         exit.validateReferences(trackedReferences);
 }
 
         exit.validateReferences(trackedReferences);
 }
 
+RegisterSet JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock*, CallSiteIndex)
+{
+    // FIXME: implement this when FTL implements try/catch.
+    // https://bugs.webkit.org/show_bug.cgi?id=149409
+    return RegisterSet();
+}
+
 } } // namespace JSC::FTL
 
 #endif // ENABLE(FTL_JIT)
 } } // namespace JSC::FTL
 
 #endif // ENABLE(FTL_JIT)
index 7ecfa62..5b50568 100644 (file)
@@ -72,6 +72,8 @@ public:
     void initializeAddressForCall(CodePtr);
     
     void validateReferences(const TrackedReferences&) override;
     void initializeAddressForCall(CodePtr);
     
     void validateReferences(const TrackedReferences&) override;
+
+    RegisterSet liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock*, CallSiteIndex) override;
     
     const Vector<RefPtr<ExecutableMemoryHandle>>& handles() const { return m_handles; }
     const Vector<RefPtr<DataSection>>& dataSections() const { return m_dataSections; }
     
     const Vector<RefPtr<ExecutableMemoryHandle>>& handles() const { return m_handles; }
     const Vector<RefPtr<DataSection>>& dataSections() const { return m_dataSections; }
index bfb07a0..58d45c8 100644 (file)
@@ -28,6 +28,8 @@
 
 #if ENABLE(JIT)
 
 
 #if ENABLE(JIT)
 
+#include "CodeBlock.h"
+#include "DFGCommonData.h"
 #include "Heap.h"
 #include "VM.h"
 #include "JSCInlines.h"
 #include "Heap.h"
 #include "VM.h"
 #include "JSCInlines.h"
@@ -44,7 +46,7 @@ GCAwareJITStubRoutine::GCAwareJITStubRoutine(
 {
     vm.heap.m_jitStubRoutines.add(this);
 }
 {
     vm.heap.m_jitStubRoutines.add(this);
 }
-    
+
 GCAwareJITStubRoutine::~GCAwareJITStubRoutine() { }
 
 void GCAwareJITStubRoutine::observeZeroRefCount()
 GCAwareJITStubRoutine::~GCAwareJITStubRoutine() { }
 
 void GCAwareJITStubRoutine::observeZeroRefCount()
@@ -94,16 +96,50 @@ void MarkingGCAwareJITStubRoutineWithOneObject::markRequiredObjectsInternal(Slot
     visitor.append(&m_object);
 }
 
     visitor.append(&m_object);
 }
 
+
+GCAwareJITStubRoutineWithExceptionHandler::GCAwareJITStubRoutineWithExceptionHandler(
+    const MacroAssemblerCodeRef& code, VM& vm, 
+    CodeBlock* codeBlockForExceptionHandlers, CallSiteIndex exceptionHandlerCallSiteIndex)
+    : GCAwareJITStubRoutine(code, vm)
+    , m_codeBlockWithExceptionHandler(codeBlockForExceptionHandlers)
+    , m_exceptionHandlerCallSiteIndex(exceptionHandlerCallSiteIndex)
+{
+    RELEASE_ASSERT(m_codeBlockWithExceptionHandler);
+}
+
+void GCAwareJITStubRoutineWithExceptionHandler::aboutToDie()
+{
+    m_codeBlockWithExceptionHandler = nullptr;
+}
+
+GCAwareJITStubRoutineWithExceptionHandler::~GCAwareJITStubRoutineWithExceptionHandler()
+{
+    if (m_codeBlockWithExceptionHandler) {
+        m_codeBlockWithExceptionHandler->jitCode()->dfgCommon()->removeCallSiteIndex(m_exceptionHandlerCallSiteIndex);
+        m_codeBlockWithExceptionHandler->removeExceptionHandlerForCallSite(m_exceptionHandlerCallSiteIndex);
+    }
+}
+    
+
 PassRefPtr<JITStubRoutine> createJITStubRoutine(
     const MacroAssemblerCodeRef& code,
     VM& vm,
     const JSCell* owner,
     bool makesCalls,
 PassRefPtr<JITStubRoutine> createJITStubRoutine(
     const MacroAssemblerCodeRef& code,
     VM& vm,
     const JSCell* owner,
     bool makesCalls,
-    JSCell* object)
+    JSCell* object,
+    CodeBlock* codeBlockForExceptionHandlers,
+    CallSiteIndex exceptionHandlerCallSiteIndex)
 {
     if (!makesCalls)
         return adoptRef(new JITStubRoutine(code));
     
 {
     if (!makesCalls)
         return adoptRef(new JITStubRoutine(code));
     
+    if (codeBlockForExceptionHandlers) {
+        RELEASE_ASSERT(!object); // We're not a marking stub routine.
+        RELEASE_ASSERT(JITCode::isOptimizingJIT(codeBlockForExceptionHandlers->jitType()));
+        return static_pointer_cast<JITStubRoutine>(
+            adoptRef(new GCAwareJITStubRoutineWithExceptionHandler(code, vm, codeBlockForExceptionHandlers, exceptionHandlerCallSiteIndex)));
+    }
+
     if (!object) {
         return static_pointer_cast<JITStubRoutine>(
             adoptRef(new GCAwareJITStubRoutine(code, vm)));
     if (!object) {
         return static_pointer_cast<JITStubRoutine>(
             adoptRef(new GCAwareJITStubRoutine(code, vm)));
index 59cbb24..347b1cc 100644 (file)
@@ -89,6 +89,22 @@ private:
     WriteBarrier<JSCell> m_object;
 };
 
     WriteBarrier<JSCell> m_object;
 };
 
+
+// The stub has exception handlers in it. So it clears itself from exception
+// handling table when it dies. It also frees space in CodeOrigin table
+// for new exception handlers to use the same CallSiteIndex.
+class GCAwareJITStubRoutineWithExceptionHandler : public GCAwareJITStubRoutine {
+public:
+    GCAwareJITStubRoutineWithExceptionHandler(const MacroAssemblerCodeRef&, VM&, CodeBlock*, CallSiteIndex);
+    ~GCAwareJITStubRoutineWithExceptionHandler() override;
+
+    void aboutToDie() override;
+
+private:
+    CodeBlock* m_codeBlockWithExceptionHandler;
+    CallSiteIndex m_exceptionHandlerCallSiteIndex;
+};
+
 // Helper for easily creating a GC-aware JIT stub routine. For the varargs,
 // pass zero or more JSCell*'s. This will either create a JITStubRoutine, a
 // GCAwareJITStubRoutine, or an ObjectMarkingGCAwareJITStubRoutine as
 // Helper for easily creating a GC-aware JIT stub routine. For the varargs,
 // pass zero or more JSCell*'s. This will either create a JITStubRoutine, a
 // GCAwareJITStubRoutine, or an ObjectMarkingGCAwareJITStubRoutine as
@@ -110,7 +126,8 @@ private:
 
 PassRefPtr<JITStubRoutine> createJITStubRoutine(
     const MacroAssemblerCodeRef&, VM&, const JSCell* owner, bool makesCalls,
 
 PassRefPtr<JITStubRoutine> createJITStubRoutine(
     const MacroAssemblerCodeRef&, VM&, const JSCell* owner, bool makesCalls,
-    JSCell* = nullptr);
+    JSCell* = nullptr, 
+    CodeBlock* codeBlockForExceptionHandlers = nullptr, CallSiteIndex exceptionHandlingCallSiteIndex = CallSiteIndex(std::numeric_limits<unsigned>::max()));
 
 // Helper for the creation of simple stub routines that need no help from the GC. Note
 // that codeBlock gets "executed" more than once.
 
 // Helper for the creation of simple stub routines that need no help from the GC. Note
 // that codeBlock gets "executed" more than once.
index 0dc6f95..bf06d17 100644 (file)
@@ -218,6 +218,11 @@ JITCode::CodePtr NativeJITCode::addressForCall(ArityCheckMode)
     return m_ref.code();
 }
 
     return m_ref.code();
 }
 
+RegisterSet JITCode::liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock*, CallSiteIndex)
+{
+    return RegisterSet();
+}
+
 } // namespace JSC
 
 namespace WTF {
 } // namespace JSC
 
 namespace WTF {
index 382f7ab..fca3c04 100644 (file)
@@ -32,6 +32,7 @@
 #include "JITStubs.h"
 #include "JSCJSValue.h"
 #include "MacroAssemblerCodeRef.h"
 #include "JITStubs.h"
 #include "JSCJSValue.h"
 #include "MacroAssemblerCodeRef.h"
+#include "RegisterSet.h"
 
 namespace JSC {
 
 
 namespace JSC {
 
@@ -193,6 +194,8 @@ public:
     
     virtual bool contains(void*) = 0;
 
     
     virtual bool contains(void*) = 0;
 
+    virtual RegisterSet liveRegistersToPreserveAtExceptionHandlingCallSite(CodeBlock*, CallSiteIndex);
+
 private:
     JITType m_jitType;
 };
 private:
     JITType m_jitType;
 };
index 898a690..96e124f 100644 (file)
@@ -51,20 +51,13 @@ JITInlineCacheGenerator::JITInlineCacheGenerator(
 
 JITByIdGenerator::JITByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, AccessType accessType,
 
 JITByIdGenerator::JITByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, AccessType accessType,
-    const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value,
-    SpillRegistersMode spillMode)
+    const RegisterSet& usedRegisters, JSValueRegs base, JSValueRegs value)
     : JITInlineCacheGenerator(codeBlock, codeOrigin, callSite, accessType)
     , m_base(base)
     , m_value(value)
 {
     : JITInlineCacheGenerator(codeBlock, codeOrigin, callSite, accessType)
     , m_base(base)
     , m_value(value)
 {
-    m_stubInfo->patch.spillMode = spillMode;
     m_stubInfo->patch.usedRegisters = usedRegisters;
     
     m_stubInfo->patch.usedRegisters = usedRegisters;
     
-    // This is a convenience - in cases where the only registers you're using are base/value,
-    // it allows you to pass RegisterSet() as the usedRegisters argument.
-    m_stubInfo->patch.usedRegisters.set(base);
-    m_stubInfo->patch.usedRegisters.set(value);
-    
     m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR());
     m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR());
 #if USE(JSVALUE32_64)
     m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR());
     m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR());
 #if USE(JSVALUE32_64)
@@ -110,9 +103,9 @@ void JITByIdGenerator::generateFastPathChecks(MacroAssembler& jit)
 
 JITGetByIdGenerator::JITGetByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
 
 JITGetByIdGenerator::JITGetByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
-    JSValueRegs base, JSValueRegs value, SpillRegistersMode spillMode)
+    JSValueRegs base, JSValueRegs value)
     : JITByIdGenerator(
     : JITByIdGenerator(
-        codeBlock, codeOrigin, callSite, AccessType::Get, usedRegisters, base, value, spillMode)
+        codeBlock, codeOrigin, callSite, AccessType::Get, usedRegisters, base, value)
 {
     RELEASE_ASSERT(base.payloadGPR() != value.tagGPR());
 }
 {
     RELEASE_ASSERT(base.payloadGPR() != value.tagGPR());
 }
@@ -136,10 +129,10 @@ void JITGetByIdGenerator::generateFastPath(MacroAssembler& jit)
 
 JITPutByIdGenerator::JITPutByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
 
 JITPutByIdGenerator::JITPutByIdGenerator(
     CodeBlock* codeBlock, CodeOrigin codeOrigin, CallSiteIndex callSite, const RegisterSet& usedRegisters,
-    JSValueRegs base, JSValueRegs value, GPRReg scratch, SpillRegistersMode spillMode,
+    JSValueRegs base, JSValueRegs value, GPRReg scratch, 
     ECMAMode ecmaMode, PutKind putKind)
     : JITByIdGenerator(
     ECMAMode ecmaMode, PutKind putKind)
     : JITByIdGenerator(
-        codeBlock, codeOrigin, callSite, AccessType::Put, usedRegisters, base, value, spillMode)
+        codeBlock, codeOrigin, callSite, AccessType::Put, usedRegisters, base, value)
     , m_ecmaMode(ecmaMode)
     , m_putKind(putKind)
 {
     , m_ecmaMode(ecmaMode)
     , m_putKind(putKind)
 {
index 65fae04..0ada0f8 100644 (file)
@@ -58,7 +58,7 @@ protected:
 
     JITByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, AccessType, const RegisterSet&, JSValueRegs base,
 
     JITByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, AccessType, const RegisterSet&, JSValueRegs base,
-        JSValueRegs value, SpillRegistersMode spillMode);
+        JSValueRegs value);
     
 public:
     void reportSlowPathCall(MacroAssembler::Label slowPathBegin, MacroAssembler::Call call)
     
 public:
     void reportSlowPathCall(MacroAssembler::Label slowPathBegin, MacroAssembler::Call call)
@@ -96,7 +96,7 @@ public:
 
     JITGetByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base,
 
     JITGetByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base,
-        JSValueRegs value, SpillRegistersMode spillMode);
+        JSValueRegs value);
     
     void generateFastPath(MacroAssembler&);
 };
     
     void generateFastPath(MacroAssembler&);
 };
@@ -107,7 +107,7 @@ public:
 
     JITPutByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base,
 
     JITPutByIdGenerator(
         CodeBlock*, CodeOrigin, CallSiteIndex, const RegisterSet& usedRegisters, JSValueRegs base,
-        JSValueRegs, GPRReg scratch, SpillRegistersMode spillMode, ECMAMode, PutKind);
+        JSValueRegs, GPRReg scratch, ECMAMode, PutKind);
     
     void generateFastPath(MacroAssembler&);
     
     
     void generateFastPath(MacroAssembler&);
     
index ff2fe53..e3c4998 100644 (file)
@@ -214,7 +214,7 @@ JITGetByIdGenerator JIT::emitGetByValWithCachedId(Instruction* currentInstructio
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs(regT0), JSValueRegs(regT0), DontSpill);
+        JSValueRegs(regT0), JSValueRegs(regT0));
     gen.generateFastPath(*this);
 
     fastDoneCase = jump();
     gen.generateFastPath(*this);
 
     fastDoneCase = jump();
@@ -422,7 +422,7 @@ JITPutByIdGenerator JIT::emitPutByValWithCachedId(Instruction* currentInstructio
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs(regT0), JSValueRegs(regT1), regT2, DontSpill, m_codeBlock->ecmaMode(), putKind);
+        JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(), putKind);
     gen.generateFastPath(*this);
     doneCases.append(jump());
 
     gen.generateFastPath(*this);
     doneCases.append(jump());
 
@@ -547,7 +547,7 @@ void JIT::emit_op_get_by_id(Instruction* currentInstruction)
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs(regT0), JSValueRegs(regT0), DontSpill);
+        JSValueRegs(regT0), JSValueRegs(regT0));
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
     m_getByIds.append(gen);
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
     m_getByIds.append(gen);
@@ -593,7 +593,7 @@ void JIT::emit_op_put_by_id(Instruction* currentInstruction)
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(m_bytecodeOffset), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs(regT0), JSValueRegs(regT1), regT2, DontSpill, m_codeBlock->ecmaMode(),
+        JSValueRegs(regT0), JSValueRegs(regT1), regT2, m_codeBlock->ecmaMode(),
         direct ? Direct : NotDirect);
     
     gen.generateFastPath(*this);
         direct ? Direct : NotDirect);
     
     gen.generateFastPath(*this);
index ca7b437..d0477fd 100644 (file)
@@ -283,7 +283,7 @@ JITGetByIdGenerator JIT::emitGetByValWithCachedId(Instruction* currentInstructio
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), DontSpill);
+        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0));
     gen.generateFastPath(*this);
 
     fastDoneCase = jump();
     gen.generateFastPath(*this);
 
     fastDoneCase = jump();
@@ -495,7 +495,7 @@ JITPutByIdGenerator JIT::emitPutByValWithCachedId(Instruction* currentInstructio
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
 
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), regT1, DontSpill, m_codeBlock->ecmaMode(), putKind);
+        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2), regT1, m_codeBlock->ecmaMode(), putKind);
     gen.generateFastPath(*this);
     doneCases.append(jump());
 
     gen.generateFastPath(*this);
     doneCases.append(jump());
 
@@ -588,7 +588,7 @@ void JIT::emit_op_get_by_id(Instruction* currentInstruction)
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
 
     JITGetByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
-        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), DontSpill);
+        JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0));
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
     m_getByIds.append(gen);
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
     m_getByIds.append(gen);
@@ -634,7 +634,7 @@ void JIT::emit_op_put_by_id(Instruction* currentInstruction)
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
     JITPutByIdGenerator gen(
         m_codeBlock, CodeOrigin(m_bytecodeOffset), CallSiteIndex(currentInstruction), RegisterSet::stubUnavailableRegisters(),
         JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
-        regT1, DontSpill, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
+        regT1, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
     
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
     
     gen.generateFastPath(*this);
     addSlowCase(gen.slowPathJump());
index 3d75ef5..db9aaa7 100644 (file)
@@ -66,6 +66,7 @@ public:
     }
     
     virtual ~JITStubRoutine();
     }
     
     virtual ~JITStubRoutine();
+    virtual void aboutToDie() { }
     
     // MacroAssemblerCodeRef is copyable, but at the cost of reference
     // counting churn. Returning a reference is a good way of reducing
     
     // MacroAssemblerCodeRef is copyable, but at the cost of reference
     // counting churn. Returning a reference is a good way of reducing
index be4c68b..86b5bf6 100644 (file)
@@ -307,6 +307,11 @@ RegisterSet RegisterSet::webAssemblyCalleeSaveRegisters()
 }
 #endif
 
 }
 #endif
 
+RegisterSet RegisterSet::registersToNotSaveForCall()
+{
+    return RegisterSet(RegisterSet::vmCalleeSaveRegisters(), RegisterSet::stackRegisters(), RegisterSet::reservedHardwareRegisters());
+}
+
 RegisterSet RegisterSet::allGPRs()
 {
     RegisterSet result;
 RegisterSet RegisterSet::allGPRs()
 {
     RegisterSet result;
index 9f5cd5e..6400fd7 100644 (file)
@@ -61,6 +61,8 @@ public:
     static RegisterSet allGPRs();
     static RegisterSet allFPRs();
     static RegisterSet allRegisters();
     static RegisterSet allGPRs();
     static RegisterSet allFPRs();
     static RegisterSet allRegisters();
+
+    static RegisterSet registersToNotSaveForCall();
     
     void set(Reg reg, bool value = true)
     {
     
     void set(Reg reg, bool value = true)
     {
@@ -68,11 +70,11 @@ public:
         m_vector.set(reg.index(), value);
     }
     
         m_vector.set(reg.index(), value);
     }
     
-    void set(JSValueRegs regs)
+    void set(JSValueRegs regs, bool value = true)
     {
         if (regs.tagGPR() != InvalidGPRReg)
     {
         if (regs.tagGPR() != InvalidGPRReg)
-            set(regs.tagGPR());
-        set(regs.payloadGPR());
+            set(regs.tagGPR(), value);
+        set(regs.payloadGPR(), value);
     }
     
     void clear(Reg reg)
     }
     
     void clear(Reg reg)
index 407fd43..66f6dc7 100644 (file)
@@ -102,64 +102,55 @@ typename BankInfo::RegisterType ScratchRegisterAllocator::allocateScratch()
 GPRReg ScratchRegisterAllocator::allocateScratchGPR() { return allocateScratch<GPRInfo>(); }
 FPRReg ScratchRegisterAllocator::allocateScratchFPR() { return allocateScratch<FPRInfo>(); }
 
 GPRReg ScratchRegisterAllocator::allocateScratchGPR() { return allocateScratch<GPRInfo>(); }
 FPRReg ScratchRegisterAllocator::allocateScratchFPR() { return allocateScratch<FPRInfo>(); }
 
-size_t ScratchRegisterAllocator::preserveReusedRegistersByPushing(MacroAssembler& jit)
+unsigned ScratchRegisterAllocator::preserveReusedRegistersByPushing(MacroAssembler& jit)
 {
     if (!didReuseRegisters())
         return 0;
 
 {
     if (!didReuseRegisters())
         return 0;
 
-    size_t numberOfBytesPushed = 0;
-
+    RegisterSet registersToSpill;
     for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
         FPRReg reg = FPRInfo::toRegister(i);
     for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) {
         FPRReg reg = FPRInfo::toRegister(i);
-        if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg)) {
-            jit.pushToSave(reg);
-            numberOfBytesPushed += sizeof(double);
-        }
+        if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg))
+            registersToSpill.set(reg);
     }
     for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
         GPRReg reg = GPRInfo::toRegister(i);
     }
     for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) {
         GPRReg reg = GPRInfo::toRegister(i);
-        if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg)) {
-            jit.pushToSave(reg);
-            numberOfBytesPushed += sizeof(uintptr_t);
-        }
+        if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg))
+            registersToSpill.set(reg);
     }
 
     }
 
-    size_t totalStackAdjustmentBytes = numberOfBytesPushed + maxFrameExtentForSlowPathCall;
-    totalStackAdjustmentBytes = WTF::roundUpToMultipleOf(stackAlignmentBytes(), totalStackAdjustmentBytes);
-
-    // FIXME: We shouldn't have to do this.
-    // https://bugs.webkit.org/show_bug.cgi?id=149030
-    size_t numberOfPaddingBytes = totalStackAdjustmentBytes - numberOfBytesPushed;
-    jit.subPtr(MacroAssembler::TrustedImm32(numberOfPaddingBytes), MacroAssembler::stackPointerRegister);
+    unsigned extraStackBytesAtTopOfStack = maxFrameExtentForSlowPathCall;
+    unsigned stackAdjustmentSize = ScratchRegisterAllocator::preserveRegistersToStackForCall(jit, registersToSpill, extraStackBytesAtTopOfStack);
 
 
-    return numberOfPaddingBytes;
+    return stackAdjustmentSize;
 }
 
 }
 
-void ScratchRegisterAllocator::restoreReusedRegistersByPopping(MacroAssembler& jit, size_t numberOfPaddingBytes)
+void ScratchRegisterAllocator::restoreReusedRegistersByPopping(MacroAssembler& jit, unsigned numberOfBytesUsedToPreserveReusedRegisters)
 {
     if (!didReuseRegisters())
         return;
 
 {
     if (!didReuseRegisters())
         return;
 
-    jit.addPtr(MacroAssembler::TrustedImm32(numberOfPaddingBytes), MacroAssembler::stackPointerRegister);
-
+    RegisterSet registersToFill;
     for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
         GPRReg reg = GPRInfo::toRegister(i);
         if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg))
     for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
         GPRReg reg = GPRInfo::toRegister(i);
         if (m_scratchRegisters.getGPRByIndex(i) && m_usedRegisters.get(reg))
-            jit.popToRestore(reg);
+            registersToFill.set(reg);
     }
     for (unsigned i = FPRInfo::numberOfRegisters; i--;) {
         FPRReg reg = FPRInfo::toRegister(i);
         if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg))
     }
     for (unsigned i = FPRInfo::numberOfRegisters; i--;) {
         FPRReg reg = FPRInfo::toRegister(i);
         if (m_scratchRegisters.getFPRByIndex(i) && m_usedRegisters.get(reg))
-            jit.popToRestore(reg);
+            registersToFill.set(reg);
     }
     }
+
+    unsigned extraStackBytesAtTopOfStack = maxFrameExtentForSlowPathCall;
+    RegisterSet dontRestore; // Empty set. We want to restore everything.
+    ScratchRegisterAllocator::restoreRegistersFromStackForCall(jit, registersToFill, dontRestore, numberOfBytesUsedToPreserveReusedRegisters, extraStackBytesAtTopOfStack);
 }
 
 RegisterSet ScratchRegisterAllocator::usedRegistersForCall() const
 {
     RegisterSet result = m_usedRegisters;
 }
 
 RegisterSet ScratchRegisterAllocator::usedRegistersForCall() const
 {
     RegisterSet result = m_usedRegisters;
-    result.exclude(RegisterSet::calleeSaveRegisters());
-    result.exclude(RegisterSet::stackRegisters());
-    result.exclude(RegisterSet::reservedHardwareRegisters());
+    result.exclude(RegisterSet::registersToNotSaveForCall());
     return result;
 }
 
     return result;
 }
 
@@ -176,8 +167,10 @@ void ScratchRegisterAllocator::preserveUsedRegistersToScratchBufferForCall(Macro
     
     unsigned count = 0;
     for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
     
     unsigned count = 0;
     for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
-        if (usedRegisters.get(reg))
-            jit.storePtr(reg, static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + (count++));
+        if (usedRegisters.get(reg)) {
+            jit.storePtr(reg, static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + count);
+            count++;
+        }
         if (GPRInfo::toIndex(reg) != GPRInfo::InvalidIndex
             && scratchGPR == InvalidGPRReg
             && !m_lockedRegisters.get(reg) && !m_scratchRegisters.get(reg))
         if (GPRInfo::toIndex(reg) != GPRInfo::InvalidIndex
             && scratchGPR == InvalidGPRReg
             && !m_lockedRegisters.get(reg) && !m_scratchRegisters.get(reg))
@@ -186,7 +179,8 @@ void ScratchRegisterAllocator::preserveUsedRegistersToScratchBufferForCall(Macro
     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
     for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
         if (usedRegisters.get(reg)) {
     RELEASE_ASSERT(scratchGPR != InvalidGPRReg);
     for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
         if (usedRegisters.get(reg)) {
-            jit.move(MacroAssembler::TrustedImmPtr(static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + (count++)), scratchGPR);
+            jit.move(MacroAssembler::TrustedImmPtr(static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) + count), scratchGPR);
+            count++;
             jit.storeDouble(reg, scratchGPR);
         }
     }
             jit.storeDouble(reg, scratchGPR);
         }
     }
@@ -232,6 +226,74 @@ void ScratchRegisterAllocator::restoreUsedRegistersFromScratchBufferForCall(Macr
     }
 }
 
     }
 }
 
+unsigned ScratchRegisterAllocator::preserveRegistersToStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, unsigned extraBytesAtTopOfStack)
+{
+    RELEASE_ASSERT(extraBytesAtTopOfStack % sizeof(void*) == 0);
+    if (!usedRegisters.numberOfSetRegisters())
+        return 0;
+    
+    unsigned stackOffset = (usedRegisters.numberOfSetRegisters()) * sizeof(EncodedJSValue);
+    stackOffset += extraBytesAtTopOfStack;
+    stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), stackOffset);
+    jit.subPtr(
+        MacroAssembler::TrustedImm32(stackOffset),
+        MacroAssembler::stackPointerRegister);
+
+    unsigned count = 0;
+    for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
+        if (usedRegisters.get(reg)) {
+            jit.storePtr(reg, MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (count * sizeof(EncodedJSValue))));
+            count++;
+        }
+    }
+    for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
+        if (usedRegisters.get(reg)) {
+            jit.storeDouble(reg, MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (count * sizeof(EncodedJSValue))));
+            count++;
+        }
+    }
+
+    RELEASE_ASSERT(count == usedRegisters.numberOfSetRegisters());
+
+    return stackOffset;
+}
+
+void ScratchRegisterAllocator::restoreRegistersFromStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, const RegisterSet& ignore, unsigned numberOfStackBytesUsedForRegisterPreservation, unsigned extraBytesAtTopOfStack)
+{
+    RELEASE_ASSERT(extraBytesAtTopOfStack % sizeof(void*) == 0);
+    if (!usedRegisters.numberOfSetRegisters()) {
+        RELEASE_ASSERT(numberOfStackBytesUsedForRegisterPreservation == 0);
+        return;
+    }
+
+    unsigned count = 0;
+    for (GPRReg reg = MacroAssembler::firstRegister(); reg <= MacroAssembler::lastRegister(); reg = MacroAssembler::nextRegister(reg)) {
+        if (usedRegisters.get(reg)) {
+            if (!ignore.get(reg))
+                jit.loadPtr(MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (sizeof(EncodedJSValue) * count)), reg);
+            count++;
+        }
+    }
+    for (FPRReg reg = MacroAssembler::firstFPRegister(); reg <= MacroAssembler::lastFPRegister(); reg = MacroAssembler::nextFPRegister(reg)) {
+        if (usedRegisters.get(reg)) {
+            if (!ignore.get(reg))
+                jit.loadDouble(MacroAssembler::Address(MacroAssembler::stackPointerRegister, extraBytesAtTopOfStack + (sizeof(EncodedJSValue) * count)), reg);
+            count++;
+        }
+    }
+
+    unsigned stackOffset = (usedRegisters.numberOfSetRegisters()) * sizeof(EncodedJSValue);
+    stackOffset += extraBytesAtTopOfStack;
+    stackOffset = WTF::roundUpToMultipleOf(stackAlignmentBytes(), stackOffset);
+
+    RELEASE_ASSERT(count == usedRegisters.numberOfSetRegisters());
+    RELEASE_ASSERT(stackOffset == numberOfStackBytesUsedForRegisterPreservation);
+
+    jit.addPtr(
+        MacroAssembler::TrustedImm32(stackOffset),
+        MacroAssembler::stackPointerRegister);
+}
+
 } // namespace JSC
 
 #endif // ENABLE(JIT)
 } // namespace JSC
 
 #endif // ENABLE(JIT)
index f311fa8..37869d8 100644 (file)
@@ -63,22 +63,26 @@ public:
     {
         return m_numberOfReusedRegisters;
     }
     {
         return m_numberOfReusedRegisters;
     }
+
+    RegisterSet usedRegisters() const { return m_usedRegisters; }
     
     // preserveReusedRegistersByPushing() returns the number of padding bytes used to keep the stack
     // pointer properly aligned and to reserve room for calling a C helper. This number of padding
     // bytes must be provided to restoreReusedRegistersByPopping() in order to reverse the work done
     // by preserveReusedRegistersByPushing().
     
     // preserveReusedRegistersByPushing() returns the number of padding bytes used to keep the stack
     // pointer properly aligned and to reserve room for calling a C helper. This number of padding
     // bytes must be provided to restoreReusedRegistersByPopping() in order to reverse the work done
     // by preserveReusedRegistersByPushing().
-    size_t preserveReusedRegistersByPushing(MacroAssembler& jit);
-    void restoreReusedRegistersByPopping(MacroAssembler& jit, size_t numberOfPaddingBytes);
+    unsigned preserveReusedRegistersByPushing(MacroAssembler& jit);
+    void restoreReusedRegistersByPopping(MacroAssembler& jit, unsigned numberOfBytesUsedToPreserveReusedRegisters);
     
     RegisterSet usedRegistersForCall() const;
     
     unsigned desiredScratchBufferSizeForCall() const;
     
     void preserveUsedRegistersToScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR = InvalidGPRReg);
     
     RegisterSet usedRegistersForCall() const;
     
     unsigned desiredScratchBufferSizeForCall() const;
     
     void preserveUsedRegistersToScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR = InvalidGPRReg);
-    
     void restoreUsedRegistersFromScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR = InvalidGPRReg);
     void restoreUsedRegistersFromScratchBufferForCall(MacroAssembler& jit, ScratchBuffer* scratchBuffer, GPRReg scratchGPR = InvalidGPRReg);
-    
+
+    static unsigned preserveRegistersToStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, unsigned extraPaddingInBytes);
+    static void restoreRegistersFromStackForCall(MacroAssembler& jit, const RegisterSet& usedRegisters, const RegisterSet& ignore, unsigned numberOfStackBytesUsedForRegisterPreservation, unsigned extraPaddingInBytes);
+
 private:
     RegisterSet m_usedRegisters;
     TempRegisterSet m_lockedRegisters;
 private:
     RegisterSet m_usedRegisters;
     TempRegisterSet m_lockedRegisters;
index 86a3281..95bcdad 100644 (file)
@@ -314,6 +314,52 @@ private:
     WriteBarrier<JSObject> m_delegate;
 };
 
     WriteBarrier<JSObject> m_delegate;
 };
 
+class CustomGetter : public JSNonFinalObject {
+public:
+    CustomGetter(VM& vm, Structure* structure)
+        : Base(vm, structure)
+    {
+    }
+
+    DECLARE_INFO;
+    typedef JSNonFinalObject Base;
+    static const unsigned StructureFlags = Base::StructureFlags | JSC::OverridesGetOwnPropertySlot;
+
+    static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+    {
+        return Structure::create(vm, globalObject, prototype, TypeInfo(ObjectType, StructureFlags), info());
+    }
+
+    static CustomGetter* create(VM& vm, Structure* structure)
+    {
+        CustomGetter* getter = new (NotNull, allocateCell<CustomGetter>(vm.heap, sizeof(CustomGetter))) CustomGetter(vm, structure);
+        getter->finishCreation(vm);
+        return getter;
+    }
+
+    static bool getOwnPropertySlot(JSObject* object, ExecState* exec, PropertyName propertyName, PropertySlot& slot)
+    {
+        CustomGetter* thisObject = jsCast<CustomGetter*>(object);
+        if (propertyName == PropertyName(Identifier::fromString(exec, "customGetter"))) {
+            slot.setCacheableCustom(thisObject, DontDelete | ReadOnly | DontEnum, thisObject->customGetter);
+            return true;
+        }
+        return JSObject::getOwnPropertySlot(thisObject, exec, propertyName, slot);
+    }
+
+private:
+    static EncodedJSValue customGetter(ExecState* exec, JSObject*, EncodedJSValue thisValue, PropertyName)
+    {
+        CustomGetter* thisObject = jsDynamicCast<CustomGetter*>(JSValue::decode(thisValue));
+        if (!thisObject)
+            return throwVMTypeError(exec);
+        bool shouldThrow = thisObject->get(exec, PropertyName(Identifier::fromString(exec, "shouldThrow"))).toBoolean(exec);
+        if (shouldThrow)
+            return throwVMTypeError(exec);
+        return JSValue::encode(jsNumber(100));
+    }
+};
+
 class RuntimeArray : public JSArray {
 public:
     typedef JSArray Base;
 class RuntimeArray : public JSArray {
 public:
     typedef JSArray Base;
@@ -422,6 +468,7 @@ const ClassInfo Element::s_info = { "Element", &Base::s_info, 0, CREATE_METHOD_T
 const ClassInfo Masquerader::s_info = { "Masquerader", &Base::s_info, 0, CREATE_METHOD_TABLE(Masquerader) };
 const ClassInfo Root::s_info = { "Root", &Base::s_info, 0, CREATE_METHOD_TABLE(Root) };
 const ClassInfo ImpureGetter::s_info = { "ImpureGetter", &Base::s_info, 0, CREATE_METHOD_TABLE(ImpureGetter) };
 const ClassInfo Masquerader::s_info = { "Masquerader", &Base::s_info, 0, CREATE_METHOD_TABLE(Masquerader) };
 const ClassInfo Root::s_info = { "Root", &Base::s_info, 0, CREATE_METHOD_TABLE(Root) };
 const ClassInfo ImpureGetter::s_info = { "ImpureGetter", &Base::s_info, 0, CREATE_METHOD_TABLE(ImpureGetter) };
+const ClassInfo CustomGetter::s_info = { "CustomGetter", &Base::s_info, 0, CREATE_METHOD_TABLE(CustomGetter) };
 const ClassInfo RuntimeArray::s_info = { "RuntimeArray", &Base::s_info, 0, CREATE_METHOD_TABLE(RuntimeArray) };
 
 ElementHandleOwner* Element::handleOwner()
 const ClassInfo RuntimeArray::s_info = { "RuntimeArray", &Base::s_info, 0, CREATE_METHOD_TABLE(RuntimeArray) };
 
 ElementHandleOwner* Element::handleOwner()
@@ -446,6 +493,7 @@ static bool fillBufferWithContentsOfFile(const String& fileName, Vector<char>& b
 static EncodedJSValue JSC_HOST_CALL functionCreateProxy(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionCreateRuntimeArray(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionCreateImpureGetter(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionCreateProxy(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionCreateRuntimeArray(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionCreateImpureGetter(ExecState*);
+static EncodedJSValue JSC_HOST_CALL functionCreateCustomGetterObject(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionSetImpureGetterDelegate(ExecState*);
 
 static EncodedJSValue JSC_HOST_CALL functionSetElementRoot(ExecState*);
 static EncodedJSValue JSC_HOST_CALL functionSetImpureGetterDelegate(ExecState*);
 
 static EncodedJSValue JSC_HOST_CALL functionSetElementRoot(ExecState*);
@@ -663,6 +711,7 @@ protected:
         addFunction(vm, "createRuntimeArray", functionCreateRuntimeArray, 0);
 
         addFunction(vm, "createImpureGetter", functionCreateImpureGetter, 1);
         addFunction(vm, "createRuntimeArray", functionCreateRuntimeArray, 0);
 
         addFunction(vm, "createImpureGetter", functionCreateImpureGetter, 1);
+        addFunction(vm, "createCustomGetterObject", functionCreateCustomGetterObject, 0);
         addFunction(vm, "setImpureGetterDelegate", functionSetImpureGetterDelegate, 2);
 
         addFunction(vm, "dumpTypesForAllVariables", functionDumpTypesForAllVariables , 0);
         addFunction(vm, "setImpureGetterDelegate", functionSetImpureGetterDelegate, 2);
 
         addFunction(vm, "dumpTypesForAllVariables", functionDumpTypesForAllVariables , 0);
@@ -1092,6 +1141,14 @@ EncodedJSValue JSC_HOST_CALL functionCreateImpureGetter(ExecState* exec)
     return JSValue::encode(result);
 }
 
     return JSValue::encode(result);
 }
 
+EncodedJSValue JSC_HOST_CALL functionCreateCustomGetterObject(ExecState* exec)
+{
+    JSLockHolder lock(exec);
+    Structure* structure = CustomGetter::createStructure(exec->vm(), exec->lexicalGlobalObject(), jsNull());
+    CustomGetter* result = CustomGetter::create(exec->vm(), structure);
+    return JSValue::encode(result);
+}
+
 EncodedJSValue JSC_HOST_CALL functionSetImpureGetterDelegate(ExecState* exec)
 {
     JSLockHolder lock(exec);
 EncodedJSValue JSC_HOST_CALL functionSetImpureGetterDelegate(ExecState* exec)
 {
     JSLockHolder lock(exec);
diff --git a/Source/JavaScriptCore/tests/stress/try-catch-custom-getter-as-get-by-id.js b/Source/JavaScriptCore/tests/stress/try-catch-custom-getter-as-get-by-id.js
new file mode 100644 (file)
index 0000000..80af76c
--- /dev/null
@@ -0,0 +1,53 @@
+function assert(b) {
+    if (!b) throw new Error("b");
+}
+noInline(assert);
+
+let i;
+var o1 = createCustomGetterObject();
+o1.shouldThrow = false;
+
+var o2 = {
+    customGetter: 40
+}
+
+var o3 = { 
+    x: 100,
+    customGetter: 50 
+}
+
+i = -1000;
+bar(i);
+foo(i);
+function bar(i) {
+    if (i === -1000)
+        return o1;
+
+    if (i % 2)
+        return o3;
+    else
+        return o2;
+}
+noInline(bar);
+
+function foo(i) {
+    var o = bar(i);
+    var v;
+    try {
+        v = o.customGetter;
+    } catch(e) {
+        assert(o === o1);
+    }
+}
+noInline(foo);
+
+foo(i);
+for (i = 0; i < 1000; i++)
+    foo(i);
+
+i = -1000;
+for (let j = 0; j < 1000; j++) {
+    if (j > 10)
+        o1.shouldThrow = true;
+    foo(i);
+}
diff --git a/Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id-register-restoration.js b/Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id-register-restoration.js
new file mode 100644 (file)
index 0000000..77eb156
--- /dev/null
@@ -0,0 +1,57 @@
+function assert(b) {
+    if (!b) throw new Error("bad value");
+}
+noInline(assert);
+
+let i;
+var o1 = { 
+    get f() {
+        if (i === -1000)
+            throw new Error("hello");
+        return 20;
+    },
+    x: "x"
+};
+
+var o2 = {
+    f: 40
+}
+
+var o3 = {
+    x: 100,
+    f: "f"
+}
+
+function bar(i) {
+    if (i === -1000)
+        return o1;
+
+    if (i % 2)
+        return o3;
+    else
+        return o2;
+}
+noInline(bar);
+
+function foo(i) {
+    var o = bar(i);
+    let v;
+    let v2;
+    let v3;
+    try {
+        v2 = o.x;
+        v = o.f;
+    } catch(e) {
+        assert(v2 === "x");
+        assert(o === o1);
+    }
+}
+noInline(foo);
+
+foo(i);
+for (i = 0; i < 1000; i++)
+    foo(i);
+
+i = -1000;
+for (let j = 0; j < 1000; j++)
+    foo(i);
diff --git a/Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id.js b/Source/JavaScriptCore/tests/stress/try-catch-getter-as-get-by-id.js
new file mode 100644 (file)
index 0000000..6e8fb59
--- /dev/null
@@ -0,0 +1,53 @@
+function assert(b) {
+    if (!b) throw new Error("b");
+}
+noInline(assert);
+
+
+let i;
+var o1 = { 
+    get f() {
+        if (i === -1000)
+            throw new Error("hello");
+        return 20;
+    }
+};
+
+var o2 = {
+    f: 40
+}
+
+var o3 = { 
+    x: 100,
+    f: 50 
+}
+
+function bar(i) {
+    if (i === -1000)
+        return o1;
+
+    if (i % 2)
+        return o3;
+    else
+        return o2;
+}
+noInline(bar);
+
+function foo(i) {
+    var o = bar(i);
+    var v;
+    try {
+        v = o.f
+    } catch(e) {
+        assert(o === o1);
+    }
+}
+noInline(foo);
+
+foo(i);
+for (i = 0; i < 1000; i++)
+    foo(i);
+
+i = -1000;
+for (let j = 0; j < 1000; j++)
+    foo(i);
diff --git a/Source/JavaScriptCore/tests/stress/try-catch-setter-as-put-by-id.js b/Source/JavaScriptCore/tests/stress/try-catch-setter-as-put-by-id.js
new file mode 100644 (file)
index 0000000..265c719
--- /dev/null
@@ -0,0 +1,54 @@
+function assert(b) {
+    if (!b) 
+        throw new Error("bad assertion");
+}
+noInline(assert);
+
+
+let i;
+var o1 = { 
+    set f(v) {
+        if (i === -1000)
+            throw new Error("hello");
+        this._v = v;
+    }
+};
+
+var o2 = {
+    f: 40
+}
+
+var o3 = { 
+    x: 100,
+    f: 50 
+}
+
+function bar(i) {
+    if (i === -1000)
+        return o1;
+
+    if (i % 2)
+        return o3;
+    else
+        return o2;
+}
+noInline(bar);
+
+function foo(i) {
+    let o = bar(i);
+    let v = o.x;
+    try {
+        o.f = v;
+    } catch(e) {
+        assert(o === o1);
+    }
+}
+noInline(foo);
+
+foo(i);
+for (i = 0; i < 1000; i++)
+    foo(i);
+
+i = -1000;
+for (let j = 0; j < 1000; j++)
+    foo(i);
diff --git a/Source/JavaScriptCore/tests/stress/try-catch-stub-routine-replaced.js b/Source/JavaScriptCore/tests/stress/try-catch-stub-routine-replaced.js
new file mode 100644 (file)
index 0000000..321cc89
--- /dev/null
@@ -0,0 +1,79 @@
+// The main purpose of this test is to ensure that
+// we will re-use no longer in use CallSiteIndices for
+// inline cache stubs. See relevant code in destructor
+// which calls:
+// DFG::CommonData::removeCallSiteIndex(.)
+// CodeBlock::removeExceptionHandlerForCallSite(.)
+// Which add old call site indices to a free list.
+
+function assert(b) {
+    if (!b)
+        throw new Error("bad value");
+}
+noInline(assert);
+
+var arr = []
+function allocate() {
+    for (var i = 0; i < 10000; i++)
+        arr.push({});
+}
+
+function hello() { return 20; }
+noInline(hello);
+
+function foo(o) {
+    let baz = hello();
+    let v;
+    try {
+        v = o.f;
+        v = o.f;
+        v = o.f;
+    } catch(e) {
+        assert(baz === 20);
+        assert(v === 2); // Really flagCount.
+    }
+    return v;
+}
+noInline(foo);
+
+var objChain = {f: 40};
+var fakeOut = {x: 30, f: 100};
+for (let i = 0; i < 1000; i++)
+    foo(i % 2 ? objChain : fakeOut);
+
+var i;
+var flag = "flag";
+var flagCount = 0;
+objChain = { 
+    get f() {
+        if (flagCount === 2)
+            throw new Error("I'm testing you.");
+        if (i === flag)
+            flagCount++;
+        return flagCount;
+    }
+};
+for (i = 0; i < 100; i++) {
+    allocate();
+    if (i === 99)
+        i = flag;
+    foo(objChain);
+}
+
+fakeOut = {x: 30, get f() { return 100}};
+for (i = 0; i < 100; i++) {
+    allocate();
+    if (i === 99)
+        i = flag;
+    foo(fakeOut);
+}
+
+var o = { 
+    get f() {
+        return flagCount;
+    },
+    x: 100
+};
+
+for (i = 0; i < 100; i++)
+    foo(o);