Inline caches that refer to otherwise dead objects should be cleared
authorfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 21 Nov 2011 04:45:17 +0000 (04:45 +0000)
committerfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Mon, 21 Nov 2011 04:45:17 +0000 (04:45 +0000)
https://bugs.webkit.org/show_bug.cgi?id=72311

Reviewed by Geoff Garen.

DFG code blocks now participate in the weak reference harvester fixpoint
so that they only consider themselves to be live if either they are
currently executing, or their owner is live and all of their weak references
are live. If not, the relevant code blocks are jettisoned.

Inline caches in both the old JIT and the DFG are now cleared if any of
their references are not marked at the end of a GC.

This is performance-neutral on SunSpider, V8, and Kraken. With the clear-
all-code-on-GC policy that we currently have, it shows a slight reduction
in memory usage. If we turn that policy off, it's pretty easy to come up
with an example program that will cause ToT to experience linear heap
growth, while with this patch, the heap stays small and remains at a
constant size.

* assembler/ARMv7Assembler.h:
(JSC::ARMv7Assembler::readCallTarget):
* assembler/MacroAssemblerARMv7.h:
(JSC::MacroAssemblerARMv7::readCallTarget):
* assembler/MacroAssemblerX86.h:
(JSC::MacroAssemblerX86::readCallTarget):
* assembler/MacroAssemblerX86_64.h:
(JSC::MacroAssemblerX86_64::readCallTarget):
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::visitAggregate):
(JSC::CodeBlock::performTracingFixpointIteration):
(JSC::CodeBlock::visitWeakReferences):
(JSC::CodeBlock::finalizeUnconditionally):
(JSC::CodeBlock::stronglyVisitStrongReferences):
(JSC::MethodCallLinkInfo::reset):
(JSC::ProgramCodeBlock::jettison):
(JSC::EvalCodeBlock::jettison):
(JSC::FunctionCodeBlock::jettison):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::reoptimize):
(JSC::CodeBlock::shouldImmediatelyAssumeLivenessDuringScan):
* bytecode/Instruction.h:
(JSC::PolymorphicAccessStructureList::visitWeak):
* bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::visitWeakReferences):
* bytecode/StructureStubInfo.h:
(JSC::isGetByIdAccess):
(JSC::isPutByIdAccess):
(JSC::StructureStubInfo::reset):
* dfg/DFGJITCompiler.cpp:
(JSC::DFG::JITCompiler::link):
* dfg/DFGOperations.cpp:
* dfg/DFGRepatch.cpp:
(JSC::DFG::dfgRepatchByIdSelfAccess):
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* dfg/DFGRepatch.h:
(JSC::DFG::dfgResetGetByID):
(JSC::DFG::dfgResetPutByID):
* jit/JIT.h:
* jit/JITPropertyAccess.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITPropertyAccess32_64.cpp:
(JSC::JIT::resetPatchGetById):
(JSC::JIT::resetPatchPutById):
* jit/JITStubs.cpp:
(JSC::DEFINE_STUB_FUNCTION):
* jit/JITWriteBarrier.h:
(JSC::JITWriteBarrierBase::clearToMaxUnsigned):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@100880 268f45cc-cd09-0410-ab3c-d52691b4dbfc

19 files changed:
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/assembler/ARMv7Assembler.h
Source/JavaScriptCore/assembler/MacroAssemblerARMv7.h
Source/JavaScriptCore/assembler/MacroAssemblerX86.h
Source/JavaScriptCore/assembler/MacroAssemblerX86_64.h
Source/JavaScriptCore/bytecode/CodeBlock.cpp
Source/JavaScriptCore/bytecode/CodeBlock.h
Source/JavaScriptCore/bytecode/Instruction.h
Source/JavaScriptCore/bytecode/StructureStubInfo.cpp
Source/JavaScriptCore/bytecode/StructureStubInfo.h
Source/JavaScriptCore/dfg/DFGJITCompiler.cpp
Source/JavaScriptCore/dfg/DFGOperations.cpp
Source/JavaScriptCore/dfg/DFGRepatch.cpp
Source/JavaScriptCore/dfg/DFGRepatch.h
Source/JavaScriptCore/jit/JIT.h
Source/JavaScriptCore/jit/JITPropertyAccess.cpp
Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
Source/JavaScriptCore/jit/JITStubs.cpp
Source/JavaScriptCore/jit/JITWriteBarrier.h

index 9de9ce9..e182269 100644 (file)
@@ -1,3 +1,76 @@
+2011-11-18  Filip Pizlo  <fpizlo@apple.com>
+
+        Inline caches that refer to otherwise dead objects should be cleared
+        https://bugs.webkit.org/show_bug.cgi?id=72311
+
+        Reviewed by Geoff Garen.
+
+        DFG code blocks now participate in the weak reference harvester fixpoint
+        so that they only consider themselves to be live if either they are
+        currently executing, or their owner is live and all of their weak references
+        are live. If not, the relevant code blocks are jettisoned.
+
+        Inline caches in both the old JIT and the DFG are now cleared if any of
+        their references are not marked at the end of a GC.
+
+        This is performance-neutral on SunSpider, V8, and Kraken. With the clear-
+        all-code-on-GC policy that we currently have, it shows a slight reduction
+        in memory usage. If we turn that policy off, it's pretty easy to come up
+        with an example program that will cause ToT to experience linear heap
+        growth, while with this patch, the heap stays small and remains at a
+        constant size.
+
+        * assembler/ARMv7Assembler.h:
+        (JSC::ARMv7Assembler::readCallTarget):
+        * assembler/MacroAssemblerARMv7.h:
+        (JSC::MacroAssemblerARMv7::readCallTarget):
+        * assembler/MacroAssemblerX86.h:
+        (JSC::MacroAssemblerX86::readCallTarget):
+        * assembler/MacroAssemblerX86_64.h:
+        (JSC::MacroAssemblerX86_64::readCallTarget):
+        * bytecode/CodeBlock.cpp:
+        (JSC::CodeBlock::visitAggregate):
+        (JSC::CodeBlock::performTracingFixpointIteration):
+        (JSC::CodeBlock::visitWeakReferences):
+        (JSC::CodeBlock::finalizeUnconditionally):
+        (JSC::CodeBlock::stronglyVisitStrongReferences):
+        (JSC::MethodCallLinkInfo::reset):
+        (JSC::ProgramCodeBlock::jettison):
+        (JSC::EvalCodeBlock::jettison):
+        (JSC::FunctionCodeBlock::jettison):
+        * bytecode/CodeBlock.h:
+        (JSC::CodeBlock::reoptimize):
+        (JSC::CodeBlock::shouldImmediatelyAssumeLivenessDuringScan):
+        * bytecode/Instruction.h:
+        (JSC::PolymorphicAccessStructureList::visitWeak):
+        * bytecode/StructureStubInfo.cpp:
+        (JSC::StructureStubInfo::visitWeakReferences):
+        * bytecode/StructureStubInfo.h:
+        (JSC::isGetByIdAccess):
+        (JSC::isPutByIdAccess):
+        (JSC::StructureStubInfo::reset):
+        * dfg/DFGJITCompiler.cpp:
+        (JSC::DFG::JITCompiler::link):
+        * dfg/DFGOperations.cpp:
+        * dfg/DFGRepatch.cpp:
+        (JSC::DFG::dfgRepatchByIdSelfAccess):
+        (JSC::DFG::dfgResetGetByID):
+        (JSC::DFG::dfgResetPutByID):
+        * dfg/DFGRepatch.h:
+        (JSC::DFG::dfgResetGetByID):
+        (JSC::DFG::dfgResetPutByID):
+        * jit/JIT.h:
+        * jit/JITPropertyAccess.cpp:
+        (JSC::JIT::resetPatchGetById):
+        (JSC::JIT::resetPatchPutById):
+        * jit/JITPropertyAccess32_64.cpp:
+        (JSC::JIT::resetPatchGetById):
+        (JSC::JIT::resetPatchPutById):
+        * jit/JITStubs.cpp:
+        (JSC::DEFINE_STUB_FUNCTION):
+        * jit/JITWriteBarrier.h:
+        (JSC::JITWriteBarrierBase::clearToMaxUnsigned):
+
 2011-11-20  Filip Pizlo  <fpizlo@apple.com>
 
         Showing the data overlay in OpenStreetMap doesn't work, zooming partially broken
index 4948916..23c8d1b 100644 (file)
@@ -1894,6 +1894,11 @@ public:
 
         setPointer(reinterpret_cast<uint16_t*>(from) - 1, to);
     }
+    
+    static void* readCallTarget(void* from)
+    {
+        return readPointer(reinterpret_cast<uint16_t*>(from) - 1);
+    }
 
     static void repatchInt32(void* where, int32_t value)
     {
index 0a8c4b8..c255553 100644 (file)
@@ -1561,6 +1561,11 @@ protected:
     {
         return static_cast<ARMv7Assembler::Condition>(cond);
     }
+    
+    static FunctionPtr readCallTarget(CodeLocationCall call)
+    {
+        return ARMv7Assembler::readCallTarget(call.dataLocation());
+    }
 
 private:
     friend class LinkBuffer;
index 3e5f7b1..cb2450f 100644 (file)
@@ -205,6 +205,12 @@ public:
     static bool supportsFloatingPointTruncate() { return isSSE2Present(); }
     static bool supportsFloatingPointSqrt() { return isSSE2Present(); }
     static bool supportsFloatingPointAbs() { return isSSE2Present(); }
+    
+    static FunctionPtr readCallTarget(CodeLocationCall call)
+    {
+        intptr_t offset = reinterpret_cast<int32_t*>(call.dataLocation())[-1];
+        return FunctionPtr(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(call.dataLocation()) + offset));
+    }
 
 private:
     friend class LinkBuffer;
index 5826eec..8460499 100644 (file)
@@ -497,6 +497,11 @@ public:
     static bool supportsFloatingPointTruncate() { return true; }
     static bool supportsFloatingPointSqrt() { return true; }
     static bool supportsFloatingPointAbs() { return true; }
+    
+    static FunctionPtr readCallTarget(CodeLocationCall call)
+    {
+        return FunctionPtr(X86Assembler::readPointer(call.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11).dataLocation()));
+    }
 
 private:
     friend class LinkBuffer;
index 49d0610..5eff2ca 100644 (file)
 #include "BytecodeGenerator.h"
 #include "DFGCapabilities.h"
 #include "DFGNode.h"
+#include "DFGRepatch.h"
 #include "Debugger.h"
 #include "Interpreter.h"
 #include "JIT.h"
+#include "JITStubs.h"
 #include "JSActivation.h"
 #include "JSFunction.h"
 #include "JSStaticScopeObject.h"
@@ -1587,6 +1589,210 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor)
 {
     if (!!m_alternative)
         m_alternative->visitAggregate(visitor);
+
+    // There are three things that may use unconditional finalizers: lazy bytecode freeing,
+    // inline cache clearing, and jettisoning. The probability of us wanting to do at
+    // least one of those things is probably quite close to 1. So we add one no matter what
+    // and when it runs, it figures out whether it has any work to do.
+    visitor.addUnconditionalFinalizer(this);
+    
+    if (shouldImmediatelyAssumeLivenessDuringScan()) {
+        // This code block is live, so scan all references strongly and return.
+        stronglyVisitStrongReferences(visitor);
+        stronglyVisitWeakReferences(visitor);
+        return;
+    }
+    
+#if ENABLE(DFG_JIT)
+    // We get here if we're live in the sense that our owner executable is live,
+    // but we're not yet live for sure in another sense: we may yet decide that this
+    // code block should be jettisoned based on its outgoing weak references being
+    // stale. Set a flag to indicate that we're still assuming that we're dead, and
+    // perform one round of determining if we're live. The GC may determine, based on
+    // either us marking additional objects, or by other objects being marked for
+    // other reasons, that this iteration should run again; it will notify us of this
+    // decision by calling harvestWeakReferences().
+    
+    m_dfgData->livenessHasBeenProved = false;
+    m_dfgData->allTransitionsHaveBeenMarked = false;
+    
+    performTracingFixpointIteration(visitor);
+
+    // GC doesn't have enough information yet for us to decide whether to keep our DFG
+    // data, so we need to register a handler to run again at the end of GC, when more
+    // information is available.
+    if (!(m_dfgData->livenessHasBeenProved && m_dfgData->allTransitionsHaveBeenMarked))
+        visitor.addWeakReferenceHarvester(this);
+    
+#else // ENABLE(DFG_JIT)
+    ASSERT_NOT_REACHED();
+#endif // ENABLE(DFG_JIT)
+}
+
+void CodeBlock::performTracingFixpointIteration(SlotVisitor& visitor)
+{
+    UNUSED_PARAM(visitor);
+    
+#if ENABLE(DFG_JIT)
+    // Evaluate our weak reference transitions, if there are still some to evaluate.
+    if (!m_dfgData->allTransitionsHaveBeenMarked) {
+        bool allAreMarkedSoFar = true;
+        for (unsigned i = 0; i < m_dfgData->transitions.size(); ++i) {
+            if ((!m_dfgData->transitions[i].m_codeOrigin
+                 || Heap::isMarked(m_dfgData->transitions[i].m_codeOrigin.get()))
+                && Heap::isMarked(m_dfgData->transitions[i].m_from.get())) {
+                // If the following three things are live, then the target of the
+                // transition is also live:
+                // - This code block. We know it's live already because otherwise
+                //   we wouldn't be scanning ourselves.
+                // - The code origin of the transition. Transitions may arise from
+                //   code that was inlined. They are not relevant if the user's
+                //   object that is required for the inlinee to run is no longer
+                //   live.
+                // - The source of the transition. The transition checks if some
+                //   heap location holds the source, and if so, stores the target.
+                //   Hence the source must be live for the transition to be live.
+                visitor.append(&m_dfgData->transitions[i].m_to);
+            } else
+                allAreMarkedSoFar = false;
+        }
+        
+        if (allAreMarkedSoFar)
+            m_dfgData->allTransitionsHaveBeenMarked = true;
+    }
+    
+    // Check if we have any remaining work to do.
+    if (m_dfgData->livenessHasBeenProved)
+        return;
+    
+    // Now check all of our weak references. If all of them are live, then we
+    // have proved liveness and so we scan our strong references. If at end of
+    // GC we still have not proved liveness, then this code block is toast.
+    bool allAreLiveSoFar = true;
+    for (unsigned i = 0; i < m_dfgData->weakReferences.size(); ++i) {
+        if (!Heap::isMarked(m_dfgData->weakReferences[i].get())) {
+            allAreLiveSoFar = false;
+            break;
+        }
+    }
+    
+    // If some weak references are dead, then this fixpoint iteration was
+    // unsuccessful.
+    if (!allAreLiveSoFar)
+        return;
+    
+    // All weak references are live. Record this information so we don't
+    // come back here again, and scan the strong references.
+    m_dfgData->livenessHasBeenProved = true;
+    stronglyVisitStrongReferences(visitor);
+#endif // ENABLE(DFG_JIT)
+}
+
+void CodeBlock::visitWeakReferences(SlotVisitor& visitor)
+{
+    performTracingFixpointIteration(visitor);
+}
+
+void CodeBlock::finalizeUnconditionally()
+{
+#if ENABLE(JIT_VERBOSE_OSR)
+    static const bool verboseUnlinking = true;
+#else
+    static const bool verboseUnlinking = false;
+#endif
+    
+#if ENABLE(DFG_JIT)
+    // Check if we're not live. If we are, then jettison.
+    if (!(shouldImmediatelyAssumeLivenessDuringScan() || m_dfgData->livenessHasBeenProved)) {
+        if (verboseUnlinking)
+            printf("Code block %p has dead weak references, jettisoning during GC.\n", this);
+
+        // Make sure that the baseline JIT knows that it should re-warm-up before
+        // optimizing.
+        alternative()->optimizeAfterWarmUp();
+        
+        jettison();
+        return;
+    }
+#endif // ENABLE(DFG_JIT)
+    
+#if ENABLE(JIT)
+    // Handle inline caches.
+    if (!!getJITCode()) {
+        RepatchBuffer repatchBuffer(this);
+        for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) {
+            if (callLinkInfo(i).isLinked() && !Heap::isMarked(callLinkInfo(i).callee.get())) {
+                if (verboseUnlinking)
+                    printf("Clearing call from %p.\n", this);
+                callLinkInfo(i).unlink(*m_globalData, repatchBuffer);
+            }
+            if (!!callLinkInfo(i).lastSeenCallee
+                && !Heap::isMarked(callLinkInfo(i).lastSeenCallee.get()))
+                callLinkInfo(i).lastSeenCallee.clear();
+        }
+        for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {
+            if (m_globalResolveInfos[i].structure && !Heap::isMarked(m_globalResolveInfos[i].structure.get())) {
+                if (verboseUnlinking)
+                    printf("Clearing resolve info in %p.\n", this);
+                m_globalResolveInfos[i].structure.clear();
+            }
+        }
+
+        for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i) {
+            StructureStubInfo& stubInfo = m_structureStubInfos[i];
+            
+            AccessType accessType = static_cast<AccessType>(stubInfo.accessType);
+            
+            if (stubInfo.visitWeakReferences())
+                continue;
+            
+            if (verboseUnlinking)
+                printf("Clearing structure cache (kind %d) in %p.\n", stubInfo.accessType, this);
+            
+            if (isGetByIdAccess(accessType)) {
+                if (getJITCode().jitType() == JITCode::DFGJIT)
+                    DFG::dfgResetGetByID(repatchBuffer, stubInfo);
+                else
+                    JIT::resetPatchGetById(repatchBuffer, &stubInfo);
+            } else {
+                ASSERT(isPutByIdAccess(accessType));
+                if (getJITCode().jitType() == JITCode::DFGJIT)
+                    DFG::dfgResetPutByID(repatchBuffer, stubInfo);
+                else 
+                    JIT::resetPatchPutById(repatchBuffer, &stubInfo);
+            }
+            
+            stubInfo.reset();
+        }
+
+        for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) {
+            if (!m_methodCallLinkInfos[i].cachedStructure)
+                continue;
+            
+            ASSERT(m_methodCallLinkInfos[i].seenOnce());
+            ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure);
+
+            if (!Heap::isMarked(m_methodCallLinkInfos[i].cachedStructure.get())
+                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototypeStructure.get())
+                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedFunction.get())
+                || !Heap::isMarked(m_methodCallLinkInfos[i].cachedPrototype.get())) {
+                if (verboseUnlinking)
+                    printf("Clearing method call in %p.\n", this);
+                m_methodCallLinkInfos[i].reset(repatchBuffer, getJITType());
+            }
+        }
+    }
+#endif
+
+    // Handle the bytecode discarding chore.
+    if (m_shouldDiscardBytecode) {
+        discardBytecode();
+        m_shouldDiscardBytecode = false;
+    }
+}
+
+void CodeBlock::stronglyVisitStrongReferences(SlotVisitor& visitor)
+{
     visitor.append(&m_globalObject);
     visitor.append(&m_ownerExecutable);
     if (m_rareData) {
@@ -1601,42 +1807,12 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor)
         visitor.append(&m_functionExprs[i]);
     for (size_t i = 0; i < m_functionDecls.size(); ++i)
         visitor.append(&m_functionDecls[i]);
-#if ENABLE(JIT)
-    for (unsigned i = 0; i < numberOfCallLinkInfos(); ++i) {
-        if (callLinkInfo(i).isLinked())
-            visitor.append(&callLinkInfo(i).callee);
-        if (!!callLinkInfo(i).lastSeenCallee)
-            visitor.append(&callLinkInfo(i).lastSeenCallee);
-    }
-#endif
 #if ENABLE(INTERPRETER)
     for (size_t size = m_propertyAccessInstructions.size(), i = 0; i < size; ++i)
         visitStructures(visitor, &instructions()[m_propertyAccessInstructions[i]]);
     for (size_t size = m_globalResolveInstructions.size(), i = 0; i < size; ++i)
         visitStructures(visitor, &instructions()[m_globalResolveInstructions[i]]);
 #endif
-#if ENABLE(JIT)
-    for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {
-        if (m_globalResolveInfos[i].structure)
-            visitor.append(&m_globalResolveInfos[i].structure);
-    }
-
-    for (size_t size = m_structureStubInfos.size(), i = 0; i < size; ++i)
-        m_structureStubInfos[i].visitAggregate(visitor);
-
-    for (size_t size = m_methodCallLinkInfos.size(), i = 0; i < size; ++i) {
-        if (m_methodCallLinkInfos[i].cachedStructure) {
-            // These members must be filled at the same time, and only after
-            // the MethodCallLinkInfo is set as seen.
-            ASSERT(m_methodCallLinkInfos[i].seenOnce());
-            visitor.append(&m_methodCallLinkInfos[i].cachedStructure);
-            ASSERT(!!m_methodCallLinkInfos[i].cachedPrototypeStructure);
-            visitor.append(&m_methodCallLinkInfos[i].cachedPrototypeStructure);
-            visitor.append(&m_methodCallLinkInfos[i].cachedFunction);
-            visitor.append(&m_methodCallLinkInfos[i].cachedPrototype);
-        }
-    }
-#endif
 
 #if ENABLE(DFG_JIT)
     if (hasCodeOrigins()) {
@@ -1653,15 +1829,6 @@ void CodeBlock::visitAggregate(SlotVisitor& visitor)
     for (unsigned profileIndex = 0; profileIndex < numberOfValueProfiles(); ++profileIndex)
         valueProfile(profileIndex)->computeUpdatedPrediction();
 #endif
-    
-#if ENABLE(JIT) && !ENABLE(OPCODE_SAMPLING)
-    // Kill off some bytecode. We can't do it here because we don't want to accidentally
-    // call into malloc while in stop-the-world GC mode.
-    if (hasInstructions() && m_shouldDiscardBytecode)
-        visitor.addUnconditionalFinalizer(this);
-#endif
-    
-    stronglyVisitWeakReferences(visitor);
 }
 
 void CodeBlock::stronglyVisitWeakReferences(SlotVisitor& visitor)
@@ -1867,6 +2034,25 @@ void CallLinkInfo::unlink(JSGlobalData& globalData, RepatchBuffer& repatchBuffer
         remove();
 }
 
+void MethodCallLinkInfo::reset(RepatchBuffer& repatchBuffer, JITCode::JITType jitType)
+{
+    cachedStructure.clearToMaxUnsigned();
+    cachedPrototype.clear();
+    cachedPrototypeStructure.clearToMaxUnsigned();
+    cachedFunction.clear();
+    
+    if (jitType == JITCode::DFGJIT) {
+#if ENABLE(DFG_JIT)
+        repatchBuffer.relink(callReturnLocation, operationGetMethodOptimize);
+#else
+        ASSERT_NOT_REACHED();
+#endif
+    } else {
+        ASSERT(jitType == JITCode::BaselineJIT);
+        repatchBuffer.relink(callReturnLocation, cti_op_get_by_id_method_check);
+    }
+}
+
 void CodeBlock::unlinkCalls()
 {
     if (!!m_alternative)
@@ -1982,37 +2168,28 @@ bool FunctionCodeBlock::canCompileWithDFG()
     return DFG::canCompileFunctionForCall(this);
 }
 
-void ProgramCodeBlock::jettison(JSGlobalData& globalData)
+void ProgramCodeBlock::jettison()
 {
     ASSERT(getJITType() != JITCode::BaselineJIT);
     ASSERT(this == replacement());
-    static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode(globalData);
+    static_cast<ProgramExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData());
 }
 
-void EvalCodeBlock::jettison(JSGlobalData& globalData)
+void EvalCodeBlock::jettison()
 {
     ASSERT(getJITType() != JITCode::BaselineJIT);
     ASSERT(this == replacement());
-    static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode(globalData);
+    static_cast<EvalExecutable*>(ownerExecutable())->jettisonOptimizedCode(*globalData());
 }
 
-void FunctionCodeBlock::jettison(JSGlobalData& globalData)
+void FunctionCodeBlock::jettison()
 {
     ASSERT(getJITType() != JITCode::BaselineJIT);
     ASSERT(this == replacement());
-    static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(globalData, m_isConstructor ? CodeForConstruct : CodeForCall);
+    static_cast<FunctionExecutable*>(ownerExecutable())->jettisonOptimizedCodeFor(*globalData(), m_isConstructor ? CodeForConstruct : CodeForCall);
 }
 #endif
 
-void CodeBlock::finalizeUnconditionally()
-{
-#if ENABLE(OPCODE_SAMPLING) || !ENABLE(JIT)
-    ASSERT_NOT_REACHED();
-#endif
-    ASSERT(m_shouldDiscardBytecode);
-    discardBytecode();
-}
-
 #if ENABLE(VALUE_PROFILER)
 bool CodeBlock::shouldOptimizeNow()
 {
index 6c0fc64..c9901ef 100644 (file)
@@ -171,6 +171,8 @@ namespace JSC {
         {
             seen = true;
         }
+        
+        void reset(RepatchBuffer&, JITCode::JITType);
 
         unsigned bytecodeIndex;
         CodeLocationCall callReturnLocation;
@@ -250,7 +252,7 @@ namespace JSC {
     }
 #endif
 
-    class CodeBlock : public UnconditionalFinalizer {
+    class CodeBlock : public UnconditionalFinalizer, public WeakReferenceHarvester {
         WTF_MAKE_FAST_ALLOCATED;
         friend class JIT;
     public:
@@ -294,10 +296,6 @@ namespace JSC {
         bool canProduceCopyWithBytecode() { return hasInstructions(); }
 
         void visitAggregate(SlotVisitor&);
-        
-        // Call this if you are not jettisoning a code block, and thus
-        // have no evidence to suggest that it will never be called into again.
-        void stronglyVisitWeakReferences(SlotVisitor&);
 
         static void dumpStatistics();
 
@@ -537,7 +535,7 @@ namespace JSC {
         JITCode::JITType getJITType() { return m_jitCode.jitType(); }
         ExecutableMemoryHandle* executableMemory() { return getJITCode().getExecutableMemory(); }
         virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*) = 0;
-        virtual void jettison(JSGlobalData&) = 0;
+        virtual void jettison() = 0;
         virtual CodeBlock* replacement() = 0;
         virtual bool canCompileWithDFG() = 0;
         bool hasOptimizedReplacement()
@@ -1060,10 +1058,10 @@ namespace JSC {
 #endif
         
 #if ENABLE(JIT)
-        void reoptimize(JSGlobalData& globalData)
+        void reoptimize()
         {
             ASSERT(replacement() != this);
-            replacement()->jettison(globalData);
+            replacement()->jettison();
             countReoptimization();
             optimizeAfterWarmUp();
         }
@@ -1085,6 +1083,7 @@ namespace JSC {
         bool m_shouldDiscardBytecode;
 
     protected:
+        virtual void visitWeakReferences(SlotVisitor&);
         virtual void finalizeUnconditionally();
         
     private:
@@ -1101,6 +1100,33 @@ namespace JSC {
         void printPutByIdOp(ExecState*, int location, Vector<Instruction>::const_iterator&, const char* op) const;
 #endif
         void visitStructures(SlotVisitor&, Instruction* vPC) const;
+        
+#if ENABLE(DFG_JIT)
+        bool shouldImmediatelyAssumeLivenessDuringScan()
+        {
+            // Null m_dfgData means that this is a baseline JIT CodeBlock. Baseline JIT
+            // CodeBlocks don't need to be jettisoned when their weak references go
+            // stale. So if a basline JIT CodeBlock gets scanned, we can assume that
+            // this means that it's live.
+            if (!m_dfgData)
+                return true;
+            
+            // For simplicity, we don't attempt to jettison code blocks during GC if
+            // they are executing. Instead we strongly mark their weak references to
+            // allow them to continue to execute soundly.
+            if (m_dfgData->mayBeExecuting)
+                return true;
+
+            return false;
+        }
+#else
+        bool shouldImmediatelyAssumeLivenessDuringScan() { return true; }
+#endif
+        
+        void performTracingFixpointIteration(SlotVisitor&);
+        
+        void stronglyVisitStrongReferences(SlotVisitor&);
+        void stronglyVisitWeakReferences(SlotVisitor&);
 
         void createRareDataIfNecessary()
         {
@@ -1177,6 +1203,8 @@ namespace JSC {
             Vector<WriteBarrier<JSCell> > weakReferences;
             bool mayBeExecuting;
             bool isJettisoned;
+            bool livenessHasBeenProved; // Initialized and used on every GC.
+            bool allTransitionsHaveBeenMarked; // Initialized and used on every GC.
         };
         
         OwnPtr<DFGData> m_dfgData;
@@ -1278,7 +1306,7 @@ namespace JSC {
 #if ENABLE(JIT)
     protected:
         virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
-        virtual void jettison(JSGlobalData&);
+        virtual void jettison();
         virtual CodeBlock* replacement();
         virtual bool canCompileWithDFG();
 #endif
@@ -1312,7 +1340,7 @@ namespace JSC {
 #if ENABLE(JIT)
     protected:
         virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
-        virtual void jettison(JSGlobalData&);
+        virtual void jettison();
         virtual CodeBlock* replacement();
         virtual bool canCompileWithDFG();
 #endif
@@ -1349,7 +1377,7 @@ namespace JSC {
 #if ENABLE(JIT)
     protected:
         virtual JSObject* compileOptimized(ExecState*, ScopeChainNode*);
-        virtual void jettison(JSGlobalData&);
+        virtual void jettison();
         virtual CodeBlock* replacement();
         virtual bool canCompileWithDFG();
 #endif
index 88d48fb..7e44130 100644 (file)
@@ -114,7 +114,7 @@ namespace JSC {
             list[0].set(globalData, owner, stubRoutine, firstBase, firstChain, isDirect);
         }
 
-        void visitAggregate(SlotVisitor& visitor, int count)
+        bool visitWeak(int count)
         {
             for (int i = 0; i < count; ++i) {
                 PolymorphicStubInfo& info = list[i];
@@ -124,12 +124,17 @@ namespace JSC {
                     continue;
                 }
                 
-                visitor.append(&info.base);
-                if (info.u.proto && !info.isChain)
-                    visitor.append(&info.u.proto);
-                if (info.u.chain && info.isChain)
-                    visitor.append(&info.u.chain);
+                if (!Heap::isMarked(info.base.get()))
+                    return false;
+                if (info.u.proto && !info.isChain
+                    && !Heap::isMarked(info.u.proto.get()))
+                    return false;
+                if (info.u.chain && info.isChain
+                    && !Heap::isMarked(info.u.chain.get()))
+                    return false;
             }
+            
+            return true;
         }
     };
 
index 3d8025c..6ce299f 100644 (file)
@@ -62,48 +62,55 @@ void StructureStubInfo::deref()
     }
 }
 
-void StructureStubInfo::visitAggregate(SlotVisitor& visitor)
+bool StructureStubInfo::visitWeakReferences()
 {
     switch (accessType) {
     case access_get_by_id_self:
-        visitor.append(&u.getByIdSelf.baseObjectStructure);
-        return;
+        if (!Heap::isMarked(u.getByIdSelf.baseObjectStructure.get()))
+            return false;
+        break;
     case access_get_by_id_proto:
-        visitor.append(&u.getByIdProto.baseObjectStructure);
-        visitor.append(&u.getByIdProto.prototypeStructure);
-        return;
+        if (!Heap::isMarked(u.getByIdProto.baseObjectStructure.get())
+            || !Heap::isMarked(u.getByIdProto.prototypeStructure.get()))
+            return false;
+        break;
     case access_get_by_id_chain:
-        visitor.append(&u.getByIdChain.baseObjectStructure);
-        visitor.append(&u.getByIdChain.chain);
-        return;
+        if (!Heap::isMarked(u.getByIdChain.baseObjectStructure.get())
+            || !Heap::isMarked(u.getByIdChain.chain.get()))
+            return false;
+        break;
     case access_get_by_id_self_list: {
         PolymorphicAccessStructureList* polymorphicStructures = u.getByIdSelfList.structureList;
-        polymorphicStructures->visitAggregate(visitor, u.getByIdSelfList.listSize);
-        return;
+        if (!polymorphicStructures->visitWeak(u.getByIdSelfList.listSize)) {
+            delete polymorphicStructures;
+            return false;
+        }
+        break;
     }
     case access_get_by_id_proto_list: {
         PolymorphicAccessStructureList* polymorphicStructures = u.getByIdProtoList.structureList;
-        polymorphicStructures->visitAggregate(visitor, u.getByIdProtoList.listSize);
-        return;
+        if (!polymorphicStructures->visitWeak(u.getByIdSelfList.listSize)) {
+            delete polymorphicStructures;
+            return false;
+        }
+        break;
     }
     case access_put_by_id_transition:
-        visitor.append(&u.putByIdTransition.previousStructure);
-        visitor.append(&u.putByIdTransition.structure);
-        visitor.append(&u.putByIdTransition.chain);
-        return;
+        if (!Heap::isMarked(u.putByIdTransition.previousStructure.get())
+            || !Heap::isMarked(u.putByIdTransition.structure.get())
+            || !Heap::isMarked(u.putByIdTransition.chain.get()))
+            return false;
+        break;
     case access_put_by_id_replace:
-        visitor.append(&u.putByIdReplace.baseObjectStructure);
-        return;
-    case access_unset:
-    case access_get_by_id_generic:
-    case access_put_by_id_generic:
-    case access_get_array_length:
-    case access_get_string_length:
-        // These instructions don't need to mark anything
-        return;
+        if (!Heap::isMarked(u.putByIdReplace.baseObjectStructure.get()))
+            return false;
+        break;
     default:
-        ASSERT_NOT_REACHED();
+        // The rest of the instructions don't require references, so there is no need to
+        // do anything.
+        break;
     }
+    return true;
 }
 #endif
 
index 20044fc..3aaabd9 100644 (file)
@@ -50,6 +50,35 @@ namespace JSC {
         access_get_string_length,
     };
 
+    inline bool isGetByIdAccess(AccessType accessType)
+    {
+        switch (accessType) {
+        case access_get_by_id_self:
+        case access_get_by_id_proto:
+        case access_get_by_id_chain:
+        case access_get_by_id_self_list:
+        case access_get_by_id_proto_list:
+        case access_get_by_id_generic:
+        case access_get_array_length:
+        case access_get_string_length:
+            return true;
+        default:
+            return false;
+        }
+    }
+    
+    inline bool isPutByIdAccess(AccessType accessType)
+    {
+        switch (accessType) {
+        case access_put_by_id_transition:
+        case access_put_by_id_replace:
+        case access_put_by_id_generic:
+            return true;
+        default:
+            return false;
+        }
+    }
+
     struct StructureStubInfo {
         StructureStubInfo()
             : accessType(access_unset)
@@ -113,10 +142,18 @@ namespace JSC {
     
             u.putByIdReplace.baseObjectStructure.set(globalData, owner, baseObjectStructure);
         }
+        
+        void reset()
+        {
+            accessType = access_unset;
+            
+            stubRoutine = MacroAssemblerCodeRef();
+        }
 
         void deref();
-        void visitAggregate(SlotVisitor&);
 
+        bool visitWeakReferences();
+        
         bool seenOnce()
         {
             return seen;
@@ -142,17 +179,18 @@ namespace JSC {
         int16_t deltaCallToDone;
         int16_t deltaCallToStructCheck;
         int16_t deltaCallToSlowCase;
+        int16_t deltaCheckImmToCall;
+#if USE(JSVALUE64)
+        int16_t deltaCallToLoadOrStore;
+#else
+        int16_t deltaCallToTagLoadOrStore;
+        int16_t deltaCallToPayloadLoadOrStore;
 #endif
+#endif // ENABLE(DFG_JIT)
 
         union {
             struct {
-                int16_t deltaCheckImmToCall;
-#if USE(JSVALUE64)
-                int16_t deltaCallToLoadOrStore;
-#elif USE(JSVALUE32_64)
-                int16_t deltaCallToTagLoadOrStore;
-                int16_t deltaCallToPayloadLoadOrStore;
-#endif
+                // It would be unwise to put anything here, as it will surely be overwritten.
             } unset;
             struct {
                 WriteBarrierBase<Structure> baseObjectStructure;
index 776b08d..2bdb252 100644 (file)
@@ -157,13 +157,13 @@ void JITCompiler::link(LinkBuffer& linkBuffer)
         StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
         CodeLocationCall callReturnLocation = linkBuffer.locationOf(m_propertyAccesses[i].m_functionCall);
         info.callReturnLocation = callReturnLocation;
-        info.u.unset.deltaCheckImmToCall = differenceBetweenCodePtr(linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCheckImmToCall), callReturnLocation);
+        info.deltaCheckImmToCall = differenceBetweenCodePtr(linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCheckImmToCall), callReturnLocation);
         info.deltaCallToStructCheck = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToStructCheck));
 #if USE(JSVALUE64)
-        info.u.unset.deltaCallToLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToLoadOrStore));
+        info.deltaCallToLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToLoadOrStore));
 #else
-        info.u.unset.deltaCallToTagLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToTagLoadOrStore));
-        info.u.unset.deltaCallToPayloadLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToPayloadLoadOrStore));
+        info.deltaCallToTagLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToTagLoadOrStore));
+        info.deltaCallToPayloadLoadOrStore = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToPayloadLoadOrStore));
 #endif
         info.deltaCallToSlowCase = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToSlowCase));
         info.deltaCallToDone = differenceBetweenCodePtr(callReturnLocation, linkBuffer.locationOf(m_propertyAccesses[i].m_deltaCallToDone));
index 1a79215..9ae9ba2 100644 (file)
@@ -325,8 +325,9 @@ EncodedJSValue DFG_OPERATION operationGetMethodOptimizeWithReturnAddress(ExecSta
     JSValue baseValue(base);
     PropertySlot slot(baseValue);
     JSValue result = baseValue.get(exec, *propertyName, slot);
-
-    MethodCallLinkInfo& methodInfo = exec->codeBlock()->getMethodCallLinkInfo(returnAddress);
+    
+    CodeBlock* codeBlock = exec->codeBlock();
+    MethodCallLinkInfo& methodInfo = codeBlock->getMethodCallLinkInfo(returnAddress);
     if (methodInfo.seenOnce())
         dfgRepatchGetMethod(exec, baseValue, *propertyName, slot, methodInfo);
     else
@@ -367,7 +368,7 @@ EncodedJSValue DFG_OPERATION operationGetByIdOptimizeWithReturnAddress(ExecState
     JSValue baseValue(base);
     PropertySlot slot(baseValue);
     JSValue result = baseValue.get(exec, *propertyName, slot);
-
+    
     StructureStubInfo& stubInfo = exec->codeBlock()->getStubInfo(returnAddress);
     if (stubInfo.seen)
         dfgRepatchGetByID(exec, baseValue, *propertyName, slot, stubInfo);
index b01a014..e8f660c 100644 (file)
@@ -49,19 +49,19 @@ static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& st
     repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction);
 
     // Patch the structure check & the offset of the load.
-    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.u.unset.deltaCheckImmToCall), structure);
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.deltaCheckImmToCall), structure);
 #if USE(JSVALUE64)
     if (compact)
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
     else
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToLoadOrStore), sizeof(JSValue) * offset);
 #elif USE(JSVALUE32_64)
     if (compact) {
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.u.unset.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     } else {
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.u.unset.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     }
 #endif
 }
@@ -651,6 +651,44 @@ void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCo
     repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct);
 }
 
+void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+    repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize);
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
+#if USE(JSVALUE64)
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToLoadOrStore), 0);
+#else
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToTagLoadOrStore), 0);
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.deltaCallToPayloadLoadOrStore), 0);
+#endif
+    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
+}
+
+void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+{
+    V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
+    V_DFGOperation_EJCI optimizedFunction;
+    if (unoptimizedFunction == operationPutByIdStrict)
+        optimizedFunction = operationPutByIdStrictOptimize;
+    else if (unoptimizedFunction == operationPutByIdNonStrict)
+        optimizedFunction = operationPutByIdNonStrictOptimize;
+    else if (unoptimizedFunction == operationPutByIdDirectStrict)
+        optimizedFunction = operationPutByIdDirectStrictOptimize;
+    else {
+        ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict);
+        optimizedFunction = operationPutByIdDirectNonStrictOptimize;
+    }
+    repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.deltaCheckImmToCall), reinterpret_cast<void*>(-1));
+#if USE(JSVALUE64)
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToLoadOrStore), 0);
+#else
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToTagLoadOrStore), 0);
+    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.deltaCallToPayloadLoadOrStore), 0);
+#endif
+    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
+}
+
 } } // namespace JSC::DFG
 
 #endif
index e9b695b..bcc0e06 100644 (file)
@@ -39,8 +39,21 @@ void dfgBuildGetByIDList(ExecState*, JSValue, const Identifier&, const PropertyS
 void dfgBuildGetByIDProtoList(ExecState*, JSValue, const Identifier&, const PropertySlot&, StructureStubInfo&);
 void dfgRepatchPutByID(ExecState*, JSValue, const Identifier&, const PutPropertySlot&, StructureStubInfo&, PutKind);
 void dfgLinkFor(ExecState*, CallLinkInfo&, CodeBlock*, JSFunction* callee, MacroAssemblerCodePtr, CodeSpecializationKind);
+void dfgResetGetByID(RepatchBuffer&, StructureStubInfo&);
+void dfgResetPutByID(RepatchBuffer&, StructureStubInfo&);
 
 } } // namespace JSC::DFG
 
-#endif
-#endif
+#else // ENABLE(DFG_JIT)
+
+#include <wtf/Assertions.h>
+
+namespace JSC { namespace DFG {
+
+void dfgResetGetByID(RepatchBuffer&, StructureStubInfo&) { ASSERT_NOT_REACHED(); }
+void dfgResetPutByID(RepatchBuffer&, StructureStubInfo&) { ASSERT_NOT_REACHED(); }
+
+} } // namespace JSC::DFG
+
+#endif // ENABLE(DFG_JIT)
+#endif // DFGRepatch_h
index f3d9194..05c5068 100644 (file)
@@ -254,6 +254,8 @@ namespace JSC {
             return jit.privateCompileCTINativeCall(globalData, func);
         }
 
+        static void resetPatchGetById(RepatchBuffer&, StructureStubInfo*);
+        static void resetPatchPutById(RepatchBuffer&, StructureStubInfo*);
         static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
         static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
         static void patchMethodCallProto(JSGlobalData&, CodeBlock* codeblock, MethodCallLinkInfo&, JSObject*, Structure*, JSObject*, ReturnAddressPtr);
index fedc05b..7f66f9a 100644 (file)
@@ -1044,6 +1044,21 @@ void JIT::emit_op_put_global_var(Instruction* currentInstruction)
     emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
 }
 
+void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
+{
+    repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset), 0);
+    repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
+}
+
+void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
+{
+    repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), reinterpret_cast<void*>(-1));
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset), 0);
+}
+
 #endif // USE(JSVALUE64)
 
 void JIT::emitWriteBarrier(RegisterID owner, RegisterID value, RegisterID scratch, RegisterID scratch2, WriteBarrierMode mode, WriteBarrierUseKind useKind)
index 4ddc488..5fb1250 100644 (file)
@@ -1107,6 +1107,23 @@ void JIT::emit_op_put_global_var(Instruction* currentInstruction)
     map(m_bytecodeOffset + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
 }
 
+void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
+{
+    repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), reinterpret_cast<void*>(-1));
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset1), 0);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetGetByIdPropertyMapOffset2), 0);
+    repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase), stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
+}
+
+void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
+{
+    repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), reinterpret_cast<void*>(-1));
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset1), 0);
+    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(patchOffsetPutByIdPropertyMapOffset2), 0);
+}
+
 } // namespace JSC
 
 #endif // USE(JSVALUE32_64)
index 384e079..fe942f5 100644 (file)
@@ -1938,7 +1938,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
 #if ENABLE(JIT_VERBOSE_OSR)
             printf("Triggering reoptimization of %p(%p) (in loop).\n", codeBlock, codeBlock->replacement());
 #endif
-            codeBlock->reoptimize(callFrame->globalData());
+            codeBlock->reoptimize();
             return;
         }
     } else {
@@ -2008,7 +2008,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
 #if ENABLE(JIT_VERBOSE_OSR)
         printf("Triggering reoptimization of %p(%p) (in loop after OSR fail).\n", codeBlock, codeBlock->replacement());
 #endif
-        codeBlock->reoptimize(callFrame->globalData());
+        codeBlock->reoptimize();
         return;
     }
 
@@ -2040,12 +2040,10 @@ DEFINE_STUB_FUNCTION(void, optimize_from_ret)
 #if ENABLE(JIT_VERBOSE_OSR)
             printf("Triggering reoptimization of %p(%p) (in return).\n", codeBlock, codeBlock->replacement());
 #endif
-            codeBlock->reoptimize(callFrame->globalData());
+            codeBlock->reoptimize();
         }
         
         codeBlock->optimizeSoon();
-
-        codeBlock->optimizeSoon();
         return;
     }
     
index 944a214..81a3653 100644 (file)
@@ -69,6 +69,7 @@ public:
     }
     
     void clear() { clear(0); }
+    void clearToMaxUnsigned() { clear(reinterpret_cast<void*>(-1)); }
 
 protected:
     JITWriteBarrierBase()