Harden how the compiler references GC objects
[WebKit.git] / Source / JavaScriptCore / bytecode / CodeBlock.cpp
index 6bb3e78..1cc783c 100644 (file)
@@ -1769,8 +1769,8 @@ void CodeBlock::dumpBytecode(
     if (!exitSites.isEmpty()) {
         out.print(" !! frequent exits: ");
         CommaPrinter comma;
-        for (unsigned i = 0; i < exitSites.size(); ++i)
-            out.print(comma, exitSites[i].kind(), " ", exitSites[i].jitType());
+        for (auto& exitSite : exitSites)
+            out.print(comma, exitSite.kind(), " ", exitSite.jitType());
     }
 #else // ENABLE(DFG_JIT)
     UNUSED_PARAM(location);
@@ -2437,13 +2437,6 @@ void CodeBlock::setNumParameters(int newValue)
     m_argumentValueProfiles = RefCountedArray<ValueProfile>(newValue);
 }
 
-void DirectEvalCodeCache::visitAggregate(SlotVisitor& visitor)
-{
-    EvalCacheMap::iterator end = m_cacheMap.end();
-    for (EvalCacheMap::iterator ptr = m_cacheMap.begin(); ptr != end; ++ptr)
-        visitor.append(&ptr->value);
-}
-
 CodeBlock* CodeBlock::specialOSREntryBlockOrNull()
 {
 #if ENABLE(FTL_JIT)
@@ -2468,7 +2461,7 @@ void CodeBlock::visitWeakly(SlotVisitor& visitor)
         return;
 
     if (shouldVisitStrongly(locker)) {
-        visitor.appendUnbarrieredReadOnlyPointer(this);
+        visitor.appendUnbarriered(this);
         return;
     }
     
@@ -2483,7 +2476,7 @@ void CodeBlock::visitWeakly(SlotVisitor& visitor)
 
     // If we jettison ourselves we'll install our alternative, so make sure that it
     // survives GC even if we don't.
-    visitor.append(&m_alternative);
+    visitor.append(m_alternative);
     
     // There are two things that we use weak reference harvesters for: DFG fixpoint for
     // jettisoning, and trying to find structures that would be live based on some
@@ -2535,7 +2528,7 @@ void CodeBlock::visitChildren(SlotVisitor& visitor)
     visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
 
     if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
-        visitor.appendUnbarrieredReadOnlyPointer(otherBlock);
+        visitor.appendUnbarriered(otherBlock);
 
     if (m_jitCode)
         visitor.reportExtraMemoryVisited(m_jitCode->size());
@@ -2661,7 +2654,7 @@ void CodeBlock::propagateTransitions(const ConcurrentJSLocker&, SlotVisitor& vis
                 Structure* newStructure =
                     m_vm->heap.structureIDTable().get(newStructureID);
                 if (Heap::isMarkedConcurrently(oldStructure))
-                    visitor.appendUnbarrieredReadOnlyPointer(newStructure);
+                    visitor.appendUnbarriered(newStructure);
                 else
                     allAreMarkedSoFar = false;
                 break;
@@ -2684,9 +2677,9 @@ void CodeBlock::propagateTransitions(const ConcurrentJSLocker&, SlotVisitor& vis
         DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
         for (auto& weakReference : dfgCommon->weakStructureReferences)
             allAreMarkedSoFar &= weakReference->markIfCheap(visitor);
-        
-        for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-            if (shouldMarkTransition(dfgCommon->transitions[i])) {
+
+        for (auto& transition : dfgCommon->transitions) {
+            if (shouldMarkTransition(transition)) {
                 // If the following three things are live, then the target of the
                 // transition is also live:
                 //
@@ -2705,8 +2698,8 @@ void CodeBlock::propagateTransitions(const ConcurrentJSLocker&, SlotVisitor& vis
                 // We also short-circuit the liveness if the structure is harmless
                 // to mark (i.e. its global object and prototype are both already
                 // live).
-                
-                visitor.append(&dfgCommon->transitions[i].m_to);
+
+                visitor.append(transition.m_to);
             } else
                 allAreMarkedSoFar = false;
         }
@@ -2732,6 +2725,7 @@ void CodeBlock::determineLiveness(const ConcurrentJSLocker&, SlotVisitor& visito
     // GC we still have not proved liveness, then this code block is toast.
     bool allAreLiveSoFar = true;
     for (unsigned i = 0; i < dfgCommon->weakReferences.size(); ++i) {
+        ASSERT(!jsDynamicCast<CodeBlock*>(dfgCommon->weakReferences[i].get()));
         if (!Heap::isMarkedConcurrently(dfgCommon->weakReferences[i].get())) {
             allAreLiveSoFar = false;
             break;
@@ -2754,7 +2748,7 @@ void CodeBlock::determineLiveness(const ConcurrentJSLocker&, SlotVisitor& visito
     // All weak references are live. Record this information so we don't
     // come back here again, and scan the strong references.
     dfgCommon->livenessHasBeenProved = true;
-    visitor.appendUnbarrieredReadOnlyPointer(this);
+    visitor.appendUnbarriered(this);
 #endif // ENABLE(DFG_JIT)
 }
 
@@ -2902,15 +2896,11 @@ void CodeBlock::UnconditionalFinalizer::finalizeUnconditionally()
     
     codeBlock->updateAllPredictions();
     
-#if ENABLE(DFG_JIT)
-    if (codeBlock->shouldJettisonDueToWeakReference()) {
-        codeBlock->jettison(Profiler::JettisonDueToWeakReference);
-        return;
-    }
-#endif // ENABLE(DFG_JIT)
-
-    if (codeBlock->shouldJettisonDueToOldAge(NoLockingNecessary)) {
-        codeBlock->jettison(Profiler::JettisonDueToOldAge);
+    if (!Heap::isMarked(codeBlock)) {
+        if (codeBlock->shouldJettisonDueToWeakReference())
+            codeBlock->jettison(Profiler::JettisonDueToWeakReference);
+        else
+            codeBlock->jettison(Profiler::JettisonDueToOldAge);
         return;
     }
 
@@ -3056,14 +3046,14 @@ void CodeBlock::visitOSRExitTargets(const ConcurrentJSLocker&, SlotVisitor& visi
     // guaranteeing that it matches the details of the CodeBlock we compiled
     // the OSR exit against.
 
-    visitor.append(&m_alternative);
+    visitor.append(m_alternative);
 
 #if ENABLE(DFG_JIT)
     DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
     if (dfgCommon->inlineCallFrames) {
         for (auto* inlineCallFrame : *dfgCommon->inlineCallFrames) {
             ASSERT(inlineCallFrame->baselineCodeBlock);
-            visitor.append(&inlineCallFrame->baselineCodeBlock);
+            visitor.append(inlineCallFrame->baselineCodeBlock);
         }
     }
 #endif
@@ -3073,22 +3063,22 @@ void CodeBlock::stronglyVisitStrongReferences(const ConcurrentJSLocker& locker,
 {
     UNUSED_PARAM(locker);
     
-    visitor.append(&m_globalObject);
-    visitor.append(&m_ownerExecutable);
-    visitor.append(&m_unlinkedCode);
+    visitor.append(m_globalObject);
+    visitor.append(m_ownerExecutable);
+    visitor.append(m_unlinkedCode);
     if (m_rareData)
         m_rareData->m_directEvalCodeCache.visitAggregate(visitor);
     visitor.appendValues(m_constantRegisters.data(), m_constantRegisters.size());
-    for (size_t i = 0; i < m_functionExprs.size(); ++i)
-        visitor.append(&m_functionExprs[i]);
-    for (size_t i = 0; i < m_functionDecls.size(); ++i)
-        visitor.append(&m_functionDecls[i]);
-    for (unsigned i = 0; i < m_objectAllocationProfiles.size(); ++i)
-        m_objectAllocationProfiles[i].visitAggregate(visitor);
+    for (auto& functionExpr : m_functionExprs)
+        visitor.append(functionExpr);
+    for (auto& functionDecl : m_functionDecls)
+        visitor.append(functionDecl);
+    for (auto& objectAllocationProfile : m_objectAllocationProfiles)
+        objectAllocationProfile.visitAggregate(visitor);
 
 #if ENABLE(JIT)
     for (ByValInfo* byValInfo : m_byValInfos)
-        visitor.append(&byValInfo->cachedSymbol);
+        visitor.append(byValInfo->cachedSymbol);
 #endif
 
 #if ENABLE(DFG_JIT)
@@ -3107,18 +3097,18 @@ void CodeBlock::stronglyVisitWeakReferences(const ConcurrentJSLocker&, SlotVisit
     
     DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
 
-    for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-        if (!!dfgCommon->transitions[i].m_codeOrigin)
-            visitor.append(&dfgCommon->transitions[i].m_codeOrigin); // Almost certainly not necessary, since the code origin should also be a weak reference. Better to be safe, though.
-        visitor.append(&dfgCommon->transitions[i].m_from);
-        visitor.append(&dfgCommon->transitions[i].m_to);
+    for (auto& transition : dfgCommon->transitions) {
+        if (!!transition.m_codeOrigin)
+            visitor.append(transition.m_codeOrigin); // Almost certainly not necessary, since the code origin should also be a weak reference. Better to be safe, though.
+        visitor.append(transition.m_from);
+        visitor.append(transition.m_to);
     }
-    
-    for (unsigned i = 0; i < dfgCommon->weakReferences.size(); ++i)
-        visitor.append(&dfgCommon->weakReferences[i]);
 
-    for (unsigned i = 0; i < dfgCommon->weakStructureReferences.size(); ++i)
-        visitor.append(&dfgCommon->weakStructureReferences[i]);
+    for (auto& weakReference : dfgCommon->weakReferences)
+        visitor.append(weakReference);
+
+    for (auto& weakStructureReference : dfgCommon->weakStructureReferences)
+        visitor.append(weakStructureReference);
 
     dfgCommon->livenessHasBeenProved = true;
 #endif    
@@ -3387,8 +3377,7 @@ void CodeBlock::jettison(Profiler::JettisonReason reason, ReoptimizationMode mod
         if (DFG::shouldDumpDisassembly()) {
             dataLog(*this, " will be jettisoned because of the following dead references:\n");
             DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
-            for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-                DFG::WeakReferenceTransition& transition = dfgCommon->transitions[i];
+            for (auto& transition : dfgCommon->transitions) {
                 JSCell* origin = transition.m_codeOrigin.get();
                 JSCell* from = transition.m_from.get();
                 JSCell* to = transition.m_to.get();
@@ -3917,9 +3906,9 @@ bool CodeBlock::shouldReoptimizeFromLoopNow()
 
 ArrayProfile* CodeBlock::getArrayProfile(const ConcurrentJSLocker&, unsigned bytecodeOffset)
 {
-    for (unsigned i = 0; i < m_arrayProfiles.size(); ++i) {
-        if (m_arrayProfiles[i].bytecodeOffset() == bytecodeOffset)
-            return &m_arrayProfiles[i];
+    for (auto& m_arrayProfile : m_arrayProfiles) {
+        if (m_arrayProfile.bytecodeOffset() == bytecodeOffset)
+            return &m_arrayProfile;
     }
     return 0;
 }
@@ -4075,10 +4064,8 @@ void CodeBlock::tallyFrequentExitSites()
     switch (jitType()) {
     case JITCode::DFGJIT: {
         DFG::JITCode* jitCode = m_jitCode->dfg();
-        for (unsigned i = 0; i < jitCode->osrExit.size(); ++i) {
-            DFG::OSRExit& exit = jitCode->osrExit[i];
+        for (auto& exit : jitCode->osrExit)
             exit.considerAddingAsFrequentExitSite(profiledBlock);
-        }
         break;
     }
 
@@ -4216,10 +4203,10 @@ bool CodeBlock::usesOpcode(OpcodeID opcodeID)
 
 String CodeBlock::nameForRegister(VirtualRegister virtualRegister)
 {
-    for (unsigned i = 0; i < m_constantRegisters.size(); i++) {
-        if (m_constantRegisters[i].get().isEmpty())
+    for (auto& constantRegister : m_constantRegisters) {
+        if (constantRegister.get().isEmpty())
             continue;
-        if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(m_constantRegisters[i].get())) {
+        if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(constantRegister.get())) {
             ConcurrentJSLocker locker(symbolTable->m_lock);
             auto end = symbolTable->end(locker);
             for (auto ptr = symbolTable->begin(locker); ptr != end; ++ptr) {