Harden how the compiler references GC objects
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
index 9b2cc90..4dd916a 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2013-2016 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -40,6 +40,8 @@
 #include "DFGClobbersExitState.h"
 #include "DFGControlEquivalenceAnalysis.h"
 #include "DFGDominators.h"
+#include "DFGFlowIndexing.h"
+#include "DFGFlowMap.h"
 #include "DFGJITCode.h"
 #include "DFGMayExit.h"
 #include "DFGNaturalLoops.h"
@@ -66,11 +68,12 @@ static const char* dfgOpNames[] = {
 #undef STRINGIZE_DFG_OP_ENUM
 };
 
-Graph::Graph(VM& vm, Plan& plan)
+Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
     : m_vm(vm)
     , m_plan(plan)
     , m_codeBlock(m_plan.codeBlock)
     , m_profiledBlock(m_codeBlock->alternative())
+    , m_allocator(longLivedState.m_allocator)
     , m_cfg(std::make_unique<CFG>(*this))
     , m_nextMachineLocal(0)
     , m_fixpointState(BeforeFixpoint)
@@ -82,10 +85,28 @@ Graph::Graph(VM& vm, Plan& plan)
     ASSERT(m_profiledBlock);
     
     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
+    
+    m_indexingCache = std::make_unique<FlowIndexing>(*this);
+    m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
+
+    registerStructure(vm.structureStructure.get());
+    this->stringStructure = registerStructure(vm.stringStructure.get());
+    this->symbolStructure = registerStructure(vm.symbolStructure.get());
 }
 
 Graph::~Graph()
 {
+    for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
+        BasicBlock* block = this->block(blockIndex);
+        if (!block)
+            continue;
+
+        for (unsigned phiIndex = block->phis.size(); phiIndex--;)
+            m_allocator.free(block->phis[phiIndex]);
+        for (unsigned nodeIndex = block->size(); nodeIndex--;)
+            m_allocator.free(block->at(nodeIndex));
+    }
+    m_allocator.freeAll();
 }
 
 const char *Graph::opName(NodeType op)
@@ -219,6 +240,8 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
         out.print(comma, node->scopeOffset());
     if (node->hasDirectArgumentsOffset())
         out.print(comma, node->capturedArgumentsOffset());
+    if (node->hasArgumentIndex())
+        out.print(comma, node->argumentIndex());
     if (node->hasRegisterPointer())
         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
     if (node->hasIdentifier())
@@ -226,15 +249,15 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
     if (node->hasPromotedLocationDescriptor())
         out.print(comma, node->promotedLocationDescriptor());
     if (node->hasStructureSet())
-        out.print(comma, inContext(node->structureSet(), context));
+        out.print(comma, inContext(node->structureSet().toStructureSet(), context));
     if (node->hasStructure())
-        out.print(comma, inContext(*node->structure(), context));
+        out.print(comma, inContext(*node->structure().get(), context));
     if (node->hasTransition()) {
         out.print(comma, pointerDumpInContext(node->transition(), context));
 #if USE(JSVALUE64)
         out.print(", ID:", node->transition()->next->id());
 #else
-        out.print(", ID:", RawPointer(node->transition()->next));
+        out.print(", ID:", RawPointer(node->transition()->next.get()));
 #endif
     }
     if (node->hasCellOperand()) {
@@ -255,6 +278,8 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
             }
         }
     }
+    if (node->hasSpeculatedTypeForQuery())
+        out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
     if (node->hasStorageAccessData()) {
         StorageAccessData& storageAccessData = node->storageAccessData();
         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
@@ -273,7 +298,7 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
         for (unsigned i = 0; i < data.variants.size(); ++i)
             out.print(comma, inContext(data.variants[i], context));
     }
-    ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
+    ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
     if (node->hasVariableAccessData(*this)) {
         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
         if (variableAccessData) {
@@ -334,6 +359,11 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
         out.print(", limit = ", data->limit);
     }
+    if (node->hasCallDOMGetterData()) {
+        CallDOMGetterData* data = node->callDOMGetterData();
+        out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
+        out.print(", domJIT = ", RawPointer(data->domJIT));
+    }
     if (node->isConstant())
         out.print(comma, pointerDumpInContext(node->constant(), context));
     if (node->isJump())
@@ -370,7 +400,7 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
         out.print(comma, "WasHoisted");
     out.print(")");
 
-    if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
+    if (node->accessesStack(*this) && node->tryGetVariableAccessData())
         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
     else if (node->hasHeapPrediction())
         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
@@ -547,7 +577,7 @@ void Graph::dump(PrintStream& out, DumpContext* context)
             RELEASE_ASSERT(block->ssa);
             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
-            out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
+            out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
             break;
         } }
         out.print("\n");
@@ -567,9 +597,70 @@ void Graph::dump(PrintStream& out, DumpContext* context)
     }
 }
 
+void Graph::addNodeToMapByIndex(Node* node)
+{
+    if (m_nodeIndexFreeList.isEmpty()) {
+        node->m_index = m_nodesByIndex.size();
+        m_nodesByIndex.append(node);
+        return;
+    }
+    unsigned index = m_nodeIndexFreeList.takeLast();
+    node->m_index = index;
+    ASSERT(!m_nodesByIndex[index]);
+    m_nodesByIndex[index] = node;
+}
+
 void Graph::deleteNode(Node* node)
 {
-    m_nodes.remove(node);
+    if (validationEnabled() && m_form == SSA) {
+        for (BasicBlock* block : blocksInNaturalOrder()) {
+            DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
+            DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
+        }
+    }
+
+    RELEASE_ASSERT(m_nodesByIndex[node->m_index] == node);
+    unsigned nodeIndex = node->m_index;
+    m_nodesByIndex[nodeIndex] = nullptr;
+    m_nodeIndexFreeList.append(nodeIndex);
+
+    m_allocator.free(node);
+}
+
+void Graph::packNodeIndices()
+{
+    if (m_nodeIndexFreeList.isEmpty())
+        return;
+
+    unsigned holeIndex = 0;
+    unsigned endIndex = m_nodesByIndex.size();
+
+    while (true) {
+        while (holeIndex < endIndex && m_nodesByIndex[holeIndex])
+            ++holeIndex;
+
+        if (holeIndex == endIndex)
+            break;
+        ASSERT(holeIndex < m_nodesByIndex.size());
+        ASSERT(!m_nodesByIndex[holeIndex]);
+
+        do {
+            --endIndex;
+        } while (!m_nodesByIndex[endIndex] && endIndex > holeIndex);
+
+        if (holeIndex == endIndex)
+            break;
+        ASSERT(endIndex > holeIndex);
+        ASSERT(m_nodesByIndex[endIndex]);
+
+        auto& value = m_nodesByIndex[endIndex];
+        value->m_index = holeIndex;
+        m_nodesByIndex[holeIndex] = WTFMove(value);
+        ++holeIndex;
+    }
+
+    m_nodeIndexFreeList.resize(0);
+    m_nodesByIndex.resize(endIndex);
 }
 
 void Graph::dethread()
@@ -737,6 +828,8 @@ void Graph::computeRefCounts()
 
 void Graph::killBlockAndItsContents(BasicBlock* block)
 {
+    if (auto& ssaData = block->ssa)
+        ssaData->invalidate();
     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
         deleteNode(block->phis[phiIndex]);
     for (Node* node : *block)
@@ -747,9 +840,8 @@ void Graph::killBlockAndItsContents(BasicBlock* block)
 
 void Graph::killUnreachableBlocks()
 {
-    // FIXME: This probably creates dangling references from Upsilons to Phis in SSA.
-    // https://bugs.webkit.org/show_bug.cgi?id=159164
-    
+    invalidateNodeLiveness();
+
     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
         BasicBlock* block = this->block(blockIndex);
         if (!block)
@@ -771,6 +863,15 @@ void Graph::invalidateCFG()
     m_backwardsCFG = nullptr;
 }
 
+void Graph::invalidateNodeLiveness()
+{
+    if (m_form != SSA)
+        return;
+
+    for (BasicBlock* block : blocksInNaturalOrder())
+        block->ssa->invalidate();
+}
+
 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
 {
     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
@@ -1074,6 +1175,13 @@ BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
     return result;
 }
 
+unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
+{
+    size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
+    size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
+    return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
+}
+
 unsigned Graph::frameRegisterCount()
 {
     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
@@ -1103,7 +1211,7 @@ unsigned Graph::requiredRegisterCountForExecutionAndExit()
 }
 
 JSValue Graph::tryGetConstantProperty(
-    JSValue base, const StructureSet& structureSet, PropertyOffset offset)
+    JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
 {
     if (!base || !base.isObject())
         return JSValue();
@@ -1111,8 +1219,7 @@ JSValue Graph::tryGetConstantProperty(
     JSObject* object = asObject(base);
     
     for (unsigned i = structureSet.size(); i--;) {
-        Structure* structure = structureSet[i];
-        assertIsRegistered(structure);
+        RegisteredStructure structure = structureSet[i];
         
         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
         if (!set || !set->isStillValid())
@@ -1144,7 +1251,7 @@ JSValue Graph::tryGetConstantProperty(
     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
     // determine if the structure belongs to the proven set.
     
-    if (!structureSet.contains(object->structure()))
+    if (!structureSet.toStructureSet().contains(object->structure()))
         return JSValue();
     
     return object->getDirect(offset);
@@ -1152,7 +1259,7 @@ JSValue Graph::tryGetConstantProperty(
 
 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
 {
-    return tryGetConstantProperty(base, StructureSet(structure), offset);
+    return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
 }
 
 JSValue Graph::tryGetConstantProperty(
@@ -1174,13 +1281,13 @@ JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset
 }
 
 AbstractValue Graph::inferredValueForProperty(
-    const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
+    const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
 {
     AbstractValue result;
     base.forEach(
-        [&] (Structure* structure) {
+        [&] (RegisteredStructure structure) {
             AbstractValue value;
-            value.set(*this, inferredTypeForProperty(structure, uid));
+            value.set(*this, inferredTypeForProperty(structure.get(), uid));
             result.merge(value);
         });
     if (clobberState == StructuresAreClobbered)
@@ -1219,7 +1326,7 @@ JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
     JSValue value;
     WatchpointSet* set;
     {
-        ConcurrentJITLocker locker(symbolTable->m_lock);
+        ConcurrentJSLocker locker(symbolTable->m_lock);
         
         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
         if (!entry)
@@ -1306,61 +1413,8 @@ void Graph::registerFrozenValues()
 void Graph::visitChildren(SlotVisitor& visitor)
 {
     for (FrozenValue* value : m_frozenValues) {
-        visitor.appendUnbarrieredReadOnlyValue(value->value());
-        visitor.appendUnbarrieredReadOnlyPointer(value->structure());
-    }
-    
-    for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
-        BasicBlock* block = this->block(blockIndex);
-        if (!block)
-            continue;
-        
-        for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
-            Node* node = block->at(nodeIndex);
-            
-            switch (node->op()) {
-            case CheckStructure:
-                for (unsigned i = node->structureSet().size(); i--;)
-                    visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
-                break;
-                
-            case NewObject:
-            case ArrayifyToStructure:
-            case NewStringObject:
-                visitor.appendUnbarrieredReadOnlyPointer(node->structure());
-                break;
-                
-            case PutStructure:
-            case AllocatePropertyStorage:
-            case ReallocatePropertyStorage:
-                visitor.appendUnbarrieredReadOnlyPointer(
-                    node->transition()->previous);
-                visitor.appendUnbarrieredReadOnlyPointer(
-                    node->transition()->next);
-                break;
-                
-            case MultiGetByOffset:
-                for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
-                    for (Structure* structure : getCase.set())
-                        visitor.appendUnbarrieredReadOnlyPointer(structure);
-                }
-                break;
-                    
-            case MultiPutByOffset:
-                for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
-                    PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
-                    const StructureSet& set = variant.oldStructure();
-                    for (unsigned j = set.size(); j--;)
-                        visitor.appendUnbarrieredReadOnlyPointer(set[j]);
-                    if (variant.kind() == PutByIdVariant::Transition)
-                        visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
-                }
-                break;
-                
-            default:
-                break;
-            }
-        }
+        visitor.appendUnbarriered(value->value());
+        visitor.appendUnbarriered(value->structure());
     }
 }
 
@@ -1368,6 +1422,12 @@ FrozenValue* Graph::freeze(JSValue value)
 {
     if (UNLIKELY(!value))
         return FrozenValue::emptySingleton();
+
+    // There are weird relationships in how optimized CodeBlocks
+    // point to other CodeBlocks. We don't want to have them be
+    // part of the weak pointer set. For example, an optimized CodeBlock
+    // having a weak pointer to itself will cause it to get collected.
+    RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(value));
     
     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
     if (LIKELY(!result.isNewEntry))
@@ -1407,12 +1467,14 @@ void Graph::convertToStrongConstant(Node* node, JSValue value)
     convertToConstant(node, freezeStrong(value));
 }
 
-StructureRegistrationResult Graph::registerStructure(Structure* structure)
+RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
 {
     m_plan.weakReferences.addLazily(structure);
     if (m_plan.watchpoints.consider(structure))
-        return StructureRegisteredAndWatched;
-    return StructureRegisteredNormally;
+        result = StructureRegisteredAndWatched;
+    else
+        result = StructureRegisteredNormally;
+    return RegisteredStructure::createPrivate(structure);
 }
 
 void Graph::assertIsRegistered(Structure* structure)
@@ -1509,45 +1571,47 @@ ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
     return *m_controlEquivalenceAnalysis;
 }
 
-MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
-{
-    while (node) {
-        CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
-        
-        if (node->hasLocal(*this)) {
-            ValueProfile* result = [&] () -> ValueProfile* {
-                if (!node->local().isArgument())
-                    return nullptr;
-                int argument = node->local().toArgument();
-                Node* argumentNode = m_arguments[argument];
-                if (!argumentNode)
-                    return nullptr;
-                if (node->variableAccessData() != argumentNode->variableAccessData())
-                    return nullptr;
-                return profiledBlock->valueProfileForArgument(argument);
-            }();
-            if (result)
-                return result;
-            
-            if (node->op() == GetLocal) {
-                return MethodOfGettingAValueProfile::fromLazyOperand(
-                    profiledBlock,
-                    LazyOperandValueProfileKey(
-                        node->origin.semantic.bytecodeIndex, node->local()));
+MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
+{
+    for (Node* node = operandNode; node;) {
+        // currentNode is null when we're doing speculation checks for checkArgumentTypes().
+        if (!currentNode || node->origin != currentNode->origin) {
+            CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
+
+            if (node->accessesStack(*this)) {
+                ValueProfile* result = [&] () -> ValueProfile* {
+                    if (!node->local().isArgument())
+                        return nullptr;
+                    int argument = node->local().toArgument();
+                    Node* argumentNode = m_arguments[argument];
+                    if (!argumentNode)
+                        return nullptr;
+                    if (node->variableAccessData() != argumentNode->variableAccessData())
+                        return nullptr;
+                    return profiledBlock->valueProfileForArgument(argument);
+                }();
+                if (result)
+                    return result;
+
+                if (node->op() == GetLocal) {
+                    return MethodOfGettingAValueProfile::fromLazyOperand(
+                        profiledBlock,
+                        LazyOperandValueProfileKey(
+                            node->origin.semantic.bytecodeIndex, node->local()));
+                }
             }
-        }
-        
-        if (node->hasHeapPrediction())
-            return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
-        
-        {
+
+            if (node->hasHeapPrediction())
+                return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
+
             if (profiledBlock->hasBaselineJITProfiling()) {
                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
                     return result;
             }
         }
-        
+
         switch (node->op()) {
+        case BooleanToNumber:
         case Identity:
         case ValueRep:
         case DoubleRep: