Use constexpr instead of const in symbol definitions that are obviously constexpr.
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGPutStackSinkingPhase.cpp
index 92447d9..8727760 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2014-2019 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -42,7 +42,9 @@ namespace JSC { namespace DFG {
 
 namespace {
 
-bool verbose = false;
+namespace DFGPutStackSinkingPhaseInternal {
+static constexpr bool verbose = false;
+}
 
 class PutStackSinkingPhase : public Phase {
 public:
@@ -57,17 +59,26 @@ public:
         // for sunken PutStacks in the presence of interesting control flow merges, and where the
         // value being PutStack'd is also otherwise live in the DFG code. We could work around this
         // by doing the sinking over CPS, or maybe just by doing really smart hoisting. It's also
-        // possible that the duplicate Phi graph can be deduplicated by LLVM. It would be best if we
+        // possible that the duplicate Phi graph can be deduplicated by B3. It would be best if we
         // could observe that there is already a Phi graph in place that does what we want. In
         // principle if we have a request to place a Phi at a particular place, we could just check
         // if there is already a Phi that does what we want. Because PutStackSinkingPhase runs just
         // after SSA conversion, we have almost a guarantee that the Phi graph we produce here would
         // be trivially redundant to the one we already have.
         
-        if (verbose) {
+        // FIXME: This phase doesn't adequately use KillStacks. KillStack can be viewed as a def.
+        // This is mostly inconsequential; it would be a bug to have a local live at a KillStack.
+        // More important is that KillStack should swallow any deferral. After a KillStack, the
+        // local should behave like a TOP deferral because it would be invalid for anyone to trust
+        // the stack. It's not clear to me if this is important or not.
+        // https://bugs.webkit.org/show_bug.cgi?id=145296
+        
+        if (DFGPutStackSinkingPhaseInternal::verbose) {
             dataLog("Graph before PutStack sinking:\n");
             m_graph.dump();
         }
+
+        m_graph.ensureSSADominators();
         
         SSACalculator ssaCalculator(m_graph);
         InsertionSet insertionSet(m_graph);
@@ -96,28 +107,34 @@ public:
                 Operands<bool> live = liveAtTail[block];
                 for (unsigned nodeIndex = block->size(); nodeIndex--;) {
                     Node* node = block->at(nodeIndex);
-                    if (verbose)
+                    if (DFGPutStackSinkingPhaseInternal::verbose)
                         dataLog("Live at ", node, ": ", live, "\n");
                     
+                    Vector<VirtualRegister, 4> reads;
+                    Vector<VirtualRegister, 4> writes;
                     auto escapeHandler = [&] (VirtualRegister operand) {
                         if (operand.isHeader())
                             return;
-                        if (verbose)
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
                             dataLog("    ", operand, " is live at ", node, "\n");
-                        live.operand(operand) = true;
+                        reads.append(operand);
                     };
-                    
+
+                    auto writeHandler = [&] (VirtualRegister operand) {
+                        if (operand.isHeader())
+                            return;
+                        RELEASE_ASSERT(node->op() == PutStack || node->op() == LoadVarargs || node->op() == ForwardVarargs || node->op() == KillStack);
+                        writes.append(operand);
+                    };
+
                     preciseLocalClobberize(
-                        m_graph, node, escapeHandler, escapeHandler,
-                        [&] (VirtualRegister operand, Node* source) {
-                            if (source == node) {
-                                // This is a load. Ignore it.
-                                return;
-                            }
-                            
-                            RELEASE_ASSERT(node->op() == PutStack);
-                            live.operand(operand) = false;
-                        });
+                        m_graph, node, escapeHandler, writeHandler,
+                        [&] (VirtualRegister, LazyNode) { });
+
+                    for (VirtualRegister operand : writes)
+                        live.operand(operand) = false;
+                    for (VirtualRegister operand : reads)
+                        live.operand(operand) = true;
                 }
                 
                 if (live == liveAtHead[block])
@@ -190,10 +207,13 @@ public:
         //     Represents the fact that the original code would have done a PutStack but we haven't
         //     identified an operation that would have observed that PutStack.
         //
-        // This code has some interesting quirks because of the fact that neither liveness nor
-        // deferrals are very precise. They are only precise enough to be able to correctly tell us
-        // when we may [sic] need to execute PutStacks. This means that they may report the need to
-        // execute a PutStack in cases where we actually don't really need it, and that's totally OK.
+        // We need to be precise about liveness in this phase because not doing so
+        // could cause us to insert a PutStack before a node we thought may escape a 
+        // value that it doesn't really escape. Sinking this PutStack above such a node may
+        // cause us to insert a GetStack that we forward to the Phi we're feeding into the
+        // sunken PutStack. Inserting such a GetStack could cause us to load garbage and
+        // can confuse the AI to claim untrue things (like that the program will exit when
+        // it really won't).
         BlockMap<Operands<FlushFormat>> deferredAtHead(m_graph);
         BlockMap<Operands<FlushFormat>> deferredAtTail(m_graph);
         
@@ -203,8 +223,9 @@ public:
             deferredAtTail[block] =
                 Operands<FlushFormat>(OperandsLike, block->variablesAtHead);
         }
-        
-        deferredAtHead.atIndex(0).fill(ConflictingFlush);
+
+        for (unsigned local = deferredAtHead.atIndex(0).numberOfLocals(); local--;)
+            deferredAtHead.atIndex(0).local(local) = ConflictingFlush;
         
         do {
             changed = false;
@@ -213,37 +234,75 @@ public:
                 Operands<FlushFormat> deferred = deferredAtHead[block];
                 
                 for (Node* node : *block) {
-                    if (verbose)
+                    if (DFGPutStackSinkingPhaseInternal::verbose)
                         dataLog("Deferred at ", node, ":", deferred, "\n");
                     
-                    if (node->op() == KillStack) {
-                        deferred.operand(node->unlinkedLocal()) = ConflictingFlush;
-                        continue;
-                    }
-                    
                     if (node->op() == GetStack) {
+                        // Handle the case that the input doesn't match our requirements. This is
+                        // really a bug, but it's a benign one if we simply don't run this phase.
+                        // It usually arises because of patterns like:
+                        //
+                        // if (thing)
+                        //     PutStack()
+                        // ...
+                        // if (thing)
+                        //     GetStack()
+                        //
+                        // Or:
+                        //
+                        // if (never happens)
+                        //     GetStack()
+                        //
+                        // Because this phase runs early in SSA, it should be sensible to enforce
+                        // that no such code pattern has arisen yet. So, when validation is
+                        // enabled, we assert that we aren't seeing this. But with validation
+                        // disabled we silently let this fly and we just abort this phase.
+                        // FIXME: Get rid of all remaining cases of conflicting GetStacks.
+                        // https://bugs.webkit.org/show_bug.cgi?id=150398
+
+                        bool isConflicting =
+                            deferred.operand(node->stackAccessData()->local) == ConflictingFlush;
+                        
+                        if (validationEnabled())
+                            DFG_ASSERT(m_graph, node, !isConflicting);
+
+                        if (isConflicting) {
+                            // Oh noes! Abort!!
+                            return false;
+                        }
+
                         // A GetStack doesn't affect anything, since we know which local we are reading
                         // from.
                         continue;
+                    } else if (node->op() == PutStack) {
+                        VirtualRegister operand = node->stackAccessData()->local;
+                        deferred.operand(operand) = node->stackAccessData()->format;
+                        continue;
+                    } else if (node->op() == KillStack) {
+                        // We don't want to sink a PutStack past a KillStack.
+                        deferred.operand(node->unlinkedLocal()) = ConflictingFlush;
+                        continue;
                     }
                     
                     auto escapeHandler = [&] (VirtualRegister operand) {
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
+                            dataLog("For ", node, " escaping ", operand, "\n");
                         if (operand.isHeader())
                             return;
                         // We will materialize just before any reads.
                         deferred.operand(operand) = DeadFlush;
                     };
+
+                    auto writeHandler = [&] (VirtualRegister operand) {
+                        if (operand.isHeader())
+                            return;
+                        RELEASE_ASSERT(node->op() == LoadVarargs || node->op() == ForwardVarargs);
+                        deferred.operand(operand) = DeadFlush;
+                    };
                     
                     preciseLocalClobberize(
-                        m_graph, node, escapeHandler, escapeHandler,
-                        [&] (VirtualRegister operand, Node* source) {
-                            if (source == node) {
-                                // This is a load. Ignore it.
-                                return;
-                            }
-                            
-                            deferred.operand(operand) = node->stackAccessData()->format;
-                        });
+                        m_graph, node, escapeHandler, writeHandler,
+                        [&] (VirtualRegister, LazyNode) { });
                 }
                 
                 if (deferred == deferredAtTail[block])
@@ -254,13 +313,13 @@ public:
                 
                 for (BasicBlock* successor : block->successors()) {
                     for (size_t i = deferred.size(); i--;) {
-                        if (verbose)
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
                             dataLog("Considering ", VirtualRegister(deferred.operandForIndex(i)), " at ", pointerDump(block), "->", pointerDump(successor), ": ", deferred[i], " and ", deferredAtHead[successor][i], " merges to ");
 
                         deferredAtHead[successor][i] =
                             merge(deferredAtHead[successor][i], deferred[i]);
                         
-                        if (verbose)
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
                             dataLog(deferredAtHead[successor][i], "\n");
                     }
                 }
@@ -303,13 +362,13 @@ public:
             indexToOperand.append(operand);
         }
         
-        HashSet<Node*> putLocalsToSink;
+        HashSet<Node*> putStacksToSink;
         
         for (BasicBlock* block : m_graph.blocksInNaturalOrder()) {
             for (Node* node : *block) {
                 switch (node->op()) {
                 case PutStack:
-                    putLocalsToSink.add(node);
+                    putStacksToSink.add(node);
                     ssaCalculator.newDef(
                         operandToVariable.operand(node->stackAccessData()->local),
                         block, node->child1().node());
@@ -338,10 +397,10 @@ public:
                 if (!isConcrete(format))
                     return nullptr;
 
-                if (verbose)
+                if (DFGPutStackSinkingPhaseInternal::verbose)
                     dataLog("Adding Phi for ", operand, " at ", pointerDump(block), "\n");
                 
-                Node* phiNode = m_graph.addNode(SpecHeapTop, Phi, NodeOrigin());
+                Node* phiNode = m_graph.addNode(SpecHeapTop, Phi, block->at(0)->origin.withInvalidExit());
                 phiNode->mergeFlags(resultFor(format));
                 return phiNode;
             });
@@ -362,7 +421,7 @@ public:
                 mapping.operand(operand) = def->value();
             }
             
-            if (verbose)
+            if (DFGPutStackSinkingPhaseInternal::verbose)
                 dataLog("Mapping at top of ", pointerDump(block), ": ", mapping, "\n");
             
             for (SSACalculator::Def* phiDef : ssaCalculator.phisForBlock(block)) {
@@ -370,7 +429,7 @@ public:
                 
                 insertionSet.insert(0, phiDef->value());
                 
-                if (verbose)
+                if (DFGPutStackSinkingPhaseInternal::verbose)
                     dataLog("   Mapping ", operand, " to ", phiDef->value(), "\n");
                 mapping.operand(operand) = phiDef->value();
             }
@@ -378,7 +437,7 @@ public:
             deferred = deferredAtHead[block];
             for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
                 Node* node = block->at(nodeIndex);
-                if (verbose)
+                if (DFGPutStackSinkingPhaseInternal::verbose)
                     dataLog("Deferred at ", node, ":", deferred, "\n");
                 
                 switch (node->op()) {
@@ -386,24 +445,24 @@ public:
                     StackAccessData* data = node->stackAccessData();
                     VirtualRegister operand = data->local;
                     deferred.operand(operand) = data->format;
-                    if (verbose)
+                    if (DFGPutStackSinkingPhaseInternal::verbose)
                         dataLog("   Mapping ", operand, " to ", node->child1().node(), " at ", node, "\n");
                     mapping.operand(operand) = node->child1().node();
                     break;
                 }
                     
-                case KillStack: {
-                    deferred.operand(node->unlinkedLocal()) = ConflictingFlush;
-                    break;
-                }
-                    
                 case GetStack: {
                     StackAccessData* data = node->stackAccessData();
                     FlushFormat format = deferred.operand(data->local);
                     if (!isConcrete(format)) {
+                        DFG_ASSERT(
+                            m_graph, node,
+                            deferred.operand(data->local) != ConflictingFlush, deferred.operand(data->local));
+                        
                         // This means there is no deferral. No deferral means that the most
                         // authoritative value for this stack slot is what is stored in the stack. So,
                         // keep the GetStack.
+                        mapping.operand(data->local) = node;
                         break;
                     }
                     
@@ -411,25 +470,36 @@ public:
                     // would have stored a value with a certain format. That format must match our
                     // format. But more importantly, we can simply use the value that the PutStack would
                     // have stored and get rid of the GetStack.
-                    DFG_ASSERT(m_graph, node, format == data->format);
+                    DFG_ASSERT(m_graph, node, format == data->format, format, data->format);
                     
                     Node* incoming = mapping.operand(data->local);
-                    node->convertToIdentity();
                     node->child1() = incoming->defaultEdge();
+                    node->convertToIdentity();
+                    break;
+                }
+
+                case KillStack: {
+                    deferred.operand(node->unlinkedLocal()) = ConflictingFlush;
                     break;
                 }
                 
                 default: {
                     auto escapeHandler = [&] (VirtualRegister operand) {
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
+                            dataLog("For ", node, " escaping ", operand, "\n");
+
                         if (operand.isHeader())
                             return;
                     
                         FlushFormat format = deferred.operand(operand);
-                        if (!isConcrete(format))
+                        if (!isConcrete(format)) {
+                            // It's dead now, rather than conflicting.
+                            deferred.operand(operand) = DeadFlush;
                             return;
+                        }
                     
                         // Gotta insert a PutStack.
-                        if (verbose)
+                        if (DFGPutStackSinkingPhaseInternal::verbose)
                             dataLog("Inserting a PutStack for ", operand, " at ", node, "\n");
 
                         Node* incoming = mapping.operand(operand);
@@ -438,58 +508,61 @@ public:
                         insertionSet.insertNode(
                             nodeIndex, SpecNone, PutStack, node->origin,
                             OpInfo(m_graph.m_stackAccessData.add(operand, format)),
-                            Edge(incoming, useKindFor(format)));
+                            Edge(incoming, uncheckedUseKindFor(format)));
                     
                         deferred.operand(operand) = DeadFlush;
                     };
-                
+
+                    auto writeHandler = [&] (VirtualRegister operand) {
+                        if (operand.isHeader())
+                            return;
+                        // LoadVarargs and ForwardVarargs are unconditional writes to the stack
+                        // locations they claim to write to. They do not read from the stack 
+                        // locations they write to. This makes those stack locations dead right 
+                        // before a LoadVarargs/ForwardVarargs. This means we should never sink
+                        // PutStacks right to this point.
+                        RELEASE_ASSERT(node->op() == LoadVarargs || node->op() == ForwardVarargs);
+                        deferred.operand(operand) = DeadFlush;
+                    };
+
                     preciseLocalClobberize(
-                        m_graph, node, escapeHandler, escapeHandler,
-                        [&] (VirtualRegister, Node*) { });
+                        m_graph, node, escapeHandler, writeHandler,
+                        [&] (VirtualRegister, LazyNode) { });
                     break;
                 } }
             }
             
-            size_t upsilonInsertionPoint = block->size() - 1;
-            NodeOrigin upsilonOrigin = block->last()->origin;
+            NodeAndIndex terminal = block->findTerminal();
+            size_t upsilonInsertionPoint = terminal.index;
+            NodeOrigin upsilonOrigin = terminal.node->origin;
             for (BasicBlock* successorBlock : block->successors()) {
                 for (SSACalculator::Def* phiDef : ssaCalculator.phisForBlock(successorBlock)) {
                     Node* phiNode = phiDef->value();
                     SSACalculator::Variable* variable = phiDef->variable();
                     VirtualRegister operand = indexToOperand[variable->index()];
-                    if (verbose)
+                    if (DFGPutStackSinkingPhaseInternal::verbose)
                         dataLog("Creating Upsilon for ", operand, " at ", pointerDump(block), "->", pointerDump(successorBlock), "\n");
                     FlushFormat format = deferredAtHead[successorBlock].operand(operand);
-                    DFG_ASSERT(m_graph, nullptr, isConcrete(format));
-                    UseKind useKind = useKindFor(format);
-                    Node* incoming = mapping.operand(operand);
-                    if (!incoming) {
-                        // This can totally happen, see tests/stress/put-local-conservative.js.
-                        // This arises because deferral and liveness are both conservative.
-                        // Conservative liveness means that a load from a *different* closure
-                        // variable may lead us to believe that our local is live. Conservative
-                        // deferral may lead us to believe that the local doesn't have a top deferral
-                        // because someone has done something that would have forced it to be
-                        // materialized. The basic pattern is:
-                        //
-                        // GetClosureVar(loc42) // loc42's deferral is now bottom
-                        // if (predicate1)
-                        //     PutClosureVar(loc42) // prevent GCSE of our GetClosureVar's
-                        // if (predicate2)
-                        //     PutStack(loc42) // we now have a concrete deferral
-                        // // we still have the concrete deferral because we merged with bottom
-                        // GetClosureVar(loc42) // force materialization
-                        //
-                        // We will have a Phi with no incoming value form the basic block that
-                        // bypassed the PutStack.
-                        
-                        // Note: we sort of could have used the equivalent of LLVM's undef here. The
-                        // point is that it's OK to just leave random bits in the local if we're
-                        // coming down this path. But, we don't have a way of saying that in our IR
-                        // right now and anyway it probably doesn't matter that much.
-                        
-                        incoming = insertionSet.insertBottomConstantForUse(
-                            upsilonInsertionPoint, upsilonOrigin, useKind).node();
+                    DFG_ASSERT(m_graph, nullptr, isConcrete(format), format);
+                    UseKind useKind = uncheckedUseKindFor(format);
+                    
+                    // We need to get a value for the stack slot. This phase doesn't really have a
+                    // good way of determining if a stack location got clobbered. It just knows if
+                    // there is a deferral. The lack of a deferral might mean that a PutStack or
+                    // GetStack had never happened, or it might mean that the value was read, or
+                    // that it was written. It's OK for us to make some bad decisions here, since
+                    // GCSE will clean it up anyway.
+                    Node* incoming;
+                    if (isConcrete(deferred.operand(operand))) {
+                        incoming = mapping.operand(operand);
+                        DFG_ASSERT(m_graph, phiNode, incoming);
+                    } else {
+                        // Issue a GetStack to get the value. This might introduce some redundancy
+                        // into the code, but if it's bad enough, GCSE will clean it up.
+                        incoming = insertionSet.insertNode(
+                            upsilonInsertionPoint, SpecNone, GetStack, upsilonOrigin,
+                            OpInfo(m_graph.m_stackAccessData.add(operand, format)));
+                        incoming->setResult(resultFor(format));
                     }
                     
                     insertionSet.insertNode(
@@ -501,25 +574,18 @@ public:
             insertionSet.execute(block);
         }
         
-        // Finally eliminate the sunken PutStacks by turning them into Phantoms. This keeps whatever
-        // type check they were doing. Also prepend KillLocals to them to ensure that we know that
-        // the relevant value was *not* stored to the stack.
+        // Finally eliminate the sunken PutStacks by turning them into Checks. This keeps whatever
+        // type check they were doing.
         for (BasicBlock* block : m_graph.blocksInNaturalOrder()) {
-            for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
-                Node* node = block->at(nodeIndex);
-                
-                if (!putLocalsToSink.contains(node))
+            for (auto* node : *block) {
+                if (!putStacksToSink.contains(node))
                     continue;
                 
-                insertionSet.insertNode(
-                    nodeIndex, SpecNone, KillStack, node->origin, OpInfo(node->stackAccessData()->local.offset()));
-                node->convertToPhantom();
+                node->remove(m_graph);
             }
-            
-            insertionSet.execute(block);
         }
         
-        if (verbose) {
+        if (DFGPutStackSinkingPhaseInternal::verbose) {
             dataLog("Graph after PutStack sinking:\n");
             m_graph.dump();
         }
@@ -532,7 +598,6 @@ public:
     
 bool performPutStackSinking(Graph& graph)
 {
-    SamplingRegion samplingRegion("DFG PutStack Sinking Phase");
     return runPhase<PutStackSinkingPhase>(graph);
 }