[resource-timing] Report performance entries with all HTTP status codes
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGVariableAccessDataDump.h"
49 #include "FullBytecodeLiveness.h"
50 #include "FunctionExecutableDump.h"
51 #include "GetterSetter.h"
52 #include "JIT.h"
53 #include "JSLexicalEnvironment.h"
54 #include "MaxFrameExtentForSlowPathCall.h"
55 #include "OperandsInlines.h"
56 #include "JSCInlines.h"
57 #include "StackAlignment.h"
58 #include <wtf/CommaPrinter.h>
59 #include <wtf/ListDump.h>
60
61 namespace JSC { namespace DFG {
62
63 static constexpr bool dumpOSRAvailabilityData = false;
64
65 // Creates an array of stringized names.
66 static const char* dfgOpNames[] = {
67 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
68     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
69 #undef STRINGIZE_DFG_OP_ENUM
70 };
71
72 Graph::Graph(VM& vm, Plan& plan)
73     : m_vm(vm)
74     , m_plan(plan)
75     , m_codeBlock(m_plan.codeBlock())
76     , m_profiledBlock(m_codeBlock->alternative())
77     , m_ssaCFG(makeUnique<SSACFG>(*this))
78     , m_nextMachineLocal(0)
79     , m_fixpointState(BeforeFixpoint)
80     , m_structureRegistrationState(HaveNotStartedRegistering)
81     , m_form(LoadStore)
82     , m_unificationState(LocallyUnified)
83     , m_refCountState(EverythingIsLive)
84 {
85     ASSERT(m_profiledBlock);
86     
87     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88     
89     m_indexingCache = makeUnique<FlowIndexing>(*this);
90     m_abstractValuesCache = makeUnique<FlowMap<AbstractValue>>(*this);
91
92     registerStructure(vm.structureStructure.get());
93     this->stringStructure = registerStructure(vm.stringStructure.get());
94     this->symbolStructure = registerStructure(vm.symbolStructure.get());
95 }
96
97 Graph::~Graph()
98 {
99 }
100
101 const char *Graph::opName(NodeType op)
102 {
103     return dfgOpNames[op];
104 }
105
106 static void printWhiteSpace(PrintStream& out, unsigned amount)
107 {
108     while (amount-- > 0)
109         out.print(" ");
110 }
111
112 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefixStr, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
113 {
114     Prefix myPrefix(prefixStr);
115     Prefix& prefix = prefixStr ? myPrefix : m_prefix;
116
117     if (!currentNode->origin.semantic)
118         return false;
119     
120     Node* previousNode = previousNodeRef;
121     previousNodeRef = currentNode;
122
123     if (!previousNode)
124         return false;
125     
126     if (previousNode->origin.semantic.inlineCallFrame() == currentNode->origin.semantic.inlineCallFrame())
127         return false;
128     
129     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
130     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
131     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
132     unsigned indexOfDivergence = commonSize;
133     for (unsigned i = 0; i < commonSize; ++i) {
134         if (previousInlineStack[i].inlineCallFrame() != currentInlineStack[i].inlineCallFrame()) {
135             indexOfDivergence = i;
136             break;
137         }
138     }
139     
140     bool hasPrinted = false;
141     
142     // Print the pops.
143     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
144         out.print(prefix);
145         printWhiteSpace(out, i * 2);
146         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame(), context), "\n");
147         hasPrinted = true;
148     }
149     
150     // Print the pushes.
151     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
152         out.print(prefix);
153         printWhiteSpace(out, i * 2);
154         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame(), context), "\n");
155         hasPrinted = true;
156     }
157     
158     return hasPrinted;
159 }
160
161 int Graph::amountOfNodeWhiteSpace(Node* node)
162 {
163     return (node->origin.semantic.inlineDepth() - 1) * 2;
164 }
165
166 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
167 {
168     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
169 }
170
171 void Graph::dump(PrintStream& out, const char* prefixStr, Node* node, DumpContext* context)
172 {
173     Prefix myPrefix(prefixStr);
174     Prefix& prefix = prefixStr ? myPrefix : m_prefix;
175
176     NodeType op = node->op();
177
178     unsigned refCount = node->refCount();
179     bool mustGenerate = node->mustGenerate();
180     if (mustGenerate)
181         --refCount;
182
183     out.print(prefix);
184     printNodeWhiteSpace(out, node);
185
186     // Example/explanation of dataflow dump output
187     //
188     //   14:   <!2:7>  GetByVal(@3, @13)
189     //   ^1     ^2 ^3     ^4       ^5
190     //
191     // (1) The nodeIndex of this operation.
192     // (2) The reference count. The number printed is the 'real' count,
193     //     not including the 'mustGenerate' ref. If the node is
194     //     'mustGenerate' then the count it prefixed with '!'.
195     // (3) The virtual register slot assigned to this node.
196     // (4) The name of the operation.
197     // (5) The arguments to the operation. The may be of the form:
198     //         @#   - a NodeIndex referencing a prior node in the graph.
199     //         arg# - an argument number.
200     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
201     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
202     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
203     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
204         out.print(node->virtualRegister());
205     else
206         out.print("-");
207     out.print(">\t", opName(op), "(");
208     CommaPrinter comma;
209     if (node->flags() & NodeHasVarArgs) {
210         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
211             if (!m_varArgChildren[childIdx])
212                 continue;
213             out.print(comma, m_varArgChildren[childIdx]);
214         }
215     } else {
216         if (!!node->child1() || !!node->child2() || !!node->child3())
217             out.print(comma, node->child1());
218         if (!!node->child2() || !!node->child3())
219             out.print(comma, node->child2());
220         if (!!node->child3())
221             out.print(comma, node->child3());
222     }
223
224     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
225         out.print(comma, NodeFlagsDump(node->flags()));
226     if (node->prediction())
227         out.print(comma, SpeculationDump(node->prediction()));
228     if (node->hasNumberOfArgumentsToSkip())
229         out.print(comma, "numberOfArgumentsToSkip = ", node->numberOfArgumentsToSkip());
230     if (node->hasArrayMode())
231         out.print(comma, node->arrayMode());
232     if (node->hasArithUnaryType())
233         out.print(comma, "Type:", node->arithUnaryType());
234     if (node->hasArithMode())
235         out.print(comma, node->arithMode());
236     if (node->hasArithRoundingMode())
237         out.print(comma, "Rounding:", node->arithRoundingMode());
238     if (node->hasScopeOffset())
239         out.print(comma, node->scopeOffset());
240     if (node->hasDirectArgumentsOffset())
241         out.print(comma, node->capturedArgumentsOffset());
242     if (node->hasArgumentIndex())
243         out.print(comma, node->argumentIndex());
244     if (node->hasRegisterPointer())
245         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
246     if (node->hasIdentifier())
247         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
248     if (node->hasPromotedLocationDescriptor())
249         out.print(comma, node->promotedLocationDescriptor());
250     if (node->hasClassInfo())
251         out.print(comma, *node->classInfo());
252     if (node->hasStructureSet())
253         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
254     if (node->hasStructure())
255         out.print(comma, inContext(*node->structure().get(), context));
256     if (node->op() == CPUIntrinsic)
257         out.print(comma, intrinsicName(node->intrinsic()));
258     if (node->hasTransition()) {
259         out.print(comma, pointerDumpInContext(node->transition(), context));
260 #if USE(JSVALUE64)
261         out.print(", ID:", node->transition()->next->id());
262 #else
263         out.print(", ID:", RawPointer(node->transition()->next.get()));
264 #endif
265     }
266     if (node->hasCellOperand()) {
267         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
268             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
269         else {
270             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
271             if (node->cellOperand()->value().isCell()) {
272                 CallVariant variant(node->cellOperand()->value().asCell());
273                 if (ExecutableBase* executable = variant.executable()) {
274                     if (executable->isHostFunction())
275                         out.print(comma, "<host function>");
276                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
277                         out.print(comma, FunctionExecutableDump(functionExecutable));
278                     else
279                         out.print(comma, "<non-function executable>");
280                 }
281             }
282         }
283     }
284     if (node->hasQueriedType())
285         out.print(comma, node->queriedType());
286     if (node->hasStorageAccessData()) {
287         StorageAccessData& storageAccessData = node->storageAccessData();
288         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
289         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
290     }
291     if (node->hasMultiGetByOffsetData()) {
292         MultiGetByOffsetData& data = node->multiGetByOffsetData();
293         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
294         for (unsigned i = 0; i < data.cases.size(); ++i)
295             out.print(comma, inContext(data.cases[i], context));
296     }
297     if (node->hasMultiPutByOffsetData()) {
298         MultiPutByOffsetData& data = node->multiPutByOffsetData();
299         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
300         for (unsigned i = 0; i < data.variants.size(); ++i)
301             out.print(comma, inContext(data.variants[i], context));
302     }
303     if (node->hasMatchStructureData()) {
304         for (MatchStructureVariant& variant : node->matchStructureData().variants)
305             out.print(comma, inContext(*variant.structure.get(), context), "=>", variant.result);
306     }
307     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
308     if (node->hasVariableAccessData(*this)) {
309         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
310         if (variableAccessData) {
311             VirtualRegister operand = variableAccessData->local();
312             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
313             operand = variableAccessData->machineLocal();
314             if (operand.isValid())
315                 out.print(comma, "machine:", operand);
316         }
317     }
318     if (node->hasStackAccessData()) {
319         StackAccessData* data = node->stackAccessData();
320         out.print(comma, data->local);
321         if (data->machineLocal.isValid())
322             out.print(comma, "machine:", data->machineLocal);
323         out.print(comma, data->format);
324     }
325     if (node->hasUnlinkedLocal()) 
326         out.print(comma, node->unlinkedLocal());
327     if (node->hasVectorLengthHint())
328         out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
329     if (node->hasLazyJSValue())
330         out.print(comma, node->lazyJSValue());
331     if (node->hasIndexingType())
332         out.print(comma, IndexingTypeDump(node->indexingMode()));
333     if (node->hasTypedArrayType())
334         out.print(comma, node->typedArrayType());
335     if (node->hasPhi())
336         out.print(comma, "^", node->phi()->index());
337     if (node->hasExecutionCounter())
338         out.print(comma, RawPointer(node->executionCounter()));
339     if (node->hasWatchpointSet())
340         out.print(comma, RawPointer(node->watchpointSet()));
341     if (node->hasStoragePointer())
342         out.print(comma, RawPointer(node->storagePointer()));
343     if (node->hasObjectMaterializationData())
344         out.print(comma, node->objectMaterializationData());
345     if (node->hasCallVarargsData())
346         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
347     if (node->hasLoadVarargsData()) {
348         LoadVarargsData* data = node->loadVarargsData();
349         out.print(comma, "start = ", data->start, ", count = ", data->count);
350         if (data->machineStart.isValid())
351             out.print(", machineStart = ", data->machineStart);
352         if (data->machineCount.isValid())
353             out.print(", machineCount = ", data->machineCount);
354         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
355         out.print(", limit = ", data->limit);
356     }
357     if (node->hasIsInternalPromise())
358         out.print(comma, "isInternalPromise = ", node->isInternalPromise());
359     if (node->hasInternalFieldIndex())
360         out.print(comma, "internalFieldIndex = ", node->internalFieldIndex());
361     if (node->hasCallDOMGetterData()) {
362         CallDOMGetterData* data = node->callDOMGetterData();
363         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
364         out.print(", domJIT = ", RawPointer(data->domJIT));
365     }
366     if (node->hasIgnoreLastIndexIsWritable())
367         out.print(comma, "ignoreLastIndexIsWritable = ", node->ignoreLastIndexIsWritable());
368     if (node->isConstant())
369         out.print(comma, pointerDumpInContext(node->constant(), context));
370     if (node->hasCallLinkStatus())
371         out.print(comma, *node->callLinkStatus());
372     if (node->hasGetByIdStatus())
373         out.print(comma, *node->getByIdStatus());
374     if (node->hasInByIdStatus())
375         out.print(comma, *node->inByIdStatus());
376     if (node->hasPutByIdStatus())
377         out.print(comma, *node->putByIdStatus());
378     if (node->isJump())
379         out.print(comma, "T:", *node->targetBlock());
380     if (node->isBranch())
381         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
382     if (node->isSwitch()) {
383         SwitchData* data = node->switchData();
384         out.print(comma, data->kind);
385         for (unsigned i = 0; i < data->cases.size(); ++i)
386             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
387         out.print(comma, "default:", data->fallThrough);
388     }
389     if (node->isEntrySwitch()) {
390         EntrySwitchData* data = node->entrySwitchData();
391         for (unsigned i = 0; i < data->cases.size(); ++i)
392             out.print(comma, BranchTarget(data->cases[i]));
393     }
394     ClobberSet reads;
395     ClobberSet writes;
396     addReadsAndWrites(*this, node, reads, writes);
397     if (!reads.isEmpty())
398         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
399     if (!writes.isEmpty())
400         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
401     ExitMode exitMode = mayExit(*this, node);
402     if (exitMode != DoesNotExit)
403         out.print(comma, exitMode);
404     if (clobbersExitState(*this, node))
405         out.print(comma, "ClobbersExit");
406     if (node->origin.isSet()) {
407         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex());
408         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
409             out.print(comma, "exit: ", node->origin.forExit);
410     }
411     out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
412     if (node->origin.wasHoisted)
413         out.print(comma, "WasHoisted");
414     out.print(")");
415
416     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
417         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
418     else if (node->hasHeapPrediction())
419         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
420     
421     out.print("\n");
422 }
423
424 bool Graph::terminalsAreValid()
425 {
426     for (BasicBlock* block : blocksInNaturalOrder()) {
427         if (!block->terminal())
428             return false;
429     }
430     return true;
431 }
432
433 static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
434 static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
435
436 void Graph::dumpBlockHeader(PrintStream& out, const char* prefixStr, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
437 {
438     Prefix myPrefix(prefixStr);
439     Prefix& prefix = prefixStr ? myPrefix : m_prefix;
440
441     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
442         block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
443     if (block->executionCount == block->executionCount)
444         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
445     out.print(prefix, "  Predecessors:");
446     for (size_t i = 0; i < block->predecessors.size(); ++i)
447         out.print(" ", *block->predecessors[i]);
448     out.print("\n");
449     out.print(prefix, "  Successors:");
450     if (block->terminal()) {
451         for (BasicBlock* successor : block->successors()) {
452             out.print(" ", *successor);
453         }
454     } else
455         out.print(" <invalid>");
456     out.print("\n");
457
458     auto printDominators = [&] (auto& dominators) {
459         out.print(prefix, "  Dominated by: ", dominators.dominatorsOf(block), "\n");
460         out.print(prefix, "  Dominates: ", dominators.blocksDominatedBy(block), "\n");
461         out.print(prefix, "  Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
462         out.print(prefix, "  Iterated Dominance Frontier: ",
463             dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
464     };
465
466     if (terminalsAreValid()) {
467         if (m_ssaDominators)
468             printDominators(*m_ssaDominators);
469         else if (m_cpsDominators)
470             printDominators(*m_cpsDominators);
471     }
472
473     if (m_backwardsDominators && terminalsAreValid()) {
474         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
475         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
476     }
477     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
478         out.print(prefix, "  Control equivalent to:");
479         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
480             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
481                 out.print(" ", *otherBlock);
482         }
483         out.print("\n");
484     }
485
486     auto printNaturalLoops = [&] (auto& naturalLoops) {
487         if (const auto* loop = naturalLoops->headerOf(block)) {
488             out.print(prefix, "  Loop header, contains:");
489             Vector<BlockIndex> sortedBlockList;
490             for (unsigned i = 0; i < loop->size(); ++i)
491                 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
492             std::sort(sortedBlockList.begin(), sortedBlockList.end());
493             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
494                 out.print(" #", sortedBlockList[i]);
495             out.print("\n");
496         }
497         
498         auto containingLoops = naturalLoops->loopsOf(block);
499         if (!containingLoops.isEmpty()) {
500             out.print(prefix, "  Containing loop headers:");
501             for (unsigned i = 0; i < containingLoops.size(); ++i)
502                 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
503             out.print("\n");
504         }
505     };
506
507     if (m_ssaNaturalLoops)
508         printNaturalLoops(m_ssaNaturalLoops);
509     else if (m_cpsNaturalLoops)
510         printNaturalLoops(m_cpsNaturalLoops);
511
512     if (!block->phis.isEmpty()) {
513         out.print(prefix, "  Phi Nodes:");
514         for (size_t i = 0; i < block->phis.size(); ++i) {
515             Node* phiNode = block->phis[i];
516             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
517                 continue;
518             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
519             if (phiNode->child1()) {
520                 out.print("@", phiNode->child1()->index());
521                 if (phiNode->child2()) {
522                     out.print(", @", phiNode->child2()->index());
523                     if (phiNode->child3())
524                         out.print(", @", phiNode->child3()->index());
525                 }
526             }
527             out.print(")", i + 1 < block->phis.size() ? "," : "");
528         }
529         out.print("\n");
530     }
531 }
532
533 void Graph::dump(PrintStream& out, DumpContext* context)
534 {
535     Prefix& prefix = m_prefix;
536     DumpContext myContext;
537     myContext.graph = this;
538     if (!context)
539         context = &myContext;
540     
541     out.print("\n");
542     out.print(prefix, "DFG for ", CodeBlockWithJITType(m_codeBlock, JITType::DFGJIT), ":\n");
543     out.print(prefix, "  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
544     if (m_form == SSA) {
545         for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
546             out.print(prefix, "  Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
547     }
548     else {
549         for (auto pair : m_rootToArguments)
550             out.print(prefix, "  Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
551     }
552     out.print("\n");
553     
554     Node* lastNode = nullptr;
555     for (size_t b = 0; b < m_blocks.size(); ++b) {
556         BasicBlock* block = m_blocks[b].get();
557         if (!block)
558             continue;
559         prefix.blockIndex = block->index;
560         dumpBlockHeader(out, Prefix::noString, block, DumpAllPhis, context);
561         out.print(prefix, "  States: ", block->cfaStructureClobberStateAtHead);
562         if (!block->cfaHasVisited)
563             out.print(", CurrentlyCFAUnreachable");
564         if (!block->intersectionOfCFAHasVisited)
565             out.print(", CFAUnreachable");
566         out.print("\n");
567         switch (m_form) {
568         case LoadStore:
569         case ThreadedCPS: {
570             out.print(prefix, "  Vars Before: ");
571             if (block->cfaHasVisited)
572                 out.print(inContext(block->valuesAtHead, context));
573             else
574                 out.print("<empty>");
575             out.print("\n");
576             out.print(prefix, "  Intersected Vars Before: ");
577             if (block->intersectionOfCFAHasVisited)
578                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
579             else
580                 out.print("<empty>");
581             out.print("\n");
582             out.print(prefix, "  Var Links: ", block->variablesAtHead, "\n");
583             break;
584         }
585             
586         case SSA: {
587             RELEASE_ASSERT(block->ssa);
588             if (dumpOSRAvailabilityData)
589                 out.print(prefix, "  Availability: ", block->ssa->availabilityAtHead, "\n");
590             out.print(prefix, "  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
591             out.print(prefix, "  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
592             break;
593         } }
594         for (size_t i = 0; i < block->size(); ++i) {
595             prefix.clearNodeIndex();
596             dumpCodeOrigin(out, Prefix::noString, lastNode, block->at(i), context);
597             prefix.nodeIndex = i;
598             dump(out, Prefix::noString, block->at(i), context);
599         }
600         prefix.clearNodeIndex();
601         out.print(prefix, "  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
602         if (!block->cfaDidFinish)
603             out.print(", CFAInvalidated");
604         out.print("\n");
605         switch (m_form) {
606         case LoadStore:
607         case ThreadedCPS: {
608             out.print(prefix, "  Vars After: ");
609             if (block->cfaHasVisited)
610                 out.print(inContext(block->valuesAtTail, context));
611             else
612                 out.print("<empty>");
613             out.print("\n");
614             out.print(prefix, "  Var Links: ", block->variablesAtTail, "\n");
615             break;
616         }
617             
618         case SSA: {
619             RELEASE_ASSERT(block->ssa);
620             if (dumpOSRAvailabilityData)
621                 out.print(prefix, "  Availability: ", block->ssa->availabilityAtTail, "\n");
622             out.print(prefix, "  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
623             out.print(prefix, "  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
624             break;
625         } }
626         out.print("\n");
627     }
628     prefix.clearBlockIndex();
629
630     out.print(prefix, "GC Values:\n");
631     for (FrozenValue* value : m_frozenValues) {
632         if (value->pointsToHeap())
633             out.print(prefix, "    ", inContext(*value, &myContext), "\n");
634     }
635
636     out.print(inContext(watchpoints(), &myContext));
637     
638     if (!myContext.isEmpty()) {
639         StringPrintStream prefixStr;
640         prefixStr.print(prefix);
641         myContext.dump(out, prefixStr.toCString().data());
642         out.print("\n");
643     }
644 }
645
646 void Graph::deleteNode(Node* node)
647 {
648     if (validationEnabled() && m_form == SSA) {
649         for (BasicBlock* block : blocksInNaturalOrder()) {
650             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
651             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
652         }
653     }
654
655     m_nodes.remove(node);
656 }
657
658 void Graph::packNodeIndices()
659 {
660     m_nodes.packIndices();
661 }
662
663 void Graph::dethread()
664 {
665     if (m_form == LoadStore || m_form == SSA)
666         return;
667     
668     if (logCompilationChanges())
669         dataLog("Dethreading DFG graph.\n");
670     
671     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
672         BasicBlock* block = m_blocks[blockIndex].get();
673         if (!block)
674             continue;
675         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
676             Node* phi = block->phis[phiIndex];
677             phi->children.reset();
678         }
679     }
680     
681     m_form = LoadStore;
682 }
683
684 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
685 {
686     if (!successor->isReachable) {
687         successor->isReachable = true;
688         worklist.append(successor);
689     }
690     
691     if (!successor->predecessors.contains(block))
692         successor->predecessors.append(block);
693 }
694
695 void Graph::determineReachability()
696 {
697     Vector<BasicBlock*, 16> worklist;
698     for (BasicBlock* entrypoint : m_roots) {
699         entrypoint->isReachable = true;
700         worklist.append(entrypoint);
701     }
702     while (!worklist.isEmpty()) {
703         BasicBlock* block = worklist.takeLast();
704         for (unsigned i = block->numSuccessors(); i--;)
705             handleSuccessor(worklist, block, block->successor(i));
706     }
707 }
708
709 void Graph::resetReachability()
710 {
711     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
712         BasicBlock* block = m_blocks[blockIndex].get();
713         if (!block)
714             continue;
715         block->isReachable = false;
716         block->predecessors.clear();
717     }
718     
719     determineReachability();
720 }
721
722 namespace {
723
724 class RefCountCalculator {
725 public:
726     RefCountCalculator(Graph& graph)
727         : m_graph(graph)
728     {
729     }
730     
731     void calculate()
732     {
733         // First reset the counts to 0 for all nodes.
734         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
735             BasicBlock* block = m_graph.block(blockIndex);
736             if (!block)
737                 continue;
738             for (unsigned indexInBlock = block->size(); indexInBlock--;)
739                 block->at(indexInBlock)->setRefCount(0);
740             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
741                 block->phis[phiIndex]->setRefCount(0);
742         }
743     
744         // Now find the roots:
745         // - Nodes that are must-generate.
746         // - Nodes that are reachable from type checks.
747         // Set their ref counts to 1 and put them on the worklist.
748         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
749             BasicBlock* block = m_graph.block(blockIndex);
750             if (!block)
751                 continue;
752             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
753                 Node* node = block->at(indexInBlock);
754                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
755                 if (!(node->flags() & NodeMustGenerate))
756                     continue;
757                 if (!node->postfixRef())
758                     m_worklist.append(node);
759             }
760         }
761         
762         while (!m_worklist.isEmpty()) {
763             while (!m_worklist.isEmpty()) {
764                 Node* node = m_worklist.last();
765                 m_worklist.removeLast();
766                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
767                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
768             }
769             
770             if (m_graph.m_form == SSA) {
771                 // Find Phi->Upsilon edges, which are represented as meta-data in the
772                 // Upsilon.
773                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
774                     BasicBlock* block = m_graph.block(blockIndex);
775                     if (!block)
776                         continue;
777                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
778                         Node* node = block->at(nodeIndex);
779                         if (node->op() != Upsilon)
780                             continue;
781                         if (node->shouldGenerate())
782                             continue;
783                         if (node->phi()->shouldGenerate())
784                             countNode(node);
785                     }
786                 }
787             }
788         }
789     }
790     
791 private:
792     void findTypeCheckRoot(Node*, Edge edge)
793     {
794         // We may have an "unproved" untyped use for code that is unreachable. The CFA
795         // will just not have gotten around to it.
796         if (edge.isProved() || edge.willNotHaveCheck())
797             return;
798         if (!edge->postfixRef())
799             m_worklist.append(edge.node());
800     }
801     
802     void countNode(Node* node)
803     {
804         if (node->postfixRef())
805             return;
806         m_worklist.append(node);
807     }
808     
809     void countEdge(Node*, Edge edge)
810     {
811         // Don't count edges that are already counted for their type checks.
812         if (!(edge.isProved() || edge.willNotHaveCheck()))
813             return;
814         countNode(edge.node());
815     }
816     
817     Graph& m_graph;
818     Vector<Node*, 128> m_worklist;
819 };
820
821 } // anonymous namespace
822
823 void Graph::computeRefCounts()
824 {
825     RefCountCalculator calculator(*this);
826     calculator.calculate();
827 }
828
829 void Graph::killBlockAndItsContents(BasicBlock* block)
830 {
831     if (auto& ssaData = block->ssa)
832         ssaData->invalidate();
833     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
834         deleteNode(block->phis[phiIndex]);
835     for (Node* node : *block)
836         deleteNode(node);
837     
838     killBlock(block);
839 }
840
841 void Graph::killUnreachableBlocks()
842 {
843     invalidateNodeLiveness();
844
845     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
846         BasicBlock* block = this->block(blockIndex);
847         if (!block)
848             continue;
849         if (block->isReachable)
850             continue;
851
852         dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
853         killBlockAndItsContents(block);
854     }
855 }
856
857 void Graph::invalidateCFG()
858 {
859     m_cpsDominators = nullptr;
860     m_ssaDominators = nullptr;
861     m_cpsNaturalLoops = nullptr;
862     m_ssaNaturalLoops = nullptr;
863     m_controlEquivalenceAnalysis = nullptr;
864     m_backwardsDominators = nullptr;
865     m_backwardsCFG = nullptr;
866     m_cpsCFG = nullptr;
867 }
868
869 void Graph::invalidateNodeLiveness()
870 {
871     if (m_form != SSA)
872         return;
873
874     for (BasicBlock* block : blocksInNaturalOrder())
875         block->ssa->invalidate();
876 }
877
878 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
879 {
880     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
881         Node* node = block[indexInBlock];
882         bool shouldContinue = true;
883         switch (node->op()) {
884         case SetLocal: {
885             if (node->local() == variableAccessData->local())
886                 shouldContinue = false;
887             break;
888         }
889                 
890         case GetLocal: {
891             if (node->variableAccessData() != variableAccessData)
892                 continue;
893             substitute(block, indexInBlock, node, newGetLocal);
894             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
895             if (oldTailNode == node)
896                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
897             shouldContinue = false;
898             break;
899         }
900                 
901         default:
902             break;
903         }
904         if (!shouldContinue)
905             break;
906     }
907 }
908
909 BlockList Graph::blocksInPreOrder()
910 {
911     BlockList result;
912     result.reserveInitialCapacity(m_blocks.size());
913     BlockWorklist worklist;
914     for (BasicBlock* entrypoint : m_roots)
915         worklist.push(entrypoint);
916     while (BasicBlock* block = worklist.pop()) {
917         result.append(block);
918         for (unsigned i = block->numSuccessors(); i--;)
919             worklist.push(block->successor(i));
920     }
921
922     if (validationEnabled()) {
923         // When iterating over pre order, we should see dominators
924         // before things they dominate.
925         auto validateResults = [&] (auto& dominators) {
926             for (unsigned i = 0; i < result.size(); ++i) {
927                 BasicBlock* a = result[i];
928                 if (!a)
929                     continue;
930                 for (unsigned j = 0; j < result.size(); ++j) {
931                     BasicBlock* b = result[j];
932                     if (!b || a == b)
933                         continue;
934                     if (dominators.dominates(a, b))
935                         RELEASE_ASSERT(i < j);
936                 }
937             }
938         };
939
940         if (m_form == SSA || m_isInSSAConversion)
941             validateResults(ensureSSADominators());
942         else
943             validateResults(ensureCPSDominators());
944     }
945     return result;
946 }
947
948 BlockList Graph::blocksInPostOrder(bool isSafeToValidate)
949 {
950     BlockList result;
951     result.reserveInitialCapacity(m_blocks.size());
952     PostOrderBlockWorklist worklist;
953     for (BasicBlock* entrypoint : m_roots)
954         worklist.push(entrypoint);
955     while (BlockWithOrder item = worklist.pop()) {
956         switch (item.order) {
957         case VisitOrder::Pre:
958             worklist.pushPost(item.node);
959             for (unsigned i = item.node->numSuccessors(); i--;)
960                 worklist.push(item.node->successor(i));
961             break;
962         case VisitOrder::Post:
963             result.append(item.node);
964             break;
965         }
966     }
967
968     if (isSafeToValidate && validationEnabled()) { // There are users of this where we haven't yet built of the CFG enough to be able to run dominators.
969         auto validateResults = [&] (auto& dominators) {
970             // When iterating over reverse post order, we should see dominators
971             // before things they dominate.
972             for (unsigned i = 0; i < result.size(); ++i) {
973                 BasicBlock* a = result[i];
974                 if (!a)
975                     continue;
976                 for (unsigned j = 0; j < result.size(); ++j) {
977                     BasicBlock* b = result[j];
978                     if (!b || a == b)
979                         continue;
980                     if (dominators.dominates(a, b))
981                         RELEASE_ASSERT(i > j);
982                 }
983             }
984         };
985
986         if (m_form == SSA || m_isInSSAConversion)
987             validateResults(ensureSSADominators());
988         else
989             validateResults(ensureCPSDominators());
990     }
991
992     return result;
993 }
994
995 void Graph::clearReplacements()
996 {
997     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
998         BasicBlock* block = m_blocks[blockIndex].get();
999         if (!block)
1000             continue;
1001         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1002             block->phis[phiIndex]->setReplacement(nullptr);
1003         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1004             block->at(nodeIndex)->setReplacement(nullptr);
1005     }
1006 }
1007
1008 void Graph::clearEpochs()
1009 {
1010     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1011         BasicBlock* block = m_blocks[blockIndex].get();
1012         if (!block)
1013             continue;
1014         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1015             block->phis[phiIndex]->setEpoch(Epoch());
1016         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1017             block->at(nodeIndex)->setEpoch(Epoch());
1018     }
1019 }
1020
1021 void Graph::initializeNodeOwners()
1022 {
1023     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1024         BasicBlock* block = m_blocks[blockIndex].get();
1025         if (!block)
1026             continue;
1027         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1028             block->phis[phiIndex]->owner = block;
1029         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1030             block->at(nodeIndex)->owner = block;
1031     }
1032 }
1033
1034 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
1035 {
1036     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1037         BasicBlock* block = m_blocks[blockIndex].get();
1038         if (!block)
1039             continue;
1040         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1041             block->phis[phiIndex]->clearFlags(flags);
1042         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1043             block->at(nodeIndex)->clearFlags(flags);
1044     }
1045 }
1046
1047 bool Graph::watchCondition(const ObjectPropertyCondition& key)
1048 {
1049     if (!key.isWatchable())
1050         return false;
1051
1052     DesiredWeakReferences& weakReferences = m_plan.weakReferences();
1053     weakReferences.addLazily(key.object());
1054     if (key.hasPrototype())
1055         weakReferences.addLazily(key.prototype());
1056     if (key.hasRequiredValue())
1057         weakReferences.addLazily(key.requiredValue());
1058
1059     m_plan.watchpoints().addLazily(key);
1060
1061     if (key.kind() == PropertyCondition::Presence)
1062         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1063     
1064     return true;
1065 }
1066
1067 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1068 {
1069     if (!keys.isValid())
1070         return false;
1071
1072     for (const ObjectPropertyCondition& key : keys) {
1073         if (!watchCondition(key))
1074             return false;
1075     }
1076     return true;
1077 }
1078
1079 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1080 {
1081     return m_safeToLoad.contains(std::make_pair(base, offset));
1082 }
1083
1084 bool Graph::watchGlobalProperty(JSGlobalObject* globalObject, unsigned identifierNumber)
1085 {
1086     UniquedStringImpl* uid = identifiers()[identifierNumber];
1087     // If we already have a WatchpointSet, and it is already invalidated, it means that this scope operation must be changed from GlobalProperty to GlobalLexicalVar,
1088     // but we still have stale metadata here since we have not yet executed this bytecode operation since the invalidation. Just emitting ForceOSRExit to update the
1089     // metadata when it reaches to this code.
1090     if (auto* watchpoint = globalObject->getReferencedPropertyWatchpointSet(uid)) {
1091         if (!watchpoint->isStillValid())
1092             return false;
1093     }
1094     globalProperties().addLazily(DesiredGlobalProperty(globalObject, identifierNumber));
1095     return true;
1096 }
1097
1098 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1099 {
1100     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1101     if (iter != m_bytecodeLiveness.end())
1102         return *iter->value;
1103     
1104     std::unique_ptr<FullBytecodeLiveness> liveness = makeUnique<FullBytecodeLiveness>();
1105     codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1106     FullBytecodeLiveness& result = *liveness;
1107     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1108     return result;
1109 }
1110
1111 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1112 {
1113     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1114 }
1115
1116 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1117 {
1118     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1119     if (iter != m_bytecodeKills.end())
1120         return *iter->value;
1121     
1122     std::unique_ptr<BytecodeKills> kills = makeUnique<BytecodeKills>();
1123     codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1124     BytecodeKills& result = *kills;
1125     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1126     return result;
1127 }
1128
1129 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1130 {
1131     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1132 }
1133
1134 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1135 {
1136     static constexpr bool verbose = false;
1137     
1138     if (verbose)
1139         dataLog("Checking of operand is live: ", operand, "\n");
1140     CodeOrigin* codeOriginPtr = &codeOrigin;
1141     for (;;) {
1142         VirtualRegister reg = VirtualRegister(
1143             operand.offset() - codeOriginPtr->stackOffset());
1144         
1145         if (verbose)
1146             dataLog("reg = ", reg, "\n");
1147
1148         auto* inlineCallFrame = codeOriginPtr->inlineCallFrame();
1149         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1150             if (reg.isArgument()) {
1151                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1152
1153
1154                 if (inlineCallFrame->isClosureCall
1155                     && reg.offset() == CallFrameSlot::callee) {
1156                     if (verbose)
1157                         dataLog("Looks like a callee.\n");
1158                     return true;
1159                 }
1160                 
1161                 if (inlineCallFrame->isVarargs()
1162                     && reg.offset() == CallFrameSlot::argumentCount) {
1163                     if (verbose)
1164                         dataLog("Looks like the argument count.\n");
1165                     return true;
1166                 }
1167                 
1168                 return false;
1169             }
1170
1171             if (verbose)
1172                 dataLog("Asking the bytecode liveness.\n");
1173             return livenessFor(inlineCallFrame).operandIsLive(reg.offset(), codeOriginPtr->bytecodeIndex());
1174         }
1175
1176         if (!inlineCallFrame) {
1177             if (verbose)
1178                 dataLog("Ran out of stack, returning true.\n");
1179             return true;
1180         }
1181
1182         // Arguments are always live. This would be redundant if it wasn't for our
1183         // op_call_varargs inlining.
1184         if (reg.isArgument()
1185             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1186             if (verbose)
1187                 dataLog("Argument is live.\n");
1188             return true;
1189         }
1190
1191         // We need to handle tail callers because we may decide to exit to the
1192         // the return bytecode following the tail call.
1193         codeOriginPtr = &inlineCallFrame->directCaller;
1194     }
1195     
1196     RELEASE_ASSERT_NOT_REACHED();
1197 }
1198
1199 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1200 {
1201     BitVector result;
1202     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1203     forAllLocalsLiveInBytecode(
1204         codeOrigin,
1205         [&] (VirtualRegister reg) {
1206             ASSERT(reg.isLocal());
1207             result.quickSet(reg.toLocal());
1208         });
1209     return result;
1210 }
1211
1212 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1213 {
1214     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1215     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1216     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1217 }
1218
1219 unsigned Graph::frameRegisterCount()
1220 {
1221     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1222     return roundLocalRegisterCountForFramePointerOffset(result);
1223 }
1224
1225 unsigned Graph::stackPointerOffset()
1226 {
1227     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1228 }
1229
1230 unsigned Graph::requiredRegisterCountForExit()
1231 {
1232     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1233     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames()->begin(); !!iter; ++iter) {
1234         InlineCallFrame* inlineCallFrame = *iter;
1235         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1236         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1237         count = std::max(count, requiredCount);
1238     }
1239     return count;
1240 }
1241
1242 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1243 {
1244     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1245     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1246     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1247 }
1248
1249 JSValue Graph::tryGetConstantProperty(
1250     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1251 {
1252     if (!base || !base.isObject())
1253         return JSValue();
1254     
1255     JSObject* object = asObject(base);
1256     
1257     for (unsigned i = structureSet.size(); i--;) {
1258         RegisteredStructure structure = structureSet[i];
1259
1260         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1261         if (!set || !set->isStillValid())
1262             return JSValue();
1263         
1264         ASSERT(structure->isValidOffset(offset));
1265         ASSERT(!structure->isUncacheableDictionary());
1266         
1267         watchpoints().addLazily(set);
1268     }
1269     
1270     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1271     // our profiling makes sense and we're still on track to generate code that won't be
1272     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1273     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1274     // is doomed.
1275     //
1276     // One argument in favor of this code is that it should definitely work because the butterfly
1277     // is always set before the structure. However, we don't currently have a fence between those
1278     // stores. It's not clear if this matters, however. We only shrink the propertyStorage while
1279     // holding the Structure's lock. So, for this to fail, you'd need an access on a constant
1280     // object pointer such that the inline caches told us that the object had a structure that it
1281     // did not *yet* have, and then later,the object transitioned to that structure that the inline
1282     // caches had already seen. And then the processor reordered the stores. Seems unlikely and
1283     // difficult to test. I believe that this is worth revisiting but it isn't worth losing sleep
1284     // over. Filed:
1285     // https://bugs.webkit.org/show_bug.cgi?id=134641
1286     //
1287     // For now, we just do the minimal thing: defend against the structure right now being
1288     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1289     // determine if the structure belongs to the proven set.
1290
1291     Structure* structure = object->structure(m_vm);
1292     if (!structureSet.toStructureSet().contains(structure))
1293         return JSValue();
1294
1295     return object->getDirectConcurrently(structure, offset);
1296 }
1297
1298 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1299 {
1300     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1301 }
1302
1303 JSValue Graph::tryGetConstantProperty(
1304     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1305 {
1306     if (structure.isInfinite()) {
1307         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1308         // watching to constant-fold the property.
1309         // https://bugs.webkit.org/show_bug.cgi?id=147271
1310         return JSValue();
1311     }
1312     
1313     return tryGetConstantProperty(base, structure.set(), offset);
1314 }
1315
1316 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1317 {
1318     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1319 }
1320
1321 AbstractValue Graph::inferredValueForProperty(
1322     const AbstractValue& base, PropertyOffset offset,
1323     StructureClobberState clobberState)
1324 {
1325     if (JSValue value = tryGetConstantProperty(base, offset)) {
1326         AbstractValue result;
1327         result.set(*this, *freeze(value), clobberState);
1328         return result;
1329     }
1330
1331     return AbstractValue::heapTop();
1332 }
1333
1334 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1335 {
1336     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1337     
1338     if (!base)
1339         return JSValue();
1340     
1341     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1342     if (!activation)
1343         return JSValue();
1344     
1345     SymbolTable* symbolTable = activation->symbolTable();
1346     JSValue value;
1347     WatchpointSet* set;
1348     {
1349         ConcurrentJSLocker locker(symbolTable->m_lock);
1350         
1351         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1352         if (!entry)
1353             return JSValue();
1354         
1355         set = entry->watchpointSet();
1356         if (!set)
1357             return JSValue();
1358         
1359         if (set->state() != IsWatched)
1360             return JSValue();
1361         
1362         ASSERT(entry->scopeOffset() == offset);
1363         value = activation->variableAt(offset).get();
1364         if (!value)
1365             return JSValue();
1366     }
1367     
1368     watchpoints().addLazily(set);
1369     
1370     return value;
1371 }
1372
1373 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1374 {
1375     return tryGetConstantClosureVar(value.m_value, offset);
1376 }
1377
1378 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1379 {
1380     if (!node->hasConstant())
1381         return JSValue();
1382     return tryGetConstantClosureVar(node->asJSValue(), offset);
1383 }
1384
1385 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1386 {
1387     if (!value)
1388         return nullptr;
1389     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1390     if (!view)
1391         return nullptr;
1392     if (!view->length())
1393         return nullptr;
1394     WTF::loadLoadFence();
1395     watchpoints().addLazily(view);
1396     return view;
1397 }
1398
1399 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1400 {
1401     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1402         return nullptr;
1403     return tryGetFoldableView(value);
1404 }
1405
1406 void Graph::registerFrozenValues()
1407 {
1408     ConcurrentJSLocker locker(m_codeBlock->m_lock);
1409     m_codeBlock->constants().shrink(0);
1410     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1411     for (FrozenValue* value : m_frozenValues) {
1412         if (!value->pointsToHeap())
1413             continue;
1414         
1415         ASSERT(value->structure());
1416         ASSERT(m_plan.weakReferences().contains(value->structure()));
1417
1418         switch (value->strength()) {
1419         case WeakValue: {
1420             m_plan.weakReferences().addLazily(value->value().asCell());
1421             break;
1422         }
1423         case StrongValue: {
1424             unsigned constantIndex = m_codeBlock->addConstantLazily(locker);
1425             // We already have a barrier on the code block.
1426             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1427             break;
1428         } }
1429     }
1430     m_codeBlock->constants().shrinkToFit();
1431     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1432 }
1433
1434 void Graph::visitChildren(SlotVisitor& visitor)
1435 {
1436     for (FrozenValue* value : m_frozenValues) {
1437         visitor.appendUnbarriered(value->value());
1438         visitor.appendUnbarriered(value->structure());
1439     }
1440 }
1441
1442 FrozenValue* Graph::freeze(JSValue value)
1443 {
1444     if (UNLIKELY(!value))
1445         return FrozenValue::emptySingleton();
1446
1447     // There are weird relationships in how optimized CodeBlocks
1448     // point to other CodeBlocks. We don't want to have them be
1449     // part of the weak pointer set. For example, an optimized CodeBlock
1450     // having a weak pointer to itself will cause it to get collected.
1451     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1452     
1453     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1454     if (LIKELY(!result.isNewEntry))
1455         return result.iterator->value;
1456
1457     if (value.isUInt32())
1458         m_uint32ValuesInUse.append(value.asUInt32());
1459     
1460     FrozenValue frozenValue = FrozenValue::freeze(value);
1461     if (Structure* structure = frozenValue.structure())
1462         registerStructure(structure);
1463     
1464     return result.iterator->value = m_frozenValues.add(frozenValue);
1465 }
1466
1467 FrozenValue* Graph::freezeStrong(JSValue value)
1468 {
1469     FrozenValue* result = freeze(value);
1470     result->strengthenTo(StrongValue);
1471     return result;
1472 }
1473
1474 void Graph::convertToConstant(Node* node, FrozenValue* value)
1475 {
1476     if (value->structure())
1477         assertIsRegistered(value->structure());
1478     node->convertToConstant(value);
1479 }
1480
1481 void Graph::convertToConstant(Node* node, JSValue value)
1482 {
1483     convertToConstant(node, freeze(value));
1484 }
1485
1486 void Graph::convertToStrongConstant(Node* node, JSValue value)
1487 {
1488     convertToConstant(node, freezeStrong(value));
1489 }
1490
1491 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1492 {
1493     m_plan.weakReferences().addLazily(structure);
1494     if (m_plan.watchpoints().consider(structure))
1495         result = StructureRegisteredAndWatched;
1496     else
1497         result = StructureRegisteredNormally;
1498     return RegisteredStructure::createPrivate(structure);
1499 }
1500
1501 void Graph::registerAndWatchStructureTransition(Structure* structure)
1502 {
1503     m_plan.weakReferences().addLazily(structure);
1504     m_plan.watchpoints().addLazily(structure->transitionWatchpointSet());
1505 }
1506
1507 void Graph::assertIsRegistered(Structure* structure)
1508 {
1509     // It's convenient to be able to call this with a maybe-null structure.
1510     if (!structure)
1511         return;
1512
1513     DFG_ASSERT(*this, nullptr, m_plan.weakReferences().contains(structure));
1514
1515     if (!structure->dfgShouldWatch())
1516         return;
1517     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1518         return;
1519     
1520     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1521 }
1522
1523 static void logDFGAssertionFailure(
1524     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1525     const char* assertion)
1526 {
1527     startCrashing();
1528     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1529     dataLog(file, "(", line, ") : ", function, "\n");
1530     dataLog("\n");
1531     dataLog(whileText);
1532     dataLog("Graph at time of failure:\n");
1533     graph.dump();
1534     dataLog("\n");
1535     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1536     dataLog(file, "(", line, ") : ", function, "\n");
1537 }
1538
1539 void Graph::logAssertionFailure(
1540     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1541 {
1542     logDFGAssertionFailure(*this, "", file, line, function, assertion);
1543 }
1544
1545 void Graph::logAssertionFailure(
1546     Node* node, const char* file, int line, const char* function, const char* assertion)
1547 {
1548     logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1549 }
1550
1551 void Graph::logAssertionFailure(
1552     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1553 {
1554     logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1555 }
1556
1557 CPSCFG& Graph::ensureCPSCFG()
1558 {
1559     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1560     if (!m_cpsCFG)
1561         m_cpsCFG = makeUnique<CPSCFG>(*this);
1562     return *m_cpsCFG;
1563 }
1564
1565 CPSDominators& Graph::ensureCPSDominators()
1566 {
1567     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1568     if (!m_cpsDominators)
1569         m_cpsDominators = makeUnique<CPSDominators>(*this);
1570     return *m_cpsDominators;
1571 }
1572
1573 SSADominators& Graph::ensureSSADominators()
1574 {
1575     RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1576     if (!m_ssaDominators)
1577         m_ssaDominators = makeUnique<SSADominators>(*this);
1578     return *m_ssaDominators;
1579 }
1580
1581 CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1582 {
1583     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1584     ensureCPSDominators();
1585     if (!m_cpsNaturalLoops)
1586         m_cpsNaturalLoops = makeUnique<CPSNaturalLoops>(*this);
1587     return *m_cpsNaturalLoops;
1588 }
1589
1590 SSANaturalLoops& Graph::ensureSSANaturalLoops()
1591 {
1592     RELEASE_ASSERT(m_form == SSA);
1593     ensureSSADominators();
1594     if (!m_ssaNaturalLoops)
1595         m_ssaNaturalLoops = makeUnique<SSANaturalLoops>(*this);
1596     return *m_ssaNaturalLoops;
1597 }
1598
1599 BackwardsCFG& Graph::ensureBackwardsCFG()
1600 {
1601     // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1602     RELEASE_ASSERT(m_form == SSA); 
1603     if (!m_backwardsCFG)
1604         m_backwardsCFG = makeUnique<BackwardsCFG>(*this);
1605     return *m_backwardsCFG;
1606 }
1607
1608 BackwardsDominators& Graph::ensureBackwardsDominators()
1609 {
1610     RELEASE_ASSERT(m_form == SSA);
1611     if (!m_backwardsDominators)
1612         m_backwardsDominators = makeUnique<BackwardsDominators>(*this);
1613     return *m_backwardsDominators;
1614 }
1615
1616 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1617 {
1618     RELEASE_ASSERT(m_form == SSA);
1619     if (!m_controlEquivalenceAnalysis)
1620         m_controlEquivalenceAnalysis = makeUnique<ControlEquivalenceAnalysis>(*this);
1621     return *m_controlEquivalenceAnalysis;
1622 }
1623
1624 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1625 {
1626     // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1627
1628     for (Node* node = operandNode; node;) {
1629         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1630         if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1631             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1632
1633             if (node->accessesStack(*this)) {
1634                 if (m_form != SSA && node->local().isArgument()) {
1635                     int argument = node->local().toArgument();
1636                     Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1637                     // FIXME: We should match SetArgumentDefinitely nodes at other entrypoints as well:
1638                     // https://bugs.webkit.org/show_bug.cgi?id=175841
1639                     if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1640                         return &profiledBlock->valueProfileForArgument(argument);
1641                 }
1642
1643                 if (node->op() == GetLocal) {
1644                     return MethodOfGettingAValueProfile::fromLazyOperand(
1645                         profiledBlock,
1646                         LazyOperandValueProfileKey(
1647                             node->origin.semantic.bytecodeIndex(), node->local()));
1648                 }
1649             }
1650
1651             if (node->hasHeapPrediction())
1652                 return &profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex());
1653
1654             if (profiledBlock->hasBaselineJITProfiling()) {
1655                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex()))
1656                     return result;
1657             }
1658         }
1659
1660         switch (node->op()) {
1661         case BooleanToNumber:
1662         case Identity:
1663         case ValueRep:
1664         case DoubleRep:
1665         case Int52Rep:
1666             node = node->child1().node();
1667             break;
1668         default:
1669             node = nullptr;
1670         }
1671     }
1672     
1673     return MethodOfGettingAValueProfile();
1674 }
1675
1676 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1677 {
1678     unsigned attributesUnused;
1679     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1680     if (!isValidOffset(offset))
1681         return false;
1682
1683     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1684     if (!value)
1685         return false;
1686
1687     // We only care about functions and getters at this point. If you want to access other properties
1688     // you'll have to add code for those types.
1689     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1690     if (!function) {
1691         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1692
1693         if (!getterSetter)
1694             return false;
1695
1696         returnJSValue = JSValue(getterSetter);
1697         return true;
1698     }
1699
1700     returnJSValue = value;
1701     return true;
1702 }
1703
1704 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1705 {
1706     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1707
1708     if (!conditions.isValid())
1709         return false;
1710
1711     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1712     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1713     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1714     if (!function)
1715         return false;
1716
1717     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1718         return false;
1719     
1720     return watchConditions(conditions);
1721 }
1722
1723
1724 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1725 {
1726     if (hasExitSite(codeOrigin, BadCache) || hasExitSite(codeOrigin, BadConstantCache))
1727         return false;
1728
1729     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1730     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1731     registerStructure(stringObjectStructure);
1732     ASSERT(stringObjectStructure->storedPrototype().isObject());
1733     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1734
1735     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1736         return false;
1737
1738     // We're being conservative here. We want DFG's ToString on StringObject to be
1739     // used in both numeric contexts (that would call valueOf()) and string contexts
1740     // (that would call toString()). We don't want the DFG to have to distinguish
1741     // between the two, just because that seems like it would get confusing. So we
1742     // just require both methods to be sane.
1743     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1744         return false;
1745     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1746 }
1747
1748 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1749 {
1750     if (!m_hasExceptionHandlers)
1751         return false;
1752
1753     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex();
1754     while (1) {
1755         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame();
1756         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1757         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1758             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1759             catchHandlerOut = handler;
1760             return true;
1761         }
1762
1763         if (!inlineCallFrame)
1764             return false;
1765
1766         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex();
1767         codeOrigin = inlineCallFrame->directCaller;
1768     }
1769
1770     RELEASE_ASSERT_NOT_REACHED();
1771 }
1772
1773 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1774 {
1775     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1776     ASSERT(node->op() == Spread);
1777
1778     if (node->child1().useKind() != ArrayUse) {
1779         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1780         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1781         return false;
1782     }
1783
1784     // FIXME: We should add profiling of the incoming operand to Spread
1785     // so we can speculate in such a way that we guarantee that this
1786     // function would return true:
1787     // https://bugs.webkit.org/show_bug.cgi?id=171198
1788
1789     if (!value.m_structure.isFinite())
1790         return false;
1791
1792     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1793     bool allGood = true;
1794     value.m_structure.forEach([&] (RegisteredStructure structure) {
1795         allGood &= structure->hasMonoProto()
1796             && structure->storedPrototype() == arrayPrototype
1797             && !structure->isDictionary()
1798             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1799             && !structure->mayInterceptIndexedAccesses();
1800     });
1801
1802     return allGood;
1803 }
1804
1805 void Graph::clearCPSCFGData()
1806 {
1807     m_cpsNaturalLoops = nullptr;
1808     m_cpsDominators = nullptr;
1809     m_cpsCFG = nullptr;
1810 }
1811
1812 void Prefix::dump(PrintStream& out) const
1813 {
1814     if (!m_enabled)
1815         return;
1816
1817     if (!noHeader) {
1818         if (nodeIndex >= 0)
1819             out.printf("%3d ", nodeIndex);
1820         else
1821             out.printf("    ");
1822
1823         if (blockIndex >= 0)
1824             out.printf("%2d ", blockIndex);
1825         else
1826             out.printf("   ");
1827
1828         if (phaseNumber >= 0)
1829             out.printf("%2d: ", phaseNumber);
1830         else
1831             out.printf("  : ");
1832     }
1833     if (prefixStr)
1834         out.printf("%s", prefixStr);
1835 }
1836
1837 } } // namespace JSC::DFG
1838
1839 #endif // ENABLE(DFG_JIT)