Don't dump OSRAvailabilityData in Graph::dump because a stale Availability may point...
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGVariableAccessDataDump.h"
49 #include "FullBytecodeLiveness.h"
50 #include "FunctionExecutableDump.h"
51 #include "GetterSetter.h"
52 #include "JIT.h"
53 #include "JSLexicalEnvironment.h"
54 #include "MaxFrameExtentForSlowPathCall.h"
55 #include "OperandsInlines.h"
56 #include "JSCInlines.h"
57 #include "StackAlignment.h"
58 #include <wtf/CommaPrinter.h>
59 #include <wtf/ListDump.h>
60
61 namespace JSC { namespace DFG {
62
63 static constexpr bool dumpOSRAvailabilityData = false;
64
65 // Creates an array of stringized names.
66 static const char* dfgOpNames[] = {
67 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
68     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
69 #undef STRINGIZE_DFG_OP_ENUM
70 };
71
72 Graph::Graph(VM& vm, Plan& plan)
73     : m_vm(vm)
74     , m_plan(plan)
75     , m_codeBlock(m_plan.codeBlock())
76     , m_profiledBlock(m_codeBlock->alternative())
77     , m_ssaCFG(std::make_unique<SSACFG>(*this))
78     , m_nextMachineLocal(0)
79     , m_fixpointState(BeforeFixpoint)
80     , m_structureRegistrationState(HaveNotStartedRegistering)
81     , m_form(LoadStore)
82     , m_unificationState(LocallyUnified)
83     , m_refCountState(EverythingIsLive)
84 {
85     ASSERT(m_profiledBlock);
86     
87     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88     
89     m_indexingCache = std::make_unique<FlowIndexing>(*this);
90     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
91
92     registerStructure(vm.structureStructure.get());
93     this->stringStructure = registerStructure(vm.stringStructure.get());
94     this->symbolStructure = registerStructure(vm.symbolStructure.get());
95 }
96
97 Graph::~Graph()
98 {
99 }
100
101 const char *Graph::opName(NodeType op)
102 {
103     return dfgOpNames[op];
104 }
105
106 static void printWhiteSpace(PrintStream& out, unsigned amount)
107 {
108     while (amount-- > 0)
109         out.print(" ");
110 }
111
112 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
113 {
114     if (!currentNode->origin.semantic)
115         return false;
116     
117     Node* previousNode = previousNodeRef;
118     previousNodeRef = currentNode;
119
120     if (!previousNode)
121         return false;
122     
123     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
124         return false;
125     
126     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
127     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
128     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
129     unsigned indexOfDivergence = commonSize;
130     for (unsigned i = 0; i < commonSize; ++i) {
131         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
132             indexOfDivergence = i;
133             break;
134         }
135     }
136     
137     bool hasPrinted = false;
138     
139     // Print the pops.
140     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
141         out.print(prefix);
142         printWhiteSpace(out, i * 2);
143         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
144         hasPrinted = true;
145     }
146     
147     // Print the pushes.
148     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
149         out.print(prefix);
150         printWhiteSpace(out, i * 2);
151         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
152         hasPrinted = true;
153     }
154     
155     return hasPrinted;
156 }
157
158 int Graph::amountOfNodeWhiteSpace(Node* node)
159 {
160     return (node->origin.semantic.inlineDepth() - 1) * 2;
161 }
162
163 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
164 {
165     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
166 }
167
168 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
169 {
170     NodeType op = node->op();
171
172     unsigned refCount = node->refCount();
173     bool mustGenerate = node->mustGenerate();
174     if (mustGenerate)
175         --refCount;
176
177     out.print(prefix);
178     printNodeWhiteSpace(out, node);
179
180     // Example/explanation of dataflow dump output
181     //
182     //   14:   <!2:7>  GetByVal(@3, @13)
183     //   ^1     ^2 ^3     ^4       ^5
184     //
185     // (1) The nodeIndex of this operation.
186     // (2) The reference count. The number printed is the 'real' count,
187     //     not including the 'mustGenerate' ref. If the node is
188     //     'mustGenerate' then the count it prefixed with '!'.
189     // (3) The virtual register slot assigned to this node.
190     // (4) The name of the operation.
191     // (5) The arguments to the operation. The may be of the form:
192     //         @#   - a NodeIndex referencing a prior node in the graph.
193     //         arg# - an argument number.
194     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
195     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
196     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
197     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
198         out.print(node->virtualRegister());
199     else
200         out.print("-");
201     out.print(">\t", opName(op), "(");
202     CommaPrinter comma;
203     if (node->flags() & NodeHasVarArgs) {
204         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
205             if (!m_varArgChildren[childIdx])
206                 continue;
207             out.print(comma, m_varArgChildren[childIdx]);
208         }
209     } else {
210         if (!!node->child1() || !!node->child2() || !!node->child3())
211             out.print(comma, node->child1());
212         if (!!node->child2() || !!node->child3())
213             out.print(comma, node->child2());
214         if (!!node->child3())
215             out.print(comma, node->child3());
216     }
217
218     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
219         out.print(comma, NodeFlagsDump(node->flags()));
220     if (node->prediction())
221         out.print(comma, SpeculationDump(node->prediction()));
222     if (node->hasNumberOfArgumentsToSkip())
223         out.print(comma, "numberOfArgumentsToSkip = ", node->numberOfArgumentsToSkip());
224     if (node->hasArrayMode())
225         out.print(comma, node->arrayMode());
226     if (node->hasArithUnaryType())
227         out.print(comma, "Type:", node->arithUnaryType());
228     if (node->hasArithMode())
229         out.print(comma, node->arithMode());
230     if (node->hasArithRoundingMode())
231         out.print(comma, "Rounding:", node->arithRoundingMode());
232     if (node->hasScopeOffset())
233         out.print(comma, node->scopeOffset());
234     if (node->hasDirectArgumentsOffset())
235         out.print(comma, node->capturedArgumentsOffset());
236     if (node->hasArgumentIndex())
237         out.print(comma, node->argumentIndex());
238     if (node->hasRegisterPointer())
239         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
240     if (node->hasIdentifier())
241         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
242     if (node->hasPromotedLocationDescriptor())
243         out.print(comma, node->promotedLocationDescriptor());
244     if (node->hasClassInfo())
245         out.print(comma, *node->classInfo());
246     if (node->hasStructureSet())
247         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
248     if (node->hasStructure())
249         out.print(comma, inContext(*node->structure().get(), context));
250     if (node->op() == CPUIntrinsic)
251         out.print(comma, intrinsicName(node->intrinsic()));
252     if (node->hasTransition()) {
253         out.print(comma, pointerDumpInContext(node->transition(), context));
254 #if USE(JSVALUE64)
255         out.print(", ID:", node->transition()->next->id());
256 #else
257         out.print(", ID:", RawPointer(node->transition()->next.get()));
258 #endif
259     }
260     if (node->hasCellOperand()) {
261         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
262             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
263         else {
264             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
265             if (node->cellOperand()->value().isCell()) {
266                 CallVariant variant(node->cellOperand()->value().asCell());
267                 if (ExecutableBase* executable = variant.executable()) {
268                     if (executable->isHostFunction())
269                         out.print(comma, "<host function>");
270                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
271                         out.print(comma, FunctionExecutableDump(functionExecutable));
272                     else
273                         out.print(comma, "<non-function executable>");
274                 }
275             }
276         }
277     }
278     if (node->hasSpeculatedTypeForQuery())
279         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
280     if (node->hasStorageAccessData()) {
281         StorageAccessData& storageAccessData = node->storageAccessData();
282         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
283         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
284         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
285     }
286     if (node->hasMultiGetByOffsetData()) {
287         MultiGetByOffsetData& data = node->multiGetByOffsetData();
288         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
289         for (unsigned i = 0; i < data.cases.size(); ++i)
290             out.print(comma, inContext(data.cases[i], context));
291     }
292     if (node->hasMultiPutByOffsetData()) {
293         MultiPutByOffsetData& data = node->multiPutByOffsetData();
294         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
295         for (unsigned i = 0; i < data.variants.size(); ++i)
296             out.print(comma, inContext(data.variants[i], context));
297     }
298     if (node->hasMatchStructureData()) {
299         for (MatchStructureVariant& variant : node->matchStructureData().variants)
300             out.print(comma, inContext(*variant.structure.get(), context), "=>", variant.result);
301     }
302     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
303     if (node->hasVariableAccessData(*this)) {
304         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
305         if (variableAccessData) {
306             VirtualRegister operand = variableAccessData->local();
307             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
308             operand = variableAccessData->machineLocal();
309             if (operand.isValid())
310                 out.print(comma, "machine:", operand);
311         }
312     }
313     if (node->hasStackAccessData()) {
314         StackAccessData* data = node->stackAccessData();
315         out.print(comma, data->local);
316         if (data->machineLocal.isValid())
317             out.print(comma, "machine:", data->machineLocal);
318         out.print(comma, data->format);
319     }
320     if (node->hasUnlinkedLocal()) 
321         out.print(comma, node->unlinkedLocal());
322     if (node->hasVectorLengthHint())
323         out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
324     if (node->hasLazyJSValue())
325         out.print(comma, node->lazyJSValue());
326     if (node->hasIndexingType())
327         out.print(comma, IndexingTypeDump(node->indexingMode()));
328     if (node->hasTypedArrayType())
329         out.print(comma, node->typedArrayType());
330     if (node->hasPhi())
331         out.print(comma, "^", node->phi()->index());
332     if (node->hasExecutionCounter())
333         out.print(comma, RawPointer(node->executionCounter()));
334     if (node->hasWatchpointSet())
335         out.print(comma, RawPointer(node->watchpointSet()));
336     if (node->hasStoragePointer())
337         out.print(comma, RawPointer(node->storagePointer()));
338     if (node->hasObjectMaterializationData())
339         out.print(comma, node->objectMaterializationData());
340     if (node->hasCallVarargsData())
341         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
342     if (node->hasLoadVarargsData()) {
343         LoadVarargsData* data = node->loadVarargsData();
344         out.print(comma, "start = ", data->start, ", count = ", data->count);
345         if (data->machineStart.isValid())
346             out.print(", machineStart = ", data->machineStart);
347         if (data->machineCount.isValid())
348             out.print(", machineCount = ", data->machineCount);
349         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
350         out.print(", limit = ", data->limit);
351     }
352     if (node->hasCallDOMGetterData()) {
353         CallDOMGetterData* data = node->callDOMGetterData();
354         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
355         out.print(", domJIT = ", RawPointer(data->domJIT));
356     }
357     if (node->hasIgnoreLastIndexIsWritable())
358         out.print(comma, "ignoreLastIndexIsWritable = ", node->ignoreLastIndexIsWritable());
359     if (node->isConstant())
360         out.print(comma, pointerDumpInContext(node->constant(), context));
361     if (node->hasCallLinkStatus())
362         out.print(comma, *node->callLinkStatus());
363     if (node->hasGetByIdStatus())
364         out.print(comma, *node->getByIdStatus());
365     if (node->hasInByIdStatus())
366         out.print(comma, *node->inByIdStatus());
367     if (node->hasPutByIdStatus())
368         out.print(comma, *node->putByIdStatus());
369     if (node->isJump())
370         out.print(comma, "T:", *node->targetBlock());
371     if (node->isBranch())
372         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
373     if (node->isSwitch()) {
374         SwitchData* data = node->switchData();
375         out.print(comma, data->kind);
376         for (unsigned i = 0; i < data->cases.size(); ++i)
377             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
378         out.print(comma, "default:", data->fallThrough);
379     }
380     if (node->isEntrySwitch()) {
381         EntrySwitchData* data = node->entrySwitchData();
382         for (unsigned i = 0; i < data->cases.size(); ++i)
383             out.print(comma, BranchTarget(data->cases[i]));
384     }
385     ClobberSet reads;
386     ClobberSet writes;
387     addReadsAndWrites(*this, node, reads, writes);
388     if (!reads.isEmpty())
389         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
390     if (!writes.isEmpty())
391         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
392     ExitMode exitMode = mayExit(*this, node);
393     if (exitMode != DoesNotExit)
394         out.print(comma, exitMode);
395     if (clobbersExitState(*this, node))
396         out.print(comma, "ClobbersExit");
397     if (node->origin.isSet()) {
398         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
399         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
400             out.print(comma, "exit: ", node->origin.forExit);
401     }
402     out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
403     if (node->origin.wasHoisted)
404         out.print(comma, "WasHoisted");
405     out.print(")");
406
407     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
408         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
409     else if (node->hasHeapPrediction())
410         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
411     
412     out.print("\n");
413 }
414
415 bool Graph::terminalsAreValid()
416 {
417     for (BasicBlock* block : blocksInNaturalOrder()) {
418         if (!block->terminal())
419             return false;
420     }
421     return true;
422 }
423
424 static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
425 static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
426
427 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
428 {
429     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
430         block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
431     if (block->executionCount == block->executionCount)
432         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
433     out.print(prefix, "  Predecessors:");
434     for (size_t i = 0; i < block->predecessors.size(); ++i)
435         out.print(" ", *block->predecessors[i]);
436     out.print("\n");
437     out.print(prefix, "  Successors:");
438     if (block->terminal()) {
439         for (BasicBlock* successor : block->successors()) {
440             out.print(" ", *successor);
441         }
442     } else
443         out.print(" <invalid>");
444     out.print("\n");
445
446     auto printDominators = [&] (auto& dominators) {
447         out.print(prefix, "  Dominated by: ", dominators.dominatorsOf(block), "\n");
448         out.print(prefix, "  Dominates: ", dominators.blocksDominatedBy(block), "\n");
449         out.print(prefix, "  Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
450         out.print(prefix, "  Iterated Dominance Frontier: ",
451             dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
452     };
453
454     if (terminalsAreValid()) {
455         if (m_ssaDominators)
456             printDominators(*m_ssaDominators);
457         else if (m_cpsDominators)
458             printDominators(*m_cpsDominators);
459     }
460
461     if (m_backwardsDominators && terminalsAreValid()) {
462         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
463         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
464     }
465     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
466         out.print(prefix, "  Control equivalent to:");
467         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
468             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
469                 out.print(" ", *otherBlock);
470         }
471         out.print("\n");
472     }
473
474     auto printNaturalLoops = [&] (auto& naturalLoops) {
475         if (const auto* loop = naturalLoops->headerOf(block)) {
476             out.print(prefix, "  Loop header, contains:");
477             Vector<BlockIndex> sortedBlockList;
478             for (unsigned i = 0; i < loop->size(); ++i)
479                 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
480             std::sort(sortedBlockList.begin(), sortedBlockList.end());
481             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
482                 out.print(" #", sortedBlockList[i]);
483             out.print("\n");
484         }
485         
486         auto containingLoops = naturalLoops->loopsOf(block);
487         if (!containingLoops.isEmpty()) {
488             out.print(prefix, "  Containing loop headers:");
489             for (unsigned i = 0; i < containingLoops.size(); ++i)
490                 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
491             out.print("\n");
492         }
493     };
494
495     if (m_ssaNaturalLoops)
496         printNaturalLoops(m_ssaNaturalLoops);
497     else if (m_cpsNaturalLoops)
498         printNaturalLoops(m_cpsNaturalLoops);
499
500     if (!block->phis.isEmpty()) {
501         out.print(prefix, "  Phi Nodes:");
502         for (size_t i = 0; i < block->phis.size(); ++i) {
503             Node* phiNode = block->phis[i];
504             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
505                 continue;
506             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
507             if (phiNode->child1()) {
508                 out.print("@", phiNode->child1()->index());
509                 if (phiNode->child2()) {
510                     out.print(", @", phiNode->child2()->index());
511                     if (phiNode->child3())
512                         out.print(", @", phiNode->child3()->index());
513                 }
514             }
515             out.print(")", i + 1 < block->phis.size() ? "," : "");
516         }
517         out.print("\n");
518     }
519 }
520
521 void Graph::dump(PrintStream& out, DumpContext* context)
522 {
523     DumpContext myContext;
524     myContext.graph = this;
525     if (!context)
526         context = &myContext;
527     
528     out.print("\n");
529     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
530     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
531     if (m_form == SSA) {
532         for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
533             out.print("  Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
534     }
535     else {
536         for (auto pair : m_rootToArguments)
537             out.print("  Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
538     }
539     out.print("\n");
540     
541     Node* lastNode = nullptr;
542     for (size_t b = 0; b < m_blocks.size(); ++b) {
543         BasicBlock* block = m_blocks[b].get();
544         if (!block)
545             continue;
546         dumpBlockHeader(out, "", block, DumpAllPhis, context);
547         out.print("  States: ", block->cfaStructureClobberStateAtHead);
548         if (!block->cfaHasVisited)
549             out.print(", CurrentlyCFAUnreachable");
550         if (!block->intersectionOfCFAHasVisited)
551             out.print(", CFAUnreachable");
552         out.print("\n");
553         switch (m_form) {
554         case LoadStore:
555         case ThreadedCPS: {
556             out.print("  Vars Before: ");
557             if (block->cfaHasVisited)
558                 out.print(inContext(block->valuesAtHead, context));
559             else
560                 out.print("<empty>");
561             out.print("\n");
562             out.print("  Intersected Vars Before: ");
563             if (block->intersectionOfCFAHasVisited)
564                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
565             else
566                 out.print("<empty>");
567             out.print("\n");
568             out.print("  Var Links: ", block->variablesAtHead, "\n");
569             break;
570         }
571             
572         case SSA: {
573             RELEASE_ASSERT(block->ssa);
574             if (dumpOSRAvailabilityData)
575                 out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
576             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
577             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
578             break;
579         } }
580         for (size_t i = 0; i < block->size(); ++i) {
581             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
582             dump(out, "", block->at(i), context);
583         }
584         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
585         if (!block->cfaDidFinish)
586             out.print(", CFAInvalidated");
587         out.print("\n");
588         switch (m_form) {
589         case LoadStore:
590         case ThreadedCPS: {
591             out.print("  Vars After: ");
592             if (block->cfaHasVisited)
593                 out.print(inContext(block->valuesAtTail, context));
594             else
595                 out.print("<empty>");
596             out.print("\n");
597             out.print("  Var Links: ", block->variablesAtTail, "\n");
598             break;
599         }
600             
601         case SSA: {
602             RELEASE_ASSERT(block->ssa);
603             if (dumpOSRAvailabilityData)
604                 out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
605             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
606             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
607             break;
608         } }
609         out.print("\n");
610     }
611     
612     out.print("GC Values:\n");
613     for (FrozenValue* value : m_frozenValues) {
614         if (value->pointsToHeap())
615             out.print("    ", inContext(*value, &myContext), "\n");
616     }
617
618     out.print(inContext(watchpoints(), &myContext));
619     
620     if (!myContext.isEmpty()) {
621         myContext.dump(out);
622         out.print("\n");
623     }
624 }
625
626 void Graph::deleteNode(Node* node)
627 {
628     if (validationEnabled() && m_form == SSA) {
629         for (BasicBlock* block : blocksInNaturalOrder()) {
630             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
631             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
632         }
633     }
634
635     m_nodes.remove(node);
636 }
637
638 void Graph::packNodeIndices()
639 {
640     m_nodes.packIndices();
641 }
642
643 void Graph::dethread()
644 {
645     if (m_form == LoadStore || m_form == SSA)
646         return;
647     
648     if (logCompilationChanges())
649         dataLog("Dethreading DFG graph.\n");
650     
651     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
652         BasicBlock* block = m_blocks[blockIndex].get();
653         if (!block)
654             continue;
655         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
656             Node* phi = block->phis[phiIndex];
657             phi->children.reset();
658         }
659     }
660     
661     m_form = LoadStore;
662 }
663
664 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
665 {
666     if (!successor->isReachable) {
667         successor->isReachable = true;
668         worklist.append(successor);
669     }
670     
671     if (!successor->predecessors.contains(block))
672         successor->predecessors.append(block);
673 }
674
675 void Graph::determineReachability()
676 {
677     Vector<BasicBlock*, 16> worklist;
678     for (BasicBlock* entrypoint : m_roots) {
679         entrypoint->isReachable = true;
680         worklist.append(entrypoint);
681     }
682     while (!worklist.isEmpty()) {
683         BasicBlock* block = worklist.takeLast();
684         for (unsigned i = block->numSuccessors(); i--;)
685             handleSuccessor(worklist, block, block->successor(i));
686     }
687 }
688
689 void Graph::resetReachability()
690 {
691     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
692         BasicBlock* block = m_blocks[blockIndex].get();
693         if (!block)
694             continue;
695         block->isReachable = false;
696         block->predecessors.clear();
697     }
698     
699     determineReachability();
700 }
701
702 namespace {
703
704 class RefCountCalculator {
705 public:
706     RefCountCalculator(Graph& graph)
707         : m_graph(graph)
708     {
709     }
710     
711     void calculate()
712     {
713         // First reset the counts to 0 for all nodes.
714         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
715             BasicBlock* block = m_graph.block(blockIndex);
716             if (!block)
717                 continue;
718             for (unsigned indexInBlock = block->size(); indexInBlock--;)
719                 block->at(indexInBlock)->setRefCount(0);
720             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
721                 block->phis[phiIndex]->setRefCount(0);
722         }
723     
724         // Now find the roots:
725         // - Nodes that are must-generate.
726         // - Nodes that are reachable from type checks.
727         // Set their ref counts to 1 and put them on the worklist.
728         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
729             BasicBlock* block = m_graph.block(blockIndex);
730             if (!block)
731                 continue;
732             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
733                 Node* node = block->at(indexInBlock);
734                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
735                 if (!(node->flags() & NodeMustGenerate))
736                     continue;
737                 if (!node->postfixRef())
738                     m_worklist.append(node);
739             }
740         }
741         
742         while (!m_worklist.isEmpty()) {
743             while (!m_worklist.isEmpty()) {
744                 Node* node = m_worklist.last();
745                 m_worklist.removeLast();
746                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
747                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
748             }
749             
750             if (m_graph.m_form == SSA) {
751                 // Find Phi->Upsilon edges, which are represented as meta-data in the
752                 // Upsilon.
753                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
754                     BasicBlock* block = m_graph.block(blockIndex);
755                     if (!block)
756                         continue;
757                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
758                         Node* node = block->at(nodeIndex);
759                         if (node->op() != Upsilon)
760                             continue;
761                         if (node->shouldGenerate())
762                             continue;
763                         if (node->phi()->shouldGenerate())
764                             countNode(node);
765                     }
766                 }
767             }
768         }
769     }
770     
771 private:
772     void findTypeCheckRoot(Node*, Edge edge)
773     {
774         // We may have an "unproved" untyped use for code that is unreachable. The CFA
775         // will just not have gotten around to it.
776         if (edge.isProved() || edge.willNotHaveCheck())
777             return;
778         if (!edge->postfixRef())
779             m_worklist.append(edge.node());
780     }
781     
782     void countNode(Node* node)
783     {
784         if (node->postfixRef())
785             return;
786         m_worklist.append(node);
787     }
788     
789     void countEdge(Node*, Edge edge)
790     {
791         // Don't count edges that are already counted for their type checks.
792         if (!(edge.isProved() || edge.willNotHaveCheck()))
793             return;
794         countNode(edge.node());
795     }
796     
797     Graph& m_graph;
798     Vector<Node*, 128> m_worklist;
799 };
800
801 } // anonymous namespace
802
803 void Graph::computeRefCounts()
804 {
805     RefCountCalculator calculator(*this);
806     calculator.calculate();
807 }
808
809 void Graph::killBlockAndItsContents(BasicBlock* block)
810 {
811     if (auto& ssaData = block->ssa)
812         ssaData->invalidate();
813     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
814         deleteNode(block->phis[phiIndex]);
815     for (Node* node : *block)
816         deleteNode(node);
817     
818     killBlock(block);
819 }
820
821 void Graph::killUnreachableBlocks()
822 {
823     invalidateNodeLiveness();
824
825     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
826         BasicBlock* block = this->block(blockIndex);
827         if (!block)
828             continue;
829         if (block->isReachable)
830             continue;
831
832         dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
833         killBlockAndItsContents(block);
834     }
835 }
836
837 void Graph::invalidateCFG()
838 {
839     m_cpsDominators = nullptr;
840     m_ssaDominators = nullptr;
841     m_cpsNaturalLoops = nullptr;
842     m_ssaNaturalLoops = nullptr;
843     m_controlEquivalenceAnalysis = nullptr;
844     m_backwardsDominators = nullptr;
845     m_backwardsCFG = nullptr;
846     m_cpsCFG = nullptr;
847 }
848
849 void Graph::invalidateNodeLiveness()
850 {
851     if (m_form != SSA)
852         return;
853
854     for (BasicBlock* block : blocksInNaturalOrder())
855         block->ssa->invalidate();
856 }
857
858 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
859 {
860     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
861         Node* node = block[indexInBlock];
862         bool shouldContinue = true;
863         switch (node->op()) {
864         case SetLocal: {
865             if (node->local() == variableAccessData->local())
866                 shouldContinue = false;
867             break;
868         }
869                 
870         case GetLocal: {
871             if (node->variableAccessData() != variableAccessData)
872                 continue;
873             substitute(block, indexInBlock, node, newGetLocal);
874             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
875             if (oldTailNode == node)
876                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
877             shouldContinue = false;
878             break;
879         }
880                 
881         default:
882             break;
883         }
884         if (!shouldContinue)
885             break;
886     }
887 }
888
889 BlockList Graph::blocksInPreOrder()
890 {
891     BlockList result;
892     BlockWorklist worklist;
893     for (BasicBlock* entrypoint : m_roots)
894         worklist.push(entrypoint);
895     while (BasicBlock* block = worklist.pop()) {
896         result.append(block);
897         for (unsigned i = block->numSuccessors(); i--;)
898             worklist.push(block->successor(i));
899     }
900
901     if (validationEnabled()) {
902         // When iterating over pre order, we should see dominators
903         // before things they dominate.
904         auto validateResults = [&] (auto& dominators) {
905             for (unsigned i = 0; i < result.size(); ++i) {
906                 BasicBlock* a = result[i];
907                 if (!a)
908                     continue;
909                 for (unsigned j = 0; j < result.size(); ++j) {
910                     BasicBlock* b = result[j];
911                     if (!b || a == b)
912                         continue;
913                     if (dominators.dominates(a, b))
914                         RELEASE_ASSERT(i < j);
915                 }
916             }
917         };
918
919         if (m_form == SSA || m_isInSSAConversion)
920             validateResults(ensureSSADominators());
921         else
922             validateResults(ensureCPSDominators());
923     }
924     return result;
925 }
926
927 BlockList Graph::blocksInPostOrder(bool isSafeToValidate)
928 {
929     BlockList result;
930     PostOrderBlockWorklist worklist;
931     for (BasicBlock* entrypoint : m_roots)
932         worklist.push(entrypoint);
933     while (BlockWithOrder item = worklist.pop()) {
934         switch (item.order) {
935         case VisitOrder::Pre:
936             worklist.pushPost(item.node);
937             for (unsigned i = item.node->numSuccessors(); i--;)
938                 worklist.push(item.node->successor(i));
939             break;
940         case VisitOrder::Post:
941             result.append(item.node);
942             break;
943         }
944     }
945
946     if (isSafeToValidate && validationEnabled()) { // There are users of this where we haven't yet built of the CFG enough to be able to run dominators.
947         auto validateResults = [&] (auto& dominators) {
948             // When iterating over reverse post order, we should see dominators
949             // before things they dominate.
950             for (unsigned i = 0; i < result.size(); ++i) {
951                 BasicBlock* a = result[i];
952                 if (!a)
953                     continue;
954                 for (unsigned j = 0; j < result.size(); ++j) {
955                     BasicBlock* b = result[j];
956                     if (!b || a == b)
957                         continue;
958                     if (dominators.dominates(a, b))
959                         RELEASE_ASSERT(i > j);
960                 }
961             }
962         };
963
964         if (m_form == SSA || m_isInSSAConversion)
965             validateResults(ensureSSADominators());
966         else
967             validateResults(ensureCPSDominators());
968     }
969
970     return result;
971 }
972
973 void Graph::clearReplacements()
974 {
975     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
976         BasicBlock* block = m_blocks[blockIndex].get();
977         if (!block)
978             continue;
979         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
980             block->phis[phiIndex]->setReplacement(nullptr);
981         for (unsigned nodeIndex = block->size(); nodeIndex--;)
982             block->at(nodeIndex)->setReplacement(nullptr);
983     }
984 }
985
986 void Graph::clearEpochs()
987 {
988     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
989         BasicBlock* block = m_blocks[blockIndex].get();
990         if (!block)
991             continue;
992         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
993             block->phis[phiIndex]->setEpoch(Epoch());
994         for (unsigned nodeIndex = block->size(); nodeIndex--;)
995             block->at(nodeIndex)->setEpoch(Epoch());
996     }
997 }
998
999 void Graph::initializeNodeOwners()
1000 {
1001     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1002         BasicBlock* block = m_blocks[blockIndex].get();
1003         if (!block)
1004             continue;
1005         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1006             block->phis[phiIndex]->owner = block;
1007         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1008             block->at(nodeIndex)->owner = block;
1009     }
1010 }
1011
1012 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
1013 {
1014     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1015         BasicBlock* block = m_blocks[blockIndex].get();
1016         if (!block)
1017             continue;
1018         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1019             block->phis[phiIndex]->clearFlags(flags);
1020         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1021             block->at(nodeIndex)->clearFlags(flags);
1022     }
1023 }
1024
1025 bool Graph::watchCondition(const ObjectPropertyCondition& key)
1026 {
1027     if (!key.isWatchable())
1028         return false;
1029
1030     DesiredWeakReferences& weakReferences = m_plan.weakReferences();
1031     weakReferences.addLazily(key.object());
1032     if (key.hasPrototype())
1033         weakReferences.addLazily(key.prototype());
1034     if (key.hasRequiredValue())
1035         weakReferences.addLazily(key.requiredValue());
1036
1037     m_plan.watchpoints().addLazily(key);
1038
1039     if (key.kind() == PropertyCondition::Presence)
1040         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1041     
1042     return true;
1043 }
1044
1045 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1046 {
1047     if (!keys.isValid())
1048         return false;
1049
1050     for (const ObjectPropertyCondition& key : keys) {
1051         if (!watchCondition(key))
1052             return false;
1053     }
1054     return true;
1055 }
1056
1057 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1058 {
1059     return m_safeToLoad.contains(std::make_pair(base, offset));
1060 }
1061
1062 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
1063 {
1064     assertIsRegistered(key.structure());
1065     
1066     auto iter = m_inferredTypes.find(key);
1067     if (iter != m_inferredTypes.end())
1068         return iter->value;
1069
1070     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
1071     if (!typeObject) {
1072         m_inferredTypes.add(key, InferredType::Top);
1073         return InferredType::Top;
1074     }
1075
1076     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
1077     if (typeDescriptor.kind() == InferredType::Top) {
1078         m_inferredTypes.add(key, InferredType::Top);
1079         return InferredType::Top;
1080     }
1081     
1082     m_inferredTypes.add(key, typeDescriptor);
1083
1084     m_plan.weakReferences().addLazily(typeObject);
1085     registerInferredType(typeDescriptor);
1086
1087     // Note that we may already be watching this desired inferred type, because multiple structures may
1088     // point to the same InferredType instance.
1089     m_plan.watchpoints().addLazily(DesiredInferredType(typeObject, typeDescriptor));
1090
1091     return typeDescriptor;
1092 }
1093
1094 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1095 {
1096     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1097     if (iter != m_bytecodeLiveness.end())
1098         return *iter->value;
1099     
1100     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1101     codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1102     FullBytecodeLiveness& result = *liveness;
1103     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1104     return result;
1105 }
1106
1107 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1108 {
1109     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1110 }
1111
1112 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1113 {
1114     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1115     if (iter != m_bytecodeKills.end())
1116         return *iter->value;
1117     
1118     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1119     codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1120     BytecodeKills& result = *kills;
1121     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1122     return result;
1123 }
1124
1125 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1126 {
1127     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1128 }
1129
1130 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1131 {
1132     static const bool verbose = false;
1133     
1134     if (verbose)
1135         dataLog("Checking of operand is live: ", operand, "\n");
1136     CodeOrigin* codeOriginPtr = &codeOrigin;
1137     for (;;) {
1138         VirtualRegister reg = VirtualRegister(
1139             operand.offset() - codeOriginPtr->stackOffset());
1140         
1141         if (verbose)
1142             dataLog("reg = ", reg, "\n");
1143         
1144         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1145             if (reg.isArgument()) {
1146                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1147                 
1148                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1149                     && reg.offset() == CallFrameSlot::callee) {
1150                     if (verbose)
1151                         dataLog("Looks like a callee.\n");
1152                     return true;
1153                 }
1154                 
1155                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1156                     && reg.offset() == CallFrameSlot::argumentCount) {
1157                     if (verbose)
1158                         dataLog("Looks like the argument count.\n");
1159                     return true;
1160                 }
1161                 
1162                 return false;
1163             }
1164
1165             if (verbose)
1166                 dataLog("Asking the bytecode liveness.\n");
1167             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1168                 reg.offset(), codeOriginPtr->bytecodeIndex);
1169         }
1170         
1171         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1172         if (!inlineCallFrame) {
1173             if (verbose)
1174                 dataLog("Ran out of stack, returning true.\n");
1175             return true;
1176         }
1177
1178         // Arguments are always live. This would be redundant if it wasn't for our
1179         // op_call_varargs inlining.
1180         if (reg.isArgument()
1181             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1182             if (verbose)
1183                 dataLog("Argument is live.\n");
1184             return true;
1185         }
1186         
1187         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1188
1189         // The first inline call frame could be an inline tail call
1190         if (!codeOriginPtr) {
1191             if (verbose)
1192                 dataLog("Dead because of tail inlining.\n");
1193             return false;
1194         }
1195     }
1196     
1197     RELEASE_ASSERT_NOT_REACHED();
1198 }
1199
1200 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1201 {
1202     BitVector result;
1203     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1204     forAllLocalsLiveInBytecode(
1205         codeOrigin,
1206         [&] (VirtualRegister reg) {
1207             ASSERT(reg.isLocal());
1208             result.quickSet(reg.toLocal());
1209         });
1210     return result;
1211 }
1212
1213 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1214 {
1215     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1216     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1217     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1218 }
1219
1220 unsigned Graph::frameRegisterCount()
1221 {
1222     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1223     return roundLocalRegisterCountForFramePointerOffset(result);
1224 }
1225
1226 unsigned Graph::stackPointerOffset()
1227 {
1228     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1229 }
1230
1231 unsigned Graph::requiredRegisterCountForExit()
1232 {
1233     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1234     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames()->begin(); !!iter; ++iter) {
1235         InlineCallFrame* inlineCallFrame = *iter;
1236         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1237         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1238         count = std::max(count, requiredCount);
1239     }
1240     return count;
1241 }
1242
1243 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1244 {
1245     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1246     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1247     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1248 }
1249
1250 JSValue Graph::tryGetConstantProperty(
1251     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1252 {
1253     if (!base || !base.isObject())
1254         return JSValue();
1255     
1256     JSObject* object = asObject(base);
1257     
1258     for (unsigned i = structureSet.size(); i--;) {
1259         RegisteredStructure structure = structureSet[i];
1260
1261         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1262         if (!set || !set->isStillValid())
1263             return JSValue();
1264         
1265         ASSERT(structure->isValidOffset(offset));
1266         ASSERT(!structure->isUncacheableDictionary());
1267         
1268         watchpoints().addLazily(set);
1269     }
1270     
1271     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1272     // our profiling makes sense and we're still on track to generate code that won't be
1273     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1274     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1275     // is doomed.
1276     //
1277     // One argument in favor of this code is that it should definitely work because the butterfly
1278     // is always set before the structure. However, we don't currently have a fence between those
1279     // stores. It's not clear if this matters, however. We only shrink the propertyStorage while
1280     // holding the Structure's lock. So, for this to fail, you'd need an access on a constant
1281     // object pointer such that the inline caches told us that the object had a structure that it
1282     // did not *yet* have, and then later,the object transitioned to that structure that the inline
1283     // caches had already seen. And then the processor reordered the stores. Seems unlikely and
1284     // difficult to test. I believe that this is worth revisiting but it isn't worth losing sleep
1285     // over. Filed:
1286     // https://bugs.webkit.org/show_bug.cgi?id=134641
1287     //
1288     // For now, we just do the minimal thing: defend against the structure right now being
1289     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1290     // determine if the structure belongs to the proven set.
1291
1292     Structure* structure = object->structure(m_vm);
1293     if (!structureSet.toStructureSet().contains(structure))
1294         return JSValue();
1295
1296     return object->getDirectConcurrently(structure, offset);
1297 }
1298
1299 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1300 {
1301     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1302 }
1303
1304 JSValue Graph::tryGetConstantProperty(
1305     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1306 {
1307     if (structure.isInfinite()) {
1308         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1309         // watching to constant-fold the property.
1310         // https://bugs.webkit.org/show_bug.cgi?id=147271
1311         return JSValue();
1312     }
1313     
1314     return tryGetConstantProperty(base, structure.set(), offset);
1315 }
1316
1317 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1318 {
1319     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1320 }
1321
1322 AbstractValue Graph::inferredValueForProperty(
1323     const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1324 {
1325     AbstractValue result;
1326     base.forEach(
1327         [&] (RegisteredStructure structure) {
1328             AbstractValue value;
1329             value.set(*this, inferredTypeForProperty(structure.get(), uid));
1330             result.merge(value);
1331         });
1332     if (clobberState == StructuresAreClobbered)
1333         result.clobberStructures();
1334     return result;
1335 }
1336
1337 AbstractValue Graph::inferredValueForProperty(
1338     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1339     StructureClobberState clobberState)
1340 {
1341     if (JSValue value = tryGetConstantProperty(base, offset)) {
1342         AbstractValue result;
1343         result.set(*this, *freeze(value), clobberState);
1344         return result;
1345     }
1346
1347     if (base.m_structure.isFinite())
1348         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1349
1350     return AbstractValue::heapTop();
1351 }
1352
1353 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1354 {
1355     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1356     
1357     if (!base)
1358         return JSValue();
1359     
1360     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1361     if (!activation)
1362         return JSValue();
1363     
1364     SymbolTable* symbolTable = activation->symbolTable();
1365     JSValue value;
1366     WatchpointSet* set;
1367     {
1368         ConcurrentJSLocker locker(symbolTable->m_lock);
1369         
1370         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1371         if (!entry)
1372             return JSValue();
1373         
1374         set = entry->watchpointSet();
1375         if (!set)
1376             return JSValue();
1377         
1378         if (set->state() != IsWatched)
1379             return JSValue();
1380         
1381         ASSERT(entry->scopeOffset() == offset);
1382         value = activation->variableAt(offset).get();
1383         if (!value)
1384             return JSValue();
1385     }
1386     
1387     watchpoints().addLazily(set);
1388     
1389     return value;
1390 }
1391
1392 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1393 {
1394     return tryGetConstantClosureVar(value.m_value, offset);
1395 }
1396
1397 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1398 {
1399     if (!node->hasConstant())
1400         return JSValue();
1401     return tryGetConstantClosureVar(node->asJSValue(), offset);
1402 }
1403
1404 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1405 {
1406     if (!value)
1407         return nullptr;
1408     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1409     if (!view)
1410         return nullptr;
1411     if (!view->length())
1412         return nullptr;
1413     WTF::loadLoadFence();
1414     watchpoints().addLazily(view);
1415     return view;
1416 }
1417
1418 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1419 {
1420     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1421         return nullptr;
1422     return tryGetFoldableView(value);
1423 }
1424
1425 void Graph::registerFrozenValues()
1426 {
1427     m_codeBlock->constants().shrink(0);
1428     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1429     for (FrozenValue* value : m_frozenValues) {
1430         if (!value->pointsToHeap())
1431             continue;
1432         
1433         ASSERT(value->structure());
1434         ASSERT(m_plan.weakReferences().contains(value->structure()));
1435
1436         switch (value->strength()) {
1437         case WeakValue: {
1438             m_plan.weakReferences().addLazily(value->value().asCell());
1439             break;
1440         }
1441         case StrongValue: {
1442             unsigned constantIndex = m_codeBlock->addConstantLazily();
1443             // We already have a barrier on the code block.
1444             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1445             break;
1446         } }
1447     }
1448     m_codeBlock->constants().shrinkToFit();
1449     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1450 }
1451
1452 void Graph::visitChildren(SlotVisitor& visitor)
1453 {
1454     for (FrozenValue* value : m_frozenValues) {
1455         visitor.appendUnbarriered(value->value());
1456         visitor.appendUnbarriered(value->structure());
1457     }
1458 }
1459
1460 FrozenValue* Graph::freeze(JSValue value)
1461 {
1462     if (UNLIKELY(!value))
1463         return FrozenValue::emptySingleton();
1464
1465     // There are weird relationships in how optimized CodeBlocks
1466     // point to other CodeBlocks. We don't want to have them be
1467     // part of the weak pointer set. For example, an optimized CodeBlock
1468     // having a weak pointer to itself will cause it to get collected.
1469     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1470     
1471     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1472     if (LIKELY(!result.isNewEntry))
1473         return result.iterator->value;
1474
1475     if (value.isUInt32())
1476         m_uint32ValuesInUse.append(value.asUInt32());
1477     
1478     FrozenValue frozenValue = FrozenValue::freeze(value);
1479     if (Structure* structure = frozenValue.structure())
1480         registerStructure(structure);
1481     
1482     return result.iterator->value = m_frozenValues.add(frozenValue);
1483 }
1484
1485 FrozenValue* Graph::freezeStrong(JSValue value)
1486 {
1487     FrozenValue* result = freeze(value);
1488     result->strengthenTo(StrongValue);
1489     return result;
1490 }
1491
1492 void Graph::convertToConstant(Node* node, FrozenValue* value)
1493 {
1494     if (value->structure())
1495         assertIsRegistered(value->structure());
1496     node->convertToConstant(value);
1497 }
1498
1499 void Graph::convertToConstant(Node* node, JSValue value)
1500 {
1501     convertToConstant(node, freeze(value));
1502 }
1503
1504 void Graph::convertToStrongConstant(Node* node, JSValue value)
1505 {
1506     convertToConstant(node, freezeStrong(value));
1507 }
1508
1509 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1510 {
1511     m_plan.weakReferences().addLazily(structure);
1512     if (m_plan.watchpoints().consider(structure))
1513         result = StructureRegisteredAndWatched;
1514     else
1515         result = StructureRegisteredNormally;
1516     return RegisteredStructure::createPrivate(structure);
1517 }
1518
1519 void Graph::registerAndWatchStructureTransition(Structure* structure)
1520 {
1521     m_plan.weakReferences().addLazily(structure);
1522     m_plan.watchpoints().addLazily(structure->transitionWatchpointSet());
1523 }
1524
1525 void Graph::assertIsRegistered(Structure* structure)
1526 {
1527     // It's convenient to be able to call this with a maybe-null structure.
1528     if (!structure)
1529         return;
1530
1531     DFG_ASSERT(*this, nullptr, m_plan.weakReferences().contains(structure));
1532
1533     if (!structure->dfgShouldWatch())
1534         return;
1535     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1536         return;
1537     
1538     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1539 }
1540
1541 static void logDFGAssertionFailure(
1542     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1543     const char* assertion)
1544 {
1545     startCrashing();
1546     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1547     dataLog(file, "(", line, ") : ", function, "\n");
1548     dataLog("\n");
1549     dataLog(whileText);
1550     dataLog("Graph at time of failure:\n");
1551     graph.dump();
1552     dataLog("\n");
1553     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1554     dataLog(file, "(", line, ") : ", function, "\n");
1555 }
1556
1557 void Graph::logAssertionFailure(
1558     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1559 {
1560     logDFGAssertionFailure(*this, "", file, line, function, assertion);
1561 }
1562
1563 void Graph::logAssertionFailure(
1564     Node* node, const char* file, int line, const char* function, const char* assertion)
1565 {
1566     logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1567 }
1568
1569 void Graph::logAssertionFailure(
1570     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1571 {
1572     logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1573 }
1574
1575 CPSCFG& Graph::ensureCPSCFG()
1576 {
1577     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1578     if (!m_cpsCFG)
1579         m_cpsCFG = std::make_unique<CPSCFG>(*this);
1580     return *m_cpsCFG;
1581 }
1582
1583 CPSDominators& Graph::ensureCPSDominators()
1584 {
1585     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1586     if (!m_cpsDominators)
1587         m_cpsDominators = std::make_unique<CPSDominators>(*this);
1588     return *m_cpsDominators;
1589 }
1590
1591 SSADominators& Graph::ensureSSADominators()
1592 {
1593     RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1594     if (!m_ssaDominators)
1595         m_ssaDominators = std::make_unique<SSADominators>(*this);
1596     return *m_ssaDominators;
1597 }
1598
1599 CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1600 {
1601     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1602     ensureCPSDominators();
1603     if (!m_cpsNaturalLoops)
1604         m_cpsNaturalLoops = std::make_unique<CPSNaturalLoops>(*this);
1605     return *m_cpsNaturalLoops;
1606 }
1607
1608 SSANaturalLoops& Graph::ensureSSANaturalLoops()
1609 {
1610     RELEASE_ASSERT(m_form == SSA);
1611     ensureSSADominators();
1612     if (!m_ssaNaturalLoops)
1613         m_ssaNaturalLoops = std::make_unique<SSANaturalLoops>(*this);
1614     return *m_ssaNaturalLoops;
1615 }
1616
1617 BackwardsCFG& Graph::ensureBackwardsCFG()
1618 {
1619     // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1620     RELEASE_ASSERT(m_form == SSA); 
1621     if (!m_backwardsCFG)
1622         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1623     return *m_backwardsCFG;
1624 }
1625
1626 BackwardsDominators& Graph::ensureBackwardsDominators()
1627 {
1628     RELEASE_ASSERT(m_form == SSA);
1629     if (!m_backwardsDominators)
1630         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1631     return *m_backwardsDominators;
1632 }
1633
1634 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1635 {
1636     RELEASE_ASSERT(m_form == SSA);
1637     if (!m_controlEquivalenceAnalysis)
1638         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1639     return *m_controlEquivalenceAnalysis;
1640 }
1641
1642 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1643 {
1644     // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1645
1646     for (Node* node = operandNode; node;) {
1647         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1648         if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1649             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1650
1651             if (node->accessesStack(*this)) {
1652                 if (m_form != SSA && node->local().isArgument()) {
1653                     int argument = node->local().toArgument();
1654                     Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1655                     // FIXME: We should match SetArgument nodes at other entrypoints as well:
1656                     // https://bugs.webkit.org/show_bug.cgi?id=175841
1657                     if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1658                         return &profiledBlock->valueProfileForArgument(argument);
1659                 }
1660
1661                 if (node->op() == GetLocal) {
1662                     return MethodOfGettingAValueProfile::fromLazyOperand(
1663                         profiledBlock,
1664                         LazyOperandValueProfileKey(
1665                             node->origin.semantic.bytecodeIndex, node->local()));
1666                 }
1667             }
1668
1669             if (node->hasHeapPrediction())
1670                 return &profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1671
1672             if (profiledBlock->hasBaselineJITProfiling()) {
1673                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1674                     return result;
1675             }
1676         }
1677
1678         switch (node->op()) {
1679         case BooleanToNumber:
1680         case Identity:
1681         case ValueRep:
1682         case DoubleRep:
1683         case Int52Rep:
1684             node = node->child1().node();
1685             break;
1686         default:
1687             node = nullptr;
1688         }
1689     }
1690     
1691     return MethodOfGettingAValueProfile();
1692 }
1693
1694 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1695 {
1696     unsigned attributesUnused;
1697     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1698     if (!isValidOffset(offset))
1699         return false;
1700
1701     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1702     if (!value)
1703         return false;
1704
1705     // We only care about functions and getters at this point. If you want to access other properties
1706     // you'll have to add code for those types.
1707     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1708     if (!function) {
1709         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1710
1711         if (!getterSetter)
1712             return false;
1713
1714         returnJSValue = JSValue(getterSetter);
1715         return true;
1716     }
1717
1718     returnJSValue = value;
1719     return true;
1720 }
1721
1722 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1723 {
1724     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1725
1726     if (!conditions.isValid())
1727         return false;
1728
1729     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1730     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1731     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1732     if (!function)
1733         return false;
1734
1735     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1736         return false;
1737     
1738     return watchConditions(conditions);
1739 }
1740
1741
1742 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1743 {
1744     if (hasExitSite(codeOrigin, NotStringObject))
1745         return false;
1746
1747     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1748     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1749     registerStructure(stringObjectStructure);
1750     ASSERT(stringObjectStructure->storedPrototype().isObject());
1751     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(*stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1752
1753     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1754         return false;
1755
1756     // We're being conservative here. We want DFG's ToString on StringObject to be
1757     // used in both numeric contexts (that would call valueOf()) and string contexts
1758     // (that would call toString()). We don't want the DFG to have to distinguish
1759     // between the two, just because that seems like it would get confusing. So we
1760     // just require both methods to be sane.
1761     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1762         return false;
1763     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1764 }
1765
1766 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1767 {
1768     if (!m_hasExceptionHandlers)
1769         return false;
1770
1771     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1772     while (1) {
1773         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1774         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1775         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1776             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1777             catchHandlerOut = handler;
1778             return true;
1779         }
1780
1781         if (!inlineCallFrame)
1782             return false;
1783
1784         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1785         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1786     }
1787
1788     RELEASE_ASSERT_NOT_REACHED();
1789 }
1790
1791 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1792 {
1793     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1794     ASSERT(node->op() == Spread);
1795
1796     if (node->child1().useKind() != ArrayUse) {
1797         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1798         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1799         return false;
1800     }
1801
1802     // FIXME: We should add profiling of the incoming operand to Spread
1803     // so we can speculate in such a way that we guarantee that this
1804     // function would return true:
1805     // https://bugs.webkit.org/show_bug.cgi?id=171198
1806
1807     if (!value.m_structure.isFinite())
1808         return false;
1809
1810     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1811     bool allGood = true;
1812     value.m_structure.forEach([&] (RegisteredStructure structure) {
1813         allGood &= structure->hasMonoProto()
1814             && structure->storedPrototype() == arrayPrototype
1815             && !structure->isDictionary()
1816             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1817             && !structure->mayInterceptIndexedAccesses();
1818     });
1819
1820     return allGood;
1821 }
1822
1823 void Graph::clearCPSCFGData()
1824 {
1825     m_cpsNaturalLoops = nullptr;
1826     m_cpsDominators = nullptr;
1827     m_cpsCFG = nullptr;
1828 }
1829
1830 } } // namespace JSC::DFG
1831
1832 #endif // ENABLE(DFG_JIT)