We should support CreateThis in the FTL
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGVariableAccessDataDump.h"
49 #include "FullBytecodeLiveness.h"
50 #include "FunctionExecutableDump.h"
51 #include "GetterSetter.h"
52 #include "JIT.h"
53 #include "JSLexicalEnvironment.h"
54 #include "MaxFrameExtentForSlowPathCall.h"
55 #include "OperandsInlines.h"
56 #include "JSCInlines.h"
57 #include "StackAlignment.h"
58 #include <wtf/CommaPrinter.h>
59 #include <wtf/ListDump.h>
60
61 namespace JSC { namespace DFG {
62
63 // Creates an array of stringized names.
64 static const char* dfgOpNames[] = {
65 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
66     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
67 #undef STRINGIZE_DFG_OP_ENUM
68 };
69
70 Graph::Graph(VM& vm, Plan& plan)
71     : m_vm(vm)
72     , m_plan(plan)
73     , m_codeBlock(m_plan.codeBlock)
74     , m_profiledBlock(m_codeBlock->alternative())
75     , m_ssaCFG(std::make_unique<SSACFG>(*this))
76     , m_nextMachineLocal(0)
77     , m_fixpointState(BeforeFixpoint)
78     , m_structureRegistrationState(HaveNotStartedRegistering)
79     , m_form(LoadStore)
80     , m_unificationState(LocallyUnified)
81     , m_refCountState(EverythingIsLive)
82 {
83     ASSERT(m_profiledBlock);
84     
85     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
86     
87     m_indexingCache = std::make_unique<FlowIndexing>(*this);
88     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
89
90     registerStructure(vm.structureStructure.get());
91     this->stringStructure = registerStructure(vm.stringStructure.get());
92     this->symbolStructure = registerStructure(vm.symbolStructure.get());
93 }
94
95 Graph::~Graph()
96 {
97 }
98
99 const char *Graph::opName(NodeType op)
100 {
101     return dfgOpNames[op];
102 }
103
104 static void printWhiteSpace(PrintStream& out, unsigned amount)
105 {
106     while (amount-- > 0)
107         out.print(" ");
108 }
109
110 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
111 {
112     if (!currentNode->origin.semantic)
113         return false;
114     
115     Node* previousNode = previousNodeRef;
116     previousNodeRef = currentNode;
117
118     if (!previousNode)
119         return false;
120     
121     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
122         return false;
123     
124     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
125     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
126     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
127     unsigned indexOfDivergence = commonSize;
128     for (unsigned i = 0; i < commonSize; ++i) {
129         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
130             indexOfDivergence = i;
131             break;
132         }
133     }
134     
135     bool hasPrinted = false;
136     
137     // Print the pops.
138     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
139         out.print(prefix);
140         printWhiteSpace(out, i * 2);
141         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
142         hasPrinted = true;
143     }
144     
145     // Print the pushes.
146     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
147         out.print(prefix);
148         printWhiteSpace(out, i * 2);
149         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
150         hasPrinted = true;
151     }
152     
153     return hasPrinted;
154 }
155
156 int Graph::amountOfNodeWhiteSpace(Node* node)
157 {
158     return (node->origin.semantic.inlineDepth() - 1) * 2;
159 }
160
161 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
162 {
163     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
164 }
165
166 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
167 {
168     NodeType op = node->op();
169
170     unsigned refCount = node->refCount();
171     bool mustGenerate = node->mustGenerate();
172     if (mustGenerate)
173         --refCount;
174
175     out.print(prefix);
176     printNodeWhiteSpace(out, node);
177
178     // Example/explanation of dataflow dump output
179     //
180     //   14:   <!2:7>  GetByVal(@3, @13)
181     //   ^1     ^2 ^3     ^4       ^5
182     //
183     // (1) The nodeIndex of this operation.
184     // (2) The reference count. The number printed is the 'real' count,
185     //     not including the 'mustGenerate' ref. If the node is
186     //     'mustGenerate' then the count it prefixed with '!'.
187     // (3) The virtual register slot assigned to this node.
188     // (4) The name of the operation.
189     // (5) The arguments to the operation. The may be of the form:
190     //         @#   - a NodeIndex referencing a prior node in the graph.
191     //         arg# - an argument number.
192     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
193     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
194     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
195     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
196         out.print(node->virtualRegister());
197     else
198         out.print("-");
199     out.print(">\t", opName(op), "(");
200     CommaPrinter comma;
201     if (node->flags() & NodeHasVarArgs) {
202         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
203             if (!m_varArgChildren[childIdx])
204                 continue;
205             out.print(comma, m_varArgChildren[childIdx]);
206         }
207     } else {
208         if (!!node->child1() || !!node->child2() || !!node->child3())
209             out.print(comma, node->child1());
210         if (!!node->child2() || !!node->child3())
211             out.print(comma, node->child2());
212         if (!!node->child3())
213             out.print(comma, node->child3());
214     }
215
216     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
217         out.print(comma, NodeFlagsDump(node->flags()));
218     if (node->prediction())
219         out.print(comma, SpeculationDump(node->prediction()));
220     if (node->hasNumberOfArgumentsToSkip())
221         out.print(comma, "numberOfArgumentsToSkip = ", node->numberOfArgumentsToSkip());
222     if (node->hasArrayMode())
223         out.print(comma, node->arrayMode());
224     if (node->hasArithUnaryType())
225         out.print(comma, "Type:", node->arithUnaryType());
226     if (node->hasArithMode())
227         out.print(comma, node->arithMode());
228     if (node->hasArithRoundingMode())
229         out.print(comma, "Rounding:", node->arithRoundingMode());
230     if (node->hasScopeOffset())
231         out.print(comma, node->scopeOffset());
232     if (node->hasDirectArgumentsOffset())
233         out.print(comma, node->capturedArgumentsOffset());
234     if (node->hasArgumentIndex())
235         out.print(comma, node->argumentIndex());
236     if (node->hasRegisterPointer())
237         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
238     if (node->hasIdentifier())
239         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
240     if (node->hasPromotedLocationDescriptor())
241         out.print(comma, node->promotedLocationDescriptor());
242     if (node->hasClassInfo())
243         out.print(comma, *node->classInfo());
244     if (node->hasStructureSet())
245         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
246     if (node->hasStructure())
247         out.print(comma, inContext(*node->structure().get(), context));
248     if (node->op() == CPUIntrinsic)
249         out.print(comma, intrinsicName(node->intrinsic()));
250     if (node->hasTransition()) {
251         out.print(comma, pointerDumpInContext(node->transition(), context));
252 #if USE(JSVALUE64)
253         out.print(", ID:", node->transition()->next->id());
254 #else
255         out.print(", ID:", RawPointer(node->transition()->next.get()));
256 #endif
257     }
258     if (node->hasCellOperand()) {
259         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
260             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
261         else {
262             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
263             if (node->cellOperand()->value().isCell()) {
264                 CallVariant variant(node->cellOperand()->value().asCell());
265                 if (ExecutableBase* executable = variant.executable()) {
266                     if (executable->isHostFunction())
267                         out.print(comma, "<host function>");
268                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
269                         out.print(comma, FunctionExecutableDump(functionExecutable));
270                     else
271                         out.print(comma, "<non-function executable>");
272                 }
273             }
274         }
275     }
276     if (node->hasSpeculatedTypeForQuery())
277         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
278     if (node->hasStorageAccessData()) {
279         StorageAccessData& storageAccessData = node->storageAccessData();
280         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
281         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
282         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
283     }
284     if (node->hasMultiGetByOffsetData()) {
285         MultiGetByOffsetData& data = node->multiGetByOffsetData();
286         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
287         for (unsigned i = 0; i < data.cases.size(); ++i)
288             out.print(comma, inContext(data.cases[i], context));
289     }
290     if (node->hasMultiPutByOffsetData()) {
291         MultiPutByOffsetData& data = node->multiPutByOffsetData();
292         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
293         for (unsigned i = 0; i < data.variants.size(); ++i)
294             out.print(comma, inContext(data.variants[i], context));
295     }
296     if (node->hasMatchStructureData()) {
297         for (MatchStructureVariant& variant : node->matchStructureData().variants)
298             out.print(comma, inContext(*variant.structure.get(), context), "=>", variant.result);
299     }
300     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
301     if (node->hasVariableAccessData(*this)) {
302         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
303         if (variableAccessData) {
304             VirtualRegister operand = variableAccessData->local();
305             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
306             operand = variableAccessData->machineLocal();
307             if (operand.isValid())
308                 out.print(comma, "machine:", operand);
309         }
310     }
311     if (node->hasStackAccessData()) {
312         StackAccessData* data = node->stackAccessData();
313         out.print(comma, data->local);
314         if (data->machineLocal.isValid())
315             out.print(comma, "machine:", data->machineLocal);
316         out.print(comma, data->format);
317     }
318     if (node->hasUnlinkedLocal()) 
319         out.print(comma, node->unlinkedLocal());
320     if (node->hasVectorLengthHint())
321         out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
322     if (node->hasLazyJSValue())
323         out.print(comma, node->lazyJSValue());
324     if (node->hasIndexingType())
325         out.print(comma, IndexingTypeDump(node->indexingMode()));
326     if (node->hasTypedArrayType())
327         out.print(comma, node->typedArrayType());
328     if (node->hasPhi())
329         out.print(comma, "^", node->phi()->index());
330     if (node->hasExecutionCounter())
331         out.print(comma, RawPointer(node->executionCounter()));
332     if (node->hasWatchpointSet())
333         out.print(comma, RawPointer(node->watchpointSet()));
334     if (node->hasStoragePointer())
335         out.print(comma, RawPointer(node->storagePointer()));
336     if (node->hasObjectMaterializationData())
337         out.print(comma, node->objectMaterializationData());
338     if (node->hasCallVarargsData())
339         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
340     if (node->hasLoadVarargsData()) {
341         LoadVarargsData* data = node->loadVarargsData();
342         out.print(comma, "start = ", data->start, ", count = ", data->count);
343         if (data->machineStart.isValid())
344             out.print(", machineStart = ", data->machineStart);
345         if (data->machineCount.isValid())
346             out.print(", machineCount = ", data->machineCount);
347         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
348         out.print(", limit = ", data->limit);
349     }
350     if (node->hasCallDOMGetterData()) {
351         CallDOMGetterData* data = node->callDOMGetterData();
352         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
353         out.print(", domJIT = ", RawPointer(data->domJIT));
354     }
355     if (node->hasIgnoreLastIndexIsWritable())
356         out.print(comma, "ignoreLastIndexIsWritable = ", node->ignoreLastIndexIsWritable());
357     if (node->isConstant())
358         out.print(comma, pointerDumpInContext(node->constant(), context));
359     if (node->hasCallLinkStatus())
360         out.print(comma, *node->callLinkStatus());
361     if (node->hasGetByIdStatus())
362         out.print(comma, *node->getByIdStatus());
363     if (node->hasInByIdStatus())
364         out.print(comma, *node->inByIdStatus());
365     if (node->hasPutByIdStatus())
366         out.print(comma, *node->putByIdStatus());
367     if (node->isJump())
368         out.print(comma, "T:", *node->targetBlock());
369     if (node->isBranch())
370         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
371     if (node->isSwitch()) {
372         SwitchData* data = node->switchData();
373         out.print(comma, data->kind);
374         for (unsigned i = 0; i < data->cases.size(); ++i)
375             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
376         out.print(comma, "default:", data->fallThrough);
377     }
378     if (node->isEntrySwitch()) {
379         EntrySwitchData* data = node->entrySwitchData();
380         for (unsigned i = 0; i < data->cases.size(); ++i)
381             out.print(comma, BranchTarget(data->cases[i]));
382     }
383     ClobberSet reads;
384     ClobberSet writes;
385     addReadsAndWrites(*this, node, reads, writes);
386     if (!reads.isEmpty())
387         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
388     if (!writes.isEmpty())
389         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
390     ExitMode exitMode = mayExit(*this, node);
391     if (exitMode != DoesNotExit)
392         out.print(comma, exitMode);
393     if (clobbersExitState(*this, node))
394         out.print(comma, "ClobbersExit");
395     if (node->origin.isSet()) {
396         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
397         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
398             out.print(comma, "exit: ", node->origin.forExit);
399     }
400     out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
401     if (node->origin.wasHoisted)
402         out.print(comma, "WasHoisted");
403     out.print(")");
404
405     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
406         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
407     else if (node->hasHeapPrediction())
408         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
409     
410     out.print("\n");
411 }
412
413 bool Graph::terminalsAreValid()
414 {
415     for (BasicBlock* block : blocksInNaturalOrder()) {
416         if (!block->terminal())
417             return false;
418     }
419     return true;
420 }
421
422 static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
423 static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
424
425 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
426 {
427     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
428         block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
429     if (block->executionCount == block->executionCount)
430         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
431     out.print(prefix, "  Predecessors:");
432     for (size_t i = 0; i < block->predecessors.size(); ++i)
433         out.print(" ", *block->predecessors[i]);
434     out.print("\n");
435     out.print(prefix, "  Successors:");
436     if (block->terminal()) {
437         for (BasicBlock* successor : block->successors()) {
438             out.print(" ", *successor);
439         }
440     } else
441         out.print(" <invalid>");
442     out.print("\n");
443
444     auto printDominators = [&] (auto& dominators) {
445         out.print(prefix, "  Dominated by: ", dominators.dominatorsOf(block), "\n");
446         out.print(prefix, "  Dominates: ", dominators.blocksDominatedBy(block), "\n");
447         out.print(prefix, "  Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
448         out.print(prefix, "  Iterated Dominance Frontier: ",
449             dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
450     };
451
452     if (terminalsAreValid()) {
453         if (m_ssaDominators)
454             printDominators(*m_ssaDominators);
455         else if (m_cpsDominators)
456             printDominators(*m_cpsDominators);
457     }
458
459     if (m_backwardsDominators && terminalsAreValid()) {
460         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
461         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
462     }
463     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
464         out.print(prefix, "  Control equivalent to:");
465         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
466             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
467                 out.print(" ", *otherBlock);
468         }
469         out.print("\n");
470     }
471
472     auto printNaturalLoops = [&] (auto& naturalLoops) {
473         if (const auto* loop = naturalLoops->headerOf(block)) {
474             out.print(prefix, "  Loop header, contains:");
475             Vector<BlockIndex> sortedBlockList;
476             for (unsigned i = 0; i < loop->size(); ++i)
477                 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
478             std::sort(sortedBlockList.begin(), sortedBlockList.end());
479             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
480                 out.print(" #", sortedBlockList[i]);
481             out.print("\n");
482         }
483         
484         auto containingLoops = naturalLoops->loopsOf(block);
485         if (!containingLoops.isEmpty()) {
486             out.print(prefix, "  Containing loop headers:");
487             for (unsigned i = 0; i < containingLoops.size(); ++i)
488                 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
489             out.print("\n");
490         }
491     };
492
493     if (m_ssaNaturalLoops)
494         printNaturalLoops(m_ssaNaturalLoops);
495     else if (m_cpsNaturalLoops)
496         printNaturalLoops(m_cpsNaturalLoops);
497
498     if (!block->phis.isEmpty()) {
499         out.print(prefix, "  Phi Nodes:");
500         for (size_t i = 0; i < block->phis.size(); ++i) {
501             Node* phiNode = block->phis[i];
502             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
503                 continue;
504             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
505             if (phiNode->child1()) {
506                 out.print("@", phiNode->child1()->index());
507                 if (phiNode->child2()) {
508                     out.print(", @", phiNode->child2()->index());
509                     if (phiNode->child3())
510                         out.print(", @", phiNode->child3()->index());
511                 }
512             }
513             out.print(")", i + 1 < block->phis.size() ? "," : "");
514         }
515         out.print("\n");
516     }
517 }
518
519 void Graph::dump(PrintStream& out, DumpContext* context)
520 {
521     DumpContext myContext;
522     myContext.graph = this;
523     if (!context)
524         context = &myContext;
525     
526     out.print("\n");
527     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
528     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
529     if (m_form == SSA) {
530         for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
531             out.print("  Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
532     }
533     else {
534         for (auto pair : m_rootToArguments)
535             out.print("  Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
536     }
537     out.print("\n");
538     
539     Node* lastNode = nullptr;
540     for (size_t b = 0; b < m_blocks.size(); ++b) {
541         BasicBlock* block = m_blocks[b].get();
542         if (!block)
543             continue;
544         dumpBlockHeader(out, "", block, DumpAllPhis, context);
545         out.print("  States: ", block->cfaStructureClobberStateAtHead);
546         if (!block->cfaHasVisited)
547             out.print(", CurrentlyCFAUnreachable");
548         if (!block->intersectionOfCFAHasVisited)
549             out.print(", CFAUnreachable");
550         out.print("\n");
551         switch (m_form) {
552         case LoadStore:
553         case ThreadedCPS: {
554             out.print("  Vars Before: ");
555             if (block->cfaHasVisited)
556                 out.print(inContext(block->valuesAtHead, context));
557             else
558                 out.print("<empty>");
559             out.print("\n");
560             out.print("  Intersected Vars Before: ");
561             if (block->intersectionOfCFAHasVisited)
562                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
563             else
564                 out.print("<empty>");
565             out.print("\n");
566             out.print("  Var Links: ", block->variablesAtHead, "\n");
567             break;
568         }
569             
570         case SSA: {
571             RELEASE_ASSERT(block->ssa);
572             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
573             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
574             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
575             break;
576         } }
577         for (size_t i = 0; i < block->size(); ++i) {
578             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
579             dump(out, "", block->at(i), context);
580         }
581         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
582         if (!block->cfaDidFinish)
583             out.print(", CFAInvalidated");
584         out.print("\n");
585         switch (m_form) {
586         case LoadStore:
587         case ThreadedCPS: {
588             out.print("  Vars After: ");
589             if (block->cfaHasVisited)
590                 out.print(inContext(block->valuesAtTail, context));
591             else
592                 out.print("<empty>");
593             out.print("\n");
594             out.print("  Var Links: ", block->variablesAtTail, "\n");
595             break;
596         }
597             
598         case SSA: {
599             RELEASE_ASSERT(block->ssa);
600             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
601             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
602             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
603             break;
604         } }
605         out.print("\n");
606     }
607     
608     out.print("GC Values:\n");
609     for (FrozenValue* value : m_frozenValues) {
610         if (value->pointsToHeap())
611             out.print("    ", inContext(*value, &myContext), "\n");
612     }
613
614     out.print(inContext(watchpoints(), &myContext));
615     
616     if (!myContext.isEmpty()) {
617         myContext.dump(out);
618         out.print("\n");
619     }
620 }
621
622 void Graph::deleteNode(Node* node)
623 {
624     if (validationEnabled() && m_form == SSA) {
625         for (BasicBlock* block : blocksInNaturalOrder()) {
626             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
627             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
628         }
629     }
630
631     m_nodes.remove(node);
632 }
633
634 void Graph::packNodeIndices()
635 {
636     m_nodes.packIndices();
637 }
638
639 void Graph::dethread()
640 {
641     if (m_form == LoadStore || m_form == SSA)
642         return;
643     
644     if (logCompilationChanges())
645         dataLog("Dethreading DFG graph.\n");
646     
647     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
648         BasicBlock* block = m_blocks[blockIndex].get();
649         if (!block)
650             continue;
651         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
652             Node* phi = block->phis[phiIndex];
653             phi->children.reset();
654         }
655     }
656     
657     m_form = LoadStore;
658 }
659
660 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
661 {
662     if (!successor->isReachable) {
663         successor->isReachable = true;
664         worklist.append(successor);
665     }
666     
667     if (!successor->predecessors.contains(block))
668         successor->predecessors.append(block);
669 }
670
671 void Graph::determineReachability()
672 {
673     Vector<BasicBlock*, 16> worklist;
674     for (BasicBlock* entrypoint : m_roots) {
675         entrypoint->isReachable = true;
676         worklist.append(entrypoint);
677     }
678     while (!worklist.isEmpty()) {
679         BasicBlock* block = worklist.takeLast();
680         for (unsigned i = block->numSuccessors(); i--;)
681             handleSuccessor(worklist, block, block->successor(i));
682     }
683 }
684
685 void Graph::resetReachability()
686 {
687     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
688         BasicBlock* block = m_blocks[blockIndex].get();
689         if (!block)
690             continue;
691         block->isReachable = false;
692         block->predecessors.clear();
693     }
694     
695     determineReachability();
696 }
697
698 namespace {
699
700 class RefCountCalculator {
701 public:
702     RefCountCalculator(Graph& graph)
703         : m_graph(graph)
704     {
705     }
706     
707     void calculate()
708     {
709         // First reset the counts to 0 for all nodes.
710         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
711             BasicBlock* block = m_graph.block(blockIndex);
712             if (!block)
713                 continue;
714             for (unsigned indexInBlock = block->size(); indexInBlock--;)
715                 block->at(indexInBlock)->setRefCount(0);
716             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
717                 block->phis[phiIndex]->setRefCount(0);
718         }
719     
720         // Now find the roots:
721         // - Nodes that are must-generate.
722         // - Nodes that are reachable from type checks.
723         // Set their ref counts to 1 and put them on the worklist.
724         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
725             BasicBlock* block = m_graph.block(blockIndex);
726             if (!block)
727                 continue;
728             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
729                 Node* node = block->at(indexInBlock);
730                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
731                 if (!(node->flags() & NodeMustGenerate))
732                     continue;
733                 if (!node->postfixRef())
734                     m_worklist.append(node);
735             }
736         }
737         
738         while (!m_worklist.isEmpty()) {
739             while (!m_worklist.isEmpty()) {
740                 Node* node = m_worklist.last();
741                 m_worklist.removeLast();
742                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
743                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
744             }
745             
746             if (m_graph.m_form == SSA) {
747                 // Find Phi->Upsilon edges, which are represented as meta-data in the
748                 // Upsilon.
749                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
750                     BasicBlock* block = m_graph.block(blockIndex);
751                     if (!block)
752                         continue;
753                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
754                         Node* node = block->at(nodeIndex);
755                         if (node->op() != Upsilon)
756                             continue;
757                         if (node->shouldGenerate())
758                             continue;
759                         if (node->phi()->shouldGenerate())
760                             countNode(node);
761                     }
762                 }
763             }
764         }
765     }
766     
767 private:
768     void findTypeCheckRoot(Node*, Edge edge)
769     {
770         // We may have an "unproved" untyped use for code that is unreachable. The CFA
771         // will just not have gotten around to it.
772         if (edge.isProved() || edge.willNotHaveCheck())
773             return;
774         if (!edge->postfixRef())
775             m_worklist.append(edge.node());
776     }
777     
778     void countNode(Node* node)
779     {
780         if (node->postfixRef())
781             return;
782         m_worklist.append(node);
783     }
784     
785     void countEdge(Node*, Edge edge)
786     {
787         // Don't count edges that are already counted for their type checks.
788         if (!(edge.isProved() || edge.willNotHaveCheck()))
789             return;
790         countNode(edge.node());
791     }
792     
793     Graph& m_graph;
794     Vector<Node*, 128> m_worklist;
795 };
796
797 } // anonymous namespace
798
799 void Graph::computeRefCounts()
800 {
801     RefCountCalculator calculator(*this);
802     calculator.calculate();
803 }
804
805 void Graph::killBlockAndItsContents(BasicBlock* block)
806 {
807     if (auto& ssaData = block->ssa)
808         ssaData->invalidate();
809     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
810         deleteNode(block->phis[phiIndex]);
811     for (Node* node : *block)
812         deleteNode(node);
813     
814     killBlock(block);
815 }
816
817 void Graph::killUnreachableBlocks()
818 {
819     invalidateNodeLiveness();
820
821     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
822         BasicBlock* block = this->block(blockIndex);
823         if (!block)
824             continue;
825         if (block->isReachable)
826             continue;
827
828         dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
829         killBlockAndItsContents(block);
830     }
831 }
832
833 void Graph::invalidateCFG()
834 {
835     m_cpsDominators = nullptr;
836     m_ssaDominators = nullptr;
837     m_cpsNaturalLoops = nullptr;
838     m_ssaNaturalLoops = nullptr;
839     m_controlEquivalenceAnalysis = nullptr;
840     m_backwardsDominators = nullptr;
841     m_backwardsCFG = nullptr;
842     m_cpsCFG = nullptr;
843 }
844
845 void Graph::invalidateNodeLiveness()
846 {
847     if (m_form != SSA)
848         return;
849
850     for (BasicBlock* block : blocksInNaturalOrder())
851         block->ssa->invalidate();
852 }
853
854 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
855 {
856     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
857         Node* node = block[indexInBlock];
858         bool shouldContinue = true;
859         switch (node->op()) {
860         case SetLocal: {
861             if (node->local() == variableAccessData->local())
862                 shouldContinue = false;
863             break;
864         }
865                 
866         case GetLocal: {
867             if (node->variableAccessData() != variableAccessData)
868                 continue;
869             substitute(block, indexInBlock, node, newGetLocal);
870             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
871             if (oldTailNode == node)
872                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
873             shouldContinue = false;
874             break;
875         }
876                 
877         default:
878             break;
879         }
880         if (!shouldContinue)
881             break;
882     }
883 }
884
885 BlockList Graph::blocksInPreOrder()
886 {
887     BlockList result;
888     BlockWorklist worklist;
889     for (BasicBlock* entrypoint : m_roots)
890         worklist.push(entrypoint);
891     while (BasicBlock* block = worklist.pop()) {
892         result.append(block);
893         for (unsigned i = block->numSuccessors(); i--;)
894             worklist.push(block->successor(i));
895     }
896
897     if (validationEnabled()) {
898         // When iterating over pre order, we should see dominators
899         // before things they dominate.
900         auto validateResults = [&] (auto& dominators) {
901             for (unsigned i = 0; i < result.size(); ++i) {
902                 BasicBlock* a = result[i];
903                 if (!a)
904                     continue;
905                 for (unsigned j = 0; j < result.size(); ++j) {
906                     BasicBlock* b = result[j];
907                     if (!b || a == b)
908                         continue;
909                     if (dominators.dominates(a, b))
910                         RELEASE_ASSERT(i < j);
911                 }
912             }
913         };
914
915         if (m_form == SSA || m_isInSSAConversion)
916             validateResults(ensureSSADominators());
917         else
918             validateResults(ensureCPSDominators());
919     }
920     return result;
921 }
922
923 BlockList Graph::blocksInPostOrder(bool isSafeToValidate)
924 {
925     BlockList result;
926     PostOrderBlockWorklist worklist;
927     for (BasicBlock* entrypoint : m_roots)
928         worklist.push(entrypoint);
929     while (BlockWithOrder item = worklist.pop()) {
930         switch (item.order) {
931         case VisitOrder::Pre:
932             worklist.pushPost(item.node);
933             for (unsigned i = item.node->numSuccessors(); i--;)
934                 worklist.push(item.node->successor(i));
935             break;
936         case VisitOrder::Post:
937             result.append(item.node);
938             break;
939         }
940     }
941
942     if (isSafeToValidate && validationEnabled()) { // There are users of this where we haven't yet built of the CFG enough to be able to run dominators.
943         auto validateResults = [&] (auto& dominators) {
944             // When iterating over reverse post order, we should see dominators
945             // before things they dominate.
946             for (unsigned i = 0; i < result.size(); ++i) {
947                 BasicBlock* a = result[i];
948                 if (!a)
949                     continue;
950                 for (unsigned j = 0; j < result.size(); ++j) {
951                     BasicBlock* b = result[j];
952                     if (!b || a == b)
953                         continue;
954                     if (dominators.dominates(a, b))
955                         RELEASE_ASSERT(i > j);
956                 }
957             }
958         };
959
960         if (m_form == SSA || m_isInSSAConversion)
961             validateResults(ensureSSADominators());
962         else
963             validateResults(ensureCPSDominators());
964     }
965
966     return result;
967 }
968
969 void Graph::clearReplacements()
970 {
971     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
972         BasicBlock* block = m_blocks[blockIndex].get();
973         if (!block)
974             continue;
975         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
976             block->phis[phiIndex]->setReplacement(nullptr);
977         for (unsigned nodeIndex = block->size(); nodeIndex--;)
978             block->at(nodeIndex)->setReplacement(nullptr);
979     }
980 }
981
982 void Graph::clearEpochs()
983 {
984     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
985         BasicBlock* block = m_blocks[blockIndex].get();
986         if (!block)
987             continue;
988         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
989             block->phis[phiIndex]->setEpoch(Epoch());
990         for (unsigned nodeIndex = block->size(); nodeIndex--;)
991             block->at(nodeIndex)->setEpoch(Epoch());
992     }
993 }
994
995 void Graph::initializeNodeOwners()
996 {
997     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
998         BasicBlock* block = m_blocks[blockIndex].get();
999         if (!block)
1000             continue;
1001         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1002             block->phis[phiIndex]->owner = block;
1003         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1004             block->at(nodeIndex)->owner = block;
1005     }
1006 }
1007
1008 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
1009 {
1010     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1011         BasicBlock* block = m_blocks[blockIndex].get();
1012         if (!block)
1013             continue;
1014         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1015             block->phis[phiIndex]->clearFlags(flags);
1016         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1017             block->at(nodeIndex)->clearFlags(flags);
1018     }
1019 }
1020
1021 bool Graph::watchCondition(const ObjectPropertyCondition& key)
1022 {
1023     if (!key.isWatchable())
1024         return false;
1025     
1026     m_plan.weakReferences.addLazily(key.object());
1027     if (key.hasPrototype())
1028         m_plan.weakReferences.addLazily(key.prototype());
1029     if (key.hasRequiredValue())
1030         m_plan.weakReferences.addLazily(key.requiredValue());
1031     
1032     m_plan.watchpoints.addLazily(key);
1033
1034     if (key.kind() == PropertyCondition::Presence)
1035         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1036     
1037     return true;
1038 }
1039
1040 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1041 {
1042     if (!keys.isValid())
1043         return false;
1044
1045     for (const ObjectPropertyCondition& key : keys) {
1046         if (!watchCondition(key))
1047             return false;
1048     }
1049     return true;
1050 }
1051
1052 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1053 {
1054     return m_safeToLoad.contains(std::make_pair(base, offset));
1055 }
1056
1057 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
1058 {
1059     assertIsRegistered(key.structure());
1060     
1061     auto iter = m_inferredTypes.find(key);
1062     if (iter != m_inferredTypes.end())
1063         return iter->value;
1064
1065     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
1066     if (!typeObject) {
1067         m_inferredTypes.add(key, InferredType::Top);
1068         return InferredType::Top;
1069     }
1070
1071     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
1072     if (typeDescriptor.kind() == InferredType::Top) {
1073         m_inferredTypes.add(key, InferredType::Top);
1074         return InferredType::Top;
1075     }
1076     
1077     m_inferredTypes.add(key, typeDescriptor);
1078
1079     m_plan.weakReferences.addLazily(typeObject);
1080     registerInferredType(typeDescriptor);
1081
1082     // Note that we may already be watching this desired inferred type, because multiple structures may
1083     // point to the same InferredType instance.
1084     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
1085
1086     return typeDescriptor;
1087 }
1088
1089 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1090 {
1091     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1092     if (iter != m_bytecodeLiveness.end())
1093         return *iter->value;
1094     
1095     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1096     codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1097     FullBytecodeLiveness& result = *liveness;
1098     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1099     return result;
1100 }
1101
1102 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1103 {
1104     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1105 }
1106
1107 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1108 {
1109     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1110     if (iter != m_bytecodeKills.end())
1111         return *iter->value;
1112     
1113     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1114     codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1115     BytecodeKills& result = *kills;
1116     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1117     return result;
1118 }
1119
1120 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1121 {
1122     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1123 }
1124
1125 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1126 {
1127     static const bool verbose = false;
1128     
1129     if (verbose)
1130         dataLog("Checking of operand is live: ", operand, "\n");
1131     CodeOrigin* codeOriginPtr = &codeOrigin;
1132     for (;;) {
1133         VirtualRegister reg = VirtualRegister(
1134             operand.offset() - codeOriginPtr->stackOffset());
1135         
1136         if (verbose)
1137             dataLog("reg = ", reg, "\n");
1138         
1139         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1140             if (reg.isArgument()) {
1141                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1142                 
1143                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1144                     && reg.offset() == CallFrameSlot::callee) {
1145                     if (verbose)
1146                         dataLog("Looks like a callee.\n");
1147                     return true;
1148                 }
1149                 
1150                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1151                     && reg.offset() == CallFrameSlot::argumentCount) {
1152                     if (verbose)
1153                         dataLog("Looks like the argument count.\n");
1154                     return true;
1155                 }
1156                 
1157                 return false;
1158             }
1159
1160             if (verbose)
1161                 dataLog("Asking the bytecode liveness.\n");
1162             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1163                 reg.offset(), codeOriginPtr->bytecodeIndex);
1164         }
1165         
1166         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1167         if (!inlineCallFrame) {
1168             if (verbose)
1169                 dataLog("Ran out of stack, returning true.\n");
1170             return true;
1171         }
1172
1173         // Arguments are always live. This would be redundant if it wasn't for our
1174         // op_call_varargs inlining.
1175         if (reg.isArgument()
1176             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1177             if (verbose)
1178                 dataLog("Argument is live.\n");
1179             return true;
1180         }
1181         
1182         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1183
1184         // The first inline call frame could be an inline tail call
1185         if (!codeOriginPtr) {
1186             if (verbose)
1187                 dataLog("Dead because of tail inlining.\n");
1188             return false;
1189         }
1190     }
1191     
1192     RELEASE_ASSERT_NOT_REACHED();
1193 }
1194
1195 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1196 {
1197     BitVector result;
1198     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1199     forAllLocalsLiveInBytecode(
1200         codeOrigin,
1201         [&] (VirtualRegister reg) {
1202             ASSERT(reg.isLocal());
1203             result.quickSet(reg.toLocal());
1204         });
1205     return result;
1206 }
1207
1208 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1209 {
1210     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1211     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1212     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1213 }
1214
1215 unsigned Graph::frameRegisterCount()
1216 {
1217     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1218     return roundLocalRegisterCountForFramePointerOffset(result);
1219 }
1220
1221 unsigned Graph::stackPointerOffset()
1222 {
1223     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1224 }
1225
1226 unsigned Graph::requiredRegisterCountForExit()
1227 {
1228     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1229     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1230         InlineCallFrame* inlineCallFrame = *iter;
1231         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1232         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1233         count = std::max(count, requiredCount);
1234     }
1235     return count;
1236 }
1237
1238 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1239 {
1240     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1241     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1242     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1243 }
1244
1245 JSValue Graph::tryGetConstantProperty(
1246     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1247 {
1248     if (!base || !base.isObject())
1249         return JSValue();
1250     
1251     JSObject* object = asObject(base);
1252     
1253     for (unsigned i = structureSet.size(); i--;) {
1254         RegisteredStructure structure = structureSet[i];
1255
1256         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1257         if (!set || !set->isStillValid())
1258             return JSValue();
1259         
1260         ASSERT(structure->isValidOffset(offset));
1261         ASSERT(!structure->isUncacheableDictionary());
1262         
1263         watchpoints().addLazily(set);
1264     }
1265     
1266     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1267     // our profiling makes sense and we're still on track to generate code that won't be
1268     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1269     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1270     // is doomed.
1271     //
1272     // One argument in favor of this code is that it should definitely work because the butterfly
1273     // is always set before the structure. However, we don't currently have a fence between those
1274     // stores. It's not clear if this matters, however. We only shrink the propertyStorage while
1275     // holding the Structure's lock. So, for this to fail, you'd need an access on a constant
1276     // object pointer such that the inline caches told us that the object had a structure that it
1277     // did not *yet* have, and then later,the object transitioned to that structure that the inline
1278     // caches had already seen. And then the processor reordered the stores. Seems unlikely and
1279     // difficult to test. I believe that this is worth revisiting but it isn't worth losing sleep
1280     // over. Filed:
1281     // https://bugs.webkit.org/show_bug.cgi?id=134641
1282     //
1283     // For now, we just do the minimal thing: defend against the structure right now being
1284     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1285     // determine if the structure belongs to the proven set.
1286
1287     Structure* structure = object->structure(m_vm);
1288     if (!structureSet.toStructureSet().contains(structure))
1289         return JSValue();
1290
1291     return object->getDirectConcurrently(structure, offset);
1292 }
1293
1294 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1295 {
1296     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1297 }
1298
1299 JSValue Graph::tryGetConstantProperty(
1300     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1301 {
1302     if (structure.isInfinite()) {
1303         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1304         // watching to constant-fold the property.
1305         // https://bugs.webkit.org/show_bug.cgi?id=147271
1306         return JSValue();
1307     }
1308     
1309     return tryGetConstantProperty(base, structure.set(), offset);
1310 }
1311
1312 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1313 {
1314     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1315 }
1316
1317 AbstractValue Graph::inferredValueForProperty(
1318     const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1319 {
1320     AbstractValue result;
1321     base.forEach(
1322         [&] (RegisteredStructure structure) {
1323             AbstractValue value;
1324             value.set(*this, inferredTypeForProperty(structure.get(), uid));
1325             result.merge(value);
1326         });
1327     if (clobberState == StructuresAreClobbered)
1328         result.clobberStructures();
1329     return result;
1330 }
1331
1332 AbstractValue Graph::inferredValueForProperty(
1333     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1334     StructureClobberState clobberState)
1335 {
1336     if (JSValue value = tryGetConstantProperty(base, offset)) {
1337         AbstractValue result;
1338         result.set(*this, *freeze(value), clobberState);
1339         return result;
1340     }
1341
1342     if (base.m_structure.isFinite())
1343         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1344
1345     return AbstractValue::heapTop();
1346 }
1347
1348 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1349 {
1350     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1351     
1352     if (!base)
1353         return JSValue();
1354     
1355     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1356     if (!activation)
1357         return JSValue();
1358     
1359     SymbolTable* symbolTable = activation->symbolTable();
1360     JSValue value;
1361     WatchpointSet* set;
1362     {
1363         ConcurrentJSLocker locker(symbolTable->m_lock);
1364         
1365         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1366         if (!entry)
1367             return JSValue();
1368         
1369         set = entry->watchpointSet();
1370         if (!set)
1371             return JSValue();
1372         
1373         if (set->state() != IsWatched)
1374             return JSValue();
1375         
1376         ASSERT(entry->scopeOffset() == offset);
1377         value = activation->variableAt(offset).get();
1378         if (!value)
1379             return JSValue();
1380     }
1381     
1382     watchpoints().addLazily(set);
1383     
1384     return value;
1385 }
1386
1387 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1388 {
1389     return tryGetConstantClosureVar(value.m_value, offset);
1390 }
1391
1392 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1393 {
1394     if (!node->hasConstant())
1395         return JSValue();
1396     return tryGetConstantClosureVar(node->asJSValue(), offset);
1397 }
1398
1399 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1400 {
1401     if (!value)
1402         return nullptr;
1403     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1404     if (!view)
1405         return nullptr;
1406     if (!view->length())
1407         return nullptr;
1408     WTF::loadLoadFence();
1409     watchpoints().addLazily(view);
1410     return view;
1411 }
1412
1413 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1414 {
1415     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1416         return nullptr;
1417     return tryGetFoldableView(value);
1418 }
1419
1420 void Graph::registerFrozenValues()
1421 {
1422     m_codeBlock->constants().shrink(0);
1423     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1424     for (FrozenValue* value : m_frozenValues) {
1425         if (!value->pointsToHeap())
1426             continue;
1427         
1428         ASSERT(value->structure());
1429         ASSERT(m_plan.weakReferences.contains(value->structure()));
1430         
1431         switch (value->strength()) {
1432         case WeakValue: {
1433             m_plan.weakReferences.addLazily(value->value().asCell());
1434             break;
1435         }
1436         case StrongValue: {
1437             unsigned constantIndex = m_codeBlock->addConstantLazily();
1438             // We already have a barrier on the code block.
1439             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1440             break;
1441         } }
1442     }
1443     m_codeBlock->constants().shrinkToFit();
1444     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1445 }
1446
1447 void Graph::visitChildren(SlotVisitor& visitor)
1448 {
1449     for (FrozenValue* value : m_frozenValues) {
1450         visitor.appendUnbarriered(value->value());
1451         visitor.appendUnbarriered(value->structure());
1452     }
1453 }
1454
1455 FrozenValue* Graph::freeze(JSValue value)
1456 {
1457     if (UNLIKELY(!value))
1458         return FrozenValue::emptySingleton();
1459
1460     // There are weird relationships in how optimized CodeBlocks
1461     // point to other CodeBlocks. We don't want to have them be
1462     // part of the weak pointer set. For example, an optimized CodeBlock
1463     // having a weak pointer to itself will cause it to get collected.
1464     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1465     
1466     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1467     if (LIKELY(!result.isNewEntry))
1468         return result.iterator->value;
1469
1470     if (value.isUInt32())
1471         m_uint32ValuesInUse.append(value.asUInt32());
1472     
1473     FrozenValue frozenValue = FrozenValue::freeze(value);
1474     if (Structure* structure = frozenValue.structure())
1475         registerStructure(structure);
1476     
1477     return result.iterator->value = m_frozenValues.add(frozenValue);
1478 }
1479
1480 FrozenValue* Graph::freezeStrong(JSValue value)
1481 {
1482     FrozenValue* result = freeze(value);
1483     result->strengthenTo(StrongValue);
1484     return result;
1485 }
1486
1487 void Graph::convertToConstant(Node* node, FrozenValue* value)
1488 {
1489     if (value->structure())
1490         assertIsRegistered(value->structure());
1491     node->convertToConstant(value);
1492 }
1493
1494 void Graph::convertToConstant(Node* node, JSValue value)
1495 {
1496     convertToConstant(node, freeze(value));
1497 }
1498
1499 void Graph::convertToStrongConstant(Node* node, JSValue value)
1500 {
1501     convertToConstant(node, freezeStrong(value));
1502 }
1503
1504 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1505 {
1506     m_plan.weakReferences.addLazily(structure);
1507     if (m_plan.watchpoints.consider(structure))
1508         result = StructureRegisteredAndWatched;
1509     else
1510         result = StructureRegisteredNormally;
1511     return RegisteredStructure::createPrivate(structure);
1512 }
1513
1514 void Graph::registerAndWatchStructureTransition(Structure* structure)
1515 {
1516     m_plan.weakReferences.addLazily(structure);
1517     m_plan.watchpoints.addLazily(structure->transitionWatchpointSet());
1518 }
1519
1520 void Graph::assertIsRegistered(Structure* structure)
1521 {
1522     // It's convenient to be able to call this with a maybe-null structure.
1523     if (!structure)
1524         return;
1525     
1526     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1527     
1528     if (!structure->dfgShouldWatch())
1529         return;
1530     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1531         return;
1532     
1533     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1534 }
1535
1536 static void logDFGAssertionFailure(
1537     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1538     const char* assertion)
1539 {
1540     startCrashing();
1541     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1542     dataLog(file, "(", line, ") : ", function, "\n");
1543     dataLog("\n");
1544     dataLog(whileText);
1545     dataLog("Graph at time of failure:\n");
1546     graph.dump();
1547     dataLog("\n");
1548     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1549     dataLog(file, "(", line, ") : ", function, "\n");
1550 }
1551
1552 void Graph::logAssertionFailure(
1553     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1554 {
1555     logDFGAssertionFailure(*this, "", file, line, function, assertion);
1556 }
1557
1558 void Graph::logAssertionFailure(
1559     Node* node, const char* file, int line, const char* function, const char* assertion)
1560 {
1561     logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1562 }
1563
1564 void Graph::logAssertionFailure(
1565     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1566 {
1567     logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1568 }
1569
1570 CPSCFG& Graph::ensureCPSCFG()
1571 {
1572     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1573     if (!m_cpsCFG)
1574         m_cpsCFG = std::make_unique<CPSCFG>(*this);
1575     return *m_cpsCFG;
1576 }
1577
1578 CPSDominators& Graph::ensureCPSDominators()
1579 {
1580     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1581     if (!m_cpsDominators)
1582         m_cpsDominators = std::make_unique<CPSDominators>(*this);
1583     return *m_cpsDominators;
1584 }
1585
1586 SSADominators& Graph::ensureSSADominators()
1587 {
1588     RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1589     if (!m_ssaDominators)
1590         m_ssaDominators = std::make_unique<SSADominators>(*this);
1591     return *m_ssaDominators;
1592 }
1593
1594 CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1595 {
1596     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1597     ensureCPSDominators();
1598     if (!m_cpsNaturalLoops)
1599         m_cpsNaturalLoops = std::make_unique<CPSNaturalLoops>(*this);
1600     return *m_cpsNaturalLoops;
1601 }
1602
1603 SSANaturalLoops& Graph::ensureSSANaturalLoops()
1604 {
1605     RELEASE_ASSERT(m_form == SSA);
1606     ensureSSADominators();
1607     if (!m_ssaNaturalLoops)
1608         m_ssaNaturalLoops = std::make_unique<SSANaturalLoops>(*this);
1609     return *m_ssaNaturalLoops;
1610 }
1611
1612 BackwardsCFG& Graph::ensureBackwardsCFG()
1613 {
1614     // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1615     RELEASE_ASSERT(m_form == SSA); 
1616     if (!m_backwardsCFG)
1617         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1618     return *m_backwardsCFG;
1619 }
1620
1621 BackwardsDominators& Graph::ensureBackwardsDominators()
1622 {
1623     RELEASE_ASSERT(m_form == SSA);
1624     if (!m_backwardsDominators)
1625         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1626     return *m_backwardsDominators;
1627 }
1628
1629 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1630 {
1631     RELEASE_ASSERT(m_form == SSA);
1632     if (!m_controlEquivalenceAnalysis)
1633         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1634     return *m_controlEquivalenceAnalysis;
1635 }
1636
1637 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1638 {
1639     // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1640
1641     for (Node* node = operandNode; node;) {
1642         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1643         if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1644             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1645
1646             if (node->accessesStack(*this)) {
1647                 if (m_form != SSA && node->local().isArgument()) {
1648                     int argument = node->local().toArgument();
1649                     Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1650                     // FIXME: We should match SetArgument nodes at other entrypoints as well:
1651                     // https://bugs.webkit.org/show_bug.cgi?id=175841
1652                     if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1653                         return &profiledBlock->valueProfileForArgument(argument);
1654                 }
1655
1656                 if (node->op() == GetLocal) {
1657                     return MethodOfGettingAValueProfile::fromLazyOperand(
1658                         profiledBlock,
1659                         LazyOperandValueProfileKey(
1660                             node->origin.semantic.bytecodeIndex, node->local()));
1661                 }
1662             }
1663
1664             if (node->hasHeapPrediction())
1665                 return &profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1666
1667             if (profiledBlock->hasBaselineJITProfiling()) {
1668                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1669                     return result;
1670             }
1671         }
1672
1673         switch (node->op()) {
1674         case BooleanToNumber:
1675         case Identity:
1676         case ValueRep:
1677         case DoubleRep:
1678         case Int52Rep:
1679             node = node->child1().node();
1680             break;
1681         default:
1682             node = nullptr;
1683         }
1684     }
1685     
1686     return MethodOfGettingAValueProfile();
1687 }
1688
1689 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1690 {
1691     unsigned attributesUnused;
1692     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1693     if (!isValidOffset(offset))
1694         return false;
1695
1696     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1697     if (!value)
1698         return false;
1699
1700     // We only care about functions and getters at this point. If you want to access other properties
1701     // you'll have to add code for those types.
1702     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1703     if (!function) {
1704         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1705
1706         if (!getterSetter)
1707             return false;
1708
1709         returnJSValue = JSValue(getterSetter);
1710         return true;
1711     }
1712
1713     returnJSValue = value;
1714     return true;
1715 }
1716
1717 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1718 {
1719     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1720
1721     if (!conditions.isValid())
1722         return false;
1723
1724     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1725     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1726     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1727     if (!function)
1728         return false;
1729
1730     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1731         return false;
1732     
1733     return watchConditions(conditions);
1734 }
1735
1736
1737 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1738 {
1739     if (hasExitSite(codeOrigin, NotStringObject))
1740         return false;
1741
1742     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1743     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1744     registerStructure(stringObjectStructure);
1745     ASSERT(stringObjectStructure->storedPrototype().isObject());
1746     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(*stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1747
1748     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1749         return false;
1750
1751     // We're being conservative here. We want DFG's ToString on StringObject to be
1752     // used in both numeric contexts (that would call valueOf()) and string contexts
1753     // (that would call toString()). We don't want the DFG to have to distinguish
1754     // between the two, just because that seems like it would get confusing. So we
1755     // just require both methods to be sane.
1756     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1757         return false;
1758     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1759 }
1760
1761 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1762 {
1763     if (!m_hasExceptionHandlers)
1764         return false;
1765
1766     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1767     while (1) {
1768         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1769         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1770         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1771             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1772             catchHandlerOut = handler;
1773             return true;
1774         }
1775
1776         if (!inlineCallFrame)
1777             return false;
1778
1779         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1780         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1781     }
1782
1783     RELEASE_ASSERT_NOT_REACHED();
1784 }
1785
1786 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1787 {
1788     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1789     ASSERT(node->op() == Spread);
1790
1791     if (node->child1().useKind() != ArrayUse) {
1792         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1793         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1794         return false;
1795     }
1796
1797     // FIXME: We should add profiling of the incoming operand to Spread
1798     // so we can speculate in such a way that we guarantee that this
1799     // function would return true:
1800     // https://bugs.webkit.org/show_bug.cgi?id=171198
1801
1802     if (!value.m_structure.isFinite())
1803         return false;
1804
1805     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1806     bool allGood = true;
1807     value.m_structure.forEach([&] (RegisteredStructure structure) {
1808         allGood &= structure->hasMonoProto()
1809             && structure->storedPrototype() == arrayPrototype
1810             && !structure->isDictionary()
1811             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1812             && !structure->mayInterceptIndexedAccesses();
1813     });
1814
1815     return allGood;
1816 }
1817
1818 void Graph::clearCPSCFGData()
1819 {
1820     m_cpsNaturalLoops = nullptr;
1821     m_cpsDominators = nullptr;
1822     m_cpsCFG = nullptr;
1823 }
1824
1825 } } // namespace JSC::DFG
1826
1827 #endif // ENABLE(DFG_JIT)