Try ripping out inferred types because it might be a performance improvement
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGVariableAccessDataDump.h"
49 #include "FullBytecodeLiveness.h"
50 #include "FunctionExecutableDump.h"
51 #include "GetterSetter.h"
52 #include "JIT.h"
53 #include "JSLexicalEnvironment.h"
54 #include "MaxFrameExtentForSlowPathCall.h"
55 #include "OperandsInlines.h"
56 #include "JSCInlines.h"
57 #include "StackAlignment.h"
58 #include <wtf/CommaPrinter.h>
59 #include <wtf/ListDump.h>
60
61 namespace JSC { namespace DFG {
62
63 static constexpr bool dumpOSRAvailabilityData = false;
64
65 // Creates an array of stringized names.
66 static const char* dfgOpNames[] = {
67 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
68     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
69 #undef STRINGIZE_DFG_OP_ENUM
70 };
71
72 Graph::Graph(VM& vm, Plan& plan)
73     : m_vm(vm)
74     , m_plan(plan)
75     , m_codeBlock(m_plan.codeBlock())
76     , m_profiledBlock(m_codeBlock->alternative())
77     , m_ssaCFG(std::make_unique<SSACFG>(*this))
78     , m_nextMachineLocal(0)
79     , m_fixpointState(BeforeFixpoint)
80     , m_structureRegistrationState(HaveNotStartedRegistering)
81     , m_form(LoadStore)
82     , m_unificationState(LocallyUnified)
83     , m_refCountState(EverythingIsLive)
84 {
85     ASSERT(m_profiledBlock);
86     
87     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88     
89     m_indexingCache = std::make_unique<FlowIndexing>(*this);
90     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
91
92     registerStructure(vm.structureStructure.get());
93     this->stringStructure = registerStructure(vm.stringStructure.get());
94     this->symbolStructure = registerStructure(vm.symbolStructure.get());
95 }
96
97 Graph::~Graph()
98 {
99 }
100
101 const char *Graph::opName(NodeType op)
102 {
103     return dfgOpNames[op];
104 }
105
106 static void printWhiteSpace(PrintStream& out, unsigned amount)
107 {
108     while (amount-- > 0)
109         out.print(" ");
110 }
111
112 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
113 {
114     if (!currentNode->origin.semantic)
115         return false;
116     
117     Node* previousNode = previousNodeRef;
118     previousNodeRef = currentNode;
119
120     if (!previousNode)
121         return false;
122     
123     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
124         return false;
125     
126     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
127     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
128     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
129     unsigned indexOfDivergence = commonSize;
130     for (unsigned i = 0; i < commonSize; ++i) {
131         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
132             indexOfDivergence = i;
133             break;
134         }
135     }
136     
137     bool hasPrinted = false;
138     
139     // Print the pops.
140     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
141         out.print(prefix);
142         printWhiteSpace(out, i * 2);
143         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
144         hasPrinted = true;
145     }
146     
147     // Print the pushes.
148     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
149         out.print(prefix);
150         printWhiteSpace(out, i * 2);
151         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
152         hasPrinted = true;
153     }
154     
155     return hasPrinted;
156 }
157
158 int Graph::amountOfNodeWhiteSpace(Node* node)
159 {
160     return (node->origin.semantic.inlineDepth() - 1) * 2;
161 }
162
163 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
164 {
165     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
166 }
167
168 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
169 {
170     NodeType op = node->op();
171
172     unsigned refCount = node->refCount();
173     bool mustGenerate = node->mustGenerate();
174     if (mustGenerate)
175         --refCount;
176
177     out.print(prefix);
178     printNodeWhiteSpace(out, node);
179
180     // Example/explanation of dataflow dump output
181     //
182     //   14:   <!2:7>  GetByVal(@3, @13)
183     //   ^1     ^2 ^3     ^4       ^5
184     //
185     // (1) The nodeIndex of this operation.
186     // (2) The reference count. The number printed is the 'real' count,
187     //     not including the 'mustGenerate' ref. If the node is
188     //     'mustGenerate' then the count it prefixed with '!'.
189     // (3) The virtual register slot assigned to this node.
190     // (4) The name of the operation.
191     // (5) The arguments to the operation. The may be of the form:
192     //         @#   - a NodeIndex referencing a prior node in the graph.
193     //         arg# - an argument number.
194     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
195     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
196     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
197     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
198         out.print(node->virtualRegister());
199     else
200         out.print("-");
201     out.print(">\t", opName(op), "(");
202     CommaPrinter comma;
203     if (node->flags() & NodeHasVarArgs) {
204         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
205             if (!m_varArgChildren[childIdx])
206                 continue;
207             out.print(comma, m_varArgChildren[childIdx]);
208         }
209     } else {
210         if (!!node->child1() || !!node->child2() || !!node->child3())
211             out.print(comma, node->child1());
212         if (!!node->child2() || !!node->child3())
213             out.print(comma, node->child2());
214         if (!!node->child3())
215             out.print(comma, node->child3());
216     }
217
218     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
219         out.print(comma, NodeFlagsDump(node->flags()));
220     if (node->prediction())
221         out.print(comma, SpeculationDump(node->prediction()));
222     if (node->hasNumberOfArgumentsToSkip())
223         out.print(comma, "numberOfArgumentsToSkip = ", node->numberOfArgumentsToSkip());
224     if (node->hasArrayMode())
225         out.print(comma, node->arrayMode());
226     if (node->hasArithUnaryType())
227         out.print(comma, "Type:", node->arithUnaryType());
228     if (node->hasArithMode())
229         out.print(comma, node->arithMode());
230     if (node->hasArithRoundingMode())
231         out.print(comma, "Rounding:", node->arithRoundingMode());
232     if (node->hasScopeOffset())
233         out.print(comma, node->scopeOffset());
234     if (node->hasDirectArgumentsOffset())
235         out.print(comma, node->capturedArgumentsOffset());
236     if (node->hasArgumentIndex())
237         out.print(comma, node->argumentIndex());
238     if (node->hasRegisterPointer())
239         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
240     if (node->hasIdentifier())
241         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
242     if (node->hasPromotedLocationDescriptor())
243         out.print(comma, node->promotedLocationDescriptor());
244     if (node->hasClassInfo())
245         out.print(comma, *node->classInfo());
246     if (node->hasStructureSet())
247         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
248     if (node->hasStructure())
249         out.print(comma, inContext(*node->structure().get(), context));
250     if (node->op() == CPUIntrinsic)
251         out.print(comma, intrinsicName(node->intrinsic()));
252     if (node->hasTransition()) {
253         out.print(comma, pointerDumpInContext(node->transition(), context));
254 #if USE(JSVALUE64)
255         out.print(", ID:", node->transition()->next->id());
256 #else
257         out.print(", ID:", RawPointer(node->transition()->next.get()));
258 #endif
259     }
260     if (node->hasCellOperand()) {
261         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
262             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
263         else {
264             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
265             if (node->cellOperand()->value().isCell()) {
266                 CallVariant variant(node->cellOperand()->value().asCell());
267                 if (ExecutableBase* executable = variant.executable()) {
268                     if (executable->isHostFunction())
269                         out.print(comma, "<host function>");
270                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
271                         out.print(comma, FunctionExecutableDump(functionExecutable));
272                     else
273                         out.print(comma, "<non-function executable>");
274                 }
275             }
276         }
277     }
278     if (node->hasSpeculatedTypeForQuery())
279         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
280     if (node->hasStorageAccessData()) {
281         StorageAccessData& storageAccessData = node->storageAccessData();
282         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
283         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
284     }
285     if (node->hasMultiGetByOffsetData()) {
286         MultiGetByOffsetData& data = node->multiGetByOffsetData();
287         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
288         for (unsigned i = 0; i < data.cases.size(); ++i)
289             out.print(comma, inContext(data.cases[i], context));
290     }
291     if (node->hasMultiPutByOffsetData()) {
292         MultiPutByOffsetData& data = node->multiPutByOffsetData();
293         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
294         for (unsigned i = 0; i < data.variants.size(); ++i)
295             out.print(comma, inContext(data.variants[i], context));
296     }
297     if (node->hasMatchStructureData()) {
298         for (MatchStructureVariant& variant : node->matchStructureData().variants)
299             out.print(comma, inContext(*variant.structure.get(), context), "=>", variant.result);
300     }
301     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
302     if (node->hasVariableAccessData(*this)) {
303         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
304         if (variableAccessData) {
305             VirtualRegister operand = variableAccessData->local();
306             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
307             operand = variableAccessData->machineLocal();
308             if (operand.isValid())
309                 out.print(comma, "machine:", operand);
310         }
311     }
312     if (node->hasStackAccessData()) {
313         StackAccessData* data = node->stackAccessData();
314         out.print(comma, data->local);
315         if (data->machineLocal.isValid())
316             out.print(comma, "machine:", data->machineLocal);
317         out.print(comma, data->format);
318     }
319     if (node->hasUnlinkedLocal()) 
320         out.print(comma, node->unlinkedLocal());
321     if (node->hasVectorLengthHint())
322         out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
323     if (node->hasLazyJSValue())
324         out.print(comma, node->lazyJSValue());
325     if (node->hasIndexingType())
326         out.print(comma, IndexingTypeDump(node->indexingMode()));
327     if (node->hasTypedArrayType())
328         out.print(comma, node->typedArrayType());
329     if (node->hasPhi())
330         out.print(comma, "^", node->phi()->index());
331     if (node->hasExecutionCounter())
332         out.print(comma, RawPointer(node->executionCounter()));
333     if (node->hasWatchpointSet())
334         out.print(comma, RawPointer(node->watchpointSet()));
335     if (node->hasStoragePointer())
336         out.print(comma, RawPointer(node->storagePointer()));
337     if (node->hasObjectMaterializationData())
338         out.print(comma, node->objectMaterializationData());
339     if (node->hasCallVarargsData())
340         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
341     if (node->hasLoadVarargsData()) {
342         LoadVarargsData* data = node->loadVarargsData();
343         out.print(comma, "start = ", data->start, ", count = ", data->count);
344         if (data->machineStart.isValid())
345             out.print(", machineStart = ", data->machineStart);
346         if (data->machineCount.isValid())
347             out.print(", machineCount = ", data->machineCount);
348         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
349         out.print(", limit = ", data->limit);
350     }
351     if (node->hasCallDOMGetterData()) {
352         CallDOMGetterData* data = node->callDOMGetterData();
353         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
354         out.print(", domJIT = ", RawPointer(data->domJIT));
355     }
356     if (node->hasIgnoreLastIndexIsWritable())
357         out.print(comma, "ignoreLastIndexIsWritable = ", node->ignoreLastIndexIsWritable());
358     if (node->isConstant())
359         out.print(comma, pointerDumpInContext(node->constant(), context));
360     if (node->hasCallLinkStatus())
361         out.print(comma, *node->callLinkStatus());
362     if (node->hasGetByIdStatus())
363         out.print(comma, *node->getByIdStatus());
364     if (node->hasInByIdStatus())
365         out.print(comma, *node->inByIdStatus());
366     if (node->hasPutByIdStatus())
367         out.print(comma, *node->putByIdStatus());
368     if (node->isJump())
369         out.print(comma, "T:", *node->targetBlock());
370     if (node->isBranch())
371         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
372     if (node->isSwitch()) {
373         SwitchData* data = node->switchData();
374         out.print(comma, data->kind);
375         for (unsigned i = 0; i < data->cases.size(); ++i)
376             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
377         out.print(comma, "default:", data->fallThrough);
378     }
379     if (node->isEntrySwitch()) {
380         EntrySwitchData* data = node->entrySwitchData();
381         for (unsigned i = 0; i < data->cases.size(); ++i)
382             out.print(comma, BranchTarget(data->cases[i]));
383     }
384     ClobberSet reads;
385     ClobberSet writes;
386     addReadsAndWrites(*this, node, reads, writes);
387     if (!reads.isEmpty())
388         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
389     if (!writes.isEmpty())
390         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
391     ExitMode exitMode = mayExit(*this, node);
392     if (exitMode != DoesNotExit)
393         out.print(comma, exitMode);
394     if (clobbersExitState(*this, node))
395         out.print(comma, "ClobbersExit");
396     if (node->origin.isSet()) {
397         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
398         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
399             out.print(comma, "exit: ", node->origin.forExit);
400     }
401     out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
402     if (node->origin.wasHoisted)
403         out.print(comma, "WasHoisted");
404     out.print(")");
405
406     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
407         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
408     else if (node->hasHeapPrediction())
409         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
410     
411     out.print("\n");
412 }
413
414 bool Graph::terminalsAreValid()
415 {
416     for (BasicBlock* block : blocksInNaturalOrder()) {
417         if (!block->terminal())
418             return false;
419     }
420     return true;
421 }
422
423 static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
424 static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
425
426 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
427 {
428     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
429         block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
430     if (block->executionCount == block->executionCount)
431         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
432     out.print(prefix, "  Predecessors:");
433     for (size_t i = 0; i < block->predecessors.size(); ++i)
434         out.print(" ", *block->predecessors[i]);
435     out.print("\n");
436     out.print(prefix, "  Successors:");
437     if (block->terminal()) {
438         for (BasicBlock* successor : block->successors()) {
439             out.print(" ", *successor);
440         }
441     } else
442         out.print(" <invalid>");
443     out.print("\n");
444
445     auto printDominators = [&] (auto& dominators) {
446         out.print(prefix, "  Dominated by: ", dominators.dominatorsOf(block), "\n");
447         out.print(prefix, "  Dominates: ", dominators.blocksDominatedBy(block), "\n");
448         out.print(prefix, "  Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
449         out.print(prefix, "  Iterated Dominance Frontier: ",
450             dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
451     };
452
453     if (terminalsAreValid()) {
454         if (m_ssaDominators)
455             printDominators(*m_ssaDominators);
456         else if (m_cpsDominators)
457             printDominators(*m_cpsDominators);
458     }
459
460     if (m_backwardsDominators && terminalsAreValid()) {
461         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
462         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
463     }
464     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
465         out.print(prefix, "  Control equivalent to:");
466         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
467             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
468                 out.print(" ", *otherBlock);
469         }
470         out.print("\n");
471     }
472
473     auto printNaturalLoops = [&] (auto& naturalLoops) {
474         if (const auto* loop = naturalLoops->headerOf(block)) {
475             out.print(prefix, "  Loop header, contains:");
476             Vector<BlockIndex> sortedBlockList;
477             for (unsigned i = 0; i < loop->size(); ++i)
478                 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
479             std::sort(sortedBlockList.begin(), sortedBlockList.end());
480             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
481                 out.print(" #", sortedBlockList[i]);
482             out.print("\n");
483         }
484         
485         auto containingLoops = naturalLoops->loopsOf(block);
486         if (!containingLoops.isEmpty()) {
487             out.print(prefix, "  Containing loop headers:");
488             for (unsigned i = 0; i < containingLoops.size(); ++i)
489                 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
490             out.print("\n");
491         }
492     };
493
494     if (m_ssaNaturalLoops)
495         printNaturalLoops(m_ssaNaturalLoops);
496     else if (m_cpsNaturalLoops)
497         printNaturalLoops(m_cpsNaturalLoops);
498
499     if (!block->phis.isEmpty()) {
500         out.print(prefix, "  Phi Nodes:");
501         for (size_t i = 0; i < block->phis.size(); ++i) {
502             Node* phiNode = block->phis[i];
503             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
504                 continue;
505             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
506             if (phiNode->child1()) {
507                 out.print("@", phiNode->child1()->index());
508                 if (phiNode->child2()) {
509                     out.print(", @", phiNode->child2()->index());
510                     if (phiNode->child3())
511                         out.print(", @", phiNode->child3()->index());
512                 }
513             }
514             out.print(")", i + 1 < block->phis.size() ? "," : "");
515         }
516         out.print("\n");
517     }
518 }
519
520 void Graph::dump(PrintStream& out, DumpContext* context)
521 {
522     DumpContext myContext;
523     myContext.graph = this;
524     if (!context)
525         context = &myContext;
526     
527     out.print("\n");
528     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
529     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
530     if (m_form == SSA) {
531         for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
532             out.print("  Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
533     }
534     else {
535         for (auto pair : m_rootToArguments)
536             out.print("  Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
537     }
538     out.print("\n");
539     
540     Node* lastNode = nullptr;
541     for (size_t b = 0; b < m_blocks.size(); ++b) {
542         BasicBlock* block = m_blocks[b].get();
543         if (!block)
544             continue;
545         dumpBlockHeader(out, "", block, DumpAllPhis, context);
546         out.print("  States: ", block->cfaStructureClobberStateAtHead);
547         if (!block->cfaHasVisited)
548             out.print(", CurrentlyCFAUnreachable");
549         if (!block->intersectionOfCFAHasVisited)
550             out.print(", CFAUnreachable");
551         out.print("\n");
552         switch (m_form) {
553         case LoadStore:
554         case ThreadedCPS: {
555             out.print("  Vars Before: ");
556             if (block->cfaHasVisited)
557                 out.print(inContext(block->valuesAtHead, context));
558             else
559                 out.print("<empty>");
560             out.print("\n");
561             out.print("  Intersected Vars Before: ");
562             if (block->intersectionOfCFAHasVisited)
563                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
564             else
565                 out.print("<empty>");
566             out.print("\n");
567             out.print("  Var Links: ", block->variablesAtHead, "\n");
568             break;
569         }
570             
571         case SSA: {
572             RELEASE_ASSERT(block->ssa);
573             if (dumpOSRAvailabilityData)
574                 out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
575             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
576             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
577             break;
578         } }
579         for (size_t i = 0; i < block->size(); ++i) {
580             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
581             dump(out, "", block->at(i), context);
582         }
583         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
584         if (!block->cfaDidFinish)
585             out.print(", CFAInvalidated");
586         out.print("\n");
587         switch (m_form) {
588         case LoadStore:
589         case ThreadedCPS: {
590             out.print("  Vars After: ");
591             if (block->cfaHasVisited)
592                 out.print(inContext(block->valuesAtTail, context));
593             else
594                 out.print("<empty>");
595             out.print("\n");
596             out.print("  Var Links: ", block->variablesAtTail, "\n");
597             break;
598         }
599             
600         case SSA: {
601             RELEASE_ASSERT(block->ssa);
602             if (dumpOSRAvailabilityData)
603                 out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
604             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
605             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
606             break;
607         } }
608         out.print("\n");
609     }
610     
611     out.print("GC Values:\n");
612     for (FrozenValue* value : m_frozenValues) {
613         if (value->pointsToHeap())
614             out.print("    ", inContext(*value, &myContext), "\n");
615     }
616
617     out.print(inContext(watchpoints(), &myContext));
618     
619     if (!myContext.isEmpty()) {
620         myContext.dump(out);
621         out.print("\n");
622     }
623 }
624
625 void Graph::deleteNode(Node* node)
626 {
627     if (validationEnabled() && m_form == SSA) {
628         for (BasicBlock* block : blocksInNaturalOrder()) {
629             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
630             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
631         }
632     }
633
634     m_nodes.remove(node);
635 }
636
637 void Graph::packNodeIndices()
638 {
639     m_nodes.packIndices();
640 }
641
642 void Graph::dethread()
643 {
644     if (m_form == LoadStore || m_form == SSA)
645         return;
646     
647     if (logCompilationChanges())
648         dataLog("Dethreading DFG graph.\n");
649     
650     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
651         BasicBlock* block = m_blocks[blockIndex].get();
652         if (!block)
653             continue;
654         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
655             Node* phi = block->phis[phiIndex];
656             phi->children.reset();
657         }
658     }
659     
660     m_form = LoadStore;
661 }
662
663 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
664 {
665     if (!successor->isReachable) {
666         successor->isReachable = true;
667         worklist.append(successor);
668     }
669     
670     if (!successor->predecessors.contains(block))
671         successor->predecessors.append(block);
672 }
673
674 void Graph::determineReachability()
675 {
676     Vector<BasicBlock*, 16> worklist;
677     for (BasicBlock* entrypoint : m_roots) {
678         entrypoint->isReachable = true;
679         worklist.append(entrypoint);
680     }
681     while (!worklist.isEmpty()) {
682         BasicBlock* block = worklist.takeLast();
683         for (unsigned i = block->numSuccessors(); i--;)
684             handleSuccessor(worklist, block, block->successor(i));
685     }
686 }
687
688 void Graph::resetReachability()
689 {
690     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
691         BasicBlock* block = m_blocks[blockIndex].get();
692         if (!block)
693             continue;
694         block->isReachable = false;
695         block->predecessors.clear();
696     }
697     
698     determineReachability();
699 }
700
701 namespace {
702
703 class RefCountCalculator {
704 public:
705     RefCountCalculator(Graph& graph)
706         : m_graph(graph)
707     {
708     }
709     
710     void calculate()
711     {
712         // First reset the counts to 0 for all nodes.
713         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
714             BasicBlock* block = m_graph.block(blockIndex);
715             if (!block)
716                 continue;
717             for (unsigned indexInBlock = block->size(); indexInBlock--;)
718                 block->at(indexInBlock)->setRefCount(0);
719             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
720                 block->phis[phiIndex]->setRefCount(0);
721         }
722     
723         // Now find the roots:
724         // - Nodes that are must-generate.
725         // - Nodes that are reachable from type checks.
726         // Set their ref counts to 1 and put them on the worklist.
727         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
728             BasicBlock* block = m_graph.block(blockIndex);
729             if (!block)
730                 continue;
731             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
732                 Node* node = block->at(indexInBlock);
733                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
734                 if (!(node->flags() & NodeMustGenerate))
735                     continue;
736                 if (!node->postfixRef())
737                     m_worklist.append(node);
738             }
739         }
740         
741         while (!m_worklist.isEmpty()) {
742             while (!m_worklist.isEmpty()) {
743                 Node* node = m_worklist.last();
744                 m_worklist.removeLast();
745                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
746                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
747             }
748             
749             if (m_graph.m_form == SSA) {
750                 // Find Phi->Upsilon edges, which are represented as meta-data in the
751                 // Upsilon.
752                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
753                     BasicBlock* block = m_graph.block(blockIndex);
754                     if (!block)
755                         continue;
756                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
757                         Node* node = block->at(nodeIndex);
758                         if (node->op() != Upsilon)
759                             continue;
760                         if (node->shouldGenerate())
761                             continue;
762                         if (node->phi()->shouldGenerate())
763                             countNode(node);
764                     }
765                 }
766             }
767         }
768     }
769     
770 private:
771     void findTypeCheckRoot(Node*, Edge edge)
772     {
773         // We may have an "unproved" untyped use for code that is unreachable. The CFA
774         // will just not have gotten around to it.
775         if (edge.isProved() || edge.willNotHaveCheck())
776             return;
777         if (!edge->postfixRef())
778             m_worklist.append(edge.node());
779     }
780     
781     void countNode(Node* node)
782     {
783         if (node->postfixRef())
784             return;
785         m_worklist.append(node);
786     }
787     
788     void countEdge(Node*, Edge edge)
789     {
790         // Don't count edges that are already counted for their type checks.
791         if (!(edge.isProved() || edge.willNotHaveCheck()))
792             return;
793         countNode(edge.node());
794     }
795     
796     Graph& m_graph;
797     Vector<Node*, 128> m_worklist;
798 };
799
800 } // anonymous namespace
801
802 void Graph::computeRefCounts()
803 {
804     RefCountCalculator calculator(*this);
805     calculator.calculate();
806 }
807
808 void Graph::killBlockAndItsContents(BasicBlock* block)
809 {
810     if (auto& ssaData = block->ssa)
811         ssaData->invalidate();
812     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
813         deleteNode(block->phis[phiIndex]);
814     for (Node* node : *block)
815         deleteNode(node);
816     
817     killBlock(block);
818 }
819
820 void Graph::killUnreachableBlocks()
821 {
822     invalidateNodeLiveness();
823
824     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
825         BasicBlock* block = this->block(blockIndex);
826         if (!block)
827             continue;
828         if (block->isReachable)
829             continue;
830
831         dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
832         killBlockAndItsContents(block);
833     }
834 }
835
836 void Graph::invalidateCFG()
837 {
838     m_cpsDominators = nullptr;
839     m_ssaDominators = nullptr;
840     m_cpsNaturalLoops = nullptr;
841     m_ssaNaturalLoops = nullptr;
842     m_controlEquivalenceAnalysis = nullptr;
843     m_backwardsDominators = nullptr;
844     m_backwardsCFG = nullptr;
845     m_cpsCFG = nullptr;
846 }
847
848 void Graph::invalidateNodeLiveness()
849 {
850     if (m_form != SSA)
851         return;
852
853     for (BasicBlock* block : blocksInNaturalOrder())
854         block->ssa->invalidate();
855 }
856
857 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
858 {
859     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
860         Node* node = block[indexInBlock];
861         bool shouldContinue = true;
862         switch (node->op()) {
863         case SetLocal: {
864             if (node->local() == variableAccessData->local())
865                 shouldContinue = false;
866             break;
867         }
868                 
869         case GetLocal: {
870             if (node->variableAccessData() != variableAccessData)
871                 continue;
872             substitute(block, indexInBlock, node, newGetLocal);
873             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
874             if (oldTailNode == node)
875                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
876             shouldContinue = false;
877             break;
878         }
879                 
880         default:
881             break;
882         }
883         if (!shouldContinue)
884             break;
885     }
886 }
887
888 BlockList Graph::blocksInPreOrder()
889 {
890     BlockList result;
891     BlockWorklist worklist;
892     for (BasicBlock* entrypoint : m_roots)
893         worklist.push(entrypoint);
894     while (BasicBlock* block = worklist.pop()) {
895         result.append(block);
896         for (unsigned i = block->numSuccessors(); i--;)
897             worklist.push(block->successor(i));
898     }
899
900     if (validationEnabled()) {
901         // When iterating over pre order, we should see dominators
902         // before things they dominate.
903         auto validateResults = [&] (auto& dominators) {
904             for (unsigned i = 0; i < result.size(); ++i) {
905                 BasicBlock* a = result[i];
906                 if (!a)
907                     continue;
908                 for (unsigned j = 0; j < result.size(); ++j) {
909                     BasicBlock* b = result[j];
910                     if (!b || a == b)
911                         continue;
912                     if (dominators.dominates(a, b))
913                         RELEASE_ASSERT(i < j);
914                 }
915             }
916         };
917
918         if (m_form == SSA || m_isInSSAConversion)
919             validateResults(ensureSSADominators());
920         else
921             validateResults(ensureCPSDominators());
922     }
923     return result;
924 }
925
926 BlockList Graph::blocksInPostOrder(bool isSafeToValidate)
927 {
928     BlockList result;
929     PostOrderBlockWorklist worklist;
930     for (BasicBlock* entrypoint : m_roots)
931         worklist.push(entrypoint);
932     while (BlockWithOrder item = worklist.pop()) {
933         switch (item.order) {
934         case VisitOrder::Pre:
935             worklist.pushPost(item.node);
936             for (unsigned i = item.node->numSuccessors(); i--;)
937                 worklist.push(item.node->successor(i));
938             break;
939         case VisitOrder::Post:
940             result.append(item.node);
941             break;
942         }
943     }
944
945     if (isSafeToValidate && validationEnabled()) { // There are users of this where we haven't yet built of the CFG enough to be able to run dominators.
946         auto validateResults = [&] (auto& dominators) {
947             // When iterating over reverse post order, we should see dominators
948             // before things they dominate.
949             for (unsigned i = 0; i < result.size(); ++i) {
950                 BasicBlock* a = result[i];
951                 if (!a)
952                     continue;
953                 for (unsigned j = 0; j < result.size(); ++j) {
954                     BasicBlock* b = result[j];
955                     if (!b || a == b)
956                         continue;
957                     if (dominators.dominates(a, b))
958                         RELEASE_ASSERT(i > j);
959                 }
960             }
961         };
962
963         if (m_form == SSA || m_isInSSAConversion)
964             validateResults(ensureSSADominators());
965         else
966             validateResults(ensureCPSDominators());
967     }
968
969     return result;
970 }
971
972 void Graph::clearReplacements()
973 {
974     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
975         BasicBlock* block = m_blocks[blockIndex].get();
976         if (!block)
977             continue;
978         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
979             block->phis[phiIndex]->setReplacement(nullptr);
980         for (unsigned nodeIndex = block->size(); nodeIndex--;)
981             block->at(nodeIndex)->setReplacement(nullptr);
982     }
983 }
984
985 void Graph::clearEpochs()
986 {
987     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
988         BasicBlock* block = m_blocks[blockIndex].get();
989         if (!block)
990             continue;
991         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
992             block->phis[phiIndex]->setEpoch(Epoch());
993         for (unsigned nodeIndex = block->size(); nodeIndex--;)
994             block->at(nodeIndex)->setEpoch(Epoch());
995     }
996 }
997
998 void Graph::initializeNodeOwners()
999 {
1000     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1001         BasicBlock* block = m_blocks[blockIndex].get();
1002         if (!block)
1003             continue;
1004         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1005             block->phis[phiIndex]->owner = block;
1006         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1007             block->at(nodeIndex)->owner = block;
1008     }
1009 }
1010
1011 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
1012 {
1013     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1014         BasicBlock* block = m_blocks[blockIndex].get();
1015         if (!block)
1016             continue;
1017         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
1018             block->phis[phiIndex]->clearFlags(flags);
1019         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1020             block->at(nodeIndex)->clearFlags(flags);
1021     }
1022 }
1023
1024 bool Graph::watchCondition(const ObjectPropertyCondition& key)
1025 {
1026     if (!key.isWatchable())
1027         return false;
1028
1029     DesiredWeakReferences& weakReferences = m_plan.weakReferences();
1030     weakReferences.addLazily(key.object());
1031     if (key.hasPrototype())
1032         weakReferences.addLazily(key.prototype());
1033     if (key.hasRequiredValue())
1034         weakReferences.addLazily(key.requiredValue());
1035
1036     m_plan.watchpoints().addLazily(key);
1037
1038     if (key.kind() == PropertyCondition::Presence)
1039         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1040     
1041     return true;
1042 }
1043
1044 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1045 {
1046     if (!keys.isValid())
1047         return false;
1048
1049     for (const ObjectPropertyCondition& key : keys) {
1050         if (!watchCondition(key))
1051             return false;
1052     }
1053     return true;
1054 }
1055
1056 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1057 {
1058     return m_safeToLoad.contains(std::make_pair(base, offset));
1059 }
1060
1061 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1062 {
1063     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1064     if (iter != m_bytecodeLiveness.end())
1065         return *iter->value;
1066     
1067     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1068     codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1069     FullBytecodeLiveness& result = *liveness;
1070     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1071     return result;
1072 }
1073
1074 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1075 {
1076     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1077 }
1078
1079 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1080 {
1081     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1082     if (iter != m_bytecodeKills.end())
1083         return *iter->value;
1084     
1085     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1086     codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1087     BytecodeKills& result = *kills;
1088     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1089     return result;
1090 }
1091
1092 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1093 {
1094     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1095 }
1096
1097 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1098 {
1099     static const bool verbose = false;
1100     
1101     if (verbose)
1102         dataLog("Checking of operand is live: ", operand, "\n");
1103     CodeOrigin* codeOriginPtr = &codeOrigin;
1104     for (;;) {
1105         VirtualRegister reg = VirtualRegister(
1106             operand.offset() - codeOriginPtr->stackOffset());
1107         
1108         if (verbose)
1109             dataLog("reg = ", reg, "\n");
1110         
1111         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1112             if (reg.isArgument()) {
1113                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1114                 
1115                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1116                     && reg.offset() == CallFrameSlot::callee) {
1117                     if (verbose)
1118                         dataLog("Looks like a callee.\n");
1119                     return true;
1120                 }
1121                 
1122                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1123                     && reg.offset() == CallFrameSlot::argumentCount) {
1124                     if (verbose)
1125                         dataLog("Looks like the argument count.\n");
1126                     return true;
1127                 }
1128                 
1129                 return false;
1130             }
1131
1132             if (verbose)
1133                 dataLog("Asking the bytecode liveness.\n");
1134             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1135                 reg.offset(), codeOriginPtr->bytecodeIndex);
1136         }
1137         
1138         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1139         if (!inlineCallFrame) {
1140             if (verbose)
1141                 dataLog("Ran out of stack, returning true.\n");
1142             return true;
1143         }
1144
1145         // Arguments are always live. This would be redundant if it wasn't for our
1146         // op_call_varargs inlining.
1147         if (reg.isArgument()
1148             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1149             if (verbose)
1150                 dataLog("Argument is live.\n");
1151             return true;
1152         }
1153         
1154         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1155
1156         // The first inline call frame could be an inline tail call
1157         if (!codeOriginPtr) {
1158             if (verbose)
1159                 dataLog("Dead because of tail inlining.\n");
1160             return false;
1161         }
1162     }
1163     
1164     RELEASE_ASSERT_NOT_REACHED();
1165 }
1166
1167 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1168 {
1169     BitVector result;
1170     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1171     forAllLocalsLiveInBytecode(
1172         codeOrigin,
1173         [&] (VirtualRegister reg) {
1174             ASSERT(reg.isLocal());
1175             result.quickSet(reg.toLocal());
1176         });
1177     return result;
1178 }
1179
1180 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1181 {
1182     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1183     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1184     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1185 }
1186
1187 unsigned Graph::frameRegisterCount()
1188 {
1189     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1190     return roundLocalRegisterCountForFramePointerOffset(result);
1191 }
1192
1193 unsigned Graph::stackPointerOffset()
1194 {
1195     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1196 }
1197
1198 unsigned Graph::requiredRegisterCountForExit()
1199 {
1200     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1201     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames()->begin(); !!iter; ++iter) {
1202         InlineCallFrame* inlineCallFrame = *iter;
1203         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1204         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1205         count = std::max(count, requiredCount);
1206     }
1207     return count;
1208 }
1209
1210 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1211 {
1212     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1213     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1214     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1215 }
1216
1217 JSValue Graph::tryGetConstantProperty(
1218     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1219 {
1220     if (!base || !base.isObject())
1221         return JSValue();
1222     
1223     JSObject* object = asObject(base);
1224     
1225     for (unsigned i = structureSet.size(); i--;) {
1226         RegisteredStructure structure = structureSet[i];
1227
1228         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1229         if (!set || !set->isStillValid())
1230             return JSValue();
1231         
1232         ASSERT(structure->isValidOffset(offset));
1233         ASSERT(!structure->isUncacheableDictionary());
1234         
1235         watchpoints().addLazily(set);
1236     }
1237     
1238     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1239     // our profiling makes sense and we're still on track to generate code that won't be
1240     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1241     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1242     // is doomed.
1243     //
1244     // One argument in favor of this code is that it should definitely work because the butterfly
1245     // is always set before the structure. However, we don't currently have a fence between those
1246     // stores. It's not clear if this matters, however. We only shrink the propertyStorage while
1247     // holding the Structure's lock. So, for this to fail, you'd need an access on a constant
1248     // object pointer such that the inline caches told us that the object had a structure that it
1249     // did not *yet* have, and then later,the object transitioned to that structure that the inline
1250     // caches had already seen. And then the processor reordered the stores. Seems unlikely and
1251     // difficult to test. I believe that this is worth revisiting but it isn't worth losing sleep
1252     // over. Filed:
1253     // https://bugs.webkit.org/show_bug.cgi?id=134641
1254     //
1255     // For now, we just do the minimal thing: defend against the structure right now being
1256     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1257     // determine if the structure belongs to the proven set.
1258
1259     Structure* structure = object->structure(m_vm);
1260     if (!structureSet.toStructureSet().contains(structure))
1261         return JSValue();
1262
1263     return object->getDirectConcurrently(structure, offset);
1264 }
1265
1266 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1267 {
1268     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1269 }
1270
1271 JSValue Graph::tryGetConstantProperty(
1272     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1273 {
1274     if (structure.isInfinite()) {
1275         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1276         // watching to constant-fold the property.
1277         // https://bugs.webkit.org/show_bug.cgi?id=147271
1278         return JSValue();
1279     }
1280     
1281     return tryGetConstantProperty(base, structure.set(), offset);
1282 }
1283
1284 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1285 {
1286     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1287 }
1288
1289 AbstractValue Graph::inferredValueForProperty(
1290     const AbstractValue& base, PropertyOffset offset,
1291     StructureClobberState clobberState)
1292 {
1293     if (JSValue value = tryGetConstantProperty(base, offset)) {
1294         AbstractValue result;
1295         result.set(*this, *freeze(value), clobberState);
1296         return result;
1297     }
1298
1299     return AbstractValue::heapTop();
1300 }
1301
1302 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1303 {
1304     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1305     
1306     if (!base)
1307         return JSValue();
1308     
1309     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1310     if (!activation)
1311         return JSValue();
1312     
1313     SymbolTable* symbolTable = activation->symbolTable();
1314     JSValue value;
1315     WatchpointSet* set;
1316     {
1317         ConcurrentJSLocker locker(symbolTable->m_lock);
1318         
1319         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1320         if (!entry)
1321             return JSValue();
1322         
1323         set = entry->watchpointSet();
1324         if (!set)
1325             return JSValue();
1326         
1327         if (set->state() != IsWatched)
1328             return JSValue();
1329         
1330         ASSERT(entry->scopeOffset() == offset);
1331         value = activation->variableAt(offset).get();
1332         if (!value)
1333             return JSValue();
1334     }
1335     
1336     watchpoints().addLazily(set);
1337     
1338     return value;
1339 }
1340
1341 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1342 {
1343     return tryGetConstantClosureVar(value.m_value, offset);
1344 }
1345
1346 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1347 {
1348     if (!node->hasConstant())
1349         return JSValue();
1350     return tryGetConstantClosureVar(node->asJSValue(), offset);
1351 }
1352
1353 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1354 {
1355     if (!value)
1356         return nullptr;
1357     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1358     if (!view)
1359         return nullptr;
1360     if (!view->length())
1361         return nullptr;
1362     WTF::loadLoadFence();
1363     watchpoints().addLazily(view);
1364     return view;
1365 }
1366
1367 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1368 {
1369     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1370         return nullptr;
1371     return tryGetFoldableView(value);
1372 }
1373
1374 void Graph::registerFrozenValues()
1375 {
1376     m_codeBlock->constants().shrink(0);
1377     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1378     for (FrozenValue* value : m_frozenValues) {
1379         if (!value->pointsToHeap())
1380             continue;
1381         
1382         ASSERT(value->structure());
1383         ASSERT(m_plan.weakReferences().contains(value->structure()));
1384
1385         switch (value->strength()) {
1386         case WeakValue: {
1387             m_plan.weakReferences().addLazily(value->value().asCell());
1388             break;
1389         }
1390         case StrongValue: {
1391             unsigned constantIndex = m_codeBlock->addConstantLazily();
1392             // We already have a barrier on the code block.
1393             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1394             break;
1395         } }
1396     }
1397     m_codeBlock->constants().shrinkToFit();
1398     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1399 }
1400
1401 void Graph::visitChildren(SlotVisitor& visitor)
1402 {
1403     for (FrozenValue* value : m_frozenValues) {
1404         visitor.appendUnbarriered(value->value());
1405         visitor.appendUnbarriered(value->structure());
1406     }
1407 }
1408
1409 FrozenValue* Graph::freeze(JSValue value)
1410 {
1411     if (UNLIKELY(!value))
1412         return FrozenValue::emptySingleton();
1413
1414     // There are weird relationships in how optimized CodeBlocks
1415     // point to other CodeBlocks. We don't want to have them be
1416     // part of the weak pointer set. For example, an optimized CodeBlock
1417     // having a weak pointer to itself will cause it to get collected.
1418     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1419     
1420     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1421     if (LIKELY(!result.isNewEntry))
1422         return result.iterator->value;
1423
1424     if (value.isUInt32())
1425         m_uint32ValuesInUse.append(value.asUInt32());
1426     
1427     FrozenValue frozenValue = FrozenValue::freeze(value);
1428     if (Structure* structure = frozenValue.structure())
1429         registerStructure(structure);
1430     
1431     return result.iterator->value = m_frozenValues.add(frozenValue);
1432 }
1433
1434 FrozenValue* Graph::freezeStrong(JSValue value)
1435 {
1436     FrozenValue* result = freeze(value);
1437     result->strengthenTo(StrongValue);
1438     return result;
1439 }
1440
1441 void Graph::convertToConstant(Node* node, FrozenValue* value)
1442 {
1443     if (value->structure())
1444         assertIsRegistered(value->structure());
1445     node->convertToConstant(value);
1446 }
1447
1448 void Graph::convertToConstant(Node* node, JSValue value)
1449 {
1450     convertToConstant(node, freeze(value));
1451 }
1452
1453 void Graph::convertToStrongConstant(Node* node, JSValue value)
1454 {
1455     convertToConstant(node, freezeStrong(value));
1456 }
1457
1458 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1459 {
1460     m_plan.weakReferences().addLazily(structure);
1461     if (m_plan.watchpoints().consider(structure))
1462         result = StructureRegisteredAndWatched;
1463     else
1464         result = StructureRegisteredNormally;
1465     return RegisteredStructure::createPrivate(structure);
1466 }
1467
1468 void Graph::registerAndWatchStructureTransition(Structure* structure)
1469 {
1470     m_plan.weakReferences().addLazily(structure);
1471     m_plan.watchpoints().addLazily(structure->transitionWatchpointSet());
1472 }
1473
1474 void Graph::assertIsRegistered(Structure* structure)
1475 {
1476     // It's convenient to be able to call this with a maybe-null structure.
1477     if (!structure)
1478         return;
1479
1480     DFG_ASSERT(*this, nullptr, m_plan.weakReferences().contains(structure));
1481
1482     if (!structure->dfgShouldWatch())
1483         return;
1484     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1485         return;
1486     
1487     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1488 }
1489
1490 static void logDFGAssertionFailure(
1491     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1492     const char* assertion)
1493 {
1494     startCrashing();
1495     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1496     dataLog(file, "(", line, ") : ", function, "\n");
1497     dataLog("\n");
1498     dataLog(whileText);
1499     dataLog("Graph at time of failure:\n");
1500     graph.dump();
1501     dataLog("\n");
1502     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1503     dataLog(file, "(", line, ") : ", function, "\n");
1504 }
1505
1506 void Graph::logAssertionFailure(
1507     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1508 {
1509     logDFGAssertionFailure(*this, "", file, line, function, assertion);
1510 }
1511
1512 void Graph::logAssertionFailure(
1513     Node* node, const char* file, int line, const char* function, const char* assertion)
1514 {
1515     logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1516 }
1517
1518 void Graph::logAssertionFailure(
1519     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1520 {
1521     logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1522 }
1523
1524 CPSCFG& Graph::ensureCPSCFG()
1525 {
1526     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1527     if (!m_cpsCFG)
1528         m_cpsCFG = std::make_unique<CPSCFG>(*this);
1529     return *m_cpsCFG;
1530 }
1531
1532 CPSDominators& Graph::ensureCPSDominators()
1533 {
1534     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1535     if (!m_cpsDominators)
1536         m_cpsDominators = std::make_unique<CPSDominators>(*this);
1537     return *m_cpsDominators;
1538 }
1539
1540 SSADominators& Graph::ensureSSADominators()
1541 {
1542     RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1543     if (!m_ssaDominators)
1544         m_ssaDominators = std::make_unique<SSADominators>(*this);
1545     return *m_ssaDominators;
1546 }
1547
1548 CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1549 {
1550     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1551     ensureCPSDominators();
1552     if (!m_cpsNaturalLoops)
1553         m_cpsNaturalLoops = std::make_unique<CPSNaturalLoops>(*this);
1554     return *m_cpsNaturalLoops;
1555 }
1556
1557 SSANaturalLoops& Graph::ensureSSANaturalLoops()
1558 {
1559     RELEASE_ASSERT(m_form == SSA);
1560     ensureSSADominators();
1561     if (!m_ssaNaturalLoops)
1562         m_ssaNaturalLoops = std::make_unique<SSANaturalLoops>(*this);
1563     return *m_ssaNaturalLoops;
1564 }
1565
1566 BackwardsCFG& Graph::ensureBackwardsCFG()
1567 {
1568     // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1569     RELEASE_ASSERT(m_form == SSA); 
1570     if (!m_backwardsCFG)
1571         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1572     return *m_backwardsCFG;
1573 }
1574
1575 BackwardsDominators& Graph::ensureBackwardsDominators()
1576 {
1577     RELEASE_ASSERT(m_form == SSA);
1578     if (!m_backwardsDominators)
1579         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1580     return *m_backwardsDominators;
1581 }
1582
1583 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1584 {
1585     RELEASE_ASSERT(m_form == SSA);
1586     if (!m_controlEquivalenceAnalysis)
1587         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1588     return *m_controlEquivalenceAnalysis;
1589 }
1590
1591 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1592 {
1593     // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1594
1595     for (Node* node = operandNode; node;) {
1596         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1597         if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1598             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1599
1600             if (node->accessesStack(*this)) {
1601                 if (m_form != SSA && node->local().isArgument()) {
1602                     int argument = node->local().toArgument();
1603                     Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1604                     // FIXME: We should match SetArgument nodes at other entrypoints as well:
1605                     // https://bugs.webkit.org/show_bug.cgi?id=175841
1606                     if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1607                         return &profiledBlock->valueProfileForArgument(argument);
1608                 }
1609
1610                 if (node->op() == GetLocal) {
1611                     return MethodOfGettingAValueProfile::fromLazyOperand(
1612                         profiledBlock,
1613                         LazyOperandValueProfileKey(
1614                             node->origin.semantic.bytecodeIndex, node->local()));
1615                 }
1616             }
1617
1618             if (node->hasHeapPrediction())
1619                 return &profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1620
1621             if (profiledBlock->hasBaselineJITProfiling()) {
1622                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1623                     return result;
1624             }
1625         }
1626
1627         switch (node->op()) {
1628         case BooleanToNumber:
1629         case Identity:
1630         case ValueRep:
1631         case DoubleRep:
1632         case Int52Rep:
1633             node = node->child1().node();
1634             break;
1635         default:
1636             node = nullptr;
1637         }
1638     }
1639     
1640     return MethodOfGettingAValueProfile();
1641 }
1642
1643 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1644 {
1645     unsigned attributesUnused;
1646     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1647     if (!isValidOffset(offset))
1648         return false;
1649
1650     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1651     if (!value)
1652         return false;
1653
1654     // We only care about functions and getters at this point. If you want to access other properties
1655     // you'll have to add code for those types.
1656     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1657     if (!function) {
1658         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1659
1660         if (!getterSetter)
1661             return false;
1662
1663         returnJSValue = JSValue(getterSetter);
1664         return true;
1665     }
1666
1667     returnJSValue = value;
1668     return true;
1669 }
1670
1671 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1672 {
1673     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1674
1675     if (!conditions.isValid())
1676         return false;
1677
1678     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1679     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1680     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1681     if (!function)
1682         return false;
1683
1684     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1685         return false;
1686     
1687     return watchConditions(conditions);
1688 }
1689
1690
1691 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1692 {
1693     if (hasExitSite(codeOrigin, NotStringObject))
1694         return false;
1695
1696     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1697     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1698     registerStructure(stringObjectStructure);
1699     ASSERT(stringObjectStructure->storedPrototype().isObject());
1700     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(*stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1701
1702     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1703         return false;
1704
1705     // We're being conservative here. We want DFG's ToString on StringObject to be
1706     // used in both numeric contexts (that would call valueOf()) and string contexts
1707     // (that would call toString()). We don't want the DFG to have to distinguish
1708     // between the two, just because that seems like it would get confusing. So we
1709     // just require both methods to be sane.
1710     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1711         return false;
1712     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1713 }
1714
1715 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1716 {
1717     if (!m_hasExceptionHandlers)
1718         return false;
1719
1720     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1721     while (1) {
1722         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1723         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1724         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1725             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1726             catchHandlerOut = handler;
1727             return true;
1728         }
1729
1730         if (!inlineCallFrame)
1731             return false;
1732
1733         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1734         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1735     }
1736
1737     RELEASE_ASSERT_NOT_REACHED();
1738 }
1739
1740 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1741 {
1742     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1743     ASSERT(node->op() == Spread);
1744
1745     if (node->child1().useKind() != ArrayUse) {
1746         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1747         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1748         return false;
1749     }
1750
1751     // FIXME: We should add profiling of the incoming operand to Spread
1752     // so we can speculate in such a way that we guarantee that this
1753     // function would return true:
1754     // https://bugs.webkit.org/show_bug.cgi?id=171198
1755
1756     if (!value.m_structure.isFinite())
1757         return false;
1758
1759     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1760     bool allGood = true;
1761     value.m_structure.forEach([&] (RegisteredStructure structure) {
1762         allGood &= structure->hasMonoProto()
1763             && structure->storedPrototype() == arrayPrototype
1764             && !structure->isDictionary()
1765             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1766             && !structure->mayInterceptIndexedAccesses();
1767     });
1768
1769     return allGood;
1770 }
1771
1772 void Graph::clearCPSCFGData()
1773 {
1774     m_cpsNaturalLoops = nullptr;
1775     m_cpsDominators = nullptr;
1776     m_cpsCFG = nullptr;
1777 }
1778
1779 } } // namespace JSC::DFG
1780
1781 #endif // ENABLE(DFG_JIT)