Add a new pattern matching rule to Graph::methodOfGettingAValueProfileFor for SetLoca...
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGVariableAccessDataDump.h"
49 #include "FullBytecodeLiveness.h"
50 #include "FunctionExecutableDump.h"
51 #include "GetterSetter.h"
52 #include "JIT.h"
53 #include "JSLexicalEnvironment.h"
54 #include "MaxFrameExtentForSlowPathCall.h"
55 #include "OperandsInlines.h"
56 #include "JSCInlines.h"
57 #include "StackAlignment.h"
58 #include <wtf/CommaPrinter.h>
59 #include <wtf/ListDump.h>
60
61 namespace JSC { namespace DFG {
62
63 // Creates an array of stringized names.
64 static const char* dfgOpNames[] = {
65 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
66     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
67 #undef STRINGIZE_DFG_OP_ENUM
68 };
69
70 Graph::Graph(VM& vm, Plan& plan)
71     : m_vm(vm)
72     , m_plan(plan)
73     , m_codeBlock(m_plan.codeBlock)
74     , m_profiledBlock(m_codeBlock->alternative())
75     , m_ssaCFG(std::make_unique<SSACFG>(*this))
76     , m_nextMachineLocal(0)
77     , m_fixpointState(BeforeFixpoint)
78     , m_structureRegistrationState(HaveNotStartedRegistering)
79     , m_form(LoadStore)
80     , m_unificationState(LocallyUnified)
81     , m_refCountState(EverythingIsLive)
82 {
83     ASSERT(m_profiledBlock);
84     
85     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
86     
87     m_indexingCache = std::make_unique<FlowIndexing>(*this);
88     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
89
90     registerStructure(vm.structureStructure.get());
91     this->stringStructure = registerStructure(vm.stringStructure.get());
92     this->symbolStructure = registerStructure(vm.symbolStructure.get());
93 }
94
95 Graph::~Graph()
96 {
97 }
98
99 const char *Graph::opName(NodeType op)
100 {
101     return dfgOpNames[op];
102 }
103
104 static void printWhiteSpace(PrintStream& out, unsigned amount)
105 {
106     while (amount-- > 0)
107         out.print(" ");
108 }
109
110 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
111 {
112     if (!currentNode->origin.semantic)
113         return false;
114     
115     Node* previousNode = previousNodeRef;
116     previousNodeRef = currentNode;
117
118     if (!previousNode)
119         return false;
120     
121     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
122         return false;
123     
124     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
125     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
126     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
127     unsigned indexOfDivergence = commonSize;
128     for (unsigned i = 0; i < commonSize; ++i) {
129         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
130             indexOfDivergence = i;
131             break;
132         }
133     }
134     
135     bool hasPrinted = false;
136     
137     // Print the pops.
138     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
139         out.print(prefix);
140         printWhiteSpace(out, i * 2);
141         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
142         hasPrinted = true;
143     }
144     
145     // Print the pushes.
146     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
147         out.print(prefix);
148         printWhiteSpace(out, i * 2);
149         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
150         hasPrinted = true;
151     }
152     
153     return hasPrinted;
154 }
155
156 int Graph::amountOfNodeWhiteSpace(Node* node)
157 {
158     return (node->origin.semantic.inlineDepth() - 1) * 2;
159 }
160
161 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
162 {
163     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
164 }
165
166 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
167 {
168     NodeType op = node->op();
169
170     unsigned refCount = node->refCount();
171     bool mustGenerate = node->mustGenerate();
172     if (mustGenerate)
173         --refCount;
174
175     out.print(prefix);
176     printNodeWhiteSpace(out, node);
177
178     // Example/explanation of dataflow dump output
179     //
180     //   14:   <!2:7>  GetByVal(@3, @13)
181     //   ^1     ^2 ^3     ^4       ^5
182     //
183     // (1) The nodeIndex of this operation.
184     // (2) The reference count. The number printed is the 'real' count,
185     //     not including the 'mustGenerate' ref. If the node is
186     //     'mustGenerate' then the count it prefixed with '!'.
187     // (3) The virtual register slot assigned to this node.
188     // (4) The name of the operation.
189     // (5) The arguments to the operation. The may be of the form:
190     //         @#   - a NodeIndex referencing a prior node in the graph.
191     //         arg# - an argument number.
192     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
193     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
194     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
195     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
196         out.print(node->virtualRegister());
197     else
198         out.print("-");
199     out.print(">\t", opName(op), "(");
200     CommaPrinter comma;
201     if (node->flags() & NodeHasVarArgs) {
202         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
203             if (!m_varArgChildren[childIdx])
204                 continue;
205             out.print(comma, m_varArgChildren[childIdx]);
206         }
207     } else {
208         if (!!node->child1() || !!node->child2() || !!node->child3())
209             out.print(comma, node->child1());
210         if (!!node->child2() || !!node->child3())
211             out.print(comma, node->child2());
212         if (!!node->child3())
213             out.print(comma, node->child3());
214     }
215
216     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
217         out.print(comma, NodeFlagsDump(node->flags()));
218     if (node->prediction())
219         out.print(comma, SpeculationDump(node->prediction()));
220     if (node->hasArrayMode())
221         out.print(comma, node->arrayMode());
222     if (node->hasArithUnaryType())
223         out.print(comma, "Type:", node->arithUnaryType());
224     if (node->hasArithMode())
225         out.print(comma, node->arithMode());
226     if (node->hasArithRoundingMode())
227         out.print(comma, "Rounding:", node->arithRoundingMode());
228     if (node->hasScopeOffset())
229         out.print(comma, node->scopeOffset());
230     if (node->hasDirectArgumentsOffset())
231         out.print(comma, node->capturedArgumentsOffset());
232     if (node->hasArgumentIndex())
233         out.print(comma, node->argumentIndex());
234     if (node->hasRegisterPointer())
235         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
236     if (node->hasIdentifier())
237         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
238     if (node->hasPromotedLocationDescriptor())
239         out.print(comma, node->promotedLocationDescriptor());
240     if (node->hasClassInfo())
241         out.print(comma, *node->classInfo());
242     if (node->hasStructureSet())
243         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
244     if (node->hasStructure())
245         out.print(comma, inContext(*node->structure().get(), context));
246     if (node->op() == CPUIntrinsic)
247         out.print(comma, intrinsicName(node->intrinsic()));
248     if (node->hasTransition()) {
249         out.print(comma, pointerDumpInContext(node->transition(), context));
250 #if USE(JSVALUE64)
251         out.print(", ID:", node->transition()->next->id());
252 #else
253         out.print(", ID:", RawPointer(node->transition()->next.get()));
254 #endif
255     }
256     if (node->hasCellOperand()) {
257         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
258             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
259         else {
260             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
261             if (node->cellOperand()->value().isCell()) {
262                 CallVariant variant(node->cellOperand()->value().asCell());
263                 if (ExecutableBase* executable = variant.executable()) {
264                     if (executable->isHostFunction())
265                         out.print(comma, "<host function>");
266                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
267                         out.print(comma, FunctionExecutableDump(functionExecutable));
268                     else
269                         out.print(comma, "<non-function executable>");
270                 }
271             }
272         }
273     }
274     if (node->hasSpeculatedTypeForQuery())
275         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
276     if (node->hasStorageAccessData()) {
277         StorageAccessData& storageAccessData = node->storageAccessData();
278         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
279         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
280         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
281     }
282     if (node->hasMultiGetByOffsetData()) {
283         MultiGetByOffsetData& data = node->multiGetByOffsetData();
284         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
285         for (unsigned i = 0; i < data.cases.size(); ++i)
286             out.print(comma, inContext(data.cases[i], context));
287     }
288     if (node->hasMultiPutByOffsetData()) {
289         MultiPutByOffsetData& data = node->multiPutByOffsetData();
290         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
291         for (unsigned i = 0; i < data.variants.size(); ++i)
292             out.print(comma, inContext(data.variants[i], context));
293     }
294     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
295     if (node->hasVariableAccessData(*this)) {
296         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
297         if (variableAccessData) {
298             VirtualRegister operand = variableAccessData->local();
299             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
300             operand = variableAccessData->machineLocal();
301             if (operand.isValid())
302                 out.print(comma, "machine:", operand);
303         }
304     }
305     if (node->hasStackAccessData()) {
306         StackAccessData* data = node->stackAccessData();
307         out.print(comma, data->local);
308         if (data->machineLocal.isValid())
309             out.print(comma, "machine:", data->machineLocal);
310         out.print(comma, data->format);
311     }
312     if (node->hasUnlinkedLocal()) 
313         out.print(comma, node->unlinkedLocal());
314     if (node->hasVectorLengthHint())
315         out.print(comma, "vectorLengthHint = ", node->vectorLengthHint());
316     if (node->hasLazyJSValue())
317         out.print(comma, node->lazyJSValue());
318     if (node->hasIndexingType())
319         out.print(comma, IndexingTypeDump(node->indexingType()));
320     if (node->hasTypedArrayType())
321         out.print(comma, node->typedArrayType());
322     if (node->hasPhi())
323         out.print(comma, "^", node->phi()->index());
324     if (node->hasExecutionCounter())
325         out.print(comma, RawPointer(node->executionCounter()));
326     if (node->hasWatchpointSet())
327         out.print(comma, RawPointer(node->watchpointSet()));
328     if (node->hasStoragePointer())
329         out.print(comma, RawPointer(node->storagePointer()));
330     if (node->hasObjectMaterializationData())
331         out.print(comma, node->objectMaterializationData());
332     if (node->hasCallVarargsData())
333         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
334     if (node->hasLoadVarargsData()) {
335         LoadVarargsData* data = node->loadVarargsData();
336         out.print(comma, "start = ", data->start, ", count = ", data->count);
337         if (data->machineStart.isValid())
338             out.print(", machineStart = ", data->machineStart);
339         if (data->machineCount.isValid())
340             out.print(", machineCount = ", data->machineCount);
341         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
342         out.print(", limit = ", data->limit);
343     }
344     if (node->hasCallDOMGetterData()) {
345         CallDOMGetterData* data = node->callDOMGetterData();
346         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
347         out.print(", domJIT = ", RawPointer(data->domJIT));
348     }
349     if (node->isConstant())
350         out.print(comma, pointerDumpInContext(node->constant(), context));
351     if (node->isJump())
352         out.print(comma, "T:", *node->targetBlock());
353     if (node->isBranch())
354         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
355     if (node->isSwitch()) {
356         SwitchData* data = node->switchData();
357         out.print(comma, data->kind);
358         for (unsigned i = 0; i < data->cases.size(); ++i)
359             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
360         out.print(comma, "default:", data->fallThrough);
361     }
362     if (node->isEntrySwitch()) {
363         EntrySwitchData* data = node->entrySwitchData();
364         for (unsigned i = 0; i < data->cases.size(); ++i)
365             out.print(comma, BranchTarget(data->cases[i]));
366     }
367     ClobberSet reads;
368     ClobberSet writes;
369     addReadsAndWrites(*this, node, reads, writes);
370     if (!reads.isEmpty())
371         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
372     if (!writes.isEmpty())
373         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
374     ExitMode exitMode = mayExit(*this, node);
375     if (exitMode != DoesNotExit)
376         out.print(comma, exitMode);
377     if (clobbersExitState(*this, node))
378         out.print(comma, "ClobbersExit");
379     if (node->origin.isSet()) {
380         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
381         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
382             out.print(comma, "exit: ", node->origin.forExit);
383     }
384     out.print(comma, node->origin.exitOK ? "ExitValid" : "ExitInvalid");
385     if (node->origin.wasHoisted)
386         out.print(comma, "WasHoisted");
387     out.print(")");
388
389     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
390         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
391     else if (node->hasHeapPrediction())
392         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
393     
394     out.print("\n");
395 }
396
397 bool Graph::terminalsAreValid()
398 {
399     for (BasicBlock* block : blocksInNaturalOrder()) {
400         if (!block->terminal())
401             return false;
402     }
403     return true;
404 }
405
406 static BasicBlock* unboxLoopNode(const CPSCFG::Node& node) { return node.node(); }
407 static BasicBlock* unboxLoopNode(BasicBlock* block) { return block; }
408
409 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
410 {
411     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):",
412         block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", block->isCatchEntrypoint ? " (Catch Entrypoint)" : "", "\n");
413     if (block->executionCount == block->executionCount)
414         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
415     out.print(prefix, "  Predecessors:");
416     for (size_t i = 0; i < block->predecessors.size(); ++i)
417         out.print(" ", *block->predecessors[i]);
418     out.print("\n");
419     out.print(prefix, "  Successors:");
420     if (block->terminal()) {
421         for (BasicBlock* successor : block->successors()) {
422             out.print(" ", *successor);
423         }
424     } else
425         out.print(" <invalid>");
426     out.print("\n");
427
428     auto printDominators = [&] (auto& dominators) {
429         out.print(prefix, "  Dominated by: ", dominators.dominatorsOf(block), "\n");
430         out.print(prefix, "  Dominates: ", dominators.blocksDominatedBy(block), "\n");
431         out.print(prefix, "  Dominance Frontier: ", dominators.dominanceFrontierOf(block), "\n");
432         out.print(prefix, "  Iterated Dominance Frontier: ",
433             dominators.iteratedDominanceFrontierOf(typename std::remove_reference<decltype(dominators)>::type::List { block }), "\n");
434     };
435
436     if (terminalsAreValid()) {
437         if (m_ssaDominators)
438             printDominators(*m_ssaDominators);
439         else if (m_cpsDominators)
440             printDominators(*m_cpsDominators);
441     }
442
443     if (m_backwardsDominators && terminalsAreValid()) {
444         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
445         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
446     }
447     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
448         out.print(prefix, "  Control equivalent to:");
449         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
450             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
451                 out.print(" ", *otherBlock);
452         }
453         out.print("\n");
454     }
455
456     auto printNaturalLoops = [&] (auto& naturalLoops) {
457         if (const auto* loop = naturalLoops->headerOf(block)) {
458             out.print(prefix, "  Loop header, contains:");
459             Vector<BlockIndex> sortedBlockList;
460             for (unsigned i = 0; i < loop->size(); ++i)
461                 sortedBlockList.append(unboxLoopNode(loop->at(i))->index);
462             std::sort(sortedBlockList.begin(), sortedBlockList.end());
463             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
464                 out.print(" #", sortedBlockList[i]);
465             out.print("\n");
466         }
467         
468         auto containingLoops = naturalLoops->loopsOf(block);
469         if (!containingLoops.isEmpty()) {
470             out.print(prefix, "  Containing loop headers:");
471             for (unsigned i = 0; i < containingLoops.size(); ++i)
472                 out.print(" ", *unboxLoopNode(containingLoops[i]->header()));
473             out.print("\n");
474         }
475     };
476
477     if (m_ssaNaturalLoops)
478         printNaturalLoops(m_ssaNaturalLoops);
479     else if (m_cpsNaturalLoops)
480         printNaturalLoops(m_cpsNaturalLoops);
481
482     if (!block->phis.isEmpty()) {
483         out.print(prefix, "  Phi Nodes:");
484         for (size_t i = 0; i < block->phis.size(); ++i) {
485             Node* phiNode = block->phis[i];
486             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
487                 continue;
488             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
489             if (phiNode->child1()) {
490                 out.print("@", phiNode->child1()->index());
491                 if (phiNode->child2()) {
492                     out.print(", @", phiNode->child2()->index());
493                     if (phiNode->child3())
494                         out.print(", @", phiNode->child3()->index());
495                 }
496             }
497             out.print(")", i + 1 < block->phis.size() ? "," : "");
498         }
499         out.print("\n");
500     }
501 }
502
503 void Graph::dump(PrintStream& out, DumpContext* context)
504 {
505     DumpContext myContext;
506     myContext.graph = this;
507     if (!context)
508         context = &myContext;
509     
510     out.print("\n");
511     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
512     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
513     if (m_form == SSA) {
514         for (unsigned entrypointIndex = 0; entrypointIndex < m_argumentFormats.size(); ++entrypointIndex)
515             out.print("  Argument formats for entrypoint index: ", entrypointIndex, " : ", listDump(m_argumentFormats[entrypointIndex]), "\n");
516     }
517     else {
518         for (auto pair : m_rootToArguments)
519             out.print("  Arguments for block#", pair.key->index, ": ", listDump(pair.value), "\n");
520     }
521     out.print("\n");
522     
523     Node* lastNode = nullptr;
524     for (size_t b = 0; b < m_blocks.size(); ++b) {
525         BasicBlock* block = m_blocks[b].get();
526         if (!block)
527             continue;
528         dumpBlockHeader(out, "", block, DumpAllPhis, context);
529         out.print("  States: ", block->cfaStructureClobberStateAtHead);
530         if (!block->cfaHasVisited)
531             out.print(", CurrentlyCFAUnreachable");
532         if (!block->intersectionOfCFAHasVisited)
533             out.print(", CFAUnreachable");
534         out.print("\n");
535         switch (m_form) {
536         case LoadStore:
537         case ThreadedCPS: {
538             out.print("  Vars Before: ");
539             if (block->cfaHasVisited)
540                 out.print(inContext(block->valuesAtHead, context));
541             else
542                 out.print("<empty>");
543             out.print("\n");
544             out.print("  Intersected Vars Before: ");
545             if (block->intersectionOfCFAHasVisited)
546                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
547             else
548                 out.print("<empty>");
549             out.print("\n");
550             out.print("  Var Links: ", block->variablesAtHead, "\n");
551             break;
552         }
553             
554         case SSA: {
555             RELEASE_ASSERT(block->ssa);
556             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
557             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
558             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
559             break;
560         } }
561         for (size_t i = 0; i < block->size(); ++i) {
562             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
563             dump(out, "", block->at(i), context);
564         }
565         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
566         if (!block->cfaDidFinish)
567             out.print(", CFAInvalidated");
568         out.print("\n");
569         switch (m_form) {
570         case LoadStore:
571         case ThreadedCPS: {
572             out.print("  Vars After: ");
573             if (block->cfaHasVisited)
574                 out.print(inContext(block->valuesAtTail, context));
575             else
576                 out.print("<empty>");
577             out.print("\n");
578             out.print("  Var Links: ", block->variablesAtTail, "\n");
579             break;
580         }
581             
582         case SSA: {
583             RELEASE_ASSERT(block->ssa);
584             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
585             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
586             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
587             break;
588         } }
589         out.print("\n");
590     }
591     
592     out.print("GC Values:\n");
593     for (FrozenValue* value : m_frozenValues) {
594         if (value->pointsToHeap())
595             out.print("    ", inContext(*value, &myContext), "\n");
596     }
597
598     out.print(inContext(watchpoints(), &myContext));
599     
600     if (!myContext.isEmpty()) {
601         myContext.dump(out);
602         out.print("\n");
603     }
604 }
605
606 void Graph::deleteNode(Node* node)
607 {
608     if (validationEnabled() && m_form == SSA) {
609         for (BasicBlock* block : blocksInNaturalOrder()) {
610             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
611             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
612         }
613     }
614
615     m_nodes.remove(node);
616 }
617
618 void Graph::packNodeIndices()
619 {
620     m_nodes.packIndices();
621 }
622
623 void Graph::dethread()
624 {
625     if (m_form == LoadStore || m_form == SSA)
626         return;
627     
628     if (logCompilationChanges())
629         dataLog("Dethreading DFG graph.\n");
630     
631     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
632         BasicBlock* block = m_blocks[blockIndex].get();
633         if (!block)
634             continue;
635         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
636             Node* phi = block->phis[phiIndex];
637             phi->children.reset();
638         }
639     }
640     
641     m_form = LoadStore;
642 }
643
644 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
645 {
646     if (!successor->isReachable) {
647         successor->isReachable = true;
648         worklist.append(successor);
649     }
650     
651     successor->predecessors.append(block);
652 }
653
654 void Graph::determineReachability()
655 {
656     Vector<BasicBlock*, 16> worklist;
657     for (BasicBlock* entrypoint : m_roots) {
658         entrypoint->isReachable = true;
659         worklist.append(entrypoint);
660     }
661     while (!worklist.isEmpty()) {
662         BasicBlock* block = worklist.takeLast();
663         for (unsigned i = block->numSuccessors(); i--;)
664             handleSuccessor(worklist, block, block->successor(i));
665     }
666 }
667
668 void Graph::resetReachability()
669 {
670     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
671         BasicBlock* block = m_blocks[blockIndex].get();
672         if (!block)
673             continue;
674         block->isReachable = false;
675         block->predecessors.clear();
676     }
677     
678     determineReachability();
679 }
680
681 namespace {
682
683 class RefCountCalculator {
684 public:
685     RefCountCalculator(Graph& graph)
686         : m_graph(graph)
687     {
688     }
689     
690     void calculate()
691     {
692         // First reset the counts to 0 for all nodes.
693         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
694             BasicBlock* block = m_graph.block(blockIndex);
695             if (!block)
696                 continue;
697             for (unsigned indexInBlock = block->size(); indexInBlock--;)
698                 block->at(indexInBlock)->setRefCount(0);
699             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
700                 block->phis[phiIndex]->setRefCount(0);
701         }
702     
703         // Now find the roots:
704         // - Nodes that are must-generate.
705         // - Nodes that are reachable from type checks.
706         // Set their ref counts to 1 and put them on the worklist.
707         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
708             BasicBlock* block = m_graph.block(blockIndex);
709             if (!block)
710                 continue;
711             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
712                 Node* node = block->at(indexInBlock);
713                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
714                 if (!(node->flags() & NodeMustGenerate))
715                     continue;
716                 if (!node->postfixRef())
717                     m_worklist.append(node);
718             }
719         }
720         
721         while (!m_worklist.isEmpty()) {
722             while (!m_worklist.isEmpty()) {
723                 Node* node = m_worklist.last();
724                 m_worklist.removeLast();
725                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
726                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
727             }
728             
729             if (m_graph.m_form == SSA) {
730                 // Find Phi->Upsilon edges, which are represented as meta-data in the
731                 // Upsilon.
732                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
733                     BasicBlock* block = m_graph.block(blockIndex);
734                     if (!block)
735                         continue;
736                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
737                         Node* node = block->at(nodeIndex);
738                         if (node->op() != Upsilon)
739                             continue;
740                         if (node->shouldGenerate())
741                             continue;
742                         if (node->phi()->shouldGenerate())
743                             countNode(node);
744                     }
745                 }
746             }
747         }
748     }
749     
750 private:
751     void findTypeCheckRoot(Node*, Edge edge)
752     {
753         // We may have an "unproved" untyped use for code that is unreachable. The CFA
754         // will just not have gotten around to it.
755         if (edge.isProved() || edge.willNotHaveCheck())
756             return;
757         if (!edge->postfixRef())
758             m_worklist.append(edge.node());
759     }
760     
761     void countNode(Node* node)
762     {
763         if (node->postfixRef())
764             return;
765         m_worklist.append(node);
766     }
767     
768     void countEdge(Node*, Edge edge)
769     {
770         // Don't count edges that are already counted for their type checks.
771         if (!(edge.isProved() || edge.willNotHaveCheck()))
772             return;
773         countNode(edge.node());
774     }
775     
776     Graph& m_graph;
777     Vector<Node*, 128> m_worklist;
778 };
779
780 } // anonymous namespace
781
782 void Graph::computeRefCounts()
783 {
784     RefCountCalculator calculator(*this);
785     calculator.calculate();
786 }
787
788 void Graph::killBlockAndItsContents(BasicBlock* block)
789 {
790     if (auto& ssaData = block->ssa)
791         ssaData->invalidate();
792     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
793         deleteNode(block->phis[phiIndex]);
794     for (Node* node : *block)
795         deleteNode(node);
796     
797     killBlock(block);
798 }
799
800 void Graph::killUnreachableBlocks()
801 {
802     invalidateNodeLiveness();
803
804     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
805         BasicBlock* block = this->block(blockIndex);
806         if (!block)
807             continue;
808         if (block->isReachable)
809             continue;
810
811         dataLogIf(Options::verboseDFGBytecodeParsing(), "Basic block #", blockIndex, " was killed because it was unreachable\n");
812         killBlockAndItsContents(block);
813     }
814 }
815
816 void Graph::invalidateCFG()
817 {
818     m_cpsDominators = nullptr;
819     m_ssaDominators = nullptr;
820     m_cpsNaturalLoops = nullptr;
821     m_ssaNaturalLoops = nullptr;
822     m_controlEquivalenceAnalysis = nullptr;
823     m_backwardsDominators = nullptr;
824     m_backwardsCFG = nullptr;
825     m_cpsCFG = nullptr;
826 }
827
828 void Graph::invalidateNodeLiveness()
829 {
830     if (m_form != SSA)
831         return;
832
833     for (BasicBlock* block : blocksInNaturalOrder())
834         block->ssa->invalidate();
835 }
836
837 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
838 {
839     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
840         Node* node = block[indexInBlock];
841         bool shouldContinue = true;
842         switch (node->op()) {
843         case SetLocal: {
844             if (node->local() == variableAccessData->local())
845                 shouldContinue = false;
846             break;
847         }
848                 
849         case GetLocal: {
850             if (node->variableAccessData() != variableAccessData)
851                 continue;
852             substitute(block, indexInBlock, node, newGetLocal);
853             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
854             if (oldTailNode == node)
855                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
856             shouldContinue = false;
857             break;
858         }
859                 
860         default:
861             break;
862         }
863         if (!shouldContinue)
864             break;
865     }
866 }
867
868 BlockList Graph::blocksInPreOrder()
869 {
870     BlockList result;
871     BlockWorklist worklist;
872     for (BasicBlock* entrypoint : m_roots)
873         worklist.push(entrypoint);
874     while (BasicBlock* block = worklist.pop()) {
875         result.append(block);
876         for (unsigned i = block->numSuccessors(); i--;)
877             worklist.push(block->successor(i));
878     }
879
880     if (validationEnabled()) {
881         // When iterating over pre order, we should see dominators
882         // before things they dominate.
883         auto validateResults = [&] (auto& dominators) {
884             for (unsigned i = 0; i < result.size(); ++i) {
885                 BasicBlock* a = result[i];
886                 if (!a)
887                     continue;
888                 for (unsigned j = 0; j < result.size(); ++j) {
889                     BasicBlock* b = result[j];
890                     if (!b || a == b)
891                         continue;
892                     if (dominators.dominates(a, b))
893                         RELEASE_ASSERT(i < j);
894                 }
895             }
896         };
897
898         if (m_form == SSA || m_isInSSAConversion)
899             validateResults(ensureSSADominators());
900         else
901             validateResults(ensureCPSDominators());
902     }
903     return result;
904 }
905
906 BlockList Graph::blocksInPostOrder()
907 {
908     BlockList result;
909     PostOrderBlockWorklist worklist;
910     for (BasicBlock* entrypoint : m_roots)
911         worklist.push(entrypoint);
912     while (BlockWithOrder item = worklist.pop()) {
913         switch (item.order) {
914         case VisitOrder::Pre:
915             worklist.pushPost(item.node);
916             for (unsigned i = item.node->numSuccessors(); i--;)
917                 worklist.push(item.node->successor(i));
918             break;
919         case VisitOrder::Post:
920             result.append(item.node);
921             break;
922         }
923     }
924
925     if (validationEnabled()) {
926         auto validateResults = [&] (auto& dominators) {
927             // When iterating over reverse post order, we should see dominators
928             // before things they dominate.
929             for (unsigned i = 0; i < result.size(); ++i) {
930                 BasicBlock* a = result[i];
931                 if (!a)
932                     continue;
933                 for (unsigned j = 0; j < result.size(); ++j) {
934                     BasicBlock* b = result[j];
935                     if (!b || a == b)
936                         continue;
937                     if (dominators.dominates(a, b))
938                         RELEASE_ASSERT(i > j);
939                 }
940             }
941         };
942
943         if (m_form == SSA || m_isInSSAConversion)
944             validateResults(ensureSSADominators());
945         else
946             validateResults(ensureCPSDominators());
947     }
948
949     return result;
950 }
951
952 void Graph::clearReplacements()
953 {
954     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
955         BasicBlock* block = m_blocks[blockIndex].get();
956         if (!block)
957             continue;
958         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
959             block->phis[phiIndex]->setReplacement(nullptr);
960         for (unsigned nodeIndex = block->size(); nodeIndex--;)
961             block->at(nodeIndex)->setReplacement(nullptr);
962     }
963 }
964
965 void Graph::clearEpochs()
966 {
967     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
968         BasicBlock* block = m_blocks[blockIndex].get();
969         if (!block)
970             continue;
971         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
972             block->phis[phiIndex]->setEpoch(Epoch());
973         for (unsigned nodeIndex = block->size(); nodeIndex--;)
974             block->at(nodeIndex)->setEpoch(Epoch());
975     }
976 }
977
978 void Graph::initializeNodeOwners()
979 {
980     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
981         BasicBlock* block = m_blocks[blockIndex].get();
982         if (!block)
983             continue;
984         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
985             block->phis[phiIndex]->owner = block;
986         for (unsigned nodeIndex = block->size(); nodeIndex--;)
987             block->at(nodeIndex)->owner = block;
988     }
989 }
990
991 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
992 {
993     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
994         BasicBlock* block = m_blocks[blockIndex].get();
995         if (!block)
996             continue;
997         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
998             block->phis[phiIndex]->clearFlags(flags);
999         for (unsigned nodeIndex = block->size(); nodeIndex--;)
1000             block->at(nodeIndex)->clearFlags(flags);
1001     }
1002 }
1003
1004 bool Graph::watchCondition(const ObjectPropertyCondition& key)
1005 {
1006     if (!key.isWatchable())
1007         return false;
1008     
1009     m_plan.weakReferences.addLazily(key.object());
1010     if (key.hasPrototype())
1011         m_plan.weakReferences.addLazily(key.prototype());
1012     if (key.hasRequiredValue())
1013         m_plan.weakReferences.addLazily(key.requiredValue());
1014     
1015     m_plan.watchpoints.addLazily(key);
1016
1017     if (key.kind() == PropertyCondition::Presence)
1018         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1019     
1020     return true;
1021 }
1022
1023 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1024 {
1025     if (!keys.isValid())
1026         return false;
1027
1028     for (const ObjectPropertyCondition& key : keys) {
1029         if (!watchCondition(key))
1030             return false;
1031     }
1032     return true;
1033 }
1034
1035 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1036 {
1037     return m_safeToLoad.contains(std::make_pair(base, offset));
1038 }
1039
1040 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
1041 {
1042     assertIsRegistered(key.structure());
1043     
1044     auto iter = m_inferredTypes.find(key);
1045     if (iter != m_inferredTypes.end())
1046         return iter->value;
1047
1048     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
1049     if (!typeObject) {
1050         m_inferredTypes.add(key, InferredType::Top);
1051         return InferredType::Top;
1052     }
1053
1054     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
1055     if (typeDescriptor.kind() == InferredType::Top) {
1056         m_inferredTypes.add(key, InferredType::Top);
1057         return InferredType::Top;
1058     }
1059     
1060     m_inferredTypes.add(key, typeDescriptor);
1061
1062     m_plan.weakReferences.addLazily(typeObject);
1063     registerInferredType(typeDescriptor);
1064
1065     // Note that we may already be watching this desired inferred type, because multiple structures may
1066     // point to the same InferredType instance.
1067     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
1068
1069     return typeDescriptor;
1070 }
1071
1072 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1073 {
1074     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1075     if (iter != m_bytecodeLiveness.end())
1076         return *iter->value;
1077     
1078     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1079     codeBlock->livenessAnalysis().computeFullLiveness(codeBlock, *liveness);
1080     FullBytecodeLiveness& result = *liveness;
1081     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1082     return result;
1083 }
1084
1085 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1086 {
1087     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1088 }
1089
1090 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1091 {
1092     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1093     if (iter != m_bytecodeKills.end())
1094         return *iter->value;
1095     
1096     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1097     codeBlock->livenessAnalysis().computeKills(codeBlock, *kills);
1098     BytecodeKills& result = *kills;
1099     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1100     return result;
1101 }
1102
1103 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1104 {
1105     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1106 }
1107
1108 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1109 {
1110     static const bool verbose = false;
1111     
1112     if (verbose)
1113         dataLog("Checking of operand is live: ", operand, "\n");
1114     CodeOrigin* codeOriginPtr = &codeOrigin;
1115     for (;;) {
1116         VirtualRegister reg = VirtualRegister(
1117             operand.offset() - codeOriginPtr->stackOffset());
1118         
1119         if (verbose)
1120             dataLog("reg = ", reg, "\n");
1121         
1122         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1123             if (reg.isArgument()) {
1124                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1125                 
1126                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1127                     && reg.offset() == CallFrameSlot::callee) {
1128                     if (verbose)
1129                         dataLog("Looks like a callee.\n");
1130                     return true;
1131                 }
1132                 
1133                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1134                     && reg.offset() == CallFrameSlot::argumentCount) {
1135                     if (verbose)
1136                         dataLog("Looks like the argument count.\n");
1137                     return true;
1138                 }
1139                 
1140                 return false;
1141             }
1142
1143             if (verbose)
1144                 dataLog("Asking the bytecode liveness.\n");
1145             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1146                 reg.offset(), codeOriginPtr->bytecodeIndex);
1147         }
1148         
1149         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1150         if (!inlineCallFrame) {
1151             if (verbose)
1152                 dataLog("Ran out of stack, returning true.\n");
1153             return true;
1154         }
1155
1156         // Arguments are always live. This would be redundant if it wasn't for our
1157         // op_call_varargs inlining.
1158         if (reg.isArgument()
1159             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->argumentsWithFixup.size()) {
1160             if (verbose)
1161                 dataLog("Argument is live.\n");
1162             return true;
1163         }
1164         
1165         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1166
1167         // The first inline call frame could be an inline tail call
1168         if (!codeOriginPtr) {
1169             if (verbose)
1170                 dataLog("Dead because of tail inlining.\n");
1171             return false;
1172         }
1173     }
1174     
1175     RELEASE_ASSERT_NOT_REACHED();
1176 }
1177
1178 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1179 {
1180     BitVector result;
1181     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1182     forAllLocalsLiveInBytecode(
1183         codeOrigin,
1184         [&] (VirtualRegister reg) {
1185             ASSERT(reg.isLocal());
1186             result.quickSet(reg.toLocal());
1187         });
1188     return result;
1189 }
1190
1191 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1192 {
1193     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1194     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1195     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1196 }
1197
1198 unsigned Graph::frameRegisterCount()
1199 {
1200     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1201     return roundLocalRegisterCountForFramePointerOffset(result);
1202 }
1203
1204 unsigned Graph::stackPointerOffset()
1205 {
1206     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1207 }
1208
1209 unsigned Graph::requiredRegisterCountForExit()
1210 {
1211     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1212     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1213         InlineCallFrame* inlineCallFrame = *iter;
1214         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1215         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1216         count = std::max(count, requiredCount);
1217     }
1218     return count;
1219 }
1220
1221 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1222 {
1223     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1224     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1225     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1226 }
1227
1228 JSValue Graph::tryGetConstantProperty(
1229     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1230 {
1231     if (!base || !base.isObject())
1232         return JSValue();
1233     
1234     JSObject* object = asObject(base);
1235     
1236     for (unsigned i = structureSet.size(); i--;) {
1237         RegisteredStructure structure = structureSet[i];
1238         
1239         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1240         if (!set || !set->isStillValid())
1241             return JSValue();
1242         
1243         ASSERT(structure->isValidOffset(offset));
1244         ASSERT(!structure->isUncacheableDictionary());
1245         
1246         watchpoints().addLazily(set);
1247     }
1248     
1249     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1250     // our profiling makes sense and we're still on track to generate code that won't be
1251     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1252     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1253     // is doomed.
1254     //
1255     // One argument in favor of this code is that it should definitely work because the butterfly
1256     // is always set before the structure. However, we don't currently have a fence between those
1257     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1258     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1259     // caches told us that the object had a structure that it did not *yet* have, and then later,
1260     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1261     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1262     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1263     // https://bugs.webkit.org/show_bug.cgi?id=134641
1264     //
1265     // For now, we just do the minimal thing: defend against the structure right now being
1266     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1267     // determine if the structure belongs to the proven set.
1268     
1269     if (!structureSet.toStructureSet().contains(object->structure()))
1270         return JSValue();
1271     
1272     return object->getDirect(offset);
1273 }
1274
1275 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1276 {
1277     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1278 }
1279
1280 JSValue Graph::tryGetConstantProperty(
1281     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1282 {
1283     if (structure.isInfinite()) {
1284         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1285         // watching to constant-fold the property.
1286         // https://bugs.webkit.org/show_bug.cgi?id=147271
1287         return JSValue();
1288     }
1289     
1290     return tryGetConstantProperty(base, structure.set(), offset);
1291 }
1292
1293 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1294 {
1295     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1296 }
1297
1298 AbstractValue Graph::inferredValueForProperty(
1299     const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1300 {
1301     AbstractValue result;
1302     base.forEach(
1303         [&] (RegisteredStructure structure) {
1304             AbstractValue value;
1305             value.set(*this, inferredTypeForProperty(structure.get(), uid));
1306             result.merge(value);
1307         });
1308     if (clobberState == StructuresAreClobbered)
1309         result.clobberStructures();
1310     return result;
1311 }
1312
1313 AbstractValue Graph::inferredValueForProperty(
1314     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1315     StructureClobberState clobberState)
1316 {
1317     if (JSValue value = tryGetConstantProperty(base, offset)) {
1318         AbstractValue result;
1319         result.set(*this, *freeze(value), clobberState);
1320         return result;
1321     }
1322
1323     if (base.m_structure.isFinite())
1324         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1325
1326     return AbstractValue::heapTop();
1327 }
1328
1329 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1330 {
1331     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1332     
1333     if (!base)
1334         return JSValue();
1335     
1336     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1337     if (!activation)
1338         return JSValue();
1339     
1340     SymbolTable* symbolTable = activation->symbolTable();
1341     JSValue value;
1342     WatchpointSet* set;
1343     {
1344         ConcurrentJSLocker locker(symbolTable->m_lock);
1345         
1346         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1347         if (!entry)
1348             return JSValue();
1349         
1350         set = entry->watchpointSet();
1351         if (!set)
1352             return JSValue();
1353         
1354         if (set->state() != IsWatched)
1355             return JSValue();
1356         
1357         ASSERT(entry->scopeOffset() == offset);
1358         value = activation->variableAt(offset).get();
1359         if (!value)
1360             return JSValue();
1361     }
1362     
1363     watchpoints().addLazily(set);
1364     
1365     return value;
1366 }
1367
1368 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1369 {
1370     return tryGetConstantClosureVar(value.m_value, offset);
1371 }
1372
1373 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1374 {
1375     if (!node->hasConstant())
1376         return JSValue();
1377     return tryGetConstantClosureVar(node->asJSValue(), offset);
1378 }
1379
1380 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1381 {
1382     if (!value)
1383         return nullptr;
1384     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1385     if (!value)
1386         return nullptr;
1387     if (!view->length())
1388         return nullptr;
1389     WTF::loadLoadFence();
1390     watchpoints().addLazily(view);
1391     return view;
1392 }
1393
1394 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1395 {
1396     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1397         return nullptr;
1398     return tryGetFoldableView(value);
1399 }
1400
1401 void Graph::registerFrozenValues()
1402 {
1403     m_codeBlock->constants().shrink(0);
1404     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1405     for (FrozenValue* value : m_frozenValues) {
1406         if (!value->pointsToHeap())
1407             continue;
1408         
1409         ASSERT(value->structure());
1410         ASSERT(m_plan.weakReferences.contains(value->structure()));
1411         
1412         switch (value->strength()) {
1413         case WeakValue: {
1414             m_plan.weakReferences.addLazily(value->value().asCell());
1415             break;
1416         }
1417         case StrongValue: {
1418             unsigned constantIndex = m_codeBlock->addConstantLazily();
1419             // We already have a barrier on the code block.
1420             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1421             break;
1422         } }
1423     }
1424     m_codeBlock->constants().shrinkToFit();
1425     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1426 }
1427
1428 void Graph::visitChildren(SlotVisitor& visitor)
1429 {
1430     for (FrozenValue* value : m_frozenValues) {
1431         visitor.appendUnbarriered(value->value());
1432         visitor.appendUnbarriered(value->structure());
1433     }
1434 }
1435
1436 FrozenValue* Graph::freeze(JSValue value)
1437 {
1438     if (UNLIKELY(!value))
1439         return FrozenValue::emptySingleton();
1440
1441     // There are weird relationships in how optimized CodeBlocks
1442     // point to other CodeBlocks. We don't want to have them be
1443     // part of the weak pointer set. For example, an optimized CodeBlock
1444     // having a weak pointer to itself will cause it to get collected.
1445     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1446     
1447     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1448     if (LIKELY(!result.isNewEntry))
1449         return result.iterator->value;
1450
1451     if (value.isUInt32())
1452         m_uint32ValuesInUse.append(value.asUInt32());
1453     
1454     FrozenValue frozenValue = FrozenValue::freeze(value);
1455     if (Structure* structure = frozenValue.structure())
1456         registerStructure(structure);
1457     
1458     return result.iterator->value = m_frozenValues.add(frozenValue);
1459 }
1460
1461 FrozenValue* Graph::freezeStrong(JSValue value)
1462 {
1463     FrozenValue* result = freeze(value);
1464     result->strengthenTo(StrongValue);
1465     return result;
1466 }
1467
1468 void Graph::convertToConstant(Node* node, FrozenValue* value)
1469 {
1470     if (value->structure())
1471         assertIsRegistered(value->structure());
1472     node->convertToConstant(value);
1473 }
1474
1475 void Graph::convertToConstant(Node* node, JSValue value)
1476 {
1477     convertToConstant(node, freeze(value));
1478 }
1479
1480 void Graph::convertToStrongConstant(Node* node, JSValue value)
1481 {
1482     convertToConstant(node, freezeStrong(value));
1483 }
1484
1485 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1486 {
1487     m_plan.weakReferences.addLazily(structure);
1488     if (m_plan.watchpoints.consider(structure))
1489         result = StructureRegisteredAndWatched;
1490     else
1491         result = StructureRegisteredNormally;
1492     return RegisteredStructure::createPrivate(structure);
1493 }
1494
1495 void Graph::registerAndWatchStructureTransition(Structure* structure)
1496 {
1497     m_plan.weakReferences.addLazily(structure);
1498     m_plan.watchpoints.addLazily(structure->transitionWatchpointSet());
1499 }
1500
1501 void Graph::assertIsRegistered(Structure* structure)
1502 {
1503     // It's convenient to be able to call this with a maybe-null structure.
1504     if (!structure)
1505         return;
1506     
1507     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1508     
1509     if (!structure->dfgShouldWatch())
1510         return;
1511     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1512         return;
1513     
1514     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1515 }
1516
1517 static void logDFGAssertionFailure(
1518     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1519     const char* assertion)
1520 {
1521     startCrashing();
1522     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1523     dataLog(file, "(", line, ") : ", function, "\n");
1524     dataLog("\n");
1525     dataLog(whileText);
1526     dataLog("Graph at time of failure:\n");
1527     graph.dump();
1528     dataLog("\n");
1529     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1530     dataLog(file, "(", line, ") : ", function, "\n");
1531 }
1532
1533 void Graph::logAssertionFailure(
1534     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1535 {
1536     logDFGAssertionFailure(*this, "", file, line, function, assertion);
1537 }
1538
1539 void Graph::logAssertionFailure(
1540     Node* node, const char* file, int line, const char* function, const char* assertion)
1541 {
1542     logDFGAssertionFailure(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1543 }
1544
1545 void Graph::logAssertionFailure(
1546     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1547 {
1548     logDFGAssertionFailure(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1549 }
1550
1551 CPSCFG& Graph::ensureCPSCFG()
1552 {
1553     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1554     if (!m_cpsCFG)
1555         m_cpsCFG = std::make_unique<CPSCFG>(*this);
1556     return *m_cpsCFG;
1557 }
1558
1559 CPSDominators& Graph::ensureCPSDominators()
1560 {
1561     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1562     if (!m_cpsDominators)
1563         m_cpsDominators = std::make_unique<CPSDominators>(*this);
1564     return *m_cpsDominators;
1565 }
1566
1567 SSADominators& Graph::ensureSSADominators()
1568 {
1569     RELEASE_ASSERT(m_form == SSA || m_isInSSAConversion);
1570     if (!m_ssaDominators)
1571         m_ssaDominators = std::make_unique<SSADominators>(*this);
1572     return *m_ssaDominators;
1573 }
1574
1575 CPSNaturalLoops& Graph::ensureCPSNaturalLoops()
1576 {
1577     RELEASE_ASSERT(m_form != SSA && !m_isInSSAConversion);
1578     ensureCPSDominators();
1579     if (!m_cpsNaturalLoops)
1580         m_cpsNaturalLoops = std::make_unique<CPSNaturalLoops>(*this);
1581     return *m_cpsNaturalLoops;
1582 }
1583
1584 SSANaturalLoops& Graph::ensureSSANaturalLoops()
1585 {
1586     RELEASE_ASSERT(m_form == SSA);
1587     ensureSSADominators();
1588     if (!m_ssaNaturalLoops)
1589         m_ssaNaturalLoops = std::make_unique<SSANaturalLoops>(*this);
1590     return *m_ssaNaturalLoops;
1591 }
1592
1593 BackwardsCFG& Graph::ensureBackwardsCFG()
1594 {
1595     // We could easily relax this in the future to work over CPS, but today, it's only used in SSA.
1596     RELEASE_ASSERT(m_form == SSA); 
1597     if (!m_backwardsCFG)
1598         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1599     return *m_backwardsCFG;
1600 }
1601
1602 BackwardsDominators& Graph::ensureBackwardsDominators()
1603 {
1604     RELEASE_ASSERT(m_form == SSA);
1605     if (!m_backwardsDominators)
1606         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1607     return *m_backwardsDominators;
1608 }
1609
1610 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1611 {
1612     RELEASE_ASSERT(m_form == SSA);
1613     if (!m_controlEquivalenceAnalysis)
1614         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1615     return *m_controlEquivalenceAnalysis;
1616 }
1617
1618 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1619 {
1620     // This represents IR like `CurrentNode(@operandNode)`. For example: `GetByVal(..., Int32:@GetLocal)`.
1621
1622     for (Node* node = operandNode; node;) {
1623         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1624         if (!currentNode || node->origin.semantic != currentNode->origin.semantic || !currentNode->hasResult()) {
1625             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1626
1627             if (node->accessesStack(*this)) {
1628                 if (m_form != SSA && node->local().isArgument()) {
1629                     int argument = node->local().toArgument();
1630                     Node* argumentNode = m_rootToArguments.find(block(0))->value[argument];
1631                     // FIXME: We should match SetArgument nodes at other entrypoints as well:
1632                     // https://bugs.webkit.org/show_bug.cgi?id=175841
1633                     if (argumentNode && node->variableAccessData() == argumentNode->variableAccessData())
1634                         return &profiledBlock->valueProfileForArgument(argument);
1635                 }
1636
1637                 if (node->op() == GetLocal) {
1638                     return MethodOfGettingAValueProfile::fromLazyOperand(
1639                         profiledBlock,
1640                         LazyOperandValueProfileKey(
1641                             node->origin.semantic.bytecodeIndex, node->local()));
1642                 }
1643             }
1644
1645             if (node->hasHeapPrediction())
1646                 return &profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1647
1648             if (profiledBlock->hasBaselineJITProfiling()) {
1649                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1650                     return result;
1651             }
1652         }
1653
1654         switch (node->op()) {
1655         case BooleanToNumber:
1656         case Identity:
1657         case ValueRep:
1658         case DoubleRep:
1659         case Int52Rep:
1660             node = node->child1().node();
1661             break;
1662         default:
1663             node = nullptr;
1664         }
1665     }
1666     
1667     return MethodOfGettingAValueProfile();
1668 }
1669
1670 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1671 {
1672     unsigned attributesUnused;
1673     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1674     if (!isValidOffset(offset))
1675         return false;
1676
1677     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1678     if (!value)
1679         return false;
1680
1681     // We only care about functions and getters at this point. If you want to access other properties
1682     // you'll have to add code for those types.
1683     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1684     if (!function) {
1685         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1686
1687         if (!getterSetter)
1688             return false;
1689
1690         returnJSValue = JSValue(getterSetter);
1691         return true;
1692     }
1693
1694     returnJSValue = value;
1695     return true;
1696 }
1697
1698 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1699 {
1700     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1701
1702     if (!conditions.isValid())
1703         return false;
1704
1705     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1706     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1707     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1708     if (!function)
1709         return false;
1710
1711     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1712         return false;
1713     
1714     return watchConditions(conditions);
1715 }
1716
1717
1718 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1719 {
1720     if (hasExitSite(codeOrigin, NotStringObject))
1721         return false;
1722
1723     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1724     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1725     registerStructure(stringObjectStructure);
1726     ASSERT(stringObjectStructure->storedPrototype().isObject());
1727     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(*stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1728
1729     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1730         return false;
1731
1732     // We're being conservative here. We want DFG's ToString on StringObject to be
1733     // used in both numeric contexts (that would call valueOf()) and string contexts
1734     // (that would call toString()). We don't want the DFG to have to distinguish
1735     // between the two, just because that seems like it would get confusing. So we
1736     // just require both methods to be sane.
1737     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1738         return false;
1739     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1740 }
1741
1742 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1743 {
1744     if (!m_hasExceptionHandlers)
1745         return false;
1746
1747     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1748     while (1) {
1749         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1750         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1751         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1752             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1753             catchHandlerOut = handler;
1754             return true;
1755         }
1756
1757         if (!inlineCallFrame)
1758             return false;
1759
1760         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1761         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1762     }
1763
1764     RELEASE_ASSERT_NOT_REACHED();
1765 }
1766
1767 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1768 {
1769     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1770     ASSERT(node->op() == Spread);
1771
1772     if (node->child1().useKind() != ArrayUse) {
1773         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1774         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1775         return false;
1776     }
1777
1778     // FIXME: We should add profiling of the incoming operand to Spread
1779     // so we can speculate in such a way that we guarantee that this
1780     // function would return true:
1781     // https://bugs.webkit.org/show_bug.cgi?id=171198
1782
1783     if (!value.m_structure.isFinite())
1784         return false;
1785
1786     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1787     bool allGood = true;
1788     value.m_structure.forEach([&] (RegisteredStructure structure) {
1789         allGood &= structure->hasMonoProto()
1790             && structure->storedPrototype() == arrayPrototype
1791             && !structure->isDictionary()
1792             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1793             && !structure->mayInterceptIndexedAccesses();
1794     });
1795
1796     return allGood;
1797 }
1798
1799 void Graph::clearCPSCFGData()
1800 {
1801     m_cpsNaturalLoops = nullptr;
1802     m_cpsDominators = nullptr;
1803     m_cpsCFG = nullptr;
1804 }
1805
1806 } } // namespace JSC::DFG
1807
1808 #endif // ENABLE(DFG_JIT)