VirtualRegister should really know how to dump itself
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeLivenessAnalysisInlines.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockWithJITType.h"
34 #include "DFGBlockWorklist.h"
35 #include "DFGClobberSet.h"
36 #include "DFGJITCode.h"
37 #include "DFGVariableAccessDataDump.h"
38 #include "FullBytecodeLiveness.h"
39 #include "FunctionExecutableDump.h"
40 #include "JIT.h"
41 #include "JSLexicalEnvironment.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "OperandsInlines.h"
44 #include "JSCInlines.h"
45 #include "StackAlignment.h"
46 #include <wtf/CommaPrinter.h>
47 #include <wtf/ListDump.h>
48
49 namespace JSC { namespace DFG {
50
51 // Creates an array of stringized names.
52 static const char* dfgOpNames[] = {
53 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
54     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
55 #undef STRINGIZE_DFG_OP_ENUM
56 };
57
58 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
59     : m_vm(vm)
60     , m_plan(plan)
61     , m_codeBlock(m_plan.codeBlock.get())
62     , m_profiledBlock(m_codeBlock->alternative())
63     , m_allocator(longLivedState.m_allocator)
64     , m_mustHandleValues(OperandsLike, plan.mustHandleValues)
65     , m_hasArguments(false)
66     , m_nextMachineLocal(0)
67     , m_machineCaptureStart(std::numeric_limits<int>::max())
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     for (unsigned i = m_mustHandleValues.size(); i--;)
77         m_mustHandleValues[i] = freezeFragile(plan.mustHandleValues[i]);
78     
79     for (unsigned i = m_codeBlock->m_numVars; i--;) {
80         if (m_codeBlock->isCaptured(virtualRegisterForLocal(i)))
81             m_outermostCapturedVars.set(i);
82     }
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
112 {
113     if (!previousNode)
114         return false;
115     
116     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
117         return false;
118     
119     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
120     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
121     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
122     unsigned indexOfDivergence = commonSize;
123     for (unsigned i = 0; i < commonSize; ++i) {
124         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
125             indexOfDivergence = i;
126             break;
127         }
128     }
129     
130     bool hasPrinted = false;
131     
132     // Print the pops.
133     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
134         out.print(prefix);
135         printWhiteSpace(out, i * 2);
136         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
137         hasPrinted = true;
138     }
139     
140     // Print the pushes.
141     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
142         out.print(prefix);
143         printWhiteSpace(out, i * 2);
144         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
145         hasPrinted = true;
146     }
147     
148     return hasPrinted;
149 }
150
151 int Graph::amountOfNodeWhiteSpace(Node* node)
152 {
153     return (node->origin.semantic.inlineDepth() - 1) * 2;
154 }
155
156 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
157 {
158     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
159 }
160
161 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
162 {
163     NodeType op = node->op();
164
165     unsigned refCount = node->refCount();
166     bool mustGenerate = node->mustGenerate();
167     if (mustGenerate)
168         --refCount;
169
170     out.print(prefix);
171     printNodeWhiteSpace(out, node);
172
173     // Example/explanation of dataflow dump output
174     //
175     //   14:   <!2:7>  GetByVal(@3, @13)
176     //   ^1     ^2 ^3     ^4       ^5
177     //
178     // (1) The nodeIndex of this operation.
179     // (2) The reference count. The number printed is the 'real' count,
180     //     not including the 'mustGenerate' ref. If the node is
181     //     'mustGenerate' then the count it prefixed with '!'.
182     // (3) The virtual register slot assigned to this node.
183     // (4) The name of the operation.
184     // (5) The arguments to the operation. The may be of the form:
185     //         @#   - a NodeIndex referencing a prior node in the graph.
186     //         arg# - an argument number.
187     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
188     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
189     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
190     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
191         out.print(node->virtualRegister());
192     else
193         out.print("-");
194     out.print(">\t", opName(op), "(");
195     CommaPrinter comma;
196     if (node->flags() & NodeHasVarArgs) {
197         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
198             if (!m_varArgChildren[childIdx])
199                 continue;
200             out.print(comma, m_varArgChildren[childIdx]);
201         }
202     } else {
203         if (!!node->child1() || !!node->child2() || !!node->child3())
204             out.print(comma, node->child1());
205         if (!!node->child2() || !!node->child3())
206             out.print(comma, node->child2());
207         if (!!node->child3())
208             out.print(comma, node->child3());
209     }
210
211     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
212         out.print(comma, NodeFlagsDump(node->flags()));
213     if (node->prediction())
214         out.print(comma, SpeculationDump(node->prediction()));
215     if (node->hasArrayMode())
216         out.print(comma, node->arrayMode());
217     if (node->hasArithMode())
218         out.print(comma, node->arithMode());
219     if (node->hasVarNumber())
220         out.print(comma, node->varNumber());
221     if (node->hasRegisterPointer())
222         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findRegisterIndex(node->registerPointer()), "(", RawPointer(node->registerPointer()), ")");
223     if (node->hasIdentifier())
224         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
225     if (node->hasStructureSet())
226         out.print(comma, inContext(node->structureSet(), context));
227     if (node->hasStructure())
228         out.print(comma, inContext(*node->structure(), context));
229     if (node->hasTransition())
230         out.print(comma, pointerDumpInContext(node->transition(), context));
231     if (node->hasCellOperand()) {
232         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
233             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
234         else {
235             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
236             if (node->cellOperand()->value().isCell()) {
237                 CallVariant variant(node->cellOperand()->value().asCell());
238                 if (ExecutableBase* executable = variant.executable()) {
239                     if (executable->isHostFunction())
240                         out.print(comma, "<host function>");
241                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
242                         out.print(comma, FunctionExecutableDump(functionExecutable));
243                     else
244                         out.print(comma, "<non-function executable>");
245                 }
246             }
247         }
248     }
249     if (node->hasFunctionDeclIndex()) {
250         FunctionExecutable* executable = m_codeBlock->functionDecl(node->functionDeclIndex());
251         out.print(comma, FunctionExecutableDump(executable));
252     }
253     if (node->hasFunctionExprIndex()) {
254         FunctionExecutable* executable = m_codeBlock->functionExpr(node->functionExprIndex());
255         out.print(comma, FunctionExecutableDump(executable));
256     }
257     if (node->hasStorageAccessData()) {
258         StorageAccessData& storageAccessData = node->storageAccessData();
259         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
260         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
261     }
262     if (node->hasMultiGetByOffsetData()) {
263         MultiGetByOffsetData& data = node->multiGetByOffsetData();
264         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
265         for (unsigned i = 0; i < data.variants.size(); ++i)
266             out.print(comma, inContext(data.variants[i], context));
267     }
268     if (node->hasMultiPutByOffsetData()) {
269         MultiPutByOffsetData& data = node->multiPutByOffsetData();
270         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
271         for (unsigned i = 0; i < data.variants.size(); ++i)
272             out.print(comma, inContext(data.variants[i], context));
273     }
274     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
275     if (node->hasVariableAccessData(*this)) {
276         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
277         if (variableAccessData) {
278             VirtualRegister operand = variableAccessData->local();
279             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
280             operand = variableAccessData->machineLocal();
281             if (operand.isValid())
282                 out.print(comma, "machine:", operand);
283         }
284     }
285     if (node->hasUnlinkedLocal()) 
286         out.print(comma, node->unlinkedLocal());
287     if (node->hasUnlinkedMachineLocal()) {
288         VirtualRegister operand = node->unlinkedMachineLocal();
289         if (operand.isValid())
290             out.print(comma, "machine:", operand);
291     }
292     if (node->hasConstantBuffer()) {
293         out.print(comma);
294         out.print(node->startConstant(), ":[");
295         CommaPrinter anotherComma;
296         for (unsigned i = 0; i < node->numConstants(); ++i)
297             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
298         out.print("]");
299     }
300     if (node->hasIndexingType())
301         out.print(comma, IndexingTypeDump(node->indexingType()));
302     if (node->hasTypedArrayType())
303         out.print(comma, node->typedArrayType());
304     if (node->hasPhi())
305         out.print(comma, "^", node->phi()->index());
306     if (node->hasExecutionCounter())
307         out.print(comma, RawPointer(node->executionCounter()));
308     if (node->hasVariableWatchpointSet())
309         out.print(comma, RawPointer(node->variableWatchpointSet()));
310     if (node->hasTypedArray())
311         out.print(comma, inContext(JSValue(node->typedArray()), context));
312     if (node->hasStoragePointer())
313         out.print(comma, RawPointer(node->storagePointer()));
314     if (node->hasObjectMaterializationData())
315         out.print(comma, node->objectMaterializationData());
316     if (node->isConstant())
317         out.print(comma, pointerDumpInContext(node->constant(), context));
318     if (node->isJump())
319         out.print(comma, "T:", *node->targetBlock());
320     if (node->isBranch())
321         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
322     if (node->isSwitch()) {
323         SwitchData* data = node->switchData();
324         out.print(comma, data->kind);
325         for (unsigned i = 0; i < data->cases.size(); ++i)
326             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
327         out.print(comma, "default:", data->fallThrough);
328     }
329     ClobberSet reads;
330     ClobberSet writes;
331     addReadsAndWrites(*this, node, reads, writes);
332     if (!reads.isEmpty())
333         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
334     if (!writes.isEmpty())
335         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
336     if (node->origin.isSet()) {
337         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
338         if (node->origin.semantic != node->origin.forExit)
339             out.print(comma, "exit: ", node->origin.forExit);
340     }
341     
342     out.print(")");
343
344     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
345         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
346     else if (node->hasHeapPrediction())
347         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
348     
349     out.print("\n");
350 }
351
352 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
353 {
354     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
355     if (block->executionCount == block->executionCount)
356         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
357     out.print(prefix, "  Predecessors:");
358     for (size_t i = 0; i < block->predecessors.size(); ++i)
359         out.print(" ", *block->predecessors[i]);
360     out.print("\n");
361     out.print(prefix, "  Successors:");
362     for (BasicBlock* successor : block->successors()) {
363         out.print(" ", *successor);
364         if (m_prePostNumbering.isValid())
365             out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
366     }
367     out.print("\n");
368     if (m_dominators.isValid()) {
369         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
370         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
371         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
372         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
373     }
374     if (m_prePostNumbering.isValid())
375         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
376     if (m_naturalLoops.isValid()) {
377         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
378             out.print(prefix, "  Loop header, contains:");
379             Vector<BlockIndex> sortedBlockList;
380             for (unsigned i = 0; i < loop->size(); ++i)
381                 sortedBlockList.append(loop->at(i)->index);
382             std::sort(sortedBlockList.begin(), sortedBlockList.end());
383             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
384                 out.print(" #", sortedBlockList[i]);
385             out.print("\n");
386         }
387         
388         Vector<const NaturalLoop*> containingLoops =
389             m_naturalLoops.loopsOf(block);
390         if (!containingLoops.isEmpty()) {
391             out.print(prefix, "  Containing loop headers:");
392             for (unsigned i = 0; i < containingLoops.size(); ++i)
393                 out.print(" ", *containingLoops[i]->header());
394             out.print("\n");
395         }
396     }
397     if (!block->phis.isEmpty()) {
398         out.print(prefix, "  Phi Nodes:");
399         for (size_t i = 0; i < block->phis.size(); ++i) {
400             Node* phiNode = block->phis[i];
401             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
402                 continue;
403             out.print(" @", phiNode->index(), "<", phiNode->refCount(), ">->(");
404             if (phiNode->child1()) {
405                 out.print("@", phiNode->child1()->index());
406                 if (phiNode->child2()) {
407                     out.print(", @", phiNode->child2()->index());
408                     if (phiNode->child3())
409                         out.print(", @", phiNode->child3()->index());
410                 }
411             }
412             out.print(")", i + 1 < block->phis.size() ? "," : "");
413         }
414         out.print("\n");
415     }
416 }
417
418 void Graph::dump(PrintStream& out, DumpContext* context)
419 {
420     DumpContext myContext;
421     myContext.graph = this;
422     if (!context)
423         context = &myContext;
424     
425     out.print("\n");
426     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
427     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
428     out.print("\n");
429     
430     Node* lastNode = 0;
431     for (size_t b = 0; b < m_blocks.size(); ++b) {
432         BasicBlock* block = m_blocks[b].get();
433         if (!block)
434             continue;
435         dumpBlockHeader(out, "", block, DumpAllPhis, context);
436         out.print("  States: ", block->cfaStructureClobberStateAtHead);
437         if (!block->cfaHasVisited)
438             out.print(", CurrentlyCFAUnreachable");
439         if (!block->intersectionOfCFAHasVisited)
440             out.print(", CFAUnreachable");
441         out.print("\n");
442         switch (m_form) {
443         case LoadStore:
444         case ThreadedCPS: {
445             out.print("  Vars Before: ");
446             if (block->cfaHasVisited)
447                 out.print(inContext(block->valuesAtHead, context));
448             else
449                 out.print("<empty>");
450             out.print("\n");
451             out.print("  Intersected Vars Before: ");
452             if (block->intersectionOfCFAHasVisited)
453                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
454             else
455                 out.print("<empty>");
456             out.print("\n");
457             out.print("  Var Links: ", block->variablesAtHead, "\n");
458             break;
459         }
460             
461         case SSA: {
462             RELEASE_ASSERT(block->ssa);
463             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
464             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
465             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
466             break;
467         } }
468         for (size_t i = 0; i < block->size(); ++i) {
469             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
470             dump(out, "", block->at(i), context);
471             lastNode = block->at(i);
472         }
473         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
474         if (!block->cfaDidFinish)
475             out.print(", CFAInvalidated");
476         out.print("\n");
477         switch (m_form) {
478         case LoadStore:
479         case ThreadedCPS: {
480             out.print("  Vars After: ");
481             if (block->cfaHasVisited)
482                 out.print(inContext(block->valuesAtTail, context));
483             else
484                 out.print("<empty>");
485             out.print("\n");
486             out.print("  Var Links: ", block->variablesAtTail, "\n");
487             break;
488         }
489             
490         case SSA: {
491             RELEASE_ASSERT(block->ssa);
492             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
493             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
494             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
495             break;
496         } }
497         out.print("\n");
498     }
499     
500     if (!myContext.isEmpty()) {
501         myContext.dump(out);
502         out.print("\n");
503     }
504 }
505
506 void Graph::dethread()
507 {
508     if (m_form == LoadStore || m_form == SSA)
509         return;
510     
511     if (logCompilationChanges())
512         dataLog("Dethreading DFG graph.\n");
513     
514     SamplingRegion samplingRegion("DFG Dethreading");
515     
516     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
517         BasicBlock* block = m_blocks[blockIndex].get();
518         if (!block)
519             continue;
520         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
521             Node* phi = block->phis[phiIndex];
522             phi->children.reset();
523         }
524     }
525     
526     m_form = LoadStore;
527 }
528
529 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
530 {
531     if (!successor->isReachable) {
532         successor->isReachable = true;
533         worklist.append(successor);
534     }
535     
536     successor->predecessors.append(block);
537 }
538
539 void Graph::determineReachability()
540 {
541     Vector<BasicBlock*, 16> worklist;
542     worklist.append(block(0));
543     block(0)->isReachable = true;
544     while (!worklist.isEmpty()) {
545         BasicBlock* block = worklist.takeLast();
546         for (unsigned i = block->numSuccessors(); i--;)
547             handleSuccessor(worklist, block, block->successor(i));
548     }
549 }
550
551 void Graph::resetReachability()
552 {
553     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
554         BasicBlock* block = m_blocks[blockIndex].get();
555         if (!block)
556             continue;
557         block->isReachable = false;
558         block->predecessors.clear();
559     }
560     
561     determineReachability();
562 }
563
564 void Graph::mergeRelevantToOSR()
565 {
566     for (BasicBlock* block : blocksInNaturalOrder()) {
567         for (Node* node : *block) {
568             switch (node->op()) {
569             case MovHint:
570                 node->child1()->mergeFlags(NodeRelevantToOSR);
571                 break;
572                 
573             case PutStructureHint:
574             case PutByOffsetHint:
575                 node->child2()->mergeFlags(NodeRelevantToOSR);
576                 break;
577                 
578             default:
579                 break;
580             }
581         }
582     }
583 }
584
585 namespace {
586
587 class RefCountCalculator {
588 public:
589     RefCountCalculator(Graph& graph)
590         : m_graph(graph)
591     {
592     }
593     
594     void calculate()
595     {
596         // First reset the counts to 0 for all nodes.
597         //
598         // Also take this opportunity to pretend that Check nodes are not NodeMustGenerate. Check
599         // nodes are MustGenerate because they are executed for effect, but they follow the same
600         // DCE rules as nodes that aren't MustGenerate: they only contribute to the ref count of
601         // their children if the edges require checks. Non-checking edges are removed. Note that
602         // for any Checks left over, this phase will turn them back into NodeMustGenerate.
603         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
604             BasicBlock* block = m_graph.block(blockIndex);
605             if (!block)
606                 continue;
607             for (unsigned indexInBlock = block->size(); indexInBlock--;)
608                 block->at(indexInBlock)->setRefCount(0);
609             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
610                 block->phis[phiIndex]->setRefCount(0);
611         }
612     
613         // Now find the roots:
614         // - Nodes that are must-generate.
615         // - Nodes that are reachable from type checks.
616         // Set their ref counts to 1 and put them on the worklist.
617         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
618             BasicBlock* block = m_graph.block(blockIndex);
619             if (!block)
620                 continue;
621             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
622                 Node* node = block->at(indexInBlock);
623                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
624                 if (!(node->flags() & NodeMustGenerate))
625                     continue;
626                 if (node->op() == Check) {
627                     // We don't treat Check nodes as MustGenerate. We will gladly
628                     // kill them off in this phase.
629                     continue;
630                 }
631                 if (!node->postfixRef())
632                     m_worklist.append(node);
633             }
634         }
635         
636         while (!m_worklist.isEmpty()) {
637             while (!m_worklist.isEmpty()) {
638                 Node* node = m_worklist.last();
639                 m_worklist.removeLast();
640                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
641                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
642             }
643             
644             if (m_graph.m_form == SSA) {
645                 // Find Phi->Upsilon edges, which are represented as meta-data in the
646                 // Upsilon.
647                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
648                     BasicBlock* block = m_graph.block(blockIndex);
649                     if (!block)
650                         continue;
651                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
652                         Node* node = block->at(nodeIndex);
653                         if (node->op() != Upsilon)
654                             continue;
655                         if (node->shouldGenerate())
656                             continue;
657                         if (node->phi()->shouldGenerate())
658                             countNode(node);
659                     }
660                 }
661             }
662         }
663     }
664     
665 private:
666     void findTypeCheckRoot(Node*, Edge edge)
667     {
668         // We may have an "unproved" untyped use for code that is unreachable. The CFA
669         // will just not have gotten around to it.
670         if (edge.isProved() || edge.willNotHaveCheck())
671             return;
672         if (!edge->postfixRef())
673             m_worklist.append(edge.node());
674     }
675     
676     void countNode(Node* node)
677     {
678         if (node->postfixRef())
679             return;
680         m_worklist.append(node);
681     }
682     
683     void countEdge(Node*, Edge edge)
684     {
685         // Don't count edges that are already counted for their type checks.
686         if (!(edge.isProved() || edge.willNotHaveCheck()))
687             return;
688         countNode(edge.node());
689     }
690     
691     Graph& m_graph;
692     Vector<Node*, 128> m_worklist;
693 };
694
695 } // anonymous namespace
696
697 void Graph::computeRefCounts()
698 {
699     RefCountCalculator calculator(*this);
700     calculator.calculate();
701 }
702
703 void Graph::killBlockAndItsContents(BasicBlock* block)
704 {
705     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
706         m_allocator.free(block->phis[phiIndex]);
707     for (unsigned nodeIndex = block->size(); nodeIndex--;)
708         m_allocator.free(block->at(nodeIndex));
709     
710     killBlock(block);
711 }
712
713 void Graph::killUnreachableBlocks()
714 {
715     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
716         BasicBlock* block = this->block(blockIndex);
717         if (!block)
718             continue;
719         if (block->isReachable)
720             continue;
721         
722         killBlockAndItsContents(block);
723     }
724 }
725
726 void Graph::invalidateCFG()
727 {
728     m_dominators.invalidate();
729     m_naturalLoops.invalidate();
730     m_prePostNumbering.invalidate();
731 }
732
733 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
734 {
735     if (variableAccessData->isCaptured()) {
736         // Let CSE worry about this one.
737         return;
738     }
739     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
740         Node* node = block[indexInBlock];
741         bool shouldContinue = true;
742         switch (node->op()) {
743         case SetLocal: {
744             if (node->local() == variableAccessData->local())
745                 shouldContinue = false;
746             break;
747         }
748                 
749         case GetLocal: {
750             if (node->variableAccessData() != variableAccessData)
751                 continue;
752             substitute(block, indexInBlock, node, newGetLocal);
753             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
754             if (oldTailNode == node)
755                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
756             shouldContinue = false;
757             break;
758         }
759                 
760         default:
761             break;
762         }
763         if (!shouldContinue)
764             break;
765     }
766 }
767
768 BlockList Graph::blocksInPreOrder()
769 {
770     BlockList result;
771     BlockWorklist worklist;
772     worklist.push(block(0));
773     while (BasicBlock* block = worklist.pop()) {
774         result.append(block);
775         for (unsigned i = block->numSuccessors(); i--;)
776             worklist.push(block->successor(i));
777     }
778     return result;
779 }
780
781 BlockList Graph::blocksInPostOrder()
782 {
783     BlockList result;
784     PostOrderBlockWorklist worklist;
785     worklist.push(block(0));
786     while (BlockWithOrder item = worklist.pop()) {
787         switch (item.order) {
788         case PreOrder:
789             worklist.pushPost(item.block);
790             for (unsigned i = item.block->numSuccessors(); i--;)
791                 worklist.push(item.block->successor(i));
792             break;
793         case PostOrder:
794             result.append(item.block);
795             break;
796         }
797     }
798     return result;
799 }
800
801 void Graph::clearReplacements()
802 {
803     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
804         BasicBlock* block = m_blocks[blockIndex].get();
805         if (!block)
806             continue;
807         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
808             block->phis[phiIndex]->replacement = 0;
809         for (unsigned nodeIndex = block->size(); nodeIndex--;)
810             block->at(nodeIndex)->replacement = 0;
811     }
812 }
813
814 void Graph::initializeNodeOwners()
815 {
816     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
817         BasicBlock* block = m_blocks[blockIndex].get();
818         if (!block)
819             continue;
820         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
821             block->phis[phiIndex]->owner = block;
822         for (unsigned nodeIndex = block->size(); nodeIndex--;)
823             block->at(nodeIndex)->owner = block;
824     }
825 }
826
827 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
828 {
829     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
830         BasicBlock* block = m_blocks[blockIndex].get();
831         if (!block)
832             continue;
833         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
834             block->phis[phiIndex]->clearFlags(flags);
835         for (unsigned nodeIndex = block->size(); nodeIndex--;)
836             block->at(nodeIndex)->clearFlags(flags);
837     }
838 }
839
840 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
841 {
842     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
843     if (iter != m_bytecodeLiveness.end())
844         return *iter->value;
845     
846     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
847     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
848     FullBytecodeLiveness& result = *liveness;
849     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
850     return result;
851 }
852
853 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
854 {
855     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
856 }
857
858 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
859 {
860     for (;;) {
861         VirtualRegister reg = VirtualRegister(
862             operand.offset() - codeOrigin.stackOffset());
863         
864         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
865             if (reg.isArgument()) {
866                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
867                 
868                 if (!codeOrigin.inlineCallFrame->isClosureCall)
869                     return false;
870                 
871                 if (reg.offset() == JSStack::Callee)
872                     return true;
873                 
874                 return false;
875             }
876             
877             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
878                 reg.offset(), codeOrigin.bytecodeIndex);
879         }
880         
881         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
882         if (!inlineCallFrame)
883             break;
884
885         // Arguments are always live. This would be redundant if it wasn't for our
886         // op_call_varargs inlining.
887         // FIXME: 'this' might not be live, but we don't have a way of knowing.
888         // https://bugs.webkit.org/show_bug.cgi?id=128519
889         if (reg.isArgument()
890             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
891             return true;
892         
893         codeOrigin = inlineCallFrame->caller;
894     }
895     
896     return true;
897 }
898
899 unsigned Graph::frameRegisterCount()
900 {
901     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
902     return roundLocalRegisterCountForFramePointerOffset(result);
903 }
904
905 unsigned Graph::stackPointerOffset()
906 {
907     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
908 }
909
910 unsigned Graph::requiredRegisterCountForExit()
911 {
912     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
913     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
914         InlineCallFrame* inlineCallFrame = *iter;
915         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
916         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
917         count = std::max(count, requiredCount);
918     }
919     return count;
920 }
921
922 unsigned Graph::requiredRegisterCountForExecutionAndExit()
923 {
924     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
925 }
926
927 JSValue Graph::tryGetConstantProperty(
928     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
929 {
930     if (!base || !base.isObject())
931         return JSValue();
932     
933     JSObject* object = asObject(base);
934     
935     for (unsigned i = structureSet.size(); i--;) {
936         Structure* structure = structureSet[i];
937         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
938         if (!set || !set->isStillValid())
939             return JSValue();
940         
941         ASSERT(structure->isValidOffset(offset));
942         ASSERT(!structure->isUncacheableDictionary());
943         
944         watchpoints().addLazily(set);
945     }
946     
947     // What follows may require some extra thought. We need this load to load a valid JSValue. If
948     // our profiling makes sense and we're still on track to generate code that won't be
949     // invalidated, then we have nothing to worry about. We do, however, have to worry about
950     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
951     // is doomed.
952     //
953     // One argument in favor of this code is that it should definitely work because the butterfly
954     // is always set before the structure. However, we don't currently have a fence between those
955     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
956     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
957     // caches told us that the object had a structure that it did not *yet* have, and then later,
958     // the object transitioned to that structure that the inline caches had alraedy seen. And then
959     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
960     // this is worth revisiting but it isn't worth losing sleep over. Filed:
961     // https://bugs.webkit.org/show_bug.cgi?id=134641
962     //
963     // For now, we just do the minimal thing: defend against the structure right now being
964     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
965     // determine if the structure belongs to the proven set.
966     
967     if (!structureSet.contains(object->structure()))
968         return JSValue();
969     
970     return object->getDirect(offset);
971 }
972
973 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
974 {
975     return tryGetConstantProperty(base, StructureSet(structure), offset);
976 }
977
978 JSValue Graph::tryGetConstantProperty(
979     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
980 {
981     if (structure.isTop() || structure.isClobbered())
982         return JSValue();
983     
984     return tryGetConstantProperty(base, structure.set(), offset);
985 }
986
987 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
988 {
989     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
990 }
991
992 JSLexicalEnvironment* Graph::tryGetActivation(Node* node)
993 {
994     return node->dynamicCastConstant<JSLexicalEnvironment*>();
995 }
996
997 WriteBarrierBase<Unknown>* Graph::tryGetRegisters(Node* node)
998 {
999     JSLexicalEnvironment* lexicalEnvironment = tryGetActivation(node);
1000     if (!lexicalEnvironment)
1001         return 0;
1002     return lexicalEnvironment->registers();
1003 }
1004
1005 JSArrayBufferView* Graph::tryGetFoldableView(Node* node)
1006 {
1007     JSArrayBufferView* view = node->dynamicCastConstant<JSArrayBufferView*>();
1008     if (!view)
1009         return nullptr;
1010     if (!view->length())
1011         return nullptr;
1012     WTF::loadLoadFence();
1013     return view;
1014 }
1015
1016 JSArrayBufferView* Graph::tryGetFoldableView(Node* node, ArrayMode arrayMode)
1017 {
1018     if (arrayMode.typedArrayType() == NotTypedArray)
1019         return 0;
1020     return tryGetFoldableView(node);
1021 }
1022
1023 JSArrayBufferView* Graph::tryGetFoldableViewForChild1(Node* node)
1024 {
1025     return tryGetFoldableView(child(node, 0).node(), node->arrayMode());
1026 }
1027
1028 void Graph::registerFrozenValues()
1029 {
1030     m_codeBlock->constants().resize(0);
1031     for (FrozenValue* value : m_frozenValues) {
1032         if (value->structure())
1033             ASSERT(m_plan.weakReferences.contains(value->structure()));
1034         
1035         switch (value->strength()) {
1036         case FragileValue: {
1037             break;
1038         }
1039         case WeakValue: {
1040             m_plan.weakReferences.addLazily(value->value().asCell());
1041             break;
1042         }
1043         case StrongValue: {
1044             unsigned constantIndex = m_codeBlock->addConstantLazily();
1045             initializeLazyWriteBarrierForConstant(
1046                 m_plan.writeBarriers,
1047                 m_codeBlock->constants()[constantIndex],
1048                 m_codeBlock,
1049                 constantIndex,
1050                 m_codeBlock->ownerExecutable(),
1051                 value->value());
1052             break;
1053         } }
1054     }
1055     m_codeBlock->constants().shrinkToFit();
1056 }
1057
1058 void Graph::visitChildren(SlotVisitor& visitor)
1059 {
1060     for (FrozenValue* value : m_frozenValues) {
1061         visitor.appendUnbarrieredReadOnlyValue(value->value());
1062         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1063     }
1064     
1065     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1066         BasicBlock* block = this->block(blockIndex);
1067         if (!block)
1068             continue;
1069         
1070         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1071             Node* node = block->at(nodeIndex);
1072             
1073             switch (node->op()) {
1074             case CheckStructure:
1075                 for (unsigned i = node->structureSet().size(); i--;)
1076                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1077                 break;
1078                 
1079             case NewObject:
1080             case ArrayifyToStructure:
1081             case NewStringObject:
1082                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1083                 break;
1084                 
1085             case PutStructure:
1086             case AllocatePropertyStorage:
1087             case ReallocatePropertyStorage:
1088                 visitor.appendUnbarrieredReadOnlyPointer(
1089                     node->transition()->previous);
1090                 visitor.appendUnbarrieredReadOnlyPointer(
1091                     node->transition()->next);
1092                 break;
1093                 
1094             case MultiGetByOffset:
1095                 for (unsigned i = node->multiGetByOffsetData().variants.size(); i--;) {
1096                     GetByIdVariant& variant = node->multiGetByOffsetData().variants[i];
1097                     const StructureSet& set = variant.structureSet();
1098                     for (unsigned j = set.size(); j--;)
1099                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1100
1101                     // Don't need to mark anything in the structure chain because that would
1102                     // have been decomposed into CheckStructure's. Don't need to mark the
1103                     // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
1104                     // variants did that.
1105                     ASSERT(!variant.callLinkStatus());
1106                 }
1107                 break;
1108                     
1109             case MultiPutByOffset:
1110                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1111                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1112                     const StructureSet& set = variant.oldStructure();
1113                     for (unsigned j = set.size(); j--;)
1114                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1115                     if (variant.kind() == PutByIdVariant::Transition)
1116                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1117                 }
1118                 break;
1119                 
1120             default:
1121                 break;
1122             }
1123         }
1124     }
1125 }
1126
1127 FrozenValue* Graph::freezeFragile(JSValue value)
1128 {
1129     if (UNLIKELY(!value))
1130         return FrozenValue::emptySingleton();
1131     
1132     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1133     if (LIKELY(!result.isNewEntry))
1134         return result.iterator->value;
1135     
1136     return result.iterator->value = m_frozenValues.add(FrozenValue::freeze(value));
1137 }
1138
1139 FrozenValue* Graph::freeze(JSValue value)
1140 {
1141     FrozenValue* result = freezeFragile(value);
1142     result->strengthenTo(WeakValue);
1143     return result;
1144 }
1145
1146 FrozenValue* Graph::freezeStrong(JSValue value)
1147 {
1148     FrozenValue* result = freezeFragile(value);
1149     result->strengthenTo(StrongValue);
1150     return result;
1151 }
1152
1153 void Graph::convertToConstant(Node* node, FrozenValue* value)
1154 {
1155     if (value->structure())
1156         assertIsRegistered(value->structure());
1157     if (m_form == ThreadedCPS) {
1158         if (node->op() == GetLocal)
1159             dethread();
1160         else
1161             ASSERT(!node->hasVariableAccessData(*this));
1162     }
1163     node->convertToConstant(value);
1164 }
1165
1166 void Graph::convertToConstant(Node* node, JSValue value)
1167 {
1168     convertToConstant(node, freeze(value));
1169 }
1170
1171 void Graph::convertToStrongConstant(Node* node, JSValue value)
1172 {
1173     convertToConstant(node, freezeStrong(value));
1174 }
1175
1176 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1177 {
1178     m_plan.weakReferences.addLazily(structure);
1179     if (m_plan.watchpoints.consider(structure))
1180         return StructureRegisteredAndWatched;
1181     return StructureRegisteredNormally;
1182 }
1183
1184 void Graph::assertIsRegistered(Structure* structure)
1185 {
1186     if (m_structureRegistrationState == HaveNotStartedRegistering)
1187         return;
1188     
1189     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1190     
1191     if (!structure->dfgShouldWatch())
1192         return;
1193     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1194         return;
1195     
1196     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1197 }
1198
1199 NO_RETURN_DUE_TO_CRASH static void crash(
1200     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1201     const char* assertion)
1202 {
1203     startCrashing();
1204     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1205     dataLog(file, "(", line, ") : ", function, "\n");
1206     dataLog("\n");
1207     dataLog(whileText);
1208     dataLog("Graph at time of failure:\n");
1209     graph.dump();
1210     dataLog("\n");
1211     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1212     dataLog(file, "(", line, ") : ", function, "\n");
1213     CRASH_WITH_SECURITY_IMPLICATION();
1214 }
1215
1216 void Graph::handleAssertionFailure(
1217     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1218 {
1219     crash(*this, "", file, line, function, assertion);
1220 }
1221
1222 void Graph::handleAssertionFailure(
1223     Node* node, const char* file, int line, const char* function, const char* assertion)
1224 {
1225     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1226 }
1227
1228 void Graph::handleAssertionFailure(
1229     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1230 {
1231     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1232 }
1233
1234 } } // namespace JSC::DFG
1235
1236 #endif // ENABLE(DFG_JIT)