FTL should sink object allocations
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeLivenessAnalysisInlines.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockWithJITType.h"
34 #include "DFGBlockWorklist.h"
35 #include "DFGClobberSet.h"
36 #include "DFGJITCode.h"
37 #include "DFGVariableAccessDataDump.h"
38 #include "FullBytecodeLiveness.h"
39 #include "FunctionExecutableDump.h"
40 #include "JIT.h"
41 #include "JSLexicalEnvironment.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "OperandsInlines.h"
44 #include "JSCInlines.h"
45 #include "StackAlignment.h"
46 #include <wtf/CommaPrinter.h>
47 #include <wtf/ListDump.h>
48
49 namespace JSC { namespace DFG {
50
51 // Creates an array of stringized names.
52 static const char* dfgOpNames[] = {
53 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
54     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
55 #undef STRINGIZE_DFG_OP_ENUM
56 };
57
58 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
59     : m_vm(vm)
60     , m_plan(plan)
61     , m_codeBlock(m_plan.codeBlock.get())
62     , m_profiledBlock(m_codeBlock->alternative())
63     , m_allocator(longLivedState.m_allocator)
64     , m_mustHandleValues(OperandsLike, plan.mustHandleValues)
65     , m_hasArguments(false)
66     , m_nextMachineLocal(0)
67     , m_machineCaptureStart(std::numeric_limits<int>::max())
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     for (unsigned i = m_mustHandleValues.size(); i--;)
77         m_mustHandleValues[i] = freezeFragile(plan.mustHandleValues[i]);
78 }
79
80 Graph::~Graph()
81 {
82     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
83         BasicBlock* block = this->block(blockIndex);
84         if (!block)
85             continue;
86
87         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
88             m_allocator.free(block->phis[phiIndex]);
89         for (unsigned nodeIndex = block->size(); nodeIndex--;)
90             m_allocator.free(block->at(nodeIndex));
91     }
92     m_allocator.freeAll();
93 }
94
95 const char *Graph::opName(NodeType op)
96 {
97     return dfgOpNames[op];
98 }
99
100 static void printWhiteSpace(PrintStream& out, unsigned amount)
101 {
102     while (amount-- > 0)
103         out.print(" ");
104 }
105
106 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
107 {
108     if (!previousNode)
109         return false;
110     
111     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
112         return false;
113     
114     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
115     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
116     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
117     unsigned indexOfDivergence = commonSize;
118     for (unsigned i = 0; i < commonSize; ++i) {
119         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
120             indexOfDivergence = i;
121             break;
122         }
123     }
124     
125     bool hasPrinted = false;
126     
127     // Print the pops.
128     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
129         out.print(prefix);
130         printWhiteSpace(out, i * 2);
131         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
132         hasPrinted = true;
133     }
134     
135     // Print the pushes.
136     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
137         out.print(prefix);
138         printWhiteSpace(out, i * 2);
139         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
140         hasPrinted = true;
141     }
142     
143     return hasPrinted;
144 }
145
146 int Graph::amountOfNodeWhiteSpace(Node* node)
147 {
148     return (node->origin.semantic.inlineDepth() - 1) * 2;
149 }
150
151 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
152 {
153     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
154 }
155
156 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
157 {
158     NodeType op = node->op();
159
160     unsigned refCount = node->refCount();
161     bool mustGenerate = node->mustGenerate();
162     if (mustGenerate)
163         --refCount;
164
165     out.print(prefix);
166     printNodeWhiteSpace(out, node);
167
168     // Example/explanation of dataflow dump output
169     //
170     //   14:   <!2:7>  GetByVal(@3, @13)
171     //   ^1     ^2 ^3     ^4       ^5
172     //
173     // (1) The nodeIndex of this operation.
174     // (2) The reference count. The number printed is the 'real' count,
175     //     not including the 'mustGenerate' ref. If the node is
176     //     'mustGenerate' then the count it prefixed with '!'.
177     // (3) The virtual register slot assigned to this node.
178     // (4) The name of the operation.
179     // (5) The arguments to the operation. The may be of the form:
180     //         @#   - a NodeIndex referencing a prior node in the graph.
181     //         arg# - an argument number.
182     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
183     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
184     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
185     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
186         out.print(node->virtualRegister());
187     else
188         out.print("-");
189     out.print(">\t", opName(op), "(");
190     CommaPrinter comma;
191     if (node->flags() & NodeHasVarArgs) {
192         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
193             if (!m_varArgChildren[childIdx])
194                 continue;
195             out.print(comma, m_varArgChildren[childIdx]);
196         }
197     } else {
198         if (!!node->child1() || !!node->child2() || !!node->child3())
199             out.print(comma, node->child1());
200         if (!!node->child2() || !!node->child3())
201             out.print(comma, node->child2());
202         if (!!node->child3())
203             out.print(comma, node->child3());
204     }
205
206     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
207         out.print(comma, NodeFlagsDump(node->flags()));
208     if (node->prediction())
209         out.print(comma, SpeculationDump(node->prediction()));
210     if (node->hasArrayMode())
211         out.print(comma, node->arrayMode());
212     if (node->hasArithMode())
213         out.print(comma, node->arithMode());
214     if (node->hasVarNumber())
215         out.print(comma, node->varNumber());
216     if (node->hasRegisterPointer())
217         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findRegisterIndex(node->registerPointer()), "(", RawPointer(node->registerPointer()), ")");
218     if (node->hasIdentifier())
219         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
220     if (node->hasStructureSet())
221         out.print(comma, inContext(node->structureSet(), context));
222     if (node->hasStructure())
223         out.print(comma, inContext(*node->structure(), context));
224     if (node->hasTransition())
225         out.print(comma, pointerDumpInContext(node->transition(), context));
226     if (node->hasCellOperand()) {
227         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
228             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
229         else {
230             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
231             if (node->cellOperand()->value().isCell()) {
232                 CallVariant variant(node->cellOperand()->value().asCell());
233                 if (ExecutableBase* executable = variant.executable()) {
234                     if (executable->isHostFunction())
235                         out.print(comma, "<host function>");
236                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
237                         out.print(comma, FunctionExecutableDump(functionExecutable));
238                     else
239                         out.print(comma, "<non-function executable>");
240                 }
241             }
242         }
243     }
244     if (node->hasFunctionDeclIndex()) {
245         FunctionExecutable* executable = m_codeBlock->functionDecl(node->functionDeclIndex());
246         out.print(comma, FunctionExecutableDump(executable));
247     }
248     if (node->hasFunctionExprIndex()) {
249         FunctionExecutable* executable = m_codeBlock->functionExpr(node->functionExprIndex());
250         out.print(comma, FunctionExecutableDump(executable));
251     }
252     if (node->hasStorageAccessData()) {
253         StorageAccessData& storageAccessData = node->storageAccessData();
254         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
255         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
256     }
257     if (node->hasMultiGetByOffsetData()) {
258         MultiGetByOffsetData& data = node->multiGetByOffsetData();
259         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
260         for (unsigned i = 0; i < data.variants.size(); ++i)
261             out.print(comma, inContext(data.variants[i], context));
262     }
263     if (node->hasMultiPutByOffsetData()) {
264         MultiPutByOffsetData& data = node->multiPutByOffsetData();
265         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
266         for (unsigned i = 0; i < data.variants.size(); ++i)
267             out.print(comma, inContext(data.variants[i], context));
268     }
269     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
270     if (node->hasVariableAccessData(*this)) {
271         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
272         if (variableAccessData) {
273             VirtualRegister operand = variableAccessData->local();
274             if (operand.isArgument())
275                 out.print(comma, "arg", operand.toArgument(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
276             else
277                 out.print(comma, "loc", operand.toLocal(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
278             
279             operand = variableAccessData->machineLocal();
280             if (operand.isValid()) {
281                 if (operand.isArgument())
282                     out.print(comma, "machine:arg", operand.toArgument());
283                 else
284                     out.print(comma, "machine:loc", operand.toLocal());
285             }
286         }
287     }
288     if (node->hasUnlinkedLocal()) {
289         VirtualRegister operand = node->unlinkedLocal();
290         if (operand.isArgument())
291             out.print(comma, "arg", operand.toArgument());
292         else
293             out.print(comma, "loc", operand.toLocal());
294     }
295     if (node->hasUnlinkedMachineLocal()) {
296         VirtualRegister operand = node->unlinkedMachineLocal();
297         if (operand.isValid()) {
298             if (operand.isArgument())
299                 out.print(comma, "machine:arg", operand.toArgument());
300             else
301                 out.print(comma, "machine:loc", operand.toLocal());
302         }
303     }
304     if (node->hasConstantBuffer()) {
305         out.print(comma);
306         out.print(node->startConstant(), ":[");
307         CommaPrinter anotherComma;
308         for (unsigned i = 0; i < node->numConstants(); ++i)
309             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
310         out.print("]");
311     }
312     if (node->hasIndexingType())
313         out.print(comma, IndexingTypeDump(node->indexingType()));
314     if (node->hasTypedArrayType())
315         out.print(comma, node->typedArrayType());
316     if (node->hasPhi())
317         out.print(comma, "^", node->phi()->index());
318     if (node->hasExecutionCounter())
319         out.print(comma, RawPointer(node->executionCounter()));
320     if (node->hasVariableWatchpointSet())
321         out.print(comma, RawPointer(node->variableWatchpointSet()));
322     if (node->hasTypedArray())
323         out.print(comma, inContext(JSValue(node->typedArray()), context));
324     if (node->hasStoragePointer())
325         out.print(comma, RawPointer(node->storagePointer()));
326     if (node->hasObjectMaterializationData())
327         out.print(comma, node->objectMaterializationData());
328     if (node->isConstant())
329         out.print(comma, pointerDumpInContext(node->constant(), context));
330     if (node->isJump())
331         out.print(comma, "T:", *node->targetBlock());
332     if (node->isBranch())
333         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
334     if (node->isSwitch()) {
335         SwitchData* data = node->switchData();
336         out.print(comma, data->kind);
337         for (unsigned i = 0; i < data->cases.size(); ++i)
338             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
339         out.print(comma, "default:", data->fallThrough);
340     }
341     ClobberSet reads;
342     ClobberSet writes;
343     addReadsAndWrites(*this, node, reads, writes);
344     if (!reads.isEmpty())
345         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
346     if (!writes.isEmpty())
347         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
348     if (node->origin.isSet()) {
349         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
350         if (node->origin.semantic != node->origin.forExit)
351             out.print(comma, "exit: ", node->origin.forExit);
352     }
353     
354     out.print(")");
355
356     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
357         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
358     else if (node->hasHeapPrediction())
359         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
360     
361     out.print("\n");
362 }
363
364 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
365 {
366     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
367     if (block->executionCount == block->executionCount)
368         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
369     out.print(prefix, "  Predecessors:");
370     for (size_t i = 0; i < block->predecessors.size(); ++i)
371         out.print(" ", *block->predecessors[i]);
372     out.print("\n");
373     out.print(prefix, "  Successors:");
374     for (BasicBlock* successor : block->successors()) {
375         out.print(" ", *successor);
376         if (m_prePostNumbering.isValid())
377             out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
378     }
379     out.print("\n");
380     if (m_dominators.isValid()) {
381         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
382         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
383         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
384         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
385     }
386     if (m_prePostNumbering.isValid())
387         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
388     if (m_naturalLoops.isValid()) {
389         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
390             out.print(prefix, "  Loop header, contains:");
391             Vector<BlockIndex> sortedBlockList;
392             for (unsigned i = 0; i < loop->size(); ++i)
393                 sortedBlockList.append(loop->at(i)->index);
394             std::sort(sortedBlockList.begin(), sortedBlockList.end());
395             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
396                 out.print(" #", sortedBlockList[i]);
397             out.print("\n");
398         }
399         
400         Vector<const NaturalLoop*> containingLoops =
401             m_naturalLoops.loopsOf(block);
402         if (!containingLoops.isEmpty()) {
403             out.print(prefix, "  Containing loop headers:");
404             for (unsigned i = 0; i < containingLoops.size(); ++i)
405                 out.print(" ", *containingLoops[i]->header());
406             out.print("\n");
407         }
408     }
409     if (!block->phis.isEmpty()) {
410         out.print(prefix, "  Phi Nodes:");
411         for (size_t i = 0; i < block->phis.size(); ++i) {
412             Node* phiNode = block->phis[i];
413             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
414                 continue;
415             out.print(" @", phiNode->index(), "<", phiNode->refCount(), ">->(");
416             if (phiNode->child1()) {
417                 out.print("@", phiNode->child1()->index());
418                 if (phiNode->child2()) {
419                     out.print(", @", phiNode->child2()->index());
420                     if (phiNode->child3())
421                         out.print(", @", phiNode->child3()->index());
422                 }
423             }
424             out.print(")", i + 1 < block->phis.size() ? "," : "");
425         }
426         out.print("\n");
427     }
428 }
429
430 void Graph::dump(PrintStream& out, DumpContext* context)
431 {
432     DumpContext myContext;
433     myContext.graph = this;
434     if (!context)
435         context = &myContext;
436     
437     out.print("\n");
438     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
439     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
440     out.print("\n");
441     
442     Node* lastNode = 0;
443     for (size_t b = 0; b < m_blocks.size(); ++b) {
444         BasicBlock* block = m_blocks[b].get();
445         if (!block)
446             continue;
447         dumpBlockHeader(out, "", block, DumpAllPhis, context);
448         out.print("  States: ", block->cfaStructureClobberStateAtHead);
449         if (!block->cfaHasVisited)
450             out.print(", CurrentlyCFAUnreachable");
451         if (!block->intersectionOfCFAHasVisited)
452             out.print(", CFAUnreachable");
453         out.print("\n");
454         switch (m_form) {
455         case LoadStore:
456         case ThreadedCPS: {
457             out.print("  Vars Before: ");
458             if (block->cfaHasVisited)
459                 out.print(inContext(block->valuesAtHead, context));
460             else
461                 out.print("<empty>");
462             out.print("\n");
463             out.print("  Intersected Vars Before: ");
464             if (block->intersectionOfCFAHasVisited)
465                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
466             else
467                 out.print("<empty>");
468             out.print("\n");
469             out.print("  Var Links: ", block->variablesAtHead, "\n");
470             break;
471         }
472             
473         case SSA: {
474             RELEASE_ASSERT(block->ssa);
475             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
476             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
477             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
478             break;
479         } }
480         for (size_t i = 0; i < block->size(); ++i) {
481             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
482             dump(out, "", block->at(i), context);
483             lastNode = block->at(i);
484         }
485         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
486         if (!block->cfaDidFinish)
487             out.print(", CFAInvalidated");
488         out.print("\n");
489         switch (m_form) {
490         case LoadStore:
491         case ThreadedCPS: {
492             out.print("  Vars After: ");
493             if (block->cfaHasVisited)
494                 out.print(inContext(block->valuesAtTail, context));
495             else
496                 out.print("<empty>");
497             out.print("\n");
498             out.print("  Var Links: ", block->variablesAtTail, "\n");
499             break;
500         }
501             
502         case SSA: {
503             RELEASE_ASSERT(block->ssa);
504             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
505             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
506             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
507             break;
508         } }
509         out.print("\n");
510     }
511     
512     if (!myContext.isEmpty()) {
513         myContext.dump(out);
514         out.print("\n");
515     }
516 }
517
518 void Graph::dethread()
519 {
520     if (m_form == LoadStore || m_form == SSA)
521         return;
522     
523     if (logCompilationChanges())
524         dataLog("Dethreading DFG graph.\n");
525     
526     SamplingRegion samplingRegion("DFG Dethreading");
527     
528     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
529         BasicBlock* block = m_blocks[blockIndex].get();
530         if (!block)
531             continue;
532         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
533             Node* phi = block->phis[phiIndex];
534             phi->children.reset();
535         }
536     }
537     
538     m_form = LoadStore;
539 }
540
541 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
542 {
543     if (!successor->isReachable) {
544         successor->isReachable = true;
545         worklist.append(successor);
546     }
547     
548     successor->predecessors.append(block);
549 }
550
551 void Graph::determineReachability()
552 {
553     Vector<BasicBlock*, 16> worklist;
554     worklist.append(block(0));
555     block(0)->isReachable = true;
556     while (!worklist.isEmpty()) {
557         BasicBlock* block = worklist.takeLast();
558         for (unsigned i = block->numSuccessors(); i--;)
559             handleSuccessor(worklist, block, block->successor(i));
560     }
561 }
562
563 void Graph::resetReachability()
564 {
565     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
566         BasicBlock* block = m_blocks[blockIndex].get();
567         if (!block)
568             continue;
569         block->isReachable = false;
570         block->predecessors.clear();
571     }
572     
573     determineReachability();
574 }
575
576 void Graph::mergeRelevantToOSR()
577 {
578     for (BasicBlock* block : blocksInNaturalOrder()) {
579         for (Node* node : *block) {
580             switch (node->op()) {
581             case MovHint:
582                 node->child1()->mergeFlags(NodeRelevantToOSR);
583                 break;
584                 
585             case PutStructureHint:
586             case PutByOffsetHint:
587                 node->child2()->mergeFlags(NodeRelevantToOSR);
588                 break;
589                 
590             default:
591                 break;
592             }
593         }
594     }
595 }
596
597 namespace {
598
599 class RefCountCalculator {
600 public:
601     RefCountCalculator(Graph& graph)
602         : m_graph(graph)
603     {
604     }
605     
606     void calculate()
607     {
608         // First reset the counts to 0 for all nodes.
609         //
610         // Also take this opportunity to pretend that Check nodes are not NodeMustGenerate. Check
611         // nodes are MustGenerate because they are executed for effect, but they follow the same
612         // DCE rules as nodes that aren't MustGenerate: they only contribute to the ref count of
613         // their children if the edges require checks. Non-checking edges are removed. Note that
614         // for any Checks left over, this phase will turn them back into NodeMustGenerate.
615         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
616             BasicBlock* block = m_graph.block(blockIndex);
617             if (!block)
618                 continue;
619             for (unsigned indexInBlock = block->size(); indexInBlock--;)
620                 block->at(indexInBlock)->setRefCount(0);
621             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
622                 block->phis[phiIndex]->setRefCount(0);
623         }
624     
625         // Now find the roots:
626         // - Nodes that are must-generate.
627         // - Nodes that are reachable from type checks.
628         // Set their ref counts to 1 and put them on the worklist.
629         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
630             BasicBlock* block = m_graph.block(blockIndex);
631             if (!block)
632                 continue;
633             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
634                 Node* node = block->at(indexInBlock);
635                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
636                 if (!(node->flags() & NodeMustGenerate))
637                     continue;
638                 if (node->op() == Check) {
639                     // We don't treat Check nodes as MustGenerate. We will gladly
640                     // kill them off in this phase.
641                     continue;
642                 }
643                 if (!node->postfixRef())
644                     m_worklist.append(node);
645             }
646         }
647         
648         while (!m_worklist.isEmpty()) {
649             while (!m_worklist.isEmpty()) {
650                 Node* node = m_worklist.last();
651                 m_worklist.removeLast();
652                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
653                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
654             }
655             
656             if (m_graph.m_form == SSA) {
657                 // Find Phi->Upsilon edges, which are represented as meta-data in the
658                 // Upsilon.
659                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
660                     BasicBlock* block = m_graph.block(blockIndex);
661                     if (!block)
662                         continue;
663                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
664                         Node* node = block->at(nodeIndex);
665                         if (node->op() != Upsilon)
666                             continue;
667                         if (node->shouldGenerate())
668                             continue;
669                         if (node->phi()->shouldGenerate())
670                             countNode(node);
671                     }
672                 }
673             }
674         }
675     }
676     
677 private:
678     void findTypeCheckRoot(Node*, Edge edge)
679     {
680         // We may have an "unproved" untyped use for code that is unreachable. The CFA
681         // will just not have gotten around to it.
682         if (edge.isProved() || edge.willNotHaveCheck())
683             return;
684         if (!edge->postfixRef())
685             m_worklist.append(edge.node());
686     }
687     
688     void countNode(Node* node)
689     {
690         if (node->postfixRef())
691             return;
692         m_worklist.append(node);
693     }
694     
695     void countEdge(Node*, Edge edge)
696     {
697         // Don't count edges that are already counted for their type checks.
698         if (!(edge.isProved() || edge.willNotHaveCheck()))
699             return;
700         countNode(edge.node());
701     }
702     
703     Graph& m_graph;
704     Vector<Node*, 128> m_worklist;
705 };
706
707 } // anonymous namespace
708
709 void Graph::computeRefCounts()
710 {
711     RefCountCalculator calculator(*this);
712     calculator.calculate();
713 }
714
715 void Graph::killBlockAndItsContents(BasicBlock* block)
716 {
717     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
718         m_allocator.free(block->phis[phiIndex]);
719     for (unsigned nodeIndex = block->size(); nodeIndex--;)
720         m_allocator.free(block->at(nodeIndex));
721     
722     killBlock(block);
723 }
724
725 void Graph::killUnreachableBlocks()
726 {
727     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
728         BasicBlock* block = this->block(blockIndex);
729         if (!block)
730             continue;
731         if (block->isReachable)
732             continue;
733         
734         killBlockAndItsContents(block);
735     }
736 }
737
738 void Graph::invalidateCFG()
739 {
740     m_dominators.invalidate();
741     m_naturalLoops.invalidate();
742     m_prePostNumbering.invalidate();
743 }
744
745 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
746 {
747     if (variableAccessData->isCaptured()) {
748         // Let CSE worry about this one.
749         return;
750     }
751     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
752         Node* node = block[indexInBlock];
753         bool shouldContinue = true;
754         switch (node->op()) {
755         case SetLocal: {
756             if (node->local() == variableAccessData->local())
757                 shouldContinue = false;
758             break;
759         }
760                 
761         case GetLocal: {
762             if (node->variableAccessData() != variableAccessData)
763                 continue;
764             substitute(block, indexInBlock, node, newGetLocal);
765             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
766             if (oldTailNode == node)
767                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
768             shouldContinue = false;
769             break;
770         }
771                 
772         default:
773             break;
774         }
775         if (!shouldContinue)
776             break;
777     }
778 }
779
780 BlockList Graph::blocksInPreOrder()
781 {
782     BlockList result;
783     BlockWorklist worklist;
784     worklist.push(block(0));
785     while (BasicBlock* block = worklist.pop()) {
786         result.append(block);
787         for (unsigned i = block->numSuccessors(); i--;)
788             worklist.push(block->successor(i));
789     }
790     return result;
791 }
792
793 BlockList Graph::blocksInPostOrder()
794 {
795     BlockList result;
796     PostOrderBlockWorklist worklist;
797     worklist.push(block(0));
798     while (BlockWithOrder item = worklist.pop()) {
799         switch (item.order) {
800         case PreOrder:
801             worklist.pushPost(item.block);
802             for (unsigned i = item.block->numSuccessors(); i--;)
803                 worklist.push(item.block->successor(i));
804             break;
805         case PostOrder:
806             result.append(item.block);
807             break;
808         }
809     }
810     return result;
811 }
812
813 void Graph::clearReplacements()
814 {
815     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
816         BasicBlock* block = m_blocks[blockIndex].get();
817         if (!block)
818             continue;
819         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
820             block->phis[phiIndex]->replacement = 0;
821         for (unsigned nodeIndex = block->size(); nodeIndex--;)
822             block->at(nodeIndex)->replacement = 0;
823     }
824 }
825
826 void Graph::initializeNodeOwners()
827 {
828     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
829         BasicBlock* block = m_blocks[blockIndex].get();
830         if (!block)
831             continue;
832         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
833             block->phis[phiIndex]->owner = block;
834         for (unsigned nodeIndex = block->size(); nodeIndex--;)
835             block->at(nodeIndex)->owner = block;
836     }
837 }
838
839 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
840 {
841     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
842         BasicBlock* block = m_blocks[blockIndex].get();
843         if (!block)
844             continue;
845         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
846             block->phis[phiIndex]->clearFlags(flags);
847         for (unsigned nodeIndex = block->size(); nodeIndex--;)
848             block->at(nodeIndex)->clearFlags(flags);
849     }
850 }
851
852 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
853 {
854     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
855     if (iter != m_bytecodeLiveness.end())
856         return *iter->value;
857     
858     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
859     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
860     FullBytecodeLiveness& result = *liveness;
861     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
862     return result;
863 }
864
865 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
866 {
867     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
868 }
869
870 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
871 {
872     for (;;) {
873         VirtualRegister reg = VirtualRegister(
874             operand.offset() - codeOrigin.stackOffset());
875         
876         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
877             if (reg.isArgument()) {
878                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
879                 
880                 if (!codeOrigin.inlineCallFrame->isClosureCall)
881                     return false;
882                 
883                 if (reg.offset() == JSStack::Callee)
884                     return true;
885                 if (reg.offset() == JSStack::ScopeChain)
886                     return true;
887                 
888                 return false;
889             }
890             
891             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
892                 reg.offset(), codeOrigin.bytecodeIndex);
893         }
894         
895         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
896         if (!inlineCallFrame)
897             break;
898
899         // Arguments are always live. This would be redundant if it wasn't for our
900         // op_call_varargs inlining.
901         // FIXME: 'this' might not be live, but we don't have a way of knowing.
902         // https://bugs.webkit.org/show_bug.cgi?id=128519
903         if (reg.isArgument()
904             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
905             return true;
906         
907         codeOrigin = inlineCallFrame->caller;
908     }
909     
910     return true;
911 }
912
913 unsigned Graph::frameRegisterCount()
914 {
915     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
916     return roundLocalRegisterCountForFramePointerOffset(result);
917 }
918
919 unsigned Graph::stackPointerOffset()
920 {
921     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
922 }
923
924 unsigned Graph::requiredRegisterCountForExit()
925 {
926     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
927     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
928         InlineCallFrame* inlineCallFrame = *iter;
929         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
930         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
931         count = std::max(count, requiredCount);
932     }
933     return count;
934 }
935
936 unsigned Graph::requiredRegisterCountForExecutionAndExit()
937 {
938     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
939 }
940
941 JSValue Graph::tryGetConstantProperty(
942     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
943 {
944     if (!base || !base.isObject())
945         return JSValue();
946     
947     JSObject* object = asObject(base);
948     
949     for (unsigned i = structureSet.size(); i--;) {
950         Structure* structure = structureSet[i];
951         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
952         if (!set || !set->isStillValid())
953             return JSValue();
954         
955         ASSERT(structure->isValidOffset(offset));
956         ASSERT(!structure->isUncacheableDictionary());
957         
958         watchpoints().addLazily(set);
959     }
960     
961     // What follows may require some extra thought. We need this load to load a valid JSValue. If
962     // our profiling makes sense and we're still on track to generate code that won't be
963     // invalidated, then we have nothing to worry about. We do, however, have to worry about
964     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
965     // is doomed.
966     //
967     // One argument in favor of this code is that it should definitely work because the butterfly
968     // is always set before the structure. However, we don't currently have a fence between those
969     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
970     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
971     // caches told us that the object had a structure that it did not *yet* have, and then later,
972     // the object transitioned to that structure that the inline caches had alraedy seen. And then
973     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
974     // this is worth revisiting but it isn't worth losing sleep over. Filed:
975     // https://bugs.webkit.org/show_bug.cgi?id=134641
976     //
977     // For now, we just do the minimal thing: defend against the structure right now being
978     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
979     // determine if the structure belongs to the proven set.
980     
981     if (!structureSet.contains(object->structure()))
982         return JSValue();
983     
984     return object->getDirect(offset);
985 }
986
987 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
988 {
989     return tryGetConstantProperty(base, StructureSet(structure), offset);
990 }
991
992 JSValue Graph::tryGetConstantProperty(
993     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
994 {
995     if (structure.isTop() || structure.isClobbered())
996         return JSValue();
997     
998     return tryGetConstantProperty(base, structure.set(), offset);
999 }
1000
1001 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1002 {
1003     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1004 }
1005
1006 JSLexicalEnvironment* Graph::tryGetActivation(Node* node)
1007 {
1008     return node->dynamicCastConstant<JSLexicalEnvironment*>();
1009 }
1010
1011 WriteBarrierBase<Unknown>* Graph::tryGetRegisters(Node* node)
1012 {
1013     JSLexicalEnvironment* lexicalEnvironment = tryGetActivation(node);
1014     if (!lexicalEnvironment)
1015         return 0;
1016     if (!lexicalEnvironment->isTornOff())
1017         return 0;
1018     return lexicalEnvironment->registers();
1019 }
1020
1021 JSArrayBufferView* Graph::tryGetFoldableView(Node* node)
1022 {
1023     JSArrayBufferView* view = node->dynamicCastConstant<JSArrayBufferView*>();
1024     if (!view)
1025         return nullptr;
1026     if (!view->length())
1027         return nullptr;
1028     WTF::loadLoadFence();
1029     return view;
1030 }
1031
1032 JSArrayBufferView* Graph::tryGetFoldableView(Node* node, ArrayMode arrayMode)
1033 {
1034     if (arrayMode.typedArrayType() == NotTypedArray)
1035         return 0;
1036     return tryGetFoldableView(node);
1037 }
1038
1039 JSArrayBufferView* Graph::tryGetFoldableViewForChild1(Node* node)
1040 {
1041     return tryGetFoldableView(child(node, 0).node(), node->arrayMode());
1042 }
1043
1044 void Graph::registerFrozenValues()
1045 {
1046     m_codeBlock->constants().resize(0);
1047     for (FrozenValue* value : m_frozenValues) {
1048         if (value->structure())
1049             ASSERT(m_plan.weakReferences.contains(value->structure()));
1050         
1051         switch (value->strength()) {
1052         case FragileValue: {
1053             break;
1054         }
1055         case WeakValue: {
1056             m_plan.weakReferences.addLazily(value->value().asCell());
1057             break;
1058         }
1059         case StrongValue: {
1060             unsigned constantIndex = m_codeBlock->addConstantLazily();
1061             initializeLazyWriteBarrierForConstant(
1062                 m_plan.writeBarriers,
1063                 m_codeBlock->constants()[constantIndex],
1064                 m_codeBlock,
1065                 constantIndex,
1066                 m_codeBlock->ownerExecutable(),
1067                 value->value());
1068             break;
1069         } }
1070     }
1071     m_codeBlock->constants().shrinkToFit();
1072 }
1073
1074 void Graph::visitChildren(SlotVisitor& visitor)
1075 {
1076     for (FrozenValue* value : m_frozenValues) {
1077         visitor.appendUnbarrieredReadOnlyValue(value->value());
1078         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1079     }
1080     
1081     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1082         BasicBlock* block = this->block(blockIndex);
1083         if (!block)
1084             continue;
1085         
1086         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1087             Node* node = block->at(nodeIndex);
1088             
1089             switch (node->op()) {
1090             case CheckStructure:
1091                 for (unsigned i = node->structureSet().size(); i--;)
1092                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1093                 break;
1094                 
1095             case NewObject:
1096             case ArrayifyToStructure:
1097             case NewStringObject:
1098                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1099                 break;
1100                 
1101             case PutStructure:
1102             case AllocatePropertyStorage:
1103             case ReallocatePropertyStorage:
1104                 visitor.appendUnbarrieredReadOnlyPointer(
1105                     node->transition()->previous);
1106                 visitor.appendUnbarrieredReadOnlyPointer(
1107                     node->transition()->next);
1108                 break;
1109                 
1110             case MultiGetByOffset:
1111                 for (unsigned i = node->multiGetByOffsetData().variants.size(); i--;) {
1112                     GetByIdVariant& variant = node->multiGetByOffsetData().variants[i];
1113                     const StructureSet& set = variant.structureSet();
1114                     for (unsigned j = set.size(); j--;)
1115                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1116
1117                     // Don't need to mark anything in the structure chain because that would
1118                     // have been decomposed into CheckStructure's. Don't need to mark the
1119                     // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
1120                     // variants did that.
1121                     ASSERT(!variant.callLinkStatus());
1122                 }
1123                 break;
1124                     
1125             case MultiPutByOffset:
1126                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1127                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1128                     const StructureSet& set = variant.oldStructure();
1129                     for (unsigned j = set.size(); j--;)
1130                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1131                     if (variant.kind() == PutByIdVariant::Transition)
1132                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1133                 }
1134                 break;
1135                 
1136             default:
1137                 break;
1138             }
1139         }
1140     }
1141 }
1142
1143 FrozenValue* Graph::freezeFragile(JSValue value)
1144 {
1145     if (UNLIKELY(!value))
1146         return FrozenValue::emptySingleton();
1147     
1148     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1149     if (LIKELY(!result.isNewEntry))
1150         return result.iterator->value;
1151     
1152     return result.iterator->value = m_frozenValues.add(FrozenValue::freeze(value));
1153 }
1154
1155 FrozenValue* Graph::freeze(JSValue value)
1156 {
1157     FrozenValue* result = freezeFragile(value);
1158     result->strengthenTo(WeakValue);
1159     return result;
1160 }
1161
1162 FrozenValue* Graph::freezeStrong(JSValue value)
1163 {
1164     FrozenValue* result = freezeFragile(value);
1165     result->strengthenTo(StrongValue);
1166     return result;
1167 }
1168
1169 void Graph::convertToConstant(Node* node, FrozenValue* value)
1170 {
1171     if (value->structure())
1172         assertIsRegistered(value->structure());
1173     if (m_form == ThreadedCPS) {
1174         if (node->op() == GetLocal)
1175             dethread();
1176         else
1177             ASSERT(!node->hasVariableAccessData(*this));
1178     }
1179     node->convertToConstant(value);
1180 }
1181
1182 void Graph::convertToConstant(Node* node, JSValue value)
1183 {
1184     convertToConstant(node, freeze(value));
1185 }
1186
1187 void Graph::convertToStrongConstant(Node* node, JSValue value)
1188 {
1189     convertToConstant(node, freezeStrong(value));
1190 }
1191
1192 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1193 {
1194     m_plan.weakReferences.addLazily(structure);
1195     if (m_plan.watchpoints.consider(structure))
1196         return StructureRegisteredAndWatched;
1197     return StructureRegisteredNormally;
1198 }
1199
1200 void Graph::assertIsRegistered(Structure* structure)
1201 {
1202     if (m_structureRegistrationState == HaveNotStartedRegistering)
1203         return;
1204     
1205     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1206     
1207     if (!structure->dfgShouldWatch())
1208         return;
1209     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1210         return;
1211     
1212     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1213 }
1214
1215 void Graph::handleAssertionFailure(
1216     Node* node, const char* file, int line, const char* function, const char* assertion)
1217 {
1218     startCrashing();
1219     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1220     dataLog(file, "(", line, ") : ", function, "\n");
1221     dataLog("\n");
1222     if (node) {
1223         dataLog("While handling node ", node, "\n");
1224         dataLog("\n");
1225     }
1226     dataLog("Graph at time of failure:\n");
1227     dump();
1228     dataLog("\n");
1229     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1230     dataLog(file, "(", line, ") : ", function, "\n");
1231     CRASH();
1232 }
1233
1234 } } // namespace JSC::DFG
1235
1236 #endif // ENABLE(DFG_JIT)