DFG SSA stack accesses shouldn't speak of VariableAccessDatas
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeLivenessAnalysisInlines.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockWithJITType.h"
34 #include "DFGBlockWorklist.h"
35 #include "DFGClobberSet.h"
36 #include "DFGJITCode.h"
37 #include "DFGVariableAccessDataDump.h"
38 #include "FullBytecodeLiveness.h"
39 #include "FunctionExecutableDump.h"
40 #include "JIT.h"
41 #include "JSLexicalEnvironment.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "OperandsInlines.h"
44 #include "JSCInlines.h"
45 #include "StackAlignment.h"
46 #include <wtf/CommaPrinter.h>
47 #include <wtf/ListDump.h>
48
49 namespace JSC { namespace DFG {
50
51 // Creates an array of stringized names.
52 static const char* dfgOpNames[] = {
53 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
54     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
55 #undef STRINGIZE_DFG_OP_ENUM
56 };
57
58 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
59     : m_vm(vm)
60     , m_plan(plan)
61     , m_codeBlock(m_plan.codeBlock.get())
62     , m_profiledBlock(m_codeBlock->alternative())
63     , m_allocator(longLivedState.m_allocator)
64     , m_mustHandleValues(OperandsLike, plan.mustHandleValues)
65     , m_hasArguments(false)
66     , m_nextMachineLocal(0)
67     , m_machineCaptureStart(std::numeric_limits<int>::max())
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     for (unsigned i = m_mustHandleValues.size(); i--;)
77         m_mustHandleValues[i] = freezeFragile(plan.mustHandleValues[i]);
78     
79     for (unsigned i = m_codeBlock->m_numVars; i--;) {
80         if (m_codeBlock->isCaptured(virtualRegisterForLocal(i)))
81             m_outermostCapturedVars.set(i);
82     }
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
112 {
113     if (!previousNode)
114         return false;
115     
116     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
117         return false;
118     
119     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
120     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
121     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
122     unsigned indexOfDivergence = commonSize;
123     for (unsigned i = 0; i < commonSize; ++i) {
124         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
125             indexOfDivergence = i;
126             break;
127         }
128     }
129     
130     bool hasPrinted = false;
131     
132     // Print the pops.
133     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
134         out.print(prefix);
135         printWhiteSpace(out, i * 2);
136         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
137         hasPrinted = true;
138     }
139     
140     // Print the pushes.
141     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
142         out.print(prefix);
143         printWhiteSpace(out, i * 2);
144         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
145         hasPrinted = true;
146     }
147     
148     return hasPrinted;
149 }
150
151 int Graph::amountOfNodeWhiteSpace(Node* node)
152 {
153     return (node->origin.semantic.inlineDepth() - 1) * 2;
154 }
155
156 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
157 {
158     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
159 }
160
161 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
162 {
163     NodeType op = node->op();
164
165     unsigned refCount = node->refCount();
166     bool mustGenerate = node->mustGenerate();
167     if (mustGenerate)
168         --refCount;
169
170     out.print(prefix);
171     printNodeWhiteSpace(out, node);
172
173     // Example/explanation of dataflow dump output
174     //
175     //   14:   <!2:7>  GetByVal(@3, @13)
176     //   ^1     ^2 ^3     ^4       ^5
177     //
178     // (1) The nodeIndex of this operation.
179     // (2) The reference count. The number printed is the 'real' count,
180     //     not including the 'mustGenerate' ref. If the node is
181     //     'mustGenerate' then the count it prefixed with '!'.
182     // (3) The virtual register slot assigned to this node.
183     // (4) The name of the operation.
184     // (5) The arguments to the operation. The may be of the form:
185     //         @#   - a NodeIndex referencing a prior node in the graph.
186     //         arg# - an argument number.
187     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
188     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
189     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
190     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
191         out.print(node->virtualRegister());
192     else
193         out.print("-");
194     out.print(">\t", opName(op), "(");
195     CommaPrinter comma;
196     if (node->flags() & NodeHasVarArgs) {
197         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
198             if (!m_varArgChildren[childIdx])
199                 continue;
200             out.print(comma, m_varArgChildren[childIdx]);
201         }
202     } else {
203         if (!!node->child1() || !!node->child2() || !!node->child3())
204             out.print(comma, node->child1());
205         if (!!node->child2() || !!node->child3())
206             out.print(comma, node->child2());
207         if (!!node->child3())
208             out.print(comma, node->child3());
209     }
210
211     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
212         out.print(comma, NodeFlagsDump(node->flags()));
213     if (node->prediction())
214         out.print(comma, SpeculationDump(node->prediction()));
215     if (node->hasArrayMode())
216         out.print(comma, node->arrayMode());
217     if (node->hasArithMode())
218         out.print(comma, node->arithMode());
219     if (node->hasVarNumber())
220         out.print(comma, node->varNumber());
221     if (node->hasRegisterPointer())
222         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findRegisterIndex(node->registerPointer()), "(", RawPointer(node->registerPointer()), ")");
223     if (node->hasIdentifier())
224         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
225     if (node->hasStructureSet())
226         out.print(comma, inContext(node->structureSet(), context));
227     if (node->hasStructure())
228         out.print(comma, inContext(*node->structure(), context));
229     if (node->hasTransition())
230         out.print(comma, pointerDumpInContext(node->transition(), context));
231     if (node->hasCellOperand()) {
232         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
233             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
234         else {
235             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
236             if (node->cellOperand()->value().isCell()) {
237                 CallVariant variant(node->cellOperand()->value().asCell());
238                 if (ExecutableBase* executable = variant.executable()) {
239                     if (executable->isHostFunction())
240                         out.print(comma, "<host function>");
241                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
242                         out.print(comma, FunctionExecutableDump(functionExecutable));
243                     else
244                         out.print(comma, "<non-function executable>");
245                 }
246             }
247         }
248     }
249     if (node->hasFunctionDeclIndex()) {
250         FunctionExecutable* executable = m_codeBlock->functionDecl(node->functionDeclIndex());
251         out.print(comma, FunctionExecutableDump(executable));
252     }
253     if (node->hasFunctionExprIndex()) {
254         FunctionExecutable* executable = m_codeBlock->functionExpr(node->functionExprIndex());
255         out.print(comma, FunctionExecutableDump(executable));
256     }
257     if (node->hasStorageAccessData()) {
258         StorageAccessData& storageAccessData = node->storageAccessData();
259         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
260         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
261     }
262     if (node->hasMultiGetByOffsetData()) {
263         MultiGetByOffsetData& data = node->multiGetByOffsetData();
264         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
265         for (unsigned i = 0; i < data.variants.size(); ++i)
266             out.print(comma, inContext(data.variants[i], context));
267     }
268     if (node->hasMultiPutByOffsetData()) {
269         MultiPutByOffsetData& data = node->multiPutByOffsetData();
270         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
271         for (unsigned i = 0; i < data.variants.size(); ++i)
272             out.print(comma, inContext(data.variants[i], context));
273     }
274     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
275     if (node->hasVariableAccessData(*this)) {
276         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
277         if (variableAccessData) {
278             VirtualRegister operand = variableAccessData->local();
279             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
280             operand = variableAccessData->machineLocal();
281             if (operand.isValid())
282                 out.print(comma, "machine:", operand);
283         }
284     }
285     if (node->hasStackAccessData()) {
286         StackAccessData* data = node->stackAccessData();
287         out.print(comma, data->local);
288         if (data->machineLocal.isValid())
289             out.print(comma, "machine:", data->machineLocal);
290         out.print(comma, data->format);
291     }
292     if (node->hasUnlinkedLocal()) 
293         out.print(comma, node->unlinkedLocal());
294     if (node->hasUnlinkedMachineLocal()) {
295         VirtualRegister operand = node->unlinkedMachineLocal();
296         if (operand.isValid())
297             out.print(comma, "machine:", operand);
298     }
299     if (node->hasConstantBuffer()) {
300         out.print(comma);
301         out.print(node->startConstant(), ":[");
302         CommaPrinter anotherComma;
303         for (unsigned i = 0; i < node->numConstants(); ++i)
304             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
305         out.print("]");
306     }
307     if (node->hasIndexingType())
308         out.print(comma, IndexingTypeDump(node->indexingType()));
309     if (node->hasTypedArrayType())
310         out.print(comma, node->typedArrayType());
311     if (node->hasPhi())
312         out.print(comma, "^", node->phi()->index());
313     if (node->hasExecutionCounter())
314         out.print(comma, RawPointer(node->executionCounter()));
315     if (node->hasVariableWatchpointSet())
316         out.print(comma, RawPointer(node->variableWatchpointSet()));
317     if (node->hasTypedArray())
318         out.print(comma, inContext(JSValue(node->typedArray()), context));
319     if (node->hasStoragePointer())
320         out.print(comma, RawPointer(node->storagePointer()));
321     if (node->hasObjectMaterializationData())
322         out.print(comma, node->objectMaterializationData());
323     if (node->hasCallVarargsData())
324         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
325     if (node->hasLoadVarargsData()) {
326         LoadVarargsData* data = node->loadVarargsData();
327         out.print(comma, "start = ", data->start, ", count = ", data->count);
328         if (data->machineStart.isValid())
329             out.print(", machineStart = ", data->machineStart);
330         if (data->machineCount.isValid())
331             out.print(", machineCount = ", data->machineCount);
332         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
333         out.print(", limit = ", data->limit);
334     }
335     if (node->isConstant())
336         out.print(comma, pointerDumpInContext(node->constant(), context));
337     if (node->isJump())
338         out.print(comma, "T:", *node->targetBlock());
339     if (node->isBranch())
340         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
341     if (node->isSwitch()) {
342         SwitchData* data = node->switchData();
343         out.print(comma, data->kind);
344         for (unsigned i = 0; i < data->cases.size(); ++i)
345             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
346         out.print(comma, "default:", data->fallThrough);
347     }
348     ClobberSet reads;
349     ClobberSet writes;
350     addReadsAndWrites(*this, node, reads, writes);
351     if (!reads.isEmpty())
352         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
353     if (!writes.isEmpty())
354         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
355     if (node->origin.isSet()) {
356         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
357         if (node->origin.semantic != node->origin.forExit)
358             out.print(comma, "exit: ", node->origin.forExit);
359     }
360     
361     out.print(")");
362
363     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
364         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
365     else if (node->hasHeapPrediction())
366         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
367     
368     out.print("\n");
369 }
370
371 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
372 {
373     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
374     if (block->executionCount == block->executionCount)
375         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
376     out.print(prefix, "  Predecessors:");
377     for (size_t i = 0; i < block->predecessors.size(); ++i)
378         out.print(" ", *block->predecessors[i]);
379     out.print("\n");
380     out.print(prefix, "  Successors:");
381     for (BasicBlock* successor : block->successors()) {
382         out.print(" ", *successor);
383         if (m_prePostNumbering.isValid())
384             out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
385     }
386     out.print("\n");
387     if (m_dominators.isValid()) {
388         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
389         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
390         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
391         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
392     }
393     if (m_prePostNumbering.isValid())
394         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
395     if (m_naturalLoops.isValid()) {
396         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
397             out.print(prefix, "  Loop header, contains:");
398             Vector<BlockIndex> sortedBlockList;
399             for (unsigned i = 0; i < loop->size(); ++i)
400                 sortedBlockList.append(loop->at(i)->index);
401             std::sort(sortedBlockList.begin(), sortedBlockList.end());
402             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
403                 out.print(" #", sortedBlockList[i]);
404             out.print("\n");
405         }
406         
407         Vector<const NaturalLoop*> containingLoops =
408             m_naturalLoops.loopsOf(block);
409         if (!containingLoops.isEmpty()) {
410             out.print(prefix, "  Containing loop headers:");
411             for (unsigned i = 0; i < containingLoops.size(); ++i)
412                 out.print(" ", *containingLoops[i]->header());
413             out.print("\n");
414         }
415     }
416     if (!block->phis.isEmpty()) {
417         out.print(prefix, "  Phi Nodes:");
418         for (size_t i = 0; i < block->phis.size(); ++i) {
419             Node* phiNode = block->phis[i];
420             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
421                 continue;
422             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
423             if (phiNode->child1()) {
424                 out.print("@", phiNode->child1()->index());
425                 if (phiNode->child2()) {
426                     out.print(", @", phiNode->child2()->index());
427                     if (phiNode->child3())
428                         out.print(", @", phiNode->child3()->index());
429                 }
430             }
431             out.print(")", i + 1 < block->phis.size() ? "," : "");
432         }
433         out.print("\n");
434     }
435 }
436
437 void Graph::dump(PrintStream& out, DumpContext* context)
438 {
439     DumpContext myContext;
440     myContext.graph = this;
441     if (!context)
442         context = &myContext;
443     
444     out.print("\n");
445     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
446     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
447     if (m_form == SSA)
448         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
449     else
450         out.print("  Arguments: ", listDump(m_arguments), "\n");
451     out.print("\n");
452     
453     Node* lastNode = 0;
454     for (size_t b = 0; b < m_blocks.size(); ++b) {
455         BasicBlock* block = m_blocks[b].get();
456         if (!block)
457             continue;
458         dumpBlockHeader(out, "", block, DumpAllPhis, context);
459         out.print("  States: ", block->cfaStructureClobberStateAtHead);
460         if (!block->cfaHasVisited)
461             out.print(", CurrentlyCFAUnreachable");
462         if (!block->intersectionOfCFAHasVisited)
463             out.print(", CFAUnreachable");
464         out.print("\n");
465         switch (m_form) {
466         case LoadStore:
467         case ThreadedCPS: {
468             out.print("  Vars Before: ");
469             if (block->cfaHasVisited)
470                 out.print(inContext(block->valuesAtHead, context));
471             else
472                 out.print("<empty>");
473             out.print("\n");
474             out.print("  Intersected Vars Before: ");
475             if (block->intersectionOfCFAHasVisited)
476                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
477             else
478                 out.print("<empty>");
479             out.print("\n");
480             out.print("  Var Links: ", block->variablesAtHead, "\n");
481             break;
482         }
483             
484         case SSA: {
485             RELEASE_ASSERT(block->ssa);
486             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
487             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
488             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
489             break;
490         } }
491         for (size_t i = 0; i < block->size(); ++i) {
492             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
493             dump(out, "", block->at(i), context);
494             lastNode = block->at(i);
495         }
496         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
497         if (!block->cfaDidFinish)
498             out.print(", CFAInvalidated");
499         out.print("\n");
500         switch (m_form) {
501         case LoadStore:
502         case ThreadedCPS: {
503             out.print("  Vars After: ");
504             if (block->cfaHasVisited)
505                 out.print(inContext(block->valuesAtTail, context));
506             else
507                 out.print("<empty>");
508             out.print("\n");
509             out.print("  Var Links: ", block->variablesAtTail, "\n");
510             break;
511         }
512             
513         case SSA: {
514             RELEASE_ASSERT(block->ssa);
515             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
516             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
517             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
518             break;
519         } }
520         out.print("\n");
521     }
522     
523     if (!myContext.isEmpty()) {
524         myContext.dump(out);
525         out.print("\n");
526     }
527 }
528
529 void Graph::dethread()
530 {
531     if (m_form == LoadStore || m_form == SSA)
532         return;
533     
534     if (logCompilationChanges())
535         dataLog("Dethreading DFG graph.\n");
536     
537     SamplingRegion samplingRegion("DFG Dethreading");
538     
539     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
540         BasicBlock* block = m_blocks[blockIndex].get();
541         if (!block)
542             continue;
543         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
544             Node* phi = block->phis[phiIndex];
545             phi->children.reset();
546         }
547     }
548     
549     m_form = LoadStore;
550 }
551
552 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
553 {
554     if (!successor->isReachable) {
555         successor->isReachable = true;
556         worklist.append(successor);
557     }
558     
559     successor->predecessors.append(block);
560 }
561
562 void Graph::determineReachability()
563 {
564     Vector<BasicBlock*, 16> worklist;
565     worklist.append(block(0));
566     block(0)->isReachable = true;
567     while (!worklist.isEmpty()) {
568         BasicBlock* block = worklist.takeLast();
569         for (unsigned i = block->numSuccessors(); i--;)
570             handleSuccessor(worklist, block, block->successor(i));
571     }
572 }
573
574 void Graph::resetReachability()
575 {
576     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
577         BasicBlock* block = m_blocks[blockIndex].get();
578         if (!block)
579             continue;
580         block->isReachable = false;
581         block->predecessors.clear();
582     }
583     
584     determineReachability();
585 }
586
587 void Graph::mergeRelevantToOSR()
588 {
589     for (BasicBlock* block : blocksInNaturalOrder()) {
590         for (Node* node : *block) {
591             switch (node->op()) {
592             case MovHint:
593                 node->child1()->mergeFlags(NodeRelevantToOSR);
594                 break;
595                 
596             case PutStructureHint:
597             case PutByOffsetHint:
598                 node->child2()->mergeFlags(NodeRelevantToOSR);
599                 break;
600                 
601             default:
602                 break;
603             }
604         }
605     }
606 }
607
608 namespace {
609
610 class RefCountCalculator {
611 public:
612     RefCountCalculator(Graph& graph)
613         : m_graph(graph)
614     {
615     }
616     
617     void calculate()
618     {
619         // First reset the counts to 0 for all nodes.
620         //
621         // Also take this opportunity to pretend that Check nodes are not NodeMustGenerate. Check
622         // nodes are MustGenerate because they are executed for effect, but they follow the same
623         // DCE rules as nodes that aren't MustGenerate: they only contribute to the ref count of
624         // their children if the edges require checks. Non-checking edges are removed. Note that
625         // for any Checks left over, this phase will turn them back into NodeMustGenerate.
626         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
627             BasicBlock* block = m_graph.block(blockIndex);
628             if (!block)
629                 continue;
630             for (unsigned indexInBlock = block->size(); indexInBlock--;)
631                 block->at(indexInBlock)->setRefCount(0);
632             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
633                 block->phis[phiIndex]->setRefCount(0);
634         }
635     
636         // Now find the roots:
637         // - Nodes that are must-generate.
638         // - Nodes that are reachable from type checks.
639         // Set their ref counts to 1 and put them on the worklist.
640         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
641             BasicBlock* block = m_graph.block(blockIndex);
642             if (!block)
643                 continue;
644             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
645                 Node* node = block->at(indexInBlock);
646                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
647                 if (!(node->flags() & NodeMustGenerate))
648                     continue;
649                 if (node->op() == Check) {
650                     // We don't treat Check nodes as MustGenerate. We will gladly
651                     // kill them off in this phase.
652                     continue;
653                 }
654                 if (!node->postfixRef())
655                     m_worklist.append(node);
656             }
657         }
658         
659         while (!m_worklist.isEmpty()) {
660             while (!m_worklist.isEmpty()) {
661                 Node* node = m_worklist.last();
662                 m_worklist.removeLast();
663                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
664                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
665             }
666             
667             if (m_graph.m_form == SSA) {
668                 // Find Phi->Upsilon edges, which are represented as meta-data in the
669                 // Upsilon.
670                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
671                     BasicBlock* block = m_graph.block(blockIndex);
672                     if (!block)
673                         continue;
674                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
675                         Node* node = block->at(nodeIndex);
676                         if (node->op() != Upsilon)
677                             continue;
678                         if (node->shouldGenerate())
679                             continue;
680                         if (node->phi()->shouldGenerate())
681                             countNode(node);
682                     }
683                 }
684             }
685         }
686     }
687     
688 private:
689     void findTypeCheckRoot(Node*, Edge edge)
690     {
691         // We may have an "unproved" untyped use for code that is unreachable. The CFA
692         // will just not have gotten around to it.
693         if (edge.isProved() || edge.willNotHaveCheck())
694             return;
695         if (!edge->postfixRef())
696             m_worklist.append(edge.node());
697     }
698     
699     void countNode(Node* node)
700     {
701         if (node->postfixRef())
702             return;
703         m_worklist.append(node);
704     }
705     
706     void countEdge(Node*, Edge edge)
707     {
708         // Don't count edges that are already counted for their type checks.
709         if (!(edge.isProved() || edge.willNotHaveCheck()))
710             return;
711         countNode(edge.node());
712     }
713     
714     Graph& m_graph;
715     Vector<Node*, 128> m_worklist;
716 };
717
718 } // anonymous namespace
719
720 void Graph::computeRefCounts()
721 {
722     RefCountCalculator calculator(*this);
723     calculator.calculate();
724 }
725
726 void Graph::killBlockAndItsContents(BasicBlock* block)
727 {
728     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
729         m_allocator.free(block->phis[phiIndex]);
730     for (unsigned nodeIndex = block->size(); nodeIndex--;)
731         m_allocator.free(block->at(nodeIndex));
732     
733     killBlock(block);
734 }
735
736 void Graph::killUnreachableBlocks()
737 {
738     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
739         BasicBlock* block = this->block(blockIndex);
740         if (!block)
741             continue;
742         if (block->isReachable)
743             continue;
744         
745         killBlockAndItsContents(block);
746     }
747 }
748
749 void Graph::invalidateCFG()
750 {
751     m_dominators.invalidate();
752     m_naturalLoops.invalidate();
753     m_prePostNumbering.invalidate();
754 }
755
756 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
757 {
758     if (variableAccessData->isCaptured()) {
759         // Let CSE worry about this one.
760         return;
761     }
762     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
763         Node* node = block[indexInBlock];
764         bool shouldContinue = true;
765         switch (node->op()) {
766         case SetLocal: {
767             if (node->local() == variableAccessData->local())
768                 shouldContinue = false;
769             break;
770         }
771                 
772         case GetLocal: {
773             if (node->variableAccessData() != variableAccessData)
774                 continue;
775             substitute(block, indexInBlock, node, newGetLocal);
776             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
777             if (oldTailNode == node)
778                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
779             shouldContinue = false;
780             break;
781         }
782                 
783         default:
784             break;
785         }
786         if (!shouldContinue)
787             break;
788     }
789 }
790
791 BlockList Graph::blocksInPreOrder()
792 {
793     BlockList result;
794     BlockWorklist worklist;
795     worklist.push(block(0));
796     while (BasicBlock* block = worklist.pop()) {
797         result.append(block);
798         for (unsigned i = block->numSuccessors(); i--;)
799             worklist.push(block->successor(i));
800     }
801     return result;
802 }
803
804 BlockList Graph::blocksInPostOrder()
805 {
806     BlockList result;
807     PostOrderBlockWorklist worklist;
808     worklist.push(block(0));
809     while (BlockWithOrder item = worklist.pop()) {
810         switch (item.order) {
811         case PreOrder:
812             worklist.pushPost(item.block);
813             for (unsigned i = item.block->numSuccessors(); i--;)
814                 worklist.push(item.block->successor(i));
815             break;
816         case PostOrder:
817             result.append(item.block);
818             break;
819         }
820     }
821     return result;
822 }
823
824 void Graph::clearReplacements()
825 {
826     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
827         BasicBlock* block = m_blocks[blockIndex].get();
828         if (!block)
829             continue;
830         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
831             block->phis[phiIndex]->replacement = 0;
832         for (unsigned nodeIndex = block->size(); nodeIndex--;)
833             block->at(nodeIndex)->replacement = 0;
834     }
835 }
836
837 void Graph::initializeNodeOwners()
838 {
839     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
840         BasicBlock* block = m_blocks[blockIndex].get();
841         if (!block)
842             continue;
843         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
844             block->phis[phiIndex]->owner = block;
845         for (unsigned nodeIndex = block->size(); nodeIndex--;)
846             block->at(nodeIndex)->owner = block;
847     }
848 }
849
850 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
851 {
852     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
853         BasicBlock* block = m_blocks[blockIndex].get();
854         if (!block)
855             continue;
856         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
857             block->phis[phiIndex]->clearFlags(flags);
858         for (unsigned nodeIndex = block->size(); nodeIndex--;)
859             block->at(nodeIndex)->clearFlags(flags);
860     }
861 }
862
863 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
864 {
865     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
866     if (iter != m_bytecodeLiveness.end())
867         return *iter->value;
868     
869     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
870     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
871     FullBytecodeLiveness& result = *liveness;
872     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
873     return result;
874 }
875
876 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
877 {
878     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
879 }
880
881 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
882 {
883     for (;;) {
884         VirtualRegister reg = VirtualRegister(
885             operand.offset() - codeOrigin.stackOffset());
886         
887         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
888             if (reg.isArgument()) {
889                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
890                 
891                 if (codeOrigin.inlineCallFrame->isClosureCall
892                     && reg.offset() == JSStack::Callee)
893                     return true;
894                 
895                 if (codeOrigin.inlineCallFrame->isVarargs()
896                     && reg.offset() == JSStack::ArgumentCount)
897                     return true;
898                 
899                 return false;
900             }
901             
902             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
903                 reg.offset(), codeOrigin.bytecodeIndex);
904         }
905         
906         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
907         if (!inlineCallFrame)
908             break;
909
910         // Arguments are always live. This would be redundant if it wasn't for our
911         // op_call_varargs inlining.
912         // FIXME: 'this' might not be live, but we don't have a way of knowing.
913         // https://bugs.webkit.org/show_bug.cgi?id=128519
914         if (reg.isArgument()
915             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
916             return true;
917         
918         codeOrigin = inlineCallFrame->caller;
919     }
920     
921     return true;
922 }
923
924 unsigned Graph::frameRegisterCount()
925 {
926     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
927     return roundLocalRegisterCountForFramePointerOffset(result);
928 }
929
930 unsigned Graph::stackPointerOffset()
931 {
932     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
933 }
934
935 unsigned Graph::requiredRegisterCountForExit()
936 {
937     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
938     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
939         InlineCallFrame* inlineCallFrame = *iter;
940         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
941         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
942         count = std::max(count, requiredCount);
943     }
944     return count;
945 }
946
947 unsigned Graph::requiredRegisterCountForExecutionAndExit()
948 {
949     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
950 }
951
952 JSValue Graph::tryGetConstantProperty(
953     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
954 {
955     if (!base || !base.isObject())
956         return JSValue();
957     
958     JSObject* object = asObject(base);
959     
960     for (unsigned i = structureSet.size(); i--;) {
961         Structure* structure = structureSet[i];
962         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
963         if (!set || !set->isStillValid())
964             return JSValue();
965         
966         ASSERT(structure->isValidOffset(offset));
967         ASSERT(!structure->isUncacheableDictionary());
968         
969         watchpoints().addLazily(set);
970     }
971     
972     // What follows may require some extra thought. We need this load to load a valid JSValue. If
973     // our profiling makes sense and we're still on track to generate code that won't be
974     // invalidated, then we have nothing to worry about. We do, however, have to worry about
975     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
976     // is doomed.
977     //
978     // One argument in favor of this code is that it should definitely work because the butterfly
979     // is always set before the structure. However, we don't currently have a fence between those
980     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
981     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
982     // caches told us that the object had a structure that it did not *yet* have, and then later,
983     // the object transitioned to that structure that the inline caches had alraedy seen. And then
984     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
985     // this is worth revisiting but it isn't worth losing sleep over. Filed:
986     // https://bugs.webkit.org/show_bug.cgi?id=134641
987     //
988     // For now, we just do the minimal thing: defend against the structure right now being
989     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
990     // determine if the structure belongs to the proven set.
991     
992     if (!structureSet.contains(object->structure()))
993         return JSValue();
994     
995     return object->getDirect(offset);
996 }
997
998 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
999 {
1000     return tryGetConstantProperty(base, StructureSet(structure), offset);
1001 }
1002
1003 JSValue Graph::tryGetConstantProperty(
1004     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1005 {
1006     if (structure.isTop() || structure.isClobbered())
1007         return JSValue();
1008     
1009     return tryGetConstantProperty(base, structure.set(), offset);
1010 }
1011
1012 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1013 {
1014     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1015 }
1016
1017 JSLexicalEnvironment* Graph::tryGetActivation(Node* node)
1018 {
1019     return node->dynamicCastConstant<JSLexicalEnvironment*>();
1020 }
1021
1022 WriteBarrierBase<Unknown>* Graph::tryGetRegisters(Node* node)
1023 {
1024     JSLexicalEnvironment* lexicalEnvironment = tryGetActivation(node);
1025     if (!lexicalEnvironment)
1026         return 0;
1027     return lexicalEnvironment->registers();
1028 }
1029
1030 JSArrayBufferView* Graph::tryGetFoldableView(Node* node)
1031 {
1032     JSArrayBufferView* view = node->dynamicCastConstant<JSArrayBufferView*>();
1033     if (!view)
1034         return nullptr;
1035     if (!view->length())
1036         return nullptr;
1037     WTF::loadLoadFence();
1038     return view;
1039 }
1040
1041 JSArrayBufferView* Graph::tryGetFoldableView(Node* node, ArrayMode arrayMode)
1042 {
1043     if (arrayMode.typedArrayType() == NotTypedArray)
1044         return 0;
1045     return tryGetFoldableView(node);
1046 }
1047
1048 JSArrayBufferView* Graph::tryGetFoldableViewForChild1(Node* node)
1049 {
1050     return tryGetFoldableView(child(node, 0).node(), node->arrayMode());
1051 }
1052
1053 void Graph::registerFrozenValues()
1054 {
1055     m_codeBlock->constants().resize(0);
1056     for (FrozenValue* value : m_frozenValues) {
1057         if (value->structure())
1058             ASSERT(m_plan.weakReferences.contains(value->structure()));
1059         
1060         switch (value->strength()) {
1061         case FragileValue: {
1062             break;
1063         }
1064         case WeakValue: {
1065             m_plan.weakReferences.addLazily(value->value().asCell());
1066             break;
1067         }
1068         case StrongValue: {
1069             unsigned constantIndex = m_codeBlock->addConstantLazily();
1070             initializeLazyWriteBarrierForConstant(
1071                 m_plan.writeBarriers,
1072                 m_codeBlock->constants()[constantIndex],
1073                 m_codeBlock,
1074                 constantIndex,
1075                 m_codeBlock->ownerExecutable(),
1076                 value->value());
1077             break;
1078         } }
1079     }
1080     m_codeBlock->constants().shrinkToFit();
1081 }
1082
1083 void Graph::visitChildren(SlotVisitor& visitor)
1084 {
1085     for (FrozenValue* value : m_frozenValues) {
1086         visitor.appendUnbarrieredReadOnlyValue(value->value());
1087         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1088     }
1089     
1090     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1091         BasicBlock* block = this->block(blockIndex);
1092         if (!block)
1093             continue;
1094         
1095         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1096             Node* node = block->at(nodeIndex);
1097             
1098             switch (node->op()) {
1099             case CheckStructure:
1100                 for (unsigned i = node->structureSet().size(); i--;)
1101                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1102                 break;
1103                 
1104             case NewObject:
1105             case ArrayifyToStructure:
1106             case NewStringObject:
1107                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1108                 break;
1109                 
1110             case PutStructure:
1111             case AllocatePropertyStorage:
1112             case ReallocatePropertyStorage:
1113                 visitor.appendUnbarrieredReadOnlyPointer(
1114                     node->transition()->previous);
1115                 visitor.appendUnbarrieredReadOnlyPointer(
1116                     node->transition()->next);
1117                 break;
1118                 
1119             case MultiGetByOffset:
1120                 for (unsigned i = node->multiGetByOffsetData().variants.size(); i--;) {
1121                     GetByIdVariant& variant = node->multiGetByOffsetData().variants[i];
1122                     const StructureSet& set = variant.structureSet();
1123                     for (unsigned j = set.size(); j--;)
1124                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1125
1126                     // Don't need to mark anything in the structure chain because that would
1127                     // have been decomposed into CheckStructure's. Don't need to mark the
1128                     // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
1129                     // variants did that.
1130                     ASSERT(!variant.callLinkStatus());
1131                 }
1132                 break;
1133                     
1134             case MultiPutByOffset:
1135                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1136                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1137                     const StructureSet& set = variant.oldStructure();
1138                     for (unsigned j = set.size(); j--;)
1139                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1140                     if (variant.kind() == PutByIdVariant::Transition)
1141                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1142                 }
1143                 break;
1144                 
1145             default:
1146                 break;
1147             }
1148         }
1149     }
1150 }
1151
1152 FrozenValue* Graph::freezeFragile(JSValue value)
1153 {
1154     if (UNLIKELY(!value))
1155         return FrozenValue::emptySingleton();
1156     
1157     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1158     if (LIKELY(!result.isNewEntry))
1159         return result.iterator->value;
1160     
1161     return result.iterator->value = m_frozenValues.add(FrozenValue::freeze(value));
1162 }
1163
1164 FrozenValue* Graph::freeze(JSValue value)
1165 {
1166     FrozenValue* result = freezeFragile(value);
1167     result->strengthenTo(WeakValue);
1168     return result;
1169 }
1170
1171 FrozenValue* Graph::freezeStrong(JSValue value)
1172 {
1173     FrozenValue* result = freezeFragile(value);
1174     result->strengthenTo(StrongValue);
1175     return result;
1176 }
1177
1178 void Graph::convertToConstant(Node* node, FrozenValue* value)
1179 {
1180     if (value->structure())
1181         assertIsRegistered(value->structure());
1182     if (m_form == ThreadedCPS) {
1183         if (node->op() == GetLocal)
1184             dethread();
1185         else
1186             ASSERT(!node->hasVariableAccessData(*this));
1187     }
1188     node->convertToConstant(value);
1189 }
1190
1191 void Graph::convertToConstant(Node* node, JSValue value)
1192 {
1193     convertToConstant(node, freeze(value));
1194 }
1195
1196 void Graph::convertToStrongConstant(Node* node, JSValue value)
1197 {
1198     convertToConstant(node, freezeStrong(value));
1199 }
1200
1201 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1202 {
1203     m_plan.weakReferences.addLazily(structure);
1204     if (m_plan.watchpoints.consider(structure))
1205         return StructureRegisteredAndWatched;
1206     return StructureRegisteredNormally;
1207 }
1208
1209 void Graph::assertIsRegistered(Structure* structure)
1210 {
1211     if (m_structureRegistrationState == HaveNotStartedRegistering)
1212         return;
1213     
1214     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1215     
1216     if (!structure->dfgShouldWatch())
1217         return;
1218     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1219         return;
1220     
1221     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1222 }
1223
1224 NO_RETURN_DUE_TO_CRASH static void crash(
1225     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1226     const char* assertion)
1227 {
1228     startCrashing();
1229     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1230     dataLog(file, "(", line, ") : ", function, "\n");
1231     dataLog("\n");
1232     dataLog(whileText);
1233     dataLog("Graph at time of failure:\n");
1234     graph.dump();
1235     dataLog("\n");
1236     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1237     dataLog(file, "(", line, ") : ", function, "\n");
1238     CRASH_WITH_SECURITY_IMPLICATION();
1239 }
1240
1241 void Graph::handleAssertionFailure(
1242     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1243 {
1244     crash(*this, "", file, line, function, assertion);
1245 }
1246
1247 void Graph::handleAssertionFailure(
1248     Node* node, const char* file, int line, const char* function, const char* assertion)
1249 {
1250     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1251 }
1252
1253 void Graph::handleAssertionFailure(
1254     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1255 {
1256     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1257 }
1258
1259 ValueProfile* Graph::valueProfileFor(Node* node)
1260 {
1261     if (!node)
1262         return nullptr;
1263         
1264     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1265         
1266     if (node->hasLocal(*this)) {
1267         if (!node->local().isArgument())
1268             return nullptr;
1269         int argument = node->local().toArgument();
1270         Node* argumentNode = m_arguments[argument];
1271         if (!argumentNode)
1272             return nullptr;
1273         if (node->variableAccessData() != argumentNode->variableAccessData())
1274             return nullptr;
1275         return profiledBlock->valueProfileForArgument(argument);
1276     }
1277         
1278     if (node->hasHeapPrediction())
1279         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1280         
1281     return nullptr;
1282 }
1283
1284 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1285 {
1286     if (!node)
1287         return MethodOfGettingAValueProfile();
1288     
1289     if (ValueProfile* valueProfile = valueProfileFor(node))
1290         return MethodOfGettingAValueProfile(valueProfile);
1291     
1292     if (node->op() == GetLocal) {
1293         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1294         
1295         return MethodOfGettingAValueProfile::fromLazyOperand(
1296             profiledBlock,
1297             LazyOperandValueProfileKey(
1298                 node->origin.semantic.bytecodeIndex, node->local()));
1299     }
1300     
1301     return MethodOfGettingAValueProfile();
1302 }
1303
1304 } } // namespace JSC::DFG
1305
1306 #endif // ENABLE(DFG_JIT)