Node::origin should be able to tell you if it's OK to exit
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGJITCode.h"
39 #include "DFGMayExit.h"
40 #include "DFGVariableAccessDataDump.h"
41 #include "FullBytecodeLiveness.h"
42 #include "FunctionExecutableDump.h"
43 #include "JIT.h"
44 #include "JSLexicalEnvironment.h"
45 #include "MaxFrameExtentForSlowPathCall.h"
46 #include "OperandsInlines.h"
47 #include "JSCInlines.h"
48 #include "StackAlignment.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51
52 namespace JSC { namespace DFG {
53
54 // Creates an array of stringized names.
55 static const char* dfgOpNames[] = {
56 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
57     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
58 #undef STRINGIZE_DFG_OP_ENUM
59 };
60
61 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
62     : m_vm(vm)
63     , m_plan(plan)
64     , m_codeBlock(m_plan.codeBlock.get())
65     , m_profiledBlock(m_codeBlock->alternative())
66     , m_allocator(longLivedState.m_allocator)
67     , m_nextMachineLocal(0)
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
77         || Options::forceDebuggerBytecodeGeneration();
78 }
79
80 Graph::~Graph()
81 {
82     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
83         BasicBlock* block = this->block(blockIndex);
84         if (!block)
85             continue;
86
87         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
88             m_allocator.free(block->phis[phiIndex]);
89         for (unsigned nodeIndex = block->size(); nodeIndex--;)
90             m_allocator.free(block->at(nodeIndex));
91     }
92     m_allocator.freeAll();
93 }
94
95 const char *Graph::opName(NodeType op)
96 {
97     return dfgOpNames[op];
98 }
99
100 static void printWhiteSpace(PrintStream& out, unsigned amount)
101 {
102     while (amount-- > 0)
103         out.print(" ");
104 }
105
106 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
107 {
108     if (!currentNode->origin.semantic)
109         return false;
110     
111     Node* previousNode = previousNodeRef;
112     previousNodeRef = currentNode;
113
114     if (!previousNode)
115         return false;
116     
117     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
118         return false;
119     
120     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
121     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
122     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
123     unsigned indexOfDivergence = commonSize;
124     for (unsigned i = 0; i < commonSize; ++i) {
125         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
126             indexOfDivergence = i;
127             break;
128         }
129     }
130     
131     bool hasPrinted = false;
132     
133     // Print the pops.
134     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
135         out.print(prefix);
136         printWhiteSpace(out, i * 2);
137         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
138         hasPrinted = true;
139     }
140     
141     // Print the pushes.
142     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
143         out.print(prefix);
144         printWhiteSpace(out, i * 2);
145         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
146         hasPrinted = true;
147     }
148     
149     return hasPrinted;
150 }
151
152 int Graph::amountOfNodeWhiteSpace(Node* node)
153 {
154     return (node->origin.semantic.inlineDepth() - 1) * 2;
155 }
156
157 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
158 {
159     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
160 }
161
162 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
163 {
164     NodeType op = node->op();
165
166     unsigned refCount = node->refCount();
167     bool mustGenerate = node->mustGenerate();
168     if (mustGenerate)
169         --refCount;
170
171     out.print(prefix);
172     printNodeWhiteSpace(out, node);
173
174     // Example/explanation of dataflow dump output
175     //
176     //   14:   <!2:7>  GetByVal(@3, @13)
177     //   ^1     ^2 ^3     ^4       ^5
178     //
179     // (1) The nodeIndex of this operation.
180     // (2) The reference count. The number printed is the 'real' count,
181     //     not including the 'mustGenerate' ref. If the node is
182     //     'mustGenerate' then the count it prefixed with '!'.
183     // (3) The virtual register slot assigned to this node.
184     // (4) The name of the operation.
185     // (5) The arguments to the operation. The may be of the form:
186     //         @#   - a NodeIndex referencing a prior node in the graph.
187     //         arg# - an argument number.
188     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
189     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
190     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
191     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
192         out.print(node->virtualRegister());
193     else
194         out.print("-");
195     out.print(">\t", opName(op), "(");
196     CommaPrinter comma;
197     if (node->flags() & NodeHasVarArgs) {
198         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
199             if (!m_varArgChildren[childIdx])
200                 continue;
201             out.print(comma, m_varArgChildren[childIdx]);
202         }
203     } else {
204         if (!!node->child1() || !!node->child2() || !!node->child3())
205             out.print(comma, node->child1());
206         if (!!node->child2() || !!node->child3())
207             out.print(comma, node->child2());
208         if (!!node->child3())
209             out.print(comma, node->child3());
210     }
211
212     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
213         out.print(comma, NodeFlagsDump(node->flags()));
214     if (node->prediction())
215         out.print(comma, SpeculationDump(node->prediction()));
216     if (node->hasArrayMode())
217         out.print(comma, node->arrayMode());
218     if (node->hasArithMode())
219         out.print(comma, node->arithMode());
220     if (node->hasScopeOffset())
221         out.print(comma, node->scopeOffset());
222     if (node->hasDirectArgumentsOffset())
223         out.print(comma, node->capturedArgumentsOffset());
224     if (node->hasRegisterPointer())
225         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findVariableIndex(node->variablePointer()), "(", RawPointer(node->variablePointer()), ")");
226     if (node->hasIdentifier())
227         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
228     if (node->hasPromotedLocationDescriptor())
229         out.print(comma, node->promotedLocationDescriptor());
230     if (node->hasStructureSet())
231         out.print(comma, inContext(node->structureSet(), context));
232     if (node->hasStructure())
233         out.print(comma, inContext(*node->structure(), context));
234     if (node->hasTransition()) {
235         out.print(comma, pointerDumpInContext(node->transition(), context));
236 #if USE(JSVALUE64)
237         out.print(", ID:", node->transition()->next->id());
238 #else
239         out.print(", ID:", RawPointer(node->transition()->next));
240 #endif
241     }
242     if (node->hasCellOperand()) {
243         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
244             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
245         else {
246             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
247             if (node->cellOperand()->value().isCell()) {
248                 CallVariant variant(node->cellOperand()->value().asCell());
249                 if (ExecutableBase* executable = variant.executable()) {
250                     if (executable->isHostFunction())
251                         out.print(comma, "<host function>");
252                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
253                         out.print(comma, FunctionExecutableDump(functionExecutable));
254                     else
255                         out.print(comma, "<non-function executable>");
256                 }
257             }
258         }
259     }
260     if (node->hasStorageAccessData()) {
261         StorageAccessData& storageAccessData = node->storageAccessData();
262         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
263         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
264     }
265     if (node->hasMultiGetByOffsetData()) {
266         MultiGetByOffsetData& data = node->multiGetByOffsetData();
267         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
268         for (unsigned i = 0; i < data.cases.size(); ++i)
269             out.print(comma, inContext(data.cases[i], context));
270     }
271     if (node->hasMultiPutByOffsetData()) {
272         MultiPutByOffsetData& data = node->multiPutByOffsetData();
273         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
274         for (unsigned i = 0; i < data.variants.size(); ++i)
275             out.print(comma, inContext(data.variants[i], context));
276     }
277     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
278     if (node->hasVariableAccessData(*this)) {
279         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
280         if (variableAccessData) {
281             VirtualRegister operand = variableAccessData->local();
282             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
283             operand = variableAccessData->machineLocal();
284             if (operand.isValid())
285                 out.print(comma, "machine:", operand);
286         }
287     }
288     if (node->hasStackAccessData()) {
289         StackAccessData* data = node->stackAccessData();
290         out.print(comma, data->local);
291         if (data->machineLocal.isValid())
292             out.print(comma, "machine:", data->machineLocal);
293         out.print(comma, data->format);
294     }
295     if (node->hasUnlinkedLocal()) 
296         out.print(comma, node->unlinkedLocal());
297     if (node->hasUnlinkedMachineLocal()) {
298         VirtualRegister operand = node->unlinkedMachineLocal();
299         if (operand.isValid())
300             out.print(comma, "machine:", operand);
301     }
302     if (node->hasConstantBuffer()) {
303         out.print(comma);
304         out.print(node->startConstant(), ":[");
305         CommaPrinter anotherComma;
306         for (unsigned i = 0; i < node->numConstants(); ++i)
307             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
308         out.print("]");
309     }
310     if (node->hasIndexingType())
311         out.print(comma, IndexingTypeDump(node->indexingType()));
312     if (node->hasTypedArrayType())
313         out.print(comma, node->typedArrayType());
314     if (node->hasPhi())
315         out.print(comma, "^", node->phi()->index());
316     if (node->hasExecutionCounter())
317         out.print(comma, RawPointer(node->executionCounter()));
318     if (node->hasWatchpointSet())
319         out.print(comma, RawPointer(node->watchpointSet()));
320     if (node->hasStoragePointer())
321         out.print(comma, RawPointer(node->storagePointer()));
322     if (node->hasObjectMaterializationData())
323         out.print(comma, node->objectMaterializationData());
324     if (node->hasCallVarargsData())
325         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
326     if (node->hasLoadVarargsData()) {
327         LoadVarargsData* data = node->loadVarargsData();
328         out.print(comma, "start = ", data->start, ", count = ", data->count);
329         if (data->machineStart.isValid())
330             out.print(", machineStart = ", data->machineStart);
331         if (data->machineCount.isValid())
332             out.print(", machineCount = ", data->machineCount);
333         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
334         out.print(", limit = ", data->limit);
335     }
336     if (node->isConstant())
337         out.print(comma, pointerDumpInContext(node->constant(), context));
338     if (node->isJump())
339         out.print(comma, "T:", *node->targetBlock());
340     if (node->isBranch())
341         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
342     if (node->isSwitch()) {
343         SwitchData* data = node->switchData();
344         out.print(comma, data->kind);
345         for (unsigned i = 0; i < data->cases.size(); ++i)
346             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
347         out.print(comma, "default:", data->fallThrough);
348     }
349     ClobberSet reads;
350     ClobberSet writes;
351     addReadsAndWrites(*this, node, reads, writes);
352     if (!reads.isEmpty())
353         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
354     if (!writes.isEmpty())
355         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
356     ExitMode exitMode = mayExit(*this, node);
357     if (exitMode != DoesNotExit)
358         out.print(comma, exitMode);
359     if (clobbersExitState(*this, node))
360         out.print(comma, "ClobbersExit");
361     if (node->origin.isSet()) {
362         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
363         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
364             out.print(comma, "exit: ", node->origin.forExit);
365     }
366     if (!node->origin.exitOK)
367         out.print(comma, "ExitInvalid");
368     out.print(")");
369
370     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
371         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
372     else if (node->hasHeapPrediction())
373         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
374     
375     out.print("\n");
376 }
377
378 bool Graph::terminalsAreValid()
379 {
380     for (BasicBlock* block : blocksInNaturalOrder()) {
381         if (!block->terminal())
382             return false;
383     }
384     return true;
385 }
386
387 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
388 {
389     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
390     if (block->executionCount == block->executionCount)
391         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
392     out.print(prefix, "  Predecessors:");
393     for (size_t i = 0; i < block->predecessors.size(); ++i)
394         out.print(" ", *block->predecessors[i]);
395     out.print("\n");
396     out.print(prefix, "  Successors:");
397     if (block->terminal()) {
398         for (BasicBlock* successor : block->successors()) {
399             out.print(" ", *successor);
400             if (m_prePostNumbering.isValid())
401                 out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
402         }
403     } else
404         out.print(" <invalid>");
405     out.print("\n");
406     if (m_dominators.isValid() && terminalsAreValid()) {
407         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
408         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
409         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
410         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
411     }
412     if (m_prePostNumbering.isValid())
413         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
414     if (m_naturalLoops.isValid()) {
415         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
416             out.print(prefix, "  Loop header, contains:");
417             Vector<BlockIndex> sortedBlockList;
418             for (unsigned i = 0; i < loop->size(); ++i)
419                 sortedBlockList.append(loop->at(i)->index);
420             std::sort(sortedBlockList.begin(), sortedBlockList.end());
421             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
422                 out.print(" #", sortedBlockList[i]);
423             out.print("\n");
424         }
425         
426         Vector<const NaturalLoop*> containingLoops =
427             m_naturalLoops.loopsOf(block);
428         if (!containingLoops.isEmpty()) {
429             out.print(prefix, "  Containing loop headers:");
430             for (unsigned i = 0; i < containingLoops.size(); ++i)
431                 out.print(" ", *containingLoops[i]->header());
432             out.print("\n");
433         }
434     }
435     if (!block->phis.isEmpty()) {
436         out.print(prefix, "  Phi Nodes:");
437         for (size_t i = 0; i < block->phis.size(); ++i) {
438             Node* phiNode = block->phis[i];
439             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
440                 continue;
441             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
442             if (phiNode->child1()) {
443                 out.print("@", phiNode->child1()->index());
444                 if (phiNode->child2()) {
445                     out.print(", @", phiNode->child2()->index());
446                     if (phiNode->child3())
447                         out.print(", @", phiNode->child3()->index());
448                 }
449             }
450             out.print(")", i + 1 < block->phis.size() ? "," : "");
451         }
452         out.print("\n");
453     }
454 }
455
456 void Graph::dump(PrintStream& out, DumpContext* context)
457 {
458     DumpContext myContext;
459     myContext.graph = this;
460     if (!context)
461         context = &myContext;
462     
463     out.print("\n");
464     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
465     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
466     if (m_form == SSA)
467         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
468     else
469         out.print("  Arguments: ", listDump(m_arguments), "\n");
470     out.print("\n");
471     
472     Node* lastNode = nullptr;
473     for (size_t b = 0; b < m_blocks.size(); ++b) {
474         BasicBlock* block = m_blocks[b].get();
475         if (!block)
476             continue;
477         dumpBlockHeader(out, "", block, DumpAllPhis, context);
478         out.print("  States: ", block->cfaStructureClobberStateAtHead);
479         if (!block->cfaHasVisited)
480             out.print(", CurrentlyCFAUnreachable");
481         if (!block->intersectionOfCFAHasVisited)
482             out.print(", CFAUnreachable");
483         out.print("\n");
484         switch (m_form) {
485         case LoadStore:
486         case ThreadedCPS: {
487             out.print("  Vars Before: ");
488             if (block->cfaHasVisited)
489                 out.print(inContext(block->valuesAtHead, context));
490             else
491                 out.print("<empty>");
492             out.print("\n");
493             out.print("  Intersected Vars Before: ");
494             if (block->intersectionOfCFAHasVisited)
495                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
496             else
497                 out.print("<empty>");
498             out.print("\n");
499             out.print("  Var Links: ", block->variablesAtHead, "\n");
500             break;
501         }
502             
503         case SSA: {
504             RELEASE_ASSERT(block->ssa);
505             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
506             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
507             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
508             break;
509         } }
510         for (size_t i = 0; i < block->size(); ++i) {
511             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
512             dump(out, "", block->at(i), context);
513         }
514         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
515         if (!block->cfaDidFinish)
516             out.print(", CFAInvalidated");
517         out.print("\n");
518         switch (m_form) {
519         case LoadStore:
520         case ThreadedCPS: {
521             out.print("  Vars After: ");
522             if (block->cfaHasVisited)
523                 out.print(inContext(block->valuesAtTail, context));
524             else
525                 out.print("<empty>");
526             out.print("\n");
527             out.print("  Var Links: ", block->variablesAtTail, "\n");
528             break;
529         }
530             
531         case SSA: {
532             RELEASE_ASSERT(block->ssa);
533             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
534             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
535             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
536             break;
537         } }
538         out.print("\n");
539     }
540     
541     out.print("GC Values:\n");
542     for (FrozenValue* value : m_frozenValues) {
543         if (value->pointsToHeap())
544             out.print("    ", inContext(*value, &myContext), "\n");
545     }
546
547     out.print(inContext(watchpoints(), &myContext));
548     
549     if (!myContext.isEmpty()) {
550         myContext.dump(out);
551         out.print("\n");
552     }
553 }
554
555 void Graph::dethread()
556 {
557     if (m_form == LoadStore || m_form == SSA)
558         return;
559     
560     if (logCompilationChanges())
561         dataLog("Dethreading DFG graph.\n");
562     
563     SamplingRegion samplingRegion("DFG Dethreading");
564     
565     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
566         BasicBlock* block = m_blocks[blockIndex].get();
567         if (!block)
568             continue;
569         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
570             Node* phi = block->phis[phiIndex];
571             phi->children.reset();
572         }
573     }
574     
575     m_form = LoadStore;
576 }
577
578 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
579 {
580     if (!successor->isReachable) {
581         successor->isReachable = true;
582         worklist.append(successor);
583     }
584     
585     successor->predecessors.append(block);
586 }
587
588 void Graph::determineReachability()
589 {
590     Vector<BasicBlock*, 16> worklist;
591     worklist.append(block(0));
592     block(0)->isReachable = true;
593     while (!worklist.isEmpty()) {
594         BasicBlock* block = worklist.takeLast();
595         for (unsigned i = block->numSuccessors(); i--;)
596             handleSuccessor(worklist, block, block->successor(i));
597     }
598 }
599
600 void Graph::resetReachability()
601 {
602     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
603         BasicBlock* block = m_blocks[blockIndex].get();
604         if (!block)
605             continue;
606         block->isReachable = false;
607         block->predecessors.clear();
608     }
609     
610     determineReachability();
611 }
612
613 namespace {
614
615 class RefCountCalculator {
616 public:
617     RefCountCalculator(Graph& graph)
618         : m_graph(graph)
619     {
620     }
621     
622     void calculate()
623     {
624         // First reset the counts to 0 for all nodes.
625         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
626             BasicBlock* block = m_graph.block(blockIndex);
627             if (!block)
628                 continue;
629             for (unsigned indexInBlock = block->size(); indexInBlock--;)
630                 block->at(indexInBlock)->setRefCount(0);
631             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
632                 block->phis[phiIndex]->setRefCount(0);
633         }
634     
635         // Now find the roots:
636         // - Nodes that are must-generate.
637         // - Nodes that are reachable from type checks.
638         // Set their ref counts to 1 and put them on the worklist.
639         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
640             BasicBlock* block = m_graph.block(blockIndex);
641             if (!block)
642                 continue;
643             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
644                 Node* node = block->at(indexInBlock);
645                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
646                 if (!(node->flags() & NodeMustGenerate))
647                     continue;
648                 if (!node->postfixRef())
649                     m_worklist.append(node);
650             }
651         }
652         
653         while (!m_worklist.isEmpty()) {
654             while (!m_worklist.isEmpty()) {
655                 Node* node = m_worklist.last();
656                 m_worklist.removeLast();
657                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
658                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
659             }
660             
661             if (m_graph.m_form == SSA) {
662                 // Find Phi->Upsilon edges, which are represented as meta-data in the
663                 // Upsilon.
664                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
665                     BasicBlock* block = m_graph.block(blockIndex);
666                     if (!block)
667                         continue;
668                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
669                         Node* node = block->at(nodeIndex);
670                         if (node->op() != Upsilon)
671                             continue;
672                         if (node->shouldGenerate())
673                             continue;
674                         if (node->phi()->shouldGenerate())
675                             countNode(node);
676                     }
677                 }
678             }
679         }
680     }
681     
682 private:
683     void findTypeCheckRoot(Node*, Edge edge)
684     {
685         // We may have an "unproved" untyped use for code that is unreachable. The CFA
686         // will just not have gotten around to it.
687         if (edge.isProved() || edge.willNotHaveCheck())
688             return;
689         if (!edge->postfixRef())
690             m_worklist.append(edge.node());
691     }
692     
693     void countNode(Node* node)
694     {
695         if (node->postfixRef())
696             return;
697         m_worklist.append(node);
698     }
699     
700     void countEdge(Node*, Edge edge)
701     {
702         // Don't count edges that are already counted for their type checks.
703         if (!(edge.isProved() || edge.willNotHaveCheck()))
704             return;
705         countNode(edge.node());
706     }
707     
708     Graph& m_graph;
709     Vector<Node*, 128> m_worklist;
710 };
711
712 } // anonymous namespace
713
714 void Graph::computeRefCounts()
715 {
716     RefCountCalculator calculator(*this);
717     calculator.calculate();
718 }
719
720 void Graph::killBlockAndItsContents(BasicBlock* block)
721 {
722     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
723         m_allocator.free(block->phis[phiIndex]);
724     for (unsigned nodeIndex = block->size(); nodeIndex--;)
725         m_allocator.free(block->at(nodeIndex));
726     
727     killBlock(block);
728 }
729
730 void Graph::killUnreachableBlocks()
731 {
732     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
733         BasicBlock* block = this->block(blockIndex);
734         if (!block)
735             continue;
736         if (block->isReachable)
737             continue;
738         
739         killBlockAndItsContents(block);
740     }
741 }
742
743 void Graph::invalidateCFG()
744 {
745     m_dominators.invalidate();
746     m_naturalLoops.invalidate();
747     m_prePostNumbering.invalidate();
748 }
749
750 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
751 {
752     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
753         Node* node = block[indexInBlock];
754         bool shouldContinue = true;
755         switch (node->op()) {
756         case SetLocal: {
757             if (node->local() == variableAccessData->local())
758                 shouldContinue = false;
759             break;
760         }
761                 
762         case GetLocal: {
763             if (node->variableAccessData() != variableAccessData)
764                 continue;
765             substitute(block, indexInBlock, node, newGetLocal);
766             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
767             if (oldTailNode == node)
768                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
769             shouldContinue = false;
770             break;
771         }
772                 
773         default:
774             break;
775         }
776         if (!shouldContinue)
777             break;
778     }
779 }
780
781 BlockList Graph::blocksInPreOrder()
782 {
783     BlockList result;
784     BlockWorklist worklist;
785     worklist.push(block(0));
786     while (BasicBlock* block = worklist.pop()) {
787         result.append(block);
788         for (unsigned i = block->numSuccessors(); i--;)
789             worklist.push(block->successor(i));
790     }
791     return result;
792 }
793
794 BlockList Graph::blocksInPostOrder()
795 {
796     BlockList result;
797     PostOrderBlockWorklist worklist;
798     worklist.push(block(0));
799     while (BlockWithOrder item = worklist.pop()) {
800         switch (item.order) {
801         case PreOrder:
802             worklist.pushPost(item.block);
803             for (unsigned i = item.block->numSuccessors(); i--;)
804                 worklist.push(item.block->successor(i));
805             break;
806         case PostOrder:
807             result.append(item.block);
808             break;
809         }
810     }
811     return result;
812 }
813
814 void Graph::clearReplacements()
815 {
816     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
817         BasicBlock* block = m_blocks[blockIndex].get();
818         if (!block)
819             continue;
820         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
821             block->phis[phiIndex]->setReplacement(nullptr);
822         for (unsigned nodeIndex = block->size(); nodeIndex--;)
823             block->at(nodeIndex)->setReplacement(nullptr);
824     }
825 }
826
827 void Graph::clearEpochs()
828 {
829     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
830         BasicBlock* block = m_blocks[blockIndex].get();
831         if (!block)
832             continue;
833         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
834             block->phis[phiIndex]->setEpoch(Epoch());
835         for (unsigned nodeIndex = block->size(); nodeIndex--;)
836             block->at(nodeIndex)->setEpoch(Epoch());
837     }
838 }
839
840 void Graph::initializeNodeOwners()
841 {
842     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
843         BasicBlock* block = m_blocks[blockIndex].get();
844         if (!block)
845             continue;
846         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
847             block->phis[phiIndex]->owner = block;
848         for (unsigned nodeIndex = block->size(); nodeIndex--;)
849             block->at(nodeIndex)->owner = block;
850     }
851 }
852
853 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
854 {
855     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
856         BasicBlock* block = m_blocks[blockIndex].get();
857         if (!block)
858             continue;
859         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
860             block->phis[phiIndex]->clearFlags(flags);
861         for (unsigned nodeIndex = block->size(); nodeIndex--;)
862             block->at(nodeIndex)->clearFlags(flags);
863     }
864 }
865
866 bool Graph::watchCondition(const ObjectPropertyCondition& key)
867 {
868     if (!key.isWatchable())
869         return false;
870     
871     m_plan.weakReferences.addLazily(key.object());
872     if (key.hasPrototype())
873         m_plan.weakReferences.addLazily(key.prototype());
874     if (key.hasRequiredValue())
875         m_plan.weakReferences.addLazily(key.requiredValue());
876     
877     m_plan.watchpoints.addLazily(key);
878
879     if (key.kind() == PropertyCondition::Presence)
880         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
881     
882     return true;
883 }
884
885 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
886 {
887     return m_safeToLoad.contains(std::make_pair(base, offset));
888 }
889
890 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
891 {
892     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
893     if (iter != m_bytecodeLiveness.end())
894         return *iter->value;
895     
896     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
897     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
898     FullBytecodeLiveness& result = *liveness;
899     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
900     return result;
901 }
902
903 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
904 {
905     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
906 }
907
908 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
909 {
910     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
911     if (iter != m_bytecodeKills.end())
912         return *iter->value;
913     
914     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
915     codeBlock->livenessAnalysis().computeKills(*kills);
916     BytecodeKills& result = *kills;
917     m_bytecodeKills.add(codeBlock, WTF::move(kills));
918     return result;
919 }
920
921 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
922 {
923     return killsFor(baselineCodeBlockFor(inlineCallFrame));
924 }
925
926 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
927 {
928     for (;;) {
929         VirtualRegister reg = VirtualRegister(
930             operand.offset() - codeOrigin.stackOffset());
931         
932         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
933             if (reg.isArgument()) {
934                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
935                 
936                 if (codeOrigin.inlineCallFrame->isClosureCall
937                     && reg.offset() == JSStack::Callee)
938                     return true;
939                 
940                 if (codeOrigin.inlineCallFrame->isVarargs()
941                     && reg.offset() == JSStack::ArgumentCount)
942                     return true;
943                 
944                 return false;
945             }
946             
947             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
948                 reg.offset(), codeOrigin.bytecodeIndex);
949         }
950         
951         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
952         if (!inlineCallFrame)
953             break;
954
955         // Arguments are always live. This would be redundant if it wasn't for our
956         // op_call_varargs inlining.
957         if (reg.isArgument()
958             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
959             return true;
960         
961         codeOrigin = inlineCallFrame->caller;
962     }
963     
964     return true;
965 }
966
967 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
968 {
969     BitVector result;
970     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
971     forAllLocalsLiveInBytecode(
972         codeOrigin,
973         [&] (VirtualRegister reg) {
974             ASSERT(reg.isLocal());
975             result.quickSet(reg.toLocal());
976         });
977     return result;
978 }
979
980 unsigned Graph::frameRegisterCount()
981 {
982     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
983     return roundLocalRegisterCountForFramePointerOffset(result);
984 }
985
986 unsigned Graph::stackPointerOffset()
987 {
988     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
989 }
990
991 unsigned Graph::requiredRegisterCountForExit()
992 {
993     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
994     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
995         InlineCallFrame* inlineCallFrame = *iter;
996         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
997         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
998         count = std::max(count, requiredCount);
999     }
1000     return count;
1001 }
1002
1003 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1004 {
1005     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1006 }
1007
1008 JSValue Graph::tryGetConstantProperty(
1009     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1010 {
1011     if (!base || !base.isObject())
1012         return JSValue();
1013     
1014     JSObject* object = asObject(base);
1015     
1016     for (unsigned i = structureSet.size(); i--;) {
1017         Structure* structure = structureSet[i];
1018         assertIsRegistered(structure);
1019         
1020         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1021         if (!set || !set->isStillValid())
1022             return JSValue();
1023         
1024         ASSERT(structure->isValidOffset(offset));
1025         ASSERT(!structure->isUncacheableDictionary());
1026         
1027         watchpoints().addLazily(set);
1028     }
1029     
1030     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1031     // our profiling makes sense and we're still on track to generate code that won't be
1032     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1033     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1034     // is doomed.
1035     //
1036     // One argument in favor of this code is that it should definitely work because the butterfly
1037     // is always set before the structure. However, we don't currently have a fence between those
1038     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1039     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1040     // caches told us that the object had a structure that it did not *yet* have, and then later,
1041     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1042     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1043     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1044     // https://bugs.webkit.org/show_bug.cgi?id=134641
1045     //
1046     // For now, we just do the minimal thing: defend against the structure right now being
1047     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1048     // determine if the structure belongs to the proven set.
1049     
1050     if (!structureSet.contains(object->structure()))
1051         return JSValue();
1052     
1053     return object->getDirect(offset);
1054 }
1055
1056 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1057 {
1058     return tryGetConstantProperty(base, StructureSet(structure), offset);
1059 }
1060
1061 JSValue Graph::tryGetConstantProperty(
1062     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1063 {
1064     if (structure.isTop() || structure.isClobbered()) {
1065         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1066         // watching to constant-fold the property.
1067         // https://bugs.webkit.org/show_bug.cgi?id=147271
1068         return JSValue();
1069     }
1070     
1071     return tryGetConstantProperty(base, structure.set(), offset);
1072 }
1073
1074 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1075 {
1076     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1077 }
1078
1079 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1080 {
1081     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1082     
1083     if (!base)
1084         return JSValue();
1085     
1086     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1087     if (!activation)
1088         return JSValue();
1089     
1090     SymbolTable* symbolTable = activation->symbolTable();
1091     JSValue value;
1092     WatchpointSet* set;
1093     {
1094         ConcurrentJITLocker locker(symbolTable->m_lock);
1095         
1096         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1097         if (!entry)
1098             return JSValue();
1099         
1100         set = entry->watchpointSet();
1101         if (!set)
1102             return JSValue();
1103         
1104         if (set->state() != IsWatched)
1105             return JSValue();
1106         
1107         ASSERT(entry->scopeOffset() == offset);
1108         value = activation->variableAt(offset).get();
1109         if (!value)
1110             return JSValue();
1111     }
1112     
1113     watchpoints().addLazily(set);
1114     
1115     return value;
1116 }
1117
1118 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1119 {
1120     return tryGetConstantClosureVar(value.m_value, offset);
1121 }
1122
1123 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1124 {
1125     if (!node->hasConstant())
1126         return JSValue();
1127     return tryGetConstantClosureVar(node->asJSValue(), offset);
1128 }
1129
1130 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1131 {
1132     if (!value)
1133         return nullptr;
1134     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1135     if (!value)
1136         return nullptr;
1137     if (!view->length())
1138         return nullptr;
1139     WTF::loadLoadFence();
1140     watchpoints().addLazily(view);
1141     return view;
1142 }
1143
1144 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1145 {
1146     if (arrayMode.typedArrayType() == NotTypedArray)
1147         return nullptr;
1148     return tryGetFoldableView(value);
1149 }
1150
1151 void Graph::registerFrozenValues()
1152 {
1153     m_codeBlock->constants().resize(0);
1154     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1155     for (FrozenValue* value : m_frozenValues) {
1156         if (!value->pointsToHeap())
1157             continue;
1158         
1159         ASSERT(value->structure());
1160         ASSERT(m_plan.weakReferences.contains(value->structure()));
1161         
1162         switch (value->strength()) {
1163         case WeakValue: {
1164             m_plan.weakReferences.addLazily(value->value().asCell());
1165             break;
1166         }
1167         case StrongValue: {
1168             unsigned constantIndex = m_codeBlock->addConstantLazily();
1169             // We already have a barrier on the code block.
1170             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1171             break;
1172         } }
1173     }
1174     m_codeBlock->constants().shrinkToFit();
1175     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1176 }
1177
1178 void Graph::visitChildren(SlotVisitor& visitor)
1179 {
1180     for (FrozenValue* value : m_frozenValues) {
1181         visitor.appendUnbarrieredReadOnlyValue(value->value());
1182         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1183     }
1184     
1185     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1186         BasicBlock* block = this->block(blockIndex);
1187         if (!block)
1188             continue;
1189         
1190         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1191             Node* node = block->at(nodeIndex);
1192             
1193             switch (node->op()) {
1194             case CheckStructure:
1195                 for (unsigned i = node->structureSet().size(); i--;)
1196                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1197                 break;
1198                 
1199             case NewObject:
1200             case ArrayifyToStructure:
1201             case NewStringObject:
1202                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1203                 break;
1204                 
1205             case PutStructure:
1206             case AllocatePropertyStorage:
1207             case ReallocatePropertyStorage:
1208                 visitor.appendUnbarrieredReadOnlyPointer(
1209                     node->transition()->previous);
1210                 visitor.appendUnbarrieredReadOnlyPointer(
1211                     node->transition()->next);
1212                 break;
1213                 
1214             case MultiGetByOffset:
1215                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1216                     for (Structure* structure : getCase.set())
1217                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1218                 }
1219                 break;
1220                     
1221             case MultiPutByOffset:
1222                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1223                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1224                     const StructureSet& set = variant.oldStructure();
1225                     for (unsigned j = set.size(); j--;)
1226                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1227                     if (variant.kind() == PutByIdVariant::Transition)
1228                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1229                 }
1230                 break;
1231                 
1232             default:
1233                 break;
1234             }
1235         }
1236     }
1237 }
1238
1239 FrozenValue* Graph::freeze(JSValue value)
1240 {
1241     if (UNLIKELY(!value))
1242         return FrozenValue::emptySingleton();
1243     
1244     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1245     if (LIKELY(!result.isNewEntry))
1246         return result.iterator->value;
1247
1248     if (value.isUInt32())
1249         m_uint32ValuesInUse.append(value.asUInt32());
1250     
1251     FrozenValue frozenValue = FrozenValue::freeze(value);
1252     if (Structure* structure = frozenValue.structure())
1253         registerStructure(structure);
1254     
1255     return result.iterator->value = m_frozenValues.add(frozenValue);
1256 }
1257
1258 FrozenValue* Graph::freezeStrong(JSValue value)
1259 {
1260     FrozenValue* result = freeze(value);
1261     result->strengthenTo(StrongValue);
1262     return result;
1263 }
1264
1265 void Graph::convertToConstant(Node* node, FrozenValue* value)
1266 {
1267     if (value->structure())
1268         assertIsRegistered(value->structure());
1269     node->convertToConstant(value);
1270 }
1271
1272 void Graph::convertToConstant(Node* node, JSValue value)
1273 {
1274     convertToConstant(node, freeze(value));
1275 }
1276
1277 void Graph::convertToStrongConstant(Node* node, JSValue value)
1278 {
1279     convertToConstant(node, freezeStrong(value));
1280 }
1281
1282 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1283 {
1284     m_plan.weakReferences.addLazily(structure);
1285     if (m_plan.watchpoints.consider(structure))
1286         return StructureRegisteredAndWatched;
1287     return StructureRegisteredNormally;
1288 }
1289
1290 void Graph::assertIsRegistered(Structure* structure)
1291 {
1292     // It's convenient to be able to call this with a maybe-null structure.
1293     if (!structure)
1294         return;
1295     
1296     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1297     
1298     if (!structure->dfgShouldWatch())
1299         return;
1300     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1301         return;
1302     
1303     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1304 }
1305
1306 NO_RETURN_DUE_TO_CRASH static void crash(
1307     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1308     const char* assertion)
1309 {
1310     startCrashing();
1311     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1312     dataLog(file, "(", line, ") : ", function, "\n");
1313     dataLog("\n");
1314     dataLog(whileText);
1315     dataLog("Graph at time of failure:\n");
1316     graph.dump();
1317     dataLog("\n");
1318     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1319     dataLog(file, "(", line, ") : ", function, "\n");
1320     CRASH_WITH_SECURITY_IMPLICATION();
1321 }
1322
1323 void Graph::handleAssertionFailure(
1324     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1325 {
1326     crash(*this, "", file, line, function, assertion);
1327 }
1328
1329 void Graph::handleAssertionFailure(
1330     Node* node, const char* file, int line, const char* function, const char* assertion)
1331 {
1332     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1333 }
1334
1335 void Graph::handleAssertionFailure(
1336     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1337 {
1338     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1339 }
1340
1341 ValueProfile* Graph::valueProfileFor(Node* node)
1342 {
1343     if (!node)
1344         return nullptr;
1345         
1346     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1347         
1348     if (node->hasLocal(*this)) {
1349         if (!node->local().isArgument())
1350             return nullptr;
1351         int argument = node->local().toArgument();
1352         Node* argumentNode = m_arguments[argument];
1353         if (!argumentNode)
1354             return nullptr;
1355         if (node->variableAccessData() != argumentNode->variableAccessData())
1356             return nullptr;
1357         return profiledBlock->valueProfileForArgument(argument);
1358     }
1359         
1360     if (node->hasHeapPrediction())
1361         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1362         
1363     return nullptr;
1364 }
1365
1366 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1367 {
1368     if (!node)
1369         return MethodOfGettingAValueProfile();
1370     
1371     if (ValueProfile* valueProfile = valueProfileFor(node))
1372         return MethodOfGettingAValueProfile(valueProfile);
1373     
1374     if (node->op() == GetLocal) {
1375         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1376         
1377         return MethodOfGettingAValueProfile::fromLazyOperand(
1378             profiledBlock,
1379             LazyOperandValueProfileKey(
1380                 node->origin.semantic.bytecodeIndex, node->local()));
1381     }
1382     
1383     return MethodOfGettingAValueProfile();
1384 }
1385
1386 } } // namespace JSC::DFG
1387
1388 #endif // ENABLE(DFG_JIT)