DFG should allow Phantoms after terminals
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGJITCode.h"
38 #include "DFGVariableAccessDataDump.h"
39 #include "FullBytecodeLiveness.h"
40 #include "FunctionExecutableDump.h"
41 #include "JIT.h"
42 #include "JSLexicalEnvironment.h"
43 #include "MaxFrameExtentForSlowPathCall.h"
44 #include "OperandsInlines.h"
45 #include "JSCInlines.h"
46 #include "StackAlignment.h"
47 #include <wtf/CommaPrinter.h>
48 #include <wtf/ListDump.h>
49
50 namespace JSC { namespace DFG {
51
52 // Creates an array of stringized names.
53 static const char* dfgOpNames[] = {
54 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
55     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
56 #undef STRINGIZE_DFG_OP_ENUM
57 };
58
59 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
60     : m_vm(vm)
61     , m_plan(plan)
62     , m_codeBlock(m_plan.codeBlock.get())
63     , m_profiledBlock(m_codeBlock->alternative())
64     , m_allocator(longLivedState.m_allocator)
65     , m_mustHandleValues(OperandsLike, plan.mustHandleValues)
66     , m_nextMachineLocal(0)
67     , m_fixpointState(BeforeFixpoint)
68     , m_structureRegistrationState(HaveNotStartedRegistering)
69     , m_form(LoadStore)
70     , m_unificationState(LocallyUnified)
71     , m_refCountState(EverythingIsLive)
72 {
73     ASSERT(m_profiledBlock);
74     
75     for (unsigned i = m_mustHandleValues.size(); i--;)
76         m_mustHandleValues[i] = freezeFragile(plan.mustHandleValues[i]);
77
78     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
79         || Options::forceDebuggerBytecodeGeneration();
80 }
81
82 Graph::~Graph()
83 {
84     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
85         BasicBlock* block = this->block(blockIndex);
86         if (!block)
87             continue;
88
89         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
90             m_allocator.free(block->phis[phiIndex]);
91         for (unsigned nodeIndex = block->size(); nodeIndex--;)
92             m_allocator.free(block->at(nodeIndex));
93     }
94     m_allocator.freeAll();
95 }
96
97 const char *Graph::opName(NodeType op)
98 {
99     return dfgOpNames[op];
100 }
101
102 static void printWhiteSpace(PrintStream& out, unsigned amount)
103 {
104     while (amount-- > 0)
105         out.print(" ");
106 }
107
108 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
109 {
110     if (!previousNode)
111         return false;
112     
113     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
114         return false;
115     
116     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
117     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
118     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
119     unsigned indexOfDivergence = commonSize;
120     for (unsigned i = 0; i < commonSize; ++i) {
121         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
122             indexOfDivergence = i;
123             break;
124         }
125     }
126     
127     bool hasPrinted = false;
128     
129     // Print the pops.
130     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
131         out.print(prefix);
132         printWhiteSpace(out, i * 2);
133         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
134         hasPrinted = true;
135     }
136     
137     // Print the pushes.
138     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
139         out.print(prefix);
140         printWhiteSpace(out, i * 2);
141         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
142         hasPrinted = true;
143     }
144     
145     return hasPrinted;
146 }
147
148 int Graph::amountOfNodeWhiteSpace(Node* node)
149 {
150     return (node->origin.semantic.inlineDepth() - 1) * 2;
151 }
152
153 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
154 {
155     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
156 }
157
158 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
159 {
160     NodeType op = node->op();
161
162     unsigned refCount = node->refCount();
163     bool mustGenerate = node->mustGenerate();
164     if (mustGenerate)
165         --refCount;
166
167     out.print(prefix);
168     printNodeWhiteSpace(out, node);
169
170     // Example/explanation of dataflow dump output
171     //
172     //   14:   <!2:7>  GetByVal(@3, @13)
173     //   ^1     ^2 ^3     ^4       ^5
174     //
175     // (1) The nodeIndex of this operation.
176     // (2) The reference count. The number printed is the 'real' count,
177     //     not including the 'mustGenerate' ref. If the node is
178     //     'mustGenerate' then the count it prefixed with '!'.
179     // (3) The virtual register slot assigned to this node.
180     // (4) The name of the operation.
181     // (5) The arguments to the operation. The may be of the form:
182     //         @#   - a NodeIndex referencing a prior node in the graph.
183     //         arg# - an argument number.
184     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
185     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
186     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
187     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
188         out.print(node->virtualRegister());
189     else
190         out.print("-");
191     out.print(">\t", opName(op), "(");
192     CommaPrinter comma;
193     if (node->flags() & NodeHasVarArgs) {
194         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
195             if (!m_varArgChildren[childIdx])
196                 continue;
197             out.print(comma, m_varArgChildren[childIdx]);
198         }
199     } else {
200         if (!!node->child1() || !!node->child2() || !!node->child3())
201             out.print(comma, node->child1());
202         if (!!node->child2() || !!node->child3())
203             out.print(comma, node->child2());
204         if (!!node->child3())
205             out.print(comma, node->child3());
206     }
207
208     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
209         out.print(comma, NodeFlagsDump(node->flags()));
210     if (node->prediction())
211         out.print(comma, SpeculationDump(node->prediction()));
212     if (node->hasArrayMode())
213         out.print(comma, node->arrayMode());
214     if (node->hasArithMode())
215         out.print(comma, node->arithMode());
216     if (node->hasScopeOffset())
217         out.print(comma, node->scopeOffset());
218     if (node->hasDirectArgumentsOffset())
219         out.print(comma, node->capturedArgumentsOffset());
220     if (node->hasRegisterPointer())
221         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findVariableIndex(node->variablePointer()), "(", RawPointer(node->variablePointer()), ")");
222     if (node->hasIdentifier())
223         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
224     if (node->hasPromotedLocationDescriptor())
225         out.print(comma, node->promotedLocationDescriptor());
226     if (node->hasStructureSet())
227         out.print(comma, inContext(node->structureSet(), context));
228     if (node->hasStructure())
229         out.print(comma, inContext(*node->structure(), context));
230     if (node->hasTransition()) {
231         out.print(comma, pointerDumpInContext(node->transition(), context));
232 #if USE(JSVALUE64)
233         out.print(", ID:", node->transition()->next->id());
234 #else
235         out.print(", ID:", RawPointer(node->transition()->next));
236 #endif
237     }
238     if (node->hasCellOperand()) {
239         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
240             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
241         else {
242             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
243             if (node->cellOperand()->value().isCell()) {
244                 CallVariant variant(node->cellOperand()->value().asCell());
245                 if (ExecutableBase* executable = variant.executable()) {
246                     if (executable->isHostFunction())
247                         out.print(comma, "<host function>");
248                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
249                         out.print(comma, FunctionExecutableDump(functionExecutable));
250                     else
251                         out.print(comma, "<non-function executable>");
252                 }
253             }
254         }
255     }
256     if (node->hasStorageAccessData()) {
257         StorageAccessData& storageAccessData = node->storageAccessData();
258         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
259         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
260     }
261     if (node->hasMultiGetByOffsetData()) {
262         MultiGetByOffsetData& data = node->multiGetByOffsetData();
263         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
264         for (unsigned i = 0; i < data.variants.size(); ++i)
265             out.print(comma, inContext(data.variants[i], context));
266     }
267     if (node->hasMultiPutByOffsetData()) {
268         MultiPutByOffsetData& data = node->multiPutByOffsetData();
269         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
270         for (unsigned i = 0; i < data.variants.size(); ++i)
271             out.print(comma, inContext(data.variants[i], context));
272     }
273     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
274     if (node->hasVariableAccessData(*this)) {
275         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
276         if (variableAccessData) {
277             VirtualRegister operand = variableAccessData->local();
278             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
279             operand = variableAccessData->machineLocal();
280             if (operand.isValid())
281                 out.print(comma, "machine:", operand);
282         }
283     }
284     if (node->hasStackAccessData()) {
285         StackAccessData* data = node->stackAccessData();
286         out.print(comma, data->local);
287         if (data->machineLocal.isValid())
288             out.print(comma, "machine:", data->machineLocal);
289         out.print(comma, data->format);
290     }
291     if (node->hasUnlinkedLocal()) 
292         out.print(comma, node->unlinkedLocal());
293     if (node->hasUnlinkedMachineLocal()) {
294         VirtualRegister operand = node->unlinkedMachineLocal();
295         if (operand.isValid())
296             out.print(comma, "machine:", operand);
297     }
298     if (node->hasConstantBuffer()) {
299         out.print(comma);
300         out.print(node->startConstant(), ":[");
301         CommaPrinter anotherComma;
302         for (unsigned i = 0; i < node->numConstants(); ++i)
303             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
304         out.print("]");
305     }
306     if (node->hasIndexingType())
307         out.print(comma, IndexingTypeDump(node->indexingType()));
308     if (node->hasTypedArrayType())
309         out.print(comma, node->typedArrayType());
310     if (node->hasPhi())
311         out.print(comma, "^", node->phi()->index());
312     if (node->hasExecutionCounter())
313         out.print(comma, RawPointer(node->executionCounter()));
314     if (node->hasWatchpointSet())
315         out.print(comma, RawPointer(node->watchpointSet()));
316     if (node->hasStoragePointer())
317         out.print(comma, RawPointer(node->storagePointer()));
318     if (node->hasObjectMaterializationData())
319         out.print(comma, node->objectMaterializationData());
320     if (node->hasCallVarargsData())
321         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
322     if (node->hasLoadVarargsData()) {
323         LoadVarargsData* data = node->loadVarargsData();
324         out.print(comma, "start = ", data->start, ", count = ", data->count);
325         if (data->machineStart.isValid())
326             out.print(", machineStart = ", data->machineStart);
327         if (data->machineCount.isValid())
328             out.print(", machineCount = ", data->machineCount);
329         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
330         out.print(", limit = ", data->limit);
331     }
332     if (node->isConstant())
333         out.print(comma, pointerDumpInContext(node->constant(), context));
334     if (node->isJump())
335         out.print(comma, "T:", *node->targetBlock());
336     if (node->isBranch())
337         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
338     if (node->isSwitch()) {
339         SwitchData* data = node->switchData();
340         out.print(comma, data->kind);
341         for (unsigned i = 0; i < data->cases.size(); ++i)
342             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
343         out.print(comma, "default:", data->fallThrough);
344     }
345     ClobberSet reads;
346     ClobberSet writes;
347     addReadsAndWrites(*this, node, reads, writes);
348     if (!reads.isEmpty())
349         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
350     if (!writes.isEmpty())
351         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
352     if (node->origin.isSet()) {
353         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
354         if (node->origin.semantic != node->origin.forExit)
355             out.print(comma, "exit: ", node->origin.forExit);
356     }
357     
358     out.print(")");
359
360     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
361         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
362     else if (node->hasHeapPrediction())
363         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
364     
365     out.print("\n");
366 }
367
368 bool Graph::terminalsAreValid()
369 {
370     for (BasicBlock* block : blocksInNaturalOrder()) {
371         if (!block->terminal())
372             return false;
373     }
374     return true;
375 }
376
377 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
378 {
379     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
380     if (block->executionCount == block->executionCount)
381         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
382     out.print(prefix, "  Predecessors:");
383     for (size_t i = 0; i < block->predecessors.size(); ++i)
384         out.print(" ", *block->predecessors[i]);
385     out.print("\n");
386     out.print(prefix, "  Successors:");
387     if (block->terminal()) {
388         for (BasicBlock* successor : block->successors()) {
389             out.print(" ", *successor);
390             if (m_prePostNumbering.isValid())
391                 out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
392         }
393     } else
394         out.print(" <invalid>");
395     out.print("\n");
396     if (m_dominators.isValid() && terminalsAreValid()) {
397         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
398         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
399         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
400         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
401     }
402     if (m_prePostNumbering.isValid())
403         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
404     if (m_naturalLoops.isValid()) {
405         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
406             out.print(prefix, "  Loop header, contains:");
407             Vector<BlockIndex> sortedBlockList;
408             for (unsigned i = 0; i < loop->size(); ++i)
409                 sortedBlockList.append(loop->at(i)->index);
410             std::sort(sortedBlockList.begin(), sortedBlockList.end());
411             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
412                 out.print(" #", sortedBlockList[i]);
413             out.print("\n");
414         }
415         
416         Vector<const NaturalLoop*> containingLoops =
417             m_naturalLoops.loopsOf(block);
418         if (!containingLoops.isEmpty()) {
419             out.print(prefix, "  Containing loop headers:");
420             for (unsigned i = 0; i < containingLoops.size(); ++i)
421                 out.print(" ", *containingLoops[i]->header());
422             out.print("\n");
423         }
424     }
425     if (!block->phis.isEmpty()) {
426         out.print(prefix, "  Phi Nodes:");
427         for (size_t i = 0; i < block->phis.size(); ++i) {
428             Node* phiNode = block->phis[i];
429             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
430                 continue;
431             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
432             if (phiNode->child1()) {
433                 out.print("@", phiNode->child1()->index());
434                 if (phiNode->child2()) {
435                     out.print(", @", phiNode->child2()->index());
436                     if (phiNode->child3())
437                         out.print(", @", phiNode->child3()->index());
438                 }
439             }
440             out.print(")", i + 1 < block->phis.size() ? "," : "");
441         }
442         out.print("\n");
443     }
444 }
445
446 void Graph::dump(PrintStream& out, DumpContext* context)
447 {
448     DumpContext myContext;
449     myContext.graph = this;
450     if (!context)
451         context = &myContext;
452     
453     out.print("\n");
454     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
455     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
456     if (m_form == SSA)
457         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
458     else
459         out.print("  Arguments: ", listDump(m_arguments), "\n");
460     out.print("\n");
461     
462     Node* lastNode = 0;
463     for (size_t b = 0; b < m_blocks.size(); ++b) {
464         BasicBlock* block = m_blocks[b].get();
465         if (!block)
466             continue;
467         dumpBlockHeader(out, "", block, DumpAllPhis, context);
468         out.print("  States: ", block->cfaStructureClobberStateAtHead);
469         if (!block->cfaHasVisited)
470             out.print(", CurrentlyCFAUnreachable");
471         if (!block->intersectionOfCFAHasVisited)
472             out.print(", CFAUnreachable");
473         out.print("\n");
474         switch (m_form) {
475         case LoadStore:
476         case ThreadedCPS: {
477             out.print("  Vars Before: ");
478             if (block->cfaHasVisited)
479                 out.print(inContext(block->valuesAtHead, context));
480             else
481                 out.print("<empty>");
482             out.print("\n");
483             out.print("  Intersected Vars Before: ");
484             if (block->intersectionOfCFAHasVisited)
485                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
486             else
487                 out.print("<empty>");
488             out.print("\n");
489             out.print("  Var Links: ", block->variablesAtHead, "\n");
490             break;
491         }
492             
493         case SSA: {
494             RELEASE_ASSERT(block->ssa);
495             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
496             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
497             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
498             break;
499         } }
500         for (size_t i = 0; i < block->size(); ++i) {
501             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
502             dump(out, "", block->at(i), context);
503             lastNode = block->at(i);
504         }
505         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
506         if (!block->cfaDidFinish)
507             out.print(", CFAInvalidated");
508         out.print("\n");
509         switch (m_form) {
510         case LoadStore:
511         case ThreadedCPS: {
512             out.print("  Vars After: ");
513             if (block->cfaHasVisited)
514                 out.print(inContext(block->valuesAtTail, context));
515             else
516                 out.print("<empty>");
517             out.print("\n");
518             out.print("  Var Links: ", block->variablesAtTail, "\n");
519             break;
520         }
521             
522         case SSA: {
523             RELEASE_ASSERT(block->ssa);
524             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
525             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
526             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
527             break;
528         } }
529         out.print("\n");
530     }
531     
532     if (!myContext.isEmpty()) {
533         myContext.dump(out);
534         out.print("\n");
535     }
536 }
537
538 void Graph::dethread()
539 {
540     if (m_form == LoadStore || m_form == SSA)
541         return;
542     
543     if (logCompilationChanges())
544         dataLog("Dethreading DFG graph.\n");
545     
546     SamplingRegion samplingRegion("DFG Dethreading");
547     
548     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
549         BasicBlock* block = m_blocks[blockIndex].get();
550         if (!block)
551             continue;
552         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
553             Node* phi = block->phis[phiIndex];
554             phi->children.reset();
555         }
556     }
557     
558     m_form = LoadStore;
559 }
560
561 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
562 {
563     if (!successor->isReachable) {
564         successor->isReachable = true;
565         worklist.append(successor);
566     }
567     
568     successor->predecessors.append(block);
569 }
570
571 void Graph::determineReachability()
572 {
573     Vector<BasicBlock*, 16> worklist;
574     worklist.append(block(0));
575     block(0)->isReachable = true;
576     while (!worklist.isEmpty()) {
577         BasicBlock* block = worklist.takeLast();
578         for (unsigned i = block->numSuccessors(); i--;)
579             handleSuccessor(worklist, block, block->successor(i));
580     }
581 }
582
583 void Graph::resetReachability()
584 {
585     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
586         BasicBlock* block = m_blocks[blockIndex].get();
587         if (!block)
588             continue;
589         block->isReachable = false;
590         block->predecessors.clear();
591     }
592     
593     determineReachability();
594 }
595
596 void Graph::mergeRelevantToOSR()
597 {
598     for (BasicBlock* block : blocksInNaturalOrder()) {
599         for (Node* node : *block) {
600             switch (node->op()) {
601             case MovHint:
602                 node->child1()->mergeFlags(NodeRelevantToOSR);
603                 break;
604                 
605             case PutHint:
606                 node->child2()->mergeFlags(NodeRelevantToOSR);
607                 break;
608                 
609             default:
610                 break;
611             }
612         }
613     }
614 }
615
616 namespace {
617
618 class RefCountCalculator {
619 public:
620     RefCountCalculator(Graph& graph)
621         : m_graph(graph)
622     {
623     }
624     
625     void calculate()
626     {
627         // First reset the counts to 0 for all nodes.
628         //
629         // Also take this opportunity to pretend that Check nodes are not NodeMustGenerate. Check
630         // nodes are MustGenerate because they are executed for effect, but they follow the same
631         // DCE rules as nodes that aren't MustGenerate: they only contribute to the ref count of
632         // their children if the edges require checks. Non-checking edges are removed. Note that
633         // for any Checks left over, this phase will turn them back into NodeMustGenerate.
634         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
635             BasicBlock* block = m_graph.block(blockIndex);
636             if (!block)
637                 continue;
638             for (unsigned indexInBlock = block->size(); indexInBlock--;)
639                 block->at(indexInBlock)->setRefCount(0);
640             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
641                 block->phis[phiIndex]->setRefCount(0);
642         }
643     
644         // Now find the roots:
645         // - Nodes that are must-generate.
646         // - Nodes that are reachable from type checks.
647         // Set their ref counts to 1 and put them on the worklist.
648         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
649             BasicBlock* block = m_graph.block(blockIndex);
650             if (!block)
651                 continue;
652             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
653                 Node* node = block->at(indexInBlock);
654                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
655                 if (!(node->flags() & NodeMustGenerate))
656                     continue;
657                 if (node->op() == Check) {
658                     // We don't treat Check nodes as MustGenerate. We will gladly
659                     // kill them off in this phase.
660                     continue;
661                 }
662                 if (!node->postfixRef())
663                     m_worklist.append(node);
664             }
665         }
666         
667         while (!m_worklist.isEmpty()) {
668             while (!m_worklist.isEmpty()) {
669                 Node* node = m_worklist.last();
670                 m_worklist.removeLast();
671                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
672                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
673             }
674             
675             if (m_graph.m_form == SSA) {
676                 // Find Phi->Upsilon edges, which are represented as meta-data in the
677                 // Upsilon.
678                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
679                     BasicBlock* block = m_graph.block(blockIndex);
680                     if (!block)
681                         continue;
682                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
683                         Node* node = block->at(nodeIndex);
684                         if (node->op() != Upsilon)
685                             continue;
686                         if (node->shouldGenerate())
687                             continue;
688                         if (node->phi()->shouldGenerate())
689                             countNode(node);
690                     }
691                 }
692             }
693         }
694     }
695     
696 private:
697     void findTypeCheckRoot(Node*, Edge edge)
698     {
699         // We may have an "unproved" untyped use for code that is unreachable. The CFA
700         // will just not have gotten around to it.
701         if (edge.isProved() || edge.willNotHaveCheck())
702             return;
703         if (!edge->postfixRef())
704             m_worklist.append(edge.node());
705     }
706     
707     void countNode(Node* node)
708     {
709         if (node->postfixRef())
710             return;
711         m_worklist.append(node);
712     }
713     
714     void countEdge(Node*, Edge edge)
715     {
716         // Don't count edges that are already counted for their type checks.
717         if (!(edge.isProved() || edge.willNotHaveCheck()))
718             return;
719         countNode(edge.node());
720     }
721     
722     Graph& m_graph;
723     Vector<Node*, 128> m_worklist;
724 };
725
726 } // anonymous namespace
727
728 void Graph::computeRefCounts()
729 {
730     RefCountCalculator calculator(*this);
731     calculator.calculate();
732 }
733
734 void Graph::killBlockAndItsContents(BasicBlock* block)
735 {
736     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
737         m_allocator.free(block->phis[phiIndex]);
738     for (unsigned nodeIndex = block->size(); nodeIndex--;)
739         m_allocator.free(block->at(nodeIndex));
740     
741     killBlock(block);
742 }
743
744 void Graph::killUnreachableBlocks()
745 {
746     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
747         BasicBlock* block = this->block(blockIndex);
748         if (!block)
749             continue;
750         if (block->isReachable)
751             continue;
752         
753         killBlockAndItsContents(block);
754     }
755 }
756
757 void Graph::invalidateCFG()
758 {
759     m_dominators.invalidate();
760     m_naturalLoops.invalidate();
761     m_prePostNumbering.invalidate();
762 }
763
764 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
765 {
766     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
767         Node* node = block[indexInBlock];
768         bool shouldContinue = true;
769         switch (node->op()) {
770         case SetLocal: {
771             if (node->local() == variableAccessData->local())
772                 shouldContinue = false;
773             break;
774         }
775                 
776         case GetLocal: {
777             if (node->variableAccessData() != variableAccessData)
778                 continue;
779             substitute(block, indexInBlock, node, newGetLocal);
780             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
781             if (oldTailNode == node)
782                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
783             shouldContinue = false;
784             break;
785         }
786                 
787         default:
788             break;
789         }
790         if (!shouldContinue)
791             break;
792     }
793 }
794
795 BlockList Graph::blocksInPreOrder()
796 {
797     BlockList result;
798     BlockWorklist worklist;
799     worklist.push(block(0));
800     while (BasicBlock* block = worklist.pop()) {
801         result.append(block);
802         for (unsigned i = block->numSuccessors(); i--;)
803             worklist.push(block->successor(i));
804     }
805     return result;
806 }
807
808 BlockList Graph::blocksInPostOrder()
809 {
810     BlockList result;
811     PostOrderBlockWorklist worklist;
812     worklist.push(block(0));
813     while (BlockWithOrder item = worklist.pop()) {
814         switch (item.order) {
815         case PreOrder:
816             worklist.pushPost(item.block);
817             for (unsigned i = item.block->numSuccessors(); i--;)
818                 worklist.push(item.block->successor(i));
819             break;
820         case PostOrder:
821             result.append(item.block);
822             break;
823         }
824     }
825     return result;
826 }
827
828 void Graph::clearReplacements()
829 {
830     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
831         BasicBlock* block = m_blocks[blockIndex].get();
832         if (!block)
833             continue;
834         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
835             block->phis[phiIndex]->replacement = 0;
836         for (unsigned nodeIndex = block->size(); nodeIndex--;)
837             block->at(nodeIndex)->replacement = 0;
838     }
839 }
840
841 void Graph::initializeNodeOwners()
842 {
843     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
844         BasicBlock* block = m_blocks[blockIndex].get();
845         if (!block)
846             continue;
847         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
848             block->phis[phiIndex]->owner = block;
849         for (unsigned nodeIndex = block->size(); nodeIndex--;)
850             block->at(nodeIndex)->owner = block;
851     }
852 }
853
854 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
855 {
856     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
857         BasicBlock* block = m_blocks[blockIndex].get();
858         if (!block)
859             continue;
860         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
861             block->phis[phiIndex]->clearFlags(flags);
862         for (unsigned nodeIndex = block->size(); nodeIndex--;)
863             block->at(nodeIndex)->clearFlags(flags);
864     }
865 }
866
867 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
868 {
869     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
870     if (iter != m_bytecodeLiveness.end())
871         return *iter->value;
872     
873     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
874     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
875     FullBytecodeLiveness& result = *liveness;
876     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
877     return result;
878 }
879
880 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
881 {
882     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
883 }
884
885 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
886 {
887     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
888     if (iter != m_bytecodeKills.end())
889         return *iter->value;
890     
891     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
892     codeBlock->livenessAnalysis().computeKills(*kills);
893     BytecodeKills& result = *kills;
894     m_bytecodeKills.add(codeBlock, WTF::move(kills));
895     return result;
896 }
897
898 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
899 {
900     return killsFor(baselineCodeBlockFor(inlineCallFrame));
901 }
902
903 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
904 {
905     for (;;) {
906         VirtualRegister reg = VirtualRegister(
907             operand.offset() - codeOrigin.stackOffset());
908         
909         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
910             if (reg.isArgument()) {
911                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
912                 
913                 if (codeOrigin.inlineCallFrame->isClosureCall
914                     && reg.offset() == JSStack::Callee)
915                     return true;
916                 
917                 if (codeOrigin.inlineCallFrame->isVarargs()
918                     && reg.offset() == JSStack::ArgumentCount)
919                     return true;
920                 
921                 return false;
922             }
923             
924             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
925                 reg.offset(), codeOrigin.bytecodeIndex);
926         }
927         
928         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
929         if (!inlineCallFrame)
930             break;
931
932         // Arguments are always live. This would be redundant if it wasn't for our
933         // op_call_varargs inlining.
934         // FIXME: 'this' might not be live, but we don't have a way of knowing.
935         // https://bugs.webkit.org/show_bug.cgi?id=128519
936         if (reg.isArgument()
937             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
938             return true;
939         
940         codeOrigin = inlineCallFrame->caller;
941     }
942     
943     return true;
944 }
945
946 unsigned Graph::frameRegisterCount()
947 {
948     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
949     return roundLocalRegisterCountForFramePointerOffset(result);
950 }
951
952 unsigned Graph::stackPointerOffset()
953 {
954     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
955 }
956
957 unsigned Graph::requiredRegisterCountForExit()
958 {
959     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
960     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
961         InlineCallFrame* inlineCallFrame = *iter;
962         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
963         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
964         count = std::max(count, requiredCount);
965     }
966     return count;
967 }
968
969 unsigned Graph::requiredRegisterCountForExecutionAndExit()
970 {
971     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
972 }
973
974 JSValue Graph::tryGetConstantProperty(
975     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
976 {
977     if (!base || !base.isObject())
978         return JSValue();
979     
980     JSObject* object = asObject(base);
981     
982     for (unsigned i = structureSet.size(); i--;) {
983         Structure* structure = structureSet[i];
984         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
985         if (!set || !set->isStillValid())
986             return JSValue();
987         
988         ASSERT(structure->isValidOffset(offset));
989         ASSERT(!structure->isUncacheableDictionary());
990         
991         watchpoints().addLazily(set);
992     }
993     
994     // What follows may require some extra thought. We need this load to load a valid JSValue. If
995     // our profiling makes sense and we're still on track to generate code that won't be
996     // invalidated, then we have nothing to worry about. We do, however, have to worry about
997     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
998     // is doomed.
999     //
1000     // One argument in favor of this code is that it should definitely work because the butterfly
1001     // is always set before the structure. However, we don't currently have a fence between those
1002     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1003     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1004     // caches told us that the object had a structure that it did not *yet* have, and then later,
1005     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1006     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1007     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1008     // https://bugs.webkit.org/show_bug.cgi?id=134641
1009     //
1010     // For now, we just do the minimal thing: defend against the structure right now being
1011     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1012     // determine if the structure belongs to the proven set.
1013     
1014     if (!structureSet.contains(object->structure()))
1015         return JSValue();
1016     
1017     return object->getDirect(offset);
1018 }
1019
1020 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1021 {
1022     return tryGetConstantProperty(base, StructureSet(structure), offset);
1023 }
1024
1025 JSValue Graph::tryGetConstantProperty(
1026     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1027 {
1028     if (structure.isTop() || structure.isClobbered())
1029         return JSValue();
1030     
1031     return tryGetConstantProperty(base, structure.set(), offset);
1032 }
1033
1034 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1035 {
1036     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1037 }
1038
1039 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1040 {
1041     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1042     
1043     if (!base)
1044         return JSValue();
1045     
1046     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1047     if (!activation)
1048         return JSValue();
1049     
1050     SymbolTable* symbolTable = activation->symbolTable();
1051     JSValue value;
1052     WatchpointSet* set;
1053     {
1054         ConcurrentJITLocker locker(symbolTable->m_lock);
1055         
1056         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1057         if (!entry)
1058             return JSValue();
1059         
1060         set = entry->watchpointSet();
1061         if (!set)
1062             return JSValue();
1063         
1064         if (set->state() != IsWatched)
1065             return JSValue();
1066         
1067         ASSERT(entry->scopeOffset() == offset);
1068         value = activation->variableAt(offset).get();
1069         if (!value)
1070             return JSValue();
1071     }
1072     
1073     watchpoints().addLazily(set);
1074     
1075     return value;
1076 }
1077
1078 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1079 {
1080     return tryGetConstantClosureVar(value.m_value, offset);
1081 }
1082
1083 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1084 {
1085     if (!node->hasConstant())
1086         return JSValue();
1087     return tryGetConstantClosureVar(node->asJSValue(), offset);
1088 }
1089
1090 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1091 {
1092     if (!value)
1093         return nullptr;
1094     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1095     if (!value)
1096         return nullptr;
1097     if (!view->length())
1098         return nullptr;
1099     WTF::loadLoadFence();
1100     watchpoints().addLazily(view);
1101     return view;
1102 }
1103
1104 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1105 {
1106     if (arrayMode.typedArrayType() == NotTypedArray)
1107         return nullptr;
1108     return tryGetFoldableView(value);
1109 }
1110
1111 void Graph::registerFrozenValues()
1112 {
1113     m_codeBlock->constants().resize(0);
1114     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1115     for (FrozenValue* value : m_frozenValues) {
1116         if (value->structure())
1117             ASSERT(m_plan.weakReferences.contains(value->structure()));
1118         
1119         switch (value->strength()) {
1120         case FragileValue: {
1121             break;
1122         }
1123         case WeakValue: {
1124             m_plan.weakReferences.addLazily(value->value().asCell());
1125             break;
1126         }
1127         case StrongValue: {
1128             unsigned constantIndex = m_codeBlock->addConstantLazily();
1129             initializeLazyWriteBarrierForConstant(
1130                 m_plan.writeBarriers,
1131                 m_codeBlock->constants()[constantIndex],
1132                 m_codeBlock,
1133                 constantIndex,
1134                 m_codeBlock->ownerExecutable(),
1135                 value->value());
1136             break;
1137         } }
1138     }
1139     m_codeBlock->constants().shrinkToFit();
1140     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1141 }
1142
1143 void Graph::visitChildren(SlotVisitor& visitor)
1144 {
1145     for (FrozenValue* value : m_frozenValues) {
1146         visitor.appendUnbarrieredReadOnlyValue(value->value());
1147         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1148     }
1149     
1150     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1151         BasicBlock* block = this->block(blockIndex);
1152         if (!block)
1153             continue;
1154         
1155         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1156             Node* node = block->at(nodeIndex);
1157             
1158             switch (node->op()) {
1159             case CheckStructure:
1160                 for (unsigned i = node->structureSet().size(); i--;)
1161                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1162                 break;
1163                 
1164             case NewObject:
1165             case ArrayifyToStructure:
1166             case NewStringObject:
1167                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1168                 break;
1169                 
1170             case PutStructure:
1171             case AllocatePropertyStorage:
1172             case ReallocatePropertyStorage:
1173                 visitor.appendUnbarrieredReadOnlyPointer(
1174                     node->transition()->previous);
1175                 visitor.appendUnbarrieredReadOnlyPointer(
1176                     node->transition()->next);
1177                 break;
1178                 
1179             case MultiGetByOffset:
1180                 for (unsigned i = node->multiGetByOffsetData().variants.size(); i--;) {
1181                     GetByIdVariant& variant = node->multiGetByOffsetData().variants[i];
1182                     const StructureSet& set = variant.structureSet();
1183                     for (unsigned j = set.size(); j--;)
1184                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1185
1186                     // Don't need to mark anything in the structure chain because that would
1187                     // have been decomposed into CheckStructure's. Don't need to mark the
1188                     // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
1189                     // variants did that.
1190                     ASSERT(!variant.callLinkStatus());
1191                 }
1192                 break;
1193                     
1194             case MultiPutByOffset:
1195                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1196                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1197                     const StructureSet& set = variant.oldStructure();
1198                     for (unsigned j = set.size(); j--;)
1199                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1200                     if (variant.kind() == PutByIdVariant::Transition)
1201                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1202                 }
1203                 break;
1204                 
1205             default:
1206                 break;
1207             }
1208         }
1209     }
1210 }
1211
1212 FrozenValue* Graph::freezeFragile(JSValue value)
1213 {
1214     if (UNLIKELY(!value))
1215         return FrozenValue::emptySingleton();
1216     
1217     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1218     if (LIKELY(!result.isNewEntry))
1219         return result.iterator->value;
1220     
1221     return result.iterator->value = m_frozenValues.add(FrozenValue::freeze(value));
1222 }
1223
1224 FrozenValue* Graph::freeze(JSValue value)
1225 {
1226     FrozenValue* result = freezeFragile(value);
1227     result->strengthenTo(WeakValue);
1228     return result;
1229 }
1230
1231 FrozenValue* Graph::freezeStrong(JSValue value)
1232 {
1233     FrozenValue* result = freezeFragile(value);
1234     result->strengthenTo(StrongValue);
1235     return result;
1236 }
1237
1238 void Graph::convertToConstant(Node* node, FrozenValue* value)
1239 {
1240     if (value->structure())
1241         assertIsRegistered(value->structure());
1242     if (m_form == ThreadedCPS) {
1243         if (node->op() == GetLocal)
1244             dethread();
1245         else
1246             ASSERT(!node->hasVariableAccessData(*this));
1247     }
1248     node->convertToConstant(value);
1249 }
1250
1251 void Graph::convertToConstant(Node* node, JSValue value)
1252 {
1253     convertToConstant(node, freeze(value));
1254 }
1255
1256 void Graph::convertToStrongConstant(Node* node, JSValue value)
1257 {
1258     convertToConstant(node, freezeStrong(value));
1259 }
1260
1261 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1262 {
1263     m_plan.weakReferences.addLazily(structure);
1264     if (m_plan.watchpoints.consider(structure))
1265         return StructureRegisteredAndWatched;
1266     return StructureRegisteredNormally;
1267 }
1268
1269 void Graph::assertIsRegistered(Structure* structure)
1270 {
1271     if (m_structureRegistrationState == HaveNotStartedRegistering)
1272         return;
1273     
1274     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1275     
1276     if (!structure->dfgShouldWatch())
1277         return;
1278     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1279         return;
1280     
1281     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1282 }
1283
1284 NO_RETURN_DUE_TO_CRASH static void crash(
1285     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1286     const char* assertion)
1287 {
1288     startCrashing();
1289     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1290     dataLog(file, "(", line, ") : ", function, "\n");
1291     dataLog("\n");
1292     dataLog(whileText);
1293     dataLog("Graph at time of failure:\n");
1294     graph.dump();
1295     dataLog("\n");
1296     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1297     dataLog(file, "(", line, ") : ", function, "\n");
1298     CRASH_WITH_SECURITY_IMPLICATION();
1299 }
1300
1301 void Graph::handleAssertionFailure(
1302     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1303 {
1304     crash(*this, "", file, line, function, assertion);
1305 }
1306
1307 void Graph::handleAssertionFailure(
1308     Node* node, const char* file, int line, const char* function, const char* assertion)
1309 {
1310     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1311 }
1312
1313 void Graph::handleAssertionFailure(
1314     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1315 {
1316     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1317 }
1318
1319 ValueProfile* Graph::valueProfileFor(Node* node)
1320 {
1321     if (!node)
1322         return nullptr;
1323         
1324     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1325         
1326     if (node->hasLocal(*this)) {
1327         if (!node->local().isArgument())
1328             return nullptr;
1329         int argument = node->local().toArgument();
1330         Node* argumentNode = m_arguments[argument];
1331         if (!argumentNode)
1332             return nullptr;
1333         if (node->variableAccessData() != argumentNode->variableAccessData())
1334             return nullptr;
1335         return profiledBlock->valueProfileForArgument(argument);
1336     }
1337         
1338     if (node->hasHeapPrediction())
1339         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1340         
1341     return nullptr;
1342 }
1343
1344 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1345 {
1346     if (!node)
1347         return MethodOfGettingAValueProfile();
1348     
1349     if (ValueProfile* valueProfile = valueProfileFor(node))
1350         return MethodOfGettingAValueProfile(valueProfile);
1351     
1352     if (node->op() == GetLocal) {
1353         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1354         
1355         return MethodOfGettingAValueProfile::fromLazyOperand(
1356             profiledBlock,
1357             LazyOperandValueProfileKey(
1358                 node->origin.semantic.bytecodeIndex, node->local()));
1359     }
1360     
1361     return MethodOfGettingAValueProfile();
1362 }
1363
1364 } } // namespace JSC::DFG
1365
1366 #endif // ENABLE(DFG_JIT)