2015-10-09 Geoffrey Garen <ggaren@apple.com>
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGJITCode.h"
39 #include "DFGMayExit.h"
40 #include "DFGVariableAccessDataDump.h"
41 #include "FullBytecodeLiveness.h"
42 #include "FunctionExecutableDump.h"
43 #include "JIT.h"
44 #include "JSLexicalEnvironment.h"
45 #include "MaxFrameExtentForSlowPathCall.h"
46 #include "OperandsInlines.h"
47 #include "JSCInlines.h"
48 #include "StackAlignment.h"
49 #include <wtf/CommaPrinter.h>
50 #include <wtf/ListDump.h>
51
52 namespace JSC { namespace DFG {
53
54 // Creates an array of stringized names.
55 static const char* dfgOpNames[] = {
56 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
57     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
58 #undef STRINGIZE_DFG_OP_ENUM
59 };
60
61 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
62     : m_vm(vm)
63     , m_plan(plan)
64     , m_codeBlock(m_plan.codeBlock)
65     , m_profiledBlock(m_codeBlock->alternative())
66     , m_allocator(longLivedState.m_allocator)
67     , m_nextMachineLocal(0)
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
77         || Options::forceDebuggerBytecodeGeneration();
78 }
79
80 Graph::~Graph()
81 {
82     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
83         BasicBlock* block = this->block(blockIndex);
84         if (!block)
85             continue;
86
87         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
88             m_allocator.free(block->phis[phiIndex]);
89         for (unsigned nodeIndex = block->size(); nodeIndex--;)
90             m_allocator.free(block->at(nodeIndex));
91     }
92     m_allocator.freeAll();
93 }
94
95 const char *Graph::opName(NodeType op)
96 {
97     return dfgOpNames[op];
98 }
99
100 static void printWhiteSpace(PrintStream& out, unsigned amount)
101 {
102     while (amount-- > 0)
103         out.print(" ");
104 }
105
106 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
107 {
108     if (!currentNode->origin.semantic)
109         return false;
110     
111     Node* previousNode = previousNodeRef;
112     previousNodeRef = currentNode;
113
114     if (!previousNode)
115         return false;
116     
117     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
118         return false;
119     
120     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
121     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
122     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
123     unsigned indexOfDivergence = commonSize;
124     for (unsigned i = 0; i < commonSize; ++i) {
125         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
126             indexOfDivergence = i;
127             break;
128         }
129     }
130     
131     bool hasPrinted = false;
132     
133     // Print the pops.
134     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
135         out.print(prefix);
136         printWhiteSpace(out, i * 2);
137         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
138         hasPrinted = true;
139     }
140     
141     // Print the pushes.
142     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
143         out.print(prefix);
144         printWhiteSpace(out, i * 2);
145         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
146         hasPrinted = true;
147     }
148     
149     return hasPrinted;
150 }
151
152 int Graph::amountOfNodeWhiteSpace(Node* node)
153 {
154     return (node->origin.semantic.inlineDepth() - 1) * 2;
155 }
156
157 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
158 {
159     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
160 }
161
162 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
163 {
164     NodeType op = node->op();
165
166     unsigned refCount = node->refCount();
167     bool mustGenerate = node->mustGenerate();
168     if (mustGenerate)
169         --refCount;
170
171     out.print(prefix);
172     printNodeWhiteSpace(out, node);
173
174     // Example/explanation of dataflow dump output
175     //
176     //   14:   <!2:7>  GetByVal(@3, @13)
177     //   ^1     ^2 ^3     ^4       ^5
178     //
179     // (1) The nodeIndex of this operation.
180     // (2) The reference count. The number printed is the 'real' count,
181     //     not including the 'mustGenerate' ref. If the node is
182     //     'mustGenerate' then the count it prefixed with '!'.
183     // (3) The virtual register slot assigned to this node.
184     // (4) The name of the operation.
185     // (5) The arguments to the operation. The may be of the form:
186     //         @#   - a NodeIndex referencing a prior node in the graph.
187     //         arg# - an argument number.
188     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
189     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
190     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
191     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
192         out.print(node->virtualRegister());
193     else
194         out.print("-");
195     out.print(">\t", opName(op), "(");
196     CommaPrinter comma;
197     if (node->flags() & NodeHasVarArgs) {
198         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
199             if (!m_varArgChildren[childIdx])
200                 continue;
201             out.print(comma, m_varArgChildren[childIdx]);
202         }
203     } else {
204         if (!!node->child1() || !!node->child2() || !!node->child3())
205             out.print(comma, node->child1());
206         if (!!node->child2() || !!node->child3())
207             out.print(comma, node->child2());
208         if (!!node->child3())
209             out.print(comma, node->child3());
210     }
211
212     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
213         out.print(comma, NodeFlagsDump(node->flags()));
214     if (node->prediction())
215         out.print(comma, SpeculationDump(node->prediction()));
216     if (node->hasArrayMode())
217         out.print(comma, node->arrayMode());
218     if (node->hasArithMode())
219         out.print(comma, node->arithMode());
220     if (node->hasScopeOffset())
221         out.print(comma, node->scopeOffset());
222     if (node->hasDirectArgumentsOffset())
223         out.print(comma, node->capturedArgumentsOffset());
224     if (node->hasRegisterPointer())
225         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
226     if (node->hasIdentifier())
227         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
228     if (node->hasPromotedLocationDescriptor())
229         out.print(comma, node->promotedLocationDescriptor());
230     if (node->hasStructureSet())
231         out.print(comma, inContext(node->structureSet(), context));
232     if (node->hasStructure())
233         out.print(comma, inContext(*node->structure(), context));
234     if (node->hasTransition()) {
235         out.print(comma, pointerDumpInContext(node->transition(), context));
236 #if USE(JSVALUE64)
237         out.print(", ID:", node->transition()->next->id());
238 #else
239         out.print(", ID:", RawPointer(node->transition()->next));
240 #endif
241     }
242     if (node->hasCellOperand()) {
243         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
244             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
245         else {
246             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
247             if (node->cellOperand()->value().isCell()) {
248                 CallVariant variant(node->cellOperand()->value().asCell());
249                 if (ExecutableBase* executable = variant.executable()) {
250                     if (executable->isHostFunction())
251                         out.print(comma, "<host function>");
252                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
253                         out.print(comma, FunctionExecutableDump(functionExecutable));
254                     else
255                         out.print(comma, "<non-function executable>");
256                 }
257             }
258         }
259     }
260     if (node->hasStorageAccessData()) {
261         StorageAccessData& storageAccessData = node->storageAccessData();
262         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
263         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
264         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
265     }
266     if (node->hasMultiGetByOffsetData()) {
267         MultiGetByOffsetData& data = node->multiGetByOffsetData();
268         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
269         for (unsigned i = 0; i < data.cases.size(); ++i) {
270             out.print(comma, inContext(data.cases[i], context));
271             if (data.cases[i].method().kind() == GetByOffsetMethod::Load)
272                 out.print(" (inferred value = ", inContext(inferredValueForProperty(data.cases[i].set(), identifiers()[data.identifierNumber]), context), ")");
273         }
274     }
275     if (node->hasMultiPutByOffsetData()) {
276         MultiPutByOffsetData& data = node->multiPutByOffsetData();
277         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
278         for (unsigned i = 0; i < data.variants.size(); ++i)
279             out.print(comma, inContext(data.variants[i], context));
280     }
281     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
282     if (node->hasVariableAccessData(*this)) {
283         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
284         if (variableAccessData) {
285             VirtualRegister operand = variableAccessData->local();
286             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
287             operand = variableAccessData->machineLocal();
288             if (operand.isValid())
289                 out.print(comma, "machine:", operand);
290         }
291     }
292     if (node->hasStackAccessData()) {
293         StackAccessData* data = node->stackAccessData();
294         out.print(comma, data->local);
295         if (data->machineLocal.isValid())
296             out.print(comma, "machine:", data->machineLocal);
297         out.print(comma, data->format);
298     }
299     if (node->hasUnlinkedLocal()) 
300         out.print(comma, node->unlinkedLocal());
301     if (node->hasUnlinkedMachineLocal()) {
302         VirtualRegister operand = node->unlinkedMachineLocal();
303         if (operand.isValid())
304             out.print(comma, "machine:", operand);
305     }
306     if (node->hasConstantBuffer()) {
307         out.print(comma);
308         out.print(node->startConstant(), ":[");
309         CommaPrinter anotherComma;
310         for (unsigned i = 0; i < node->numConstants(); ++i)
311             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
312         out.print("]");
313     }
314     if (node->hasIndexingType())
315         out.print(comma, IndexingTypeDump(node->indexingType()));
316     if (node->hasTypedArrayType())
317         out.print(comma, node->typedArrayType());
318     if (node->hasPhi())
319         out.print(comma, "^", node->phi()->index());
320     if (node->hasExecutionCounter())
321         out.print(comma, RawPointer(node->executionCounter()));
322     if (node->hasWatchpointSet())
323         out.print(comma, RawPointer(node->watchpointSet()));
324     if (node->hasStoragePointer())
325         out.print(comma, RawPointer(node->storagePointer()));
326     if (node->hasObjectMaterializationData())
327         out.print(comma, node->objectMaterializationData());
328     if (node->hasCallVarargsData())
329         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
330     if (node->hasLoadVarargsData()) {
331         LoadVarargsData* data = node->loadVarargsData();
332         out.print(comma, "start = ", data->start, ", count = ", data->count);
333         if (data->machineStart.isValid())
334             out.print(", machineStart = ", data->machineStart);
335         if (data->machineCount.isValid())
336             out.print(", machineCount = ", data->machineCount);
337         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
338         out.print(", limit = ", data->limit);
339     }
340     if (node->isConstant())
341         out.print(comma, pointerDumpInContext(node->constant(), context));
342     if (node->isJump())
343         out.print(comma, "T:", *node->targetBlock());
344     if (node->isBranch())
345         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
346     if (node->isSwitch()) {
347         SwitchData* data = node->switchData();
348         out.print(comma, data->kind);
349         for (unsigned i = 0; i < data->cases.size(); ++i)
350             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
351         out.print(comma, "default:", data->fallThrough);
352     }
353     ClobberSet reads;
354     ClobberSet writes;
355     addReadsAndWrites(*this, node, reads, writes);
356     if (!reads.isEmpty())
357         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
358     if (!writes.isEmpty())
359         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
360     ExitMode exitMode = mayExit(*this, node);
361     if (exitMode != DoesNotExit)
362         out.print(comma, exitMode);
363     if (clobbersExitState(*this, node))
364         out.print(comma, "ClobbersExit");
365     if (node->origin.isSet()) {
366         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
367         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
368             out.print(comma, "exit: ", node->origin.forExit);
369     }
370     if (!node->origin.exitOK)
371         out.print(comma, "ExitInvalid");
372     out.print(")");
373
374     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
375         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
376     else if (node->hasHeapPrediction())
377         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
378     
379     out.print("\n");
380 }
381
382 bool Graph::terminalsAreValid()
383 {
384     for (BasicBlock* block : blocksInNaturalOrder()) {
385         if (!block->terminal())
386             return false;
387     }
388     return true;
389 }
390
391 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
392 {
393     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
394     if (block->executionCount == block->executionCount)
395         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
396     out.print(prefix, "  Predecessors:");
397     for (size_t i = 0; i < block->predecessors.size(); ++i)
398         out.print(" ", *block->predecessors[i]);
399     out.print("\n");
400     out.print(prefix, "  Successors:");
401     if (block->terminal()) {
402         for (BasicBlock* successor : block->successors()) {
403             out.print(" ", *successor);
404             if (m_prePostNumbering.isValid())
405                 out.print(" (", m_prePostNumbering.edgeKind(block, successor), ")");
406         }
407     } else
408         out.print(" <invalid>");
409     out.print("\n");
410     if (m_dominators.isValid() && terminalsAreValid()) {
411         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
412         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
413         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
414         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
415     }
416     if (m_prePostNumbering.isValid())
417         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering.preNumber(block), "/", m_prePostNumbering.postNumber(block), "\n");
418     if (m_naturalLoops.isValid()) {
419         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
420             out.print(prefix, "  Loop header, contains:");
421             Vector<BlockIndex> sortedBlockList;
422             for (unsigned i = 0; i < loop->size(); ++i)
423                 sortedBlockList.append(loop->at(i)->index);
424             std::sort(sortedBlockList.begin(), sortedBlockList.end());
425             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
426                 out.print(" #", sortedBlockList[i]);
427             out.print("\n");
428         }
429         
430         Vector<const NaturalLoop*> containingLoops =
431             m_naturalLoops.loopsOf(block);
432         if (!containingLoops.isEmpty()) {
433             out.print(prefix, "  Containing loop headers:");
434             for (unsigned i = 0; i < containingLoops.size(); ++i)
435                 out.print(" ", *containingLoops[i]->header());
436             out.print("\n");
437         }
438     }
439     if (!block->phis.isEmpty()) {
440         out.print(prefix, "  Phi Nodes:");
441         for (size_t i = 0; i < block->phis.size(); ++i) {
442             Node* phiNode = block->phis[i];
443             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
444                 continue;
445             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
446             if (phiNode->child1()) {
447                 out.print("@", phiNode->child1()->index());
448                 if (phiNode->child2()) {
449                     out.print(", @", phiNode->child2()->index());
450                     if (phiNode->child3())
451                         out.print(", @", phiNode->child3()->index());
452                 }
453             }
454             out.print(")", i + 1 < block->phis.size() ? "," : "");
455         }
456         out.print("\n");
457     }
458 }
459
460 void Graph::dump(PrintStream& out, DumpContext* context)
461 {
462     DumpContext myContext;
463     myContext.graph = this;
464     if (!context)
465         context = &myContext;
466     
467     out.print("\n");
468     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
469     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
470     if (m_form == SSA)
471         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
472     else
473         out.print("  Arguments: ", listDump(m_arguments), "\n");
474     out.print("\n");
475     
476     Node* lastNode = nullptr;
477     for (size_t b = 0; b < m_blocks.size(); ++b) {
478         BasicBlock* block = m_blocks[b].get();
479         if (!block)
480             continue;
481         dumpBlockHeader(out, "", block, DumpAllPhis, context);
482         out.print("  States: ", block->cfaStructureClobberStateAtHead);
483         if (!block->cfaHasVisited)
484             out.print(", CurrentlyCFAUnreachable");
485         if (!block->intersectionOfCFAHasVisited)
486             out.print(", CFAUnreachable");
487         out.print("\n");
488         switch (m_form) {
489         case LoadStore:
490         case ThreadedCPS: {
491             out.print("  Vars Before: ");
492             if (block->cfaHasVisited)
493                 out.print(inContext(block->valuesAtHead, context));
494             else
495                 out.print("<empty>");
496             out.print("\n");
497             out.print("  Intersected Vars Before: ");
498             if (block->intersectionOfCFAHasVisited)
499                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
500             else
501                 out.print("<empty>");
502             out.print("\n");
503             out.print("  Var Links: ", block->variablesAtHead, "\n");
504             break;
505         }
506             
507         case SSA: {
508             RELEASE_ASSERT(block->ssa);
509             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
510             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
511             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
512             break;
513         } }
514         for (size_t i = 0; i < block->size(); ++i) {
515             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
516             dump(out, "", block->at(i), context);
517         }
518         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
519         if (!block->cfaDidFinish)
520             out.print(", CFAInvalidated");
521         out.print("\n");
522         switch (m_form) {
523         case LoadStore:
524         case ThreadedCPS: {
525             out.print("  Vars After: ");
526             if (block->cfaHasVisited)
527                 out.print(inContext(block->valuesAtTail, context));
528             else
529                 out.print("<empty>");
530             out.print("\n");
531             out.print("  Var Links: ", block->variablesAtTail, "\n");
532             break;
533         }
534             
535         case SSA: {
536             RELEASE_ASSERT(block->ssa);
537             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
538             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
539             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
540             break;
541         } }
542         out.print("\n");
543     }
544     
545     out.print("GC Values:\n");
546     for (FrozenValue* value : m_frozenValues) {
547         if (value->pointsToHeap())
548             out.print("    ", inContext(*value, &myContext), "\n");
549     }
550
551     out.print(inContext(watchpoints(), &myContext));
552     
553     if (!myContext.isEmpty()) {
554         myContext.dump(out);
555         out.print("\n");
556     }
557 }
558
559 void Graph::dethread()
560 {
561     if (m_form == LoadStore || m_form == SSA)
562         return;
563     
564     if (logCompilationChanges())
565         dataLog("Dethreading DFG graph.\n");
566     
567     SamplingRegion samplingRegion("DFG Dethreading");
568     
569     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
570         BasicBlock* block = m_blocks[blockIndex].get();
571         if (!block)
572             continue;
573         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
574             Node* phi = block->phis[phiIndex];
575             phi->children.reset();
576         }
577     }
578     
579     m_form = LoadStore;
580 }
581
582 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
583 {
584     if (!successor->isReachable) {
585         successor->isReachable = true;
586         worklist.append(successor);
587     }
588     
589     successor->predecessors.append(block);
590 }
591
592 void Graph::determineReachability()
593 {
594     Vector<BasicBlock*, 16> worklist;
595     worklist.append(block(0));
596     block(0)->isReachable = true;
597     while (!worklist.isEmpty()) {
598         BasicBlock* block = worklist.takeLast();
599         for (unsigned i = block->numSuccessors(); i--;)
600             handleSuccessor(worklist, block, block->successor(i));
601     }
602 }
603
604 void Graph::resetReachability()
605 {
606     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
607         BasicBlock* block = m_blocks[blockIndex].get();
608         if (!block)
609             continue;
610         block->isReachable = false;
611         block->predecessors.clear();
612     }
613     
614     determineReachability();
615 }
616
617 namespace {
618
619 class RefCountCalculator {
620 public:
621     RefCountCalculator(Graph& graph)
622         : m_graph(graph)
623     {
624     }
625     
626     void calculate()
627     {
628         // First reset the counts to 0 for all nodes.
629         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
630             BasicBlock* block = m_graph.block(blockIndex);
631             if (!block)
632                 continue;
633             for (unsigned indexInBlock = block->size(); indexInBlock--;)
634                 block->at(indexInBlock)->setRefCount(0);
635             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
636                 block->phis[phiIndex]->setRefCount(0);
637         }
638     
639         // Now find the roots:
640         // - Nodes that are must-generate.
641         // - Nodes that are reachable from type checks.
642         // Set their ref counts to 1 and put them on the worklist.
643         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
644             BasicBlock* block = m_graph.block(blockIndex);
645             if (!block)
646                 continue;
647             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
648                 Node* node = block->at(indexInBlock);
649                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
650                 if (!(node->flags() & NodeMustGenerate))
651                     continue;
652                 if (!node->postfixRef())
653                     m_worklist.append(node);
654             }
655         }
656         
657         while (!m_worklist.isEmpty()) {
658             while (!m_worklist.isEmpty()) {
659                 Node* node = m_worklist.last();
660                 m_worklist.removeLast();
661                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
662                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
663             }
664             
665             if (m_graph.m_form == SSA) {
666                 // Find Phi->Upsilon edges, which are represented as meta-data in the
667                 // Upsilon.
668                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
669                     BasicBlock* block = m_graph.block(blockIndex);
670                     if (!block)
671                         continue;
672                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
673                         Node* node = block->at(nodeIndex);
674                         if (node->op() != Upsilon)
675                             continue;
676                         if (node->shouldGenerate())
677                             continue;
678                         if (node->phi()->shouldGenerate())
679                             countNode(node);
680                     }
681                 }
682             }
683         }
684     }
685     
686 private:
687     void findTypeCheckRoot(Node*, Edge edge)
688     {
689         // We may have an "unproved" untyped use for code that is unreachable. The CFA
690         // will just not have gotten around to it.
691         if (edge.isProved() || edge.willNotHaveCheck())
692             return;
693         if (!edge->postfixRef())
694             m_worklist.append(edge.node());
695     }
696     
697     void countNode(Node* node)
698     {
699         if (node->postfixRef())
700             return;
701         m_worklist.append(node);
702     }
703     
704     void countEdge(Node*, Edge edge)
705     {
706         // Don't count edges that are already counted for their type checks.
707         if (!(edge.isProved() || edge.willNotHaveCheck()))
708             return;
709         countNode(edge.node());
710     }
711     
712     Graph& m_graph;
713     Vector<Node*, 128> m_worklist;
714 };
715
716 } // anonymous namespace
717
718 void Graph::computeRefCounts()
719 {
720     RefCountCalculator calculator(*this);
721     calculator.calculate();
722 }
723
724 void Graph::killBlockAndItsContents(BasicBlock* block)
725 {
726     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
727         m_allocator.free(block->phis[phiIndex]);
728     for (unsigned nodeIndex = block->size(); nodeIndex--;)
729         m_allocator.free(block->at(nodeIndex));
730     
731     killBlock(block);
732 }
733
734 void Graph::killUnreachableBlocks()
735 {
736     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
737         BasicBlock* block = this->block(blockIndex);
738         if (!block)
739             continue;
740         if (block->isReachable)
741             continue;
742         
743         killBlockAndItsContents(block);
744     }
745 }
746
747 void Graph::invalidateCFG()
748 {
749     m_dominators.invalidate();
750     m_naturalLoops.invalidate();
751     m_prePostNumbering.invalidate();
752 }
753
754 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
755 {
756     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
757         Node* node = block[indexInBlock];
758         bool shouldContinue = true;
759         switch (node->op()) {
760         case SetLocal: {
761             if (node->local() == variableAccessData->local())
762                 shouldContinue = false;
763             break;
764         }
765                 
766         case GetLocal: {
767             if (node->variableAccessData() != variableAccessData)
768                 continue;
769             substitute(block, indexInBlock, node, newGetLocal);
770             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
771             if (oldTailNode == node)
772                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
773             shouldContinue = false;
774             break;
775         }
776                 
777         default:
778             break;
779         }
780         if (!shouldContinue)
781             break;
782     }
783 }
784
785 BlockList Graph::blocksInPreOrder()
786 {
787     BlockList result;
788     BlockWorklist worklist;
789     worklist.push(block(0));
790     while (BasicBlock* block = worklist.pop()) {
791         result.append(block);
792         for (unsigned i = block->numSuccessors(); i--;)
793             worklist.push(block->successor(i));
794     }
795     return result;
796 }
797
798 BlockList Graph::blocksInPostOrder()
799 {
800     BlockList result;
801     PostOrderBlockWorklist worklist;
802     worklist.push(block(0));
803     while (BlockWithOrder item = worklist.pop()) {
804         switch (item.order) {
805         case PreOrder:
806             worklist.pushPost(item.block);
807             for (unsigned i = item.block->numSuccessors(); i--;)
808                 worklist.push(item.block->successor(i));
809             break;
810         case PostOrder:
811             result.append(item.block);
812             break;
813         }
814     }
815     return result;
816 }
817
818 void Graph::clearReplacements()
819 {
820     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
821         BasicBlock* block = m_blocks[blockIndex].get();
822         if (!block)
823             continue;
824         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
825             block->phis[phiIndex]->setReplacement(nullptr);
826         for (unsigned nodeIndex = block->size(); nodeIndex--;)
827             block->at(nodeIndex)->setReplacement(nullptr);
828     }
829 }
830
831 void Graph::clearEpochs()
832 {
833     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
834         BasicBlock* block = m_blocks[blockIndex].get();
835         if (!block)
836             continue;
837         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
838             block->phis[phiIndex]->setEpoch(Epoch());
839         for (unsigned nodeIndex = block->size(); nodeIndex--;)
840             block->at(nodeIndex)->setEpoch(Epoch());
841     }
842 }
843
844 void Graph::initializeNodeOwners()
845 {
846     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
847         BasicBlock* block = m_blocks[blockIndex].get();
848         if (!block)
849             continue;
850         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
851             block->phis[phiIndex]->owner = block;
852         for (unsigned nodeIndex = block->size(); nodeIndex--;)
853             block->at(nodeIndex)->owner = block;
854     }
855 }
856
857 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
858 {
859     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
860         BasicBlock* block = m_blocks[blockIndex].get();
861         if (!block)
862             continue;
863         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
864             block->phis[phiIndex]->clearFlags(flags);
865         for (unsigned nodeIndex = block->size(); nodeIndex--;)
866             block->at(nodeIndex)->clearFlags(flags);
867     }
868 }
869
870 bool Graph::watchCondition(const ObjectPropertyCondition& key)
871 {
872     if (!key.isWatchable())
873         return false;
874     
875     m_plan.weakReferences.addLazily(key.object());
876     if (key.hasPrototype())
877         m_plan.weakReferences.addLazily(key.prototype());
878     if (key.hasRequiredValue())
879         m_plan.weakReferences.addLazily(key.requiredValue());
880     
881     m_plan.watchpoints.addLazily(key);
882
883     if (key.kind() == PropertyCondition::Presence)
884         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
885     
886     return true;
887 }
888
889 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
890 {
891     return m_safeToLoad.contains(std::make_pair(base, offset));
892 }
893
894 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
895 {
896     assertIsRegistered(key.structure());
897     
898     auto iter = m_inferredTypes.find(key);
899     if (iter != m_inferredTypes.end())
900         return iter->value;
901
902     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
903     if (!typeObject) {
904         m_inferredTypes.add(key, InferredType::Top);
905         return InferredType::Top;
906     }
907
908     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
909     if (typeDescriptor.kind() == InferredType::Top) {
910         m_inferredTypes.add(key, InferredType::Top);
911         return InferredType::Top;
912     }
913     
914     m_inferredTypes.add(key, typeDescriptor);
915
916     m_plan.weakReferences.addLazily(typeObject);
917     registerInferredType(typeDescriptor);
918
919     // Note that we may already be watching this desired inferred type, because multiple structures may
920     // point to the same InferredType instance.
921     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
922
923     return typeDescriptor;
924 }
925
926 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
927 {
928     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
929     if (iter != m_bytecodeLiveness.end())
930         return *iter->value;
931     
932     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
933     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
934     FullBytecodeLiveness& result = *liveness;
935     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
936     return result;
937 }
938
939 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
940 {
941     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
942 }
943
944 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
945 {
946     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
947     if (iter != m_bytecodeKills.end())
948         return *iter->value;
949     
950     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
951     codeBlock->livenessAnalysis().computeKills(*kills);
952     BytecodeKills& result = *kills;
953     m_bytecodeKills.add(codeBlock, WTF::move(kills));
954     return result;
955 }
956
957 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
958 {
959     return killsFor(baselineCodeBlockFor(inlineCallFrame));
960 }
961
962 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
963 {
964     CodeOrigin* codeOriginPtr = &codeOrigin;
965     for (;;) {
966         VirtualRegister reg = VirtualRegister(
967             operand.offset() - codeOriginPtr->stackOffset());
968         
969         if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
970             if (reg.isArgument()) {
971                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
972                 
973                 if (codeOriginPtr->inlineCallFrame->isClosureCall
974                     && reg.offset() == JSStack::Callee)
975                     return true;
976                 
977                 if (codeOriginPtr->inlineCallFrame->isVarargs()
978                     && reg.offset() == JSStack::ArgumentCount)
979                     return true;
980                 
981                 return false;
982             }
983             
984             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
985                 reg.offset(), codeOriginPtr->bytecodeIndex);
986         }
987         
988         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
989         if (!inlineCallFrame)
990             break;
991
992         // Arguments are always live. This would be redundant if it wasn't for our
993         // op_call_varargs inlining.
994         if (reg.isArgument()
995             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
996             return true;
997         
998         codeOriginPtr = inlineCallFrame->getCallerSkippingDeadFrames();
999
1000         // The first inline call frame could be an inline tail call
1001         if (!codeOriginPtr)
1002             break;
1003     }
1004     
1005     return true;
1006 }
1007
1008 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1009 {
1010     BitVector result;
1011     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1012     forAllLocalsLiveInBytecode(
1013         codeOrigin,
1014         [&] (VirtualRegister reg) {
1015             ASSERT(reg.isLocal());
1016             result.quickSet(reg.toLocal());
1017         });
1018     return result;
1019 }
1020
1021 unsigned Graph::frameRegisterCount()
1022 {
1023     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1024     return roundLocalRegisterCountForFramePointerOffset(result);
1025 }
1026
1027 unsigned Graph::stackPointerOffset()
1028 {
1029     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1030 }
1031
1032 unsigned Graph::requiredRegisterCountForExit()
1033 {
1034     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1035     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1036         InlineCallFrame* inlineCallFrame = *iter;
1037         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1038         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1039         count = std::max(count, requiredCount);
1040     }
1041     return count;
1042 }
1043
1044 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1045 {
1046     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1047 }
1048
1049 JSValue Graph::tryGetConstantProperty(
1050     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1051 {
1052     if (!base || !base.isObject())
1053         return JSValue();
1054     
1055     JSObject* object = asObject(base);
1056     
1057     for (unsigned i = structureSet.size(); i--;) {
1058         Structure* structure = structureSet[i];
1059         assertIsRegistered(structure);
1060         
1061         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1062         if (!set || !set->isStillValid())
1063             return JSValue();
1064         
1065         ASSERT(structure->isValidOffset(offset));
1066         ASSERT(!structure->isUncacheableDictionary());
1067         
1068         watchpoints().addLazily(set);
1069     }
1070     
1071     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1072     // our profiling makes sense and we're still on track to generate code that won't be
1073     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1074     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1075     // is doomed.
1076     //
1077     // One argument in favor of this code is that it should definitely work because the butterfly
1078     // is always set before the structure. However, we don't currently have a fence between those
1079     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1080     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1081     // caches told us that the object had a structure that it did not *yet* have, and then later,
1082     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1083     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1084     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1085     // https://bugs.webkit.org/show_bug.cgi?id=134641
1086     //
1087     // For now, we just do the minimal thing: defend against the structure right now being
1088     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1089     // determine if the structure belongs to the proven set.
1090     
1091     if (!structureSet.contains(object->structure()))
1092         return JSValue();
1093     
1094     return object->getDirect(offset);
1095 }
1096
1097 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1098 {
1099     return tryGetConstantProperty(base, StructureSet(structure), offset);
1100 }
1101
1102 JSValue Graph::tryGetConstantProperty(
1103     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1104 {
1105     if (structure.isInfinite()) {
1106         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1107         // watching to constant-fold the property.
1108         // https://bugs.webkit.org/show_bug.cgi?id=147271
1109         return JSValue();
1110     }
1111     
1112     return tryGetConstantProperty(base, structure.set(), offset);
1113 }
1114
1115 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1116 {
1117     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1118 }
1119
1120 AbstractValue Graph::inferredValueForProperty(
1121     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1122 {
1123     AbstractValue result;
1124     base.forEach(
1125         [&] (Structure* structure) {
1126             AbstractValue value;
1127             value.set(*this, inferredTypeForProperty(structure, uid));
1128             result.merge(value);
1129         });
1130     if (clobberState == StructuresAreClobbered)
1131         result.clobberStructures();
1132     return result;
1133 }
1134
1135 AbstractValue Graph::inferredValueForProperty(
1136     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1137     StructureClobberState clobberState)
1138 {
1139     if (JSValue value = tryGetConstantProperty(base, offset)) {
1140         AbstractValue result;
1141         result.set(*this, *freeze(value), clobberState);
1142         return result;
1143     }
1144
1145     if (base.m_structure.isFinite())
1146         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1147
1148     return AbstractValue::heapTop();
1149 }
1150
1151 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1152 {
1153     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1154     
1155     if (!base)
1156         return JSValue();
1157     
1158     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1159     if (!activation)
1160         return JSValue();
1161     
1162     SymbolTable* symbolTable = activation->symbolTable();
1163     JSValue value;
1164     WatchpointSet* set;
1165     {
1166         ConcurrentJITLocker locker(symbolTable->m_lock);
1167         
1168         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1169         if (!entry)
1170             return JSValue();
1171         
1172         set = entry->watchpointSet();
1173         if (!set)
1174             return JSValue();
1175         
1176         if (set->state() != IsWatched)
1177             return JSValue();
1178         
1179         ASSERT(entry->scopeOffset() == offset);
1180         value = activation->variableAt(offset).get();
1181         if (!value)
1182             return JSValue();
1183     }
1184     
1185     watchpoints().addLazily(set);
1186     
1187     return value;
1188 }
1189
1190 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1191 {
1192     return tryGetConstantClosureVar(value.m_value, offset);
1193 }
1194
1195 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1196 {
1197     if (!node->hasConstant())
1198         return JSValue();
1199     return tryGetConstantClosureVar(node->asJSValue(), offset);
1200 }
1201
1202 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1203 {
1204     if (!value)
1205         return nullptr;
1206     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1207     if (!value)
1208         return nullptr;
1209     if (!view->length())
1210         return nullptr;
1211     WTF::loadLoadFence();
1212     watchpoints().addLazily(view);
1213     return view;
1214 }
1215
1216 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1217 {
1218     if (arrayMode.typedArrayType() == NotTypedArray)
1219         return nullptr;
1220     return tryGetFoldableView(value);
1221 }
1222
1223 void Graph::registerFrozenValues()
1224 {
1225     m_codeBlock->constants().resize(0);
1226     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1227     for (FrozenValue* value : m_frozenValues) {
1228         if (!value->pointsToHeap())
1229             continue;
1230         
1231         ASSERT(value->structure());
1232         ASSERT(m_plan.weakReferences.contains(value->structure()));
1233         
1234         switch (value->strength()) {
1235         case WeakValue: {
1236             m_plan.weakReferences.addLazily(value->value().asCell());
1237             break;
1238         }
1239         case StrongValue: {
1240             unsigned constantIndex = m_codeBlock->addConstantLazily();
1241             // We already have a barrier on the code block.
1242             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1243             break;
1244         } }
1245     }
1246     m_codeBlock->constants().shrinkToFit();
1247     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1248 }
1249
1250 void Graph::visitChildren(SlotVisitor& visitor)
1251 {
1252     for (FrozenValue* value : m_frozenValues) {
1253         visitor.appendUnbarrieredReadOnlyValue(value->value());
1254         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1255     }
1256     
1257     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1258         BasicBlock* block = this->block(blockIndex);
1259         if (!block)
1260             continue;
1261         
1262         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1263             Node* node = block->at(nodeIndex);
1264             
1265             switch (node->op()) {
1266             case CheckStructure:
1267                 for (unsigned i = node->structureSet().size(); i--;)
1268                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1269                 break;
1270                 
1271             case NewObject:
1272             case ArrayifyToStructure:
1273             case NewStringObject:
1274                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1275                 break;
1276                 
1277             case PutStructure:
1278             case AllocatePropertyStorage:
1279             case ReallocatePropertyStorage:
1280                 visitor.appendUnbarrieredReadOnlyPointer(
1281                     node->transition()->previous);
1282                 visitor.appendUnbarrieredReadOnlyPointer(
1283                     node->transition()->next);
1284                 break;
1285                 
1286             case MultiGetByOffset:
1287                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1288                     for (Structure* structure : getCase.set())
1289                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1290                 }
1291                 break;
1292                     
1293             case MultiPutByOffset:
1294                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1295                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1296                     const StructureSet& set = variant.oldStructure();
1297                     for (unsigned j = set.size(); j--;)
1298                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1299                     if (variant.kind() == PutByIdVariant::Transition)
1300                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1301                 }
1302                 break;
1303                 
1304             default:
1305                 break;
1306             }
1307         }
1308     }
1309 }
1310
1311 FrozenValue* Graph::freeze(JSValue value)
1312 {
1313     if (UNLIKELY(!value))
1314         return FrozenValue::emptySingleton();
1315     
1316     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1317     if (LIKELY(!result.isNewEntry))
1318         return result.iterator->value;
1319
1320     if (value.isUInt32())
1321         m_uint32ValuesInUse.append(value.asUInt32());
1322     
1323     FrozenValue frozenValue = FrozenValue::freeze(value);
1324     if (Structure* structure = frozenValue.structure())
1325         registerStructure(structure);
1326     
1327     return result.iterator->value = m_frozenValues.add(frozenValue);
1328 }
1329
1330 FrozenValue* Graph::freezeStrong(JSValue value)
1331 {
1332     FrozenValue* result = freeze(value);
1333     result->strengthenTo(StrongValue);
1334     return result;
1335 }
1336
1337 void Graph::convertToConstant(Node* node, FrozenValue* value)
1338 {
1339     if (value->structure())
1340         assertIsRegistered(value->structure());
1341     node->convertToConstant(value);
1342 }
1343
1344 void Graph::convertToConstant(Node* node, JSValue value)
1345 {
1346     convertToConstant(node, freeze(value));
1347 }
1348
1349 void Graph::convertToStrongConstant(Node* node, JSValue value)
1350 {
1351     convertToConstant(node, freezeStrong(value));
1352 }
1353
1354 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1355 {
1356     m_plan.weakReferences.addLazily(structure);
1357     if (m_plan.watchpoints.consider(structure))
1358         return StructureRegisteredAndWatched;
1359     return StructureRegisteredNormally;
1360 }
1361
1362 void Graph::assertIsRegistered(Structure* structure)
1363 {
1364     // It's convenient to be able to call this with a maybe-null structure.
1365     if (!structure)
1366         return;
1367     
1368     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1369     
1370     if (!structure->dfgShouldWatch())
1371         return;
1372     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1373         return;
1374     
1375     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1376 }
1377
1378 NO_RETURN_DUE_TO_CRASH static void crash(
1379     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1380     const char* assertion)
1381 {
1382     startCrashing();
1383     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1384     dataLog(file, "(", line, ") : ", function, "\n");
1385     dataLog("\n");
1386     dataLog(whileText);
1387     dataLog("Graph at time of failure:\n");
1388     graph.dump();
1389     dataLog("\n");
1390     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1391     dataLog(file, "(", line, ") : ", function, "\n");
1392     CRASH_WITH_SECURITY_IMPLICATION();
1393 }
1394
1395 void Graph::handleAssertionFailure(
1396     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1397 {
1398     crash(*this, "", file, line, function, assertion);
1399 }
1400
1401 void Graph::handleAssertionFailure(
1402     Node* node, const char* file, int line, const char* function, const char* assertion)
1403 {
1404     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1405 }
1406
1407 void Graph::handleAssertionFailure(
1408     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1409 {
1410     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1411 }
1412
1413 ValueProfile* Graph::valueProfileFor(Node* node)
1414 {
1415     if (!node)
1416         return nullptr;
1417         
1418     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1419         
1420     if (node->hasLocal(*this)) {
1421         if (!node->local().isArgument())
1422             return nullptr;
1423         int argument = node->local().toArgument();
1424         Node* argumentNode = m_arguments[argument];
1425         if (!argumentNode)
1426             return nullptr;
1427         if (node->variableAccessData() != argumentNode->variableAccessData())
1428             return nullptr;
1429         return profiledBlock->valueProfileForArgument(argument);
1430     }
1431         
1432     if (node->hasHeapPrediction())
1433         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1434         
1435     return nullptr;
1436 }
1437
1438 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1439 {
1440     if (!node)
1441         return MethodOfGettingAValueProfile();
1442     
1443     if (ValueProfile* valueProfile = valueProfileFor(node))
1444         return MethodOfGettingAValueProfile(valueProfile);
1445     
1446     if (node->op() == GetLocal) {
1447         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1448         
1449         return MethodOfGettingAValueProfile::fromLazyOperand(
1450             profiledBlock,
1451             LazyOperandValueProfileKey(
1452                 node->origin.semantic.bytecodeIndex, node->local()));
1453     }
1454     
1455     return MethodOfGettingAValueProfile();
1456 }
1457
1458 } } // namespace JSC::DFG
1459
1460 #endif // ENABLE(DFG_JIT)