541c84ebb1117430c892b6f39b60e4ebf6d1b624
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGCFG.h"
39 #include "DFGDominators.h"
40 #include "DFGJITCode.h"
41 #include "DFGMayExit.h"
42 #include "DFGNaturalLoops.h"
43 #include "DFGPrePostNumbering.h"
44 #include "DFGVariableAccessDataDump.h"
45 #include "FullBytecodeLiveness.h"
46 #include "FunctionExecutableDump.h"
47 #include "JIT.h"
48 #include "JSLexicalEnvironment.h"
49 #include "MaxFrameExtentForSlowPathCall.h"
50 #include "OperandsInlines.h"
51 #include "JSCInlines.h"
52 #include "StackAlignment.h"
53 #include <wtf/CommaPrinter.h>
54 #include <wtf/ListDump.h>
55
56 namespace JSC { namespace DFG {
57
58 // Creates an array of stringized names.
59 static const char* dfgOpNames[] = {
60 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
61     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
62 #undef STRINGIZE_DFG_OP_ENUM
63 };
64
65 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
66     : m_vm(vm)
67     , m_plan(plan)
68     , m_codeBlock(m_plan.codeBlock)
69     , m_profiledBlock(m_codeBlock->alternative())
70     , m_allocator(longLivedState.m_allocator)
71     , m_cfg(std::make_unique<CFG>(*this))
72     , m_nextMachineLocal(0)
73     , m_fixpointState(BeforeFixpoint)
74     , m_structureRegistrationState(HaveNotStartedRegistering)
75     , m_form(LoadStore)
76     , m_unificationState(LocallyUnified)
77     , m_refCountState(EverythingIsLive)
78 {
79     ASSERT(m_profiledBlock);
80     
81     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
82         || Options::forceDebuggerBytecodeGeneration();
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
112 {
113     if (!currentNode->origin.semantic)
114         return false;
115     
116     Node* previousNode = previousNodeRef;
117     previousNodeRef = currentNode;
118
119     if (!previousNode)
120         return false;
121     
122     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
123         return false;
124     
125     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
126     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
127     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
128     unsigned indexOfDivergence = commonSize;
129     for (unsigned i = 0; i < commonSize; ++i) {
130         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
131             indexOfDivergence = i;
132             break;
133         }
134     }
135     
136     bool hasPrinted = false;
137     
138     // Print the pops.
139     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
140         out.print(prefix);
141         printWhiteSpace(out, i * 2);
142         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
143         hasPrinted = true;
144     }
145     
146     // Print the pushes.
147     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     return hasPrinted;
155 }
156
157 int Graph::amountOfNodeWhiteSpace(Node* node)
158 {
159     return (node->origin.semantic.inlineDepth() - 1) * 2;
160 }
161
162 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
163 {
164     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
165 }
166
167 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
168 {
169     NodeType op = node->op();
170
171     unsigned refCount = node->refCount();
172     bool mustGenerate = node->mustGenerate();
173     if (mustGenerate)
174         --refCount;
175
176     out.print(prefix);
177     printNodeWhiteSpace(out, node);
178
179     // Example/explanation of dataflow dump output
180     //
181     //   14:   <!2:7>  GetByVal(@3, @13)
182     //   ^1     ^2 ^3     ^4       ^5
183     //
184     // (1) The nodeIndex of this operation.
185     // (2) The reference count. The number printed is the 'real' count,
186     //     not including the 'mustGenerate' ref. If the node is
187     //     'mustGenerate' then the count it prefixed with '!'.
188     // (3) The virtual register slot assigned to this node.
189     // (4) The name of the operation.
190     // (5) The arguments to the operation. The may be of the form:
191     //         @#   - a NodeIndex referencing a prior node in the graph.
192     //         arg# - an argument number.
193     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
194     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
195     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
196     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
197         out.print(node->virtualRegister());
198     else
199         out.print("-");
200     out.print(">\t", opName(op), "(");
201     CommaPrinter comma;
202     if (node->flags() & NodeHasVarArgs) {
203         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
204             if (!m_varArgChildren[childIdx])
205                 continue;
206             out.print(comma, m_varArgChildren[childIdx]);
207         }
208     } else {
209         if (!!node->child1() || !!node->child2() || !!node->child3())
210             out.print(comma, node->child1());
211         if (!!node->child2() || !!node->child3())
212             out.print(comma, node->child2());
213         if (!!node->child3())
214             out.print(comma, node->child3());
215     }
216
217     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
218         out.print(comma, NodeFlagsDump(node->flags()));
219     if (node->prediction())
220         out.print(comma, SpeculationDump(node->prediction()));
221     if (node->hasArrayMode())
222         out.print(comma, node->arrayMode());
223     if (node->hasArithMode())
224         out.print(comma, node->arithMode());
225     if (node->hasArithRoundingMode())
226         out.print(comma, "Rounding:", node->arithRoundingMode());
227     if (node->hasScopeOffset())
228         out.print(comma, node->scopeOffset());
229     if (node->hasDirectArgumentsOffset())
230         out.print(comma, node->capturedArgumentsOffset());
231     if (node->hasRegisterPointer())
232         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
233     if (node->hasIdentifier())
234         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
235     if (node->hasPromotedLocationDescriptor())
236         out.print(comma, node->promotedLocationDescriptor());
237     if (node->hasStructureSet())
238         out.print(comma, inContext(node->structureSet(), context));
239     if (node->hasStructure())
240         out.print(comma, inContext(*node->structure(), context));
241     if (node->hasTransition()) {
242         out.print(comma, pointerDumpInContext(node->transition(), context));
243 #if USE(JSVALUE64)
244         out.print(", ID:", node->transition()->next->id());
245 #else
246         out.print(", ID:", RawPointer(node->transition()->next));
247 #endif
248     }
249     if (node->hasCellOperand()) {
250         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
251             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
252         else {
253             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
254             if (node->cellOperand()->value().isCell()) {
255                 CallVariant variant(node->cellOperand()->value().asCell());
256                 if (ExecutableBase* executable = variant.executable()) {
257                     if (executable->isHostFunction())
258                         out.print(comma, "<host function>");
259                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
260                         out.print(comma, FunctionExecutableDump(functionExecutable));
261                     else
262                         out.print(comma, "<non-function executable>");
263                 }
264             }
265         }
266     }
267     if (node->hasStorageAccessData()) {
268         StorageAccessData& storageAccessData = node->storageAccessData();
269         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
270         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
271         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
272     }
273     if (node->hasMultiGetByOffsetData()) {
274         MultiGetByOffsetData& data = node->multiGetByOffsetData();
275         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
276         for (unsigned i = 0; i < data.cases.size(); ++i)
277             out.print(comma, inContext(data.cases[i], context));
278     }
279     if (node->hasMultiPutByOffsetData()) {
280         MultiPutByOffsetData& data = node->multiPutByOffsetData();
281         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
282         for (unsigned i = 0; i < data.variants.size(); ++i)
283             out.print(comma, inContext(data.variants[i], context));
284     }
285     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
286     if (node->hasVariableAccessData(*this)) {
287         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
288         if (variableAccessData) {
289             VirtualRegister operand = variableAccessData->local();
290             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
291             operand = variableAccessData->machineLocal();
292             if (operand.isValid())
293                 out.print(comma, "machine:", operand);
294         }
295     }
296     if (node->hasStackAccessData()) {
297         StackAccessData* data = node->stackAccessData();
298         out.print(comma, data->local);
299         if (data->machineLocal.isValid())
300             out.print(comma, "machine:", data->machineLocal);
301         out.print(comma, data->format);
302     }
303     if (node->hasUnlinkedLocal()) 
304         out.print(comma, node->unlinkedLocal());
305     if (node->hasUnlinkedMachineLocal()) {
306         VirtualRegister operand = node->unlinkedMachineLocal();
307         if (operand.isValid())
308             out.print(comma, "machine:", operand);
309     }
310     if (node->hasConstantBuffer()) {
311         out.print(comma);
312         out.print(node->startConstant(), ":[");
313         CommaPrinter anotherComma;
314         for (unsigned i = 0; i < node->numConstants(); ++i)
315             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
316         out.print("]");
317     }
318     if (node->hasIndexingType())
319         out.print(comma, IndexingTypeDump(node->indexingType()));
320     if (node->hasTypedArrayType())
321         out.print(comma, node->typedArrayType());
322     if (node->hasPhi())
323         out.print(comma, "^", node->phi()->index());
324     if (node->hasExecutionCounter())
325         out.print(comma, RawPointer(node->executionCounter()));
326     if (node->hasWatchpointSet())
327         out.print(comma, RawPointer(node->watchpointSet()));
328     if (node->hasStoragePointer())
329         out.print(comma, RawPointer(node->storagePointer()));
330     if (node->hasObjectMaterializationData())
331         out.print(comma, node->objectMaterializationData());
332     if (node->hasCallVarargsData())
333         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
334     if (node->hasLoadVarargsData()) {
335         LoadVarargsData* data = node->loadVarargsData();
336         out.print(comma, "start = ", data->start, ", count = ", data->count);
337         if (data->machineStart.isValid())
338             out.print(", machineStart = ", data->machineStart);
339         if (data->machineCount.isValid())
340             out.print(", machineCount = ", data->machineCount);
341         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
342         out.print(", limit = ", data->limit);
343     }
344     if (node->isConstant())
345         out.print(comma, pointerDumpInContext(node->constant(), context));
346     if (node->isJump())
347         out.print(comma, "T:", *node->targetBlock());
348     if (node->isBranch())
349         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
350     if (node->isSwitch()) {
351         SwitchData* data = node->switchData();
352         out.print(comma, data->kind);
353         for (unsigned i = 0; i < data->cases.size(); ++i)
354             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
355         out.print(comma, "default:", data->fallThrough);
356     }
357     ClobberSet reads;
358     ClobberSet writes;
359     addReadsAndWrites(*this, node, reads, writes);
360     if (!reads.isEmpty())
361         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
362     if (!writes.isEmpty())
363         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
364     ExitMode exitMode = mayExit(*this, node);
365     if (exitMode != DoesNotExit)
366         out.print(comma, exitMode);
367     if (clobbersExitState(*this, node))
368         out.print(comma, "ClobbersExit");
369     if (node->origin.isSet()) {
370         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
371         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
372             out.print(comma, "exit: ", node->origin.forExit);
373     }
374     if (!node->origin.exitOK)
375         out.print(comma, "ExitInvalid");
376     out.print(")");
377
378     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
379         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
380     else if (node->hasHeapPrediction())
381         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
382     
383     out.print("\n");
384 }
385
386 bool Graph::terminalsAreValid()
387 {
388     for (BasicBlock* block : blocksInNaturalOrder()) {
389         if (!block->terminal())
390             return false;
391     }
392     return true;
393 }
394
395 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
396 {
397     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
398     if (block->executionCount == block->executionCount)
399         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
400     out.print(prefix, "  Predecessors:");
401     for (size_t i = 0; i < block->predecessors.size(); ++i)
402         out.print(" ", *block->predecessors[i]);
403     out.print("\n");
404     out.print(prefix, "  Successors:");
405     if (block->terminal()) {
406         for (BasicBlock* successor : block->successors()) {
407             out.print(" ", *successor);
408             if (m_prePostNumbering)
409                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
410         }
411     } else
412         out.print(" <invalid>");
413     out.print("\n");
414     if (m_dominators && terminalsAreValid()) {
415         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
416         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
417         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
418         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
419     }
420     if (m_prePostNumbering)
421         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
422     if (m_naturalLoops) {
423         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
424             out.print(prefix, "  Loop header, contains:");
425             Vector<BlockIndex> sortedBlockList;
426             for (unsigned i = 0; i < loop->size(); ++i)
427                 sortedBlockList.append(loop->at(i)->index);
428             std::sort(sortedBlockList.begin(), sortedBlockList.end());
429             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
430                 out.print(" #", sortedBlockList[i]);
431             out.print("\n");
432         }
433         
434         Vector<const NaturalLoop*> containingLoops =
435             m_naturalLoops->loopsOf(block);
436         if (!containingLoops.isEmpty()) {
437             out.print(prefix, "  Containing loop headers:");
438             for (unsigned i = 0; i < containingLoops.size(); ++i)
439                 out.print(" ", *containingLoops[i]->header());
440             out.print("\n");
441         }
442     }
443     if (!block->phis.isEmpty()) {
444         out.print(prefix, "  Phi Nodes:");
445         for (size_t i = 0; i < block->phis.size(); ++i) {
446             Node* phiNode = block->phis[i];
447             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
448                 continue;
449             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
450             if (phiNode->child1()) {
451                 out.print("@", phiNode->child1()->index());
452                 if (phiNode->child2()) {
453                     out.print(", @", phiNode->child2()->index());
454                     if (phiNode->child3())
455                         out.print(", @", phiNode->child3()->index());
456                 }
457             }
458             out.print(")", i + 1 < block->phis.size() ? "," : "");
459         }
460         out.print("\n");
461     }
462 }
463
464 void Graph::dump(PrintStream& out, DumpContext* context)
465 {
466     DumpContext myContext;
467     myContext.graph = this;
468     if (!context)
469         context = &myContext;
470     
471     out.print("\n");
472     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
473     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
474     if (m_form == SSA)
475         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
476     else
477         out.print("  Arguments: ", listDump(m_arguments), "\n");
478     out.print("\n");
479     
480     Node* lastNode = nullptr;
481     for (size_t b = 0; b < m_blocks.size(); ++b) {
482         BasicBlock* block = m_blocks[b].get();
483         if (!block)
484             continue;
485         dumpBlockHeader(out, "", block, DumpAllPhis, context);
486         out.print("  States: ", block->cfaStructureClobberStateAtHead);
487         if (!block->cfaHasVisited)
488             out.print(", CurrentlyCFAUnreachable");
489         if (!block->intersectionOfCFAHasVisited)
490             out.print(", CFAUnreachable");
491         out.print("\n");
492         switch (m_form) {
493         case LoadStore:
494         case ThreadedCPS: {
495             out.print("  Vars Before: ");
496             if (block->cfaHasVisited)
497                 out.print(inContext(block->valuesAtHead, context));
498             else
499                 out.print("<empty>");
500             out.print("\n");
501             out.print("  Intersected Vars Before: ");
502             if (block->intersectionOfCFAHasVisited)
503                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
504             else
505                 out.print("<empty>");
506             out.print("\n");
507             out.print("  Var Links: ", block->variablesAtHead, "\n");
508             break;
509         }
510             
511         case SSA: {
512             RELEASE_ASSERT(block->ssa);
513             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
514             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
515             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
516             break;
517         } }
518         for (size_t i = 0; i < block->size(); ++i) {
519             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
520             dump(out, "", block->at(i), context);
521         }
522         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
523         if (!block->cfaDidFinish)
524             out.print(", CFAInvalidated");
525         out.print("\n");
526         switch (m_form) {
527         case LoadStore:
528         case ThreadedCPS: {
529             out.print("  Vars After: ");
530             if (block->cfaHasVisited)
531                 out.print(inContext(block->valuesAtTail, context));
532             else
533                 out.print("<empty>");
534             out.print("\n");
535             out.print("  Var Links: ", block->variablesAtTail, "\n");
536             break;
537         }
538             
539         case SSA: {
540             RELEASE_ASSERT(block->ssa);
541             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
542             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
543             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
544             break;
545         } }
546         out.print("\n");
547     }
548     
549     out.print("GC Values:\n");
550     for (FrozenValue* value : m_frozenValues) {
551         if (value->pointsToHeap())
552             out.print("    ", inContext(*value, &myContext), "\n");
553     }
554
555     out.print(inContext(watchpoints(), &myContext));
556     
557     if (!myContext.isEmpty()) {
558         myContext.dump(out);
559         out.print("\n");
560     }
561 }
562
563 void Graph::dethread()
564 {
565     if (m_form == LoadStore || m_form == SSA)
566         return;
567     
568     if (logCompilationChanges())
569         dataLog("Dethreading DFG graph.\n");
570     
571     SamplingRegion samplingRegion("DFG Dethreading");
572     
573     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
574         BasicBlock* block = m_blocks[blockIndex].get();
575         if (!block)
576             continue;
577         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
578             Node* phi = block->phis[phiIndex];
579             phi->children.reset();
580         }
581     }
582     
583     m_form = LoadStore;
584 }
585
586 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
587 {
588     if (!successor->isReachable) {
589         successor->isReachable = true;
590         worklist.append(successor);
591     }
592     
593     successor->predecessors.append(block);
594 }
595
596 void Graph::determineReachability()
597 {
598     Vector<BasicBlock*, 16> worklist;
599     worklist.append(block(0));
600     block(0)->isReachable = true;
601     while (!worklist.isEmpty()) {
602         BasicBlock* block = worklist.takeLast();
603         for (unsigned i = block->numSuccessors(); i--;)
604             handleSuccessor(worklist, block, block->successor(i));
605     }
606 }
607
608 void Graph::resetReachability()
609 {
610     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
611         BasicBlock* block = m_blocks[blockIndex].get();
612         if (!block)
613             continue;
614         block->isReachable = false;
615         block->predecessors.clear();
616     }
617     
618     determineReachability();
619 }
620
621 namespace {
622
623 class RefCountCalculator {
624 public:
625     RefCountCalculator(Graph& graph)
626         : m_graph(graph)
627     {
628     }
629     
630     void calculate()
631     {
632         // First reset the counts to 0 for all nodes.
633         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
634             BasicBlock* block = m_graph.block(blockIndex);
635             if (!block)
636                 continue;
637             for (unsigned indexInBlock = block->size(); indexInBlock--;)
638                 block->at(indexInBlock)->setRefCount(0);
639             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
640                 block->phis[phiIndex]->setRefCount(0);
641         }
642     
643         // Now find the roots:
644         // - Nodes that are must-generate.
645         // - Nodes that are reachable from type checks.
646         // Set their ref counts to 1 and put them on the worklist.
647         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
648             BasicBlock* block = m_graph.block(blockIndex);
649             if (!block)
650                 continue;
651             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
652                 Node* node = block->at(indexInBlock);
653                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
654                 if (!(node->flags() & NodeMustGenerate))
655                     continue;
656                 if (!node->postfixRef())
657                     m_worklist.append(node);
658             }
659         }
660         
661         while (!m_worklist.isEmpty()) {
662             while (!m_worklist.isEmpty()) {
663                 Node* node = m_worklist.last();
664                 m_worklist.removeLast();
665                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
666                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
667             }
668             
669             if (m_graph.m_form == SSA) {
670                 // Find Phi->Upsilon edges, which are represented as meta-data in the
671                 // Upsilon.
672                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
673                     BasicBlock* block = m_graph.block(blockIndex);
674                     if (!block)
675                         continue;
676                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
677                         Node* node = block->at(nodeIndex);
678                         if (node->op() != Upsilon)
679                             continue;
680                         if (node->shouldGenerate())
681                             continue;
682                         if (node->phi()->shouldGenerate())
683                             countNode(node);
684                     }
685                 }
686             }
687         }
688     }
689     
690 private:
691     void findTypeCheckRoot(Node*, Edge edge)
692     {
693         // We may have an "unproved" untyped use for code that is unreachable. The CFA
694         // will just not have gotten around to it.
695         if (edge.isProved() || edge.willNotHaveCheck())
696             return;
697         if (!edge->postfixRef())
698             m_worklist.append(edge.node());
699     }
700     
701     void countNode(Node* node)
702     {
703         if (node->postfixRef())
704             return;
705         m_worklist.append(node);
706     }
707     
708     void countEdge(Node*, Edge edge)
709     {
710         // Don't count edges that are already counted for their type checks.
711         if (!(edge.isProved() || edge.willNotHaveCheck()))
712             return;
713         countNode(edge.node());
714     }
715     
716     Graph& m_graph;
717     Vector<Node*, 128> m_worklist;
718 };
719
720 } // anonymous namespace
721
722 void Graph::computeRefCounts()
723 {
724     RefCountCalculator calculator(*this);
725     calculator.calculate();
726 }
727
728 void Graph::killBlockAndItsContents(BasicBlock* block)
729 {
730     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
731         m_allocator.free(block->phis[phiIndex]);
732     for (unsigned nodeIndex = block->size(); nodeIndex--;)
733         m_allocator.free(block->at(nodeIndex));
734     
735     killBlock(block);
736 }
737
738 void Graph::killUnreachableBlocks()
739 {
740     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
741         BasicBlock* block = this->block(blockIndex);
742         if (!block)
743             continue;
744         if (block->isReachable)
745             continue;
746         
747         killBlockAndItsContents(block);
748     }
749 }
750
751 void Graph::invalidateCFG()
752 {
753     m_dominators = nullptr;
754     m_naturalLoops = nullptr;
755     m_prePostNumbering = nullptr;
756 }
757
758 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
759 {
760     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
761         Node* node = block[indexInBlock];
762         bool shouldContinue = true;
763         switch (node->op()) {
764         case SetLocal: {
765             if (node->local() == variableAccessData->local())
766                 shouldContinue = false;
767             break;
768         }
769                 
770         case GetLocal: {
771             if (node->variableAccessData() != variableAccessData)
772                 continue;
773             substitute(block, indexInBlock, node, newGetLocal);
774             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
775             if (oldTailNode == node)
776                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
777             shouldContinue = false;
778             break;
779         }
780                 
781         default:
782             break;
783         }
784         if (!shouldContinue)
785             break;
786     }
787 }
788
789 BlockList Graph::blocksInPreOrder()
790 {
791     BlockList result;
792     BlockWorklist worklist;
793     worklist.push(block(0));
794     while (BasicBlock* block = worklist.pop()) {
795         result.append(block);
796         for (unsigned i = block->numSuccessors(); i--;)
797             worklist.push(block->successor(i));
798     }
799     return result;
800 }
801
802 BlockList Graph::blocksInPostOrder()
803 {
804     BlockList result;
805     PostOrderBlockWorklist worklist;
806     worklist.push(block(0));
807     while (BlockWithOrder item = worklist.pop()) {
808         switch (item.order) {
809         case VisitOrder::Pre:
810             worklist.pushPost(item.node);
811             for (unsigned i = item.node->numSuccessors(); i--;)
812                 worklist.push(item.node->successor(i));
813             break;
814         case VisitOrder::Post:
815             result.append(item.node);
816             break;
817         }
818     }
819     return result;
820 }
821
822 void Graph::clearReplacements()
823 {
824     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
825         BasicBlock* block = m_blocks[blockIndex].get();
826         if (!block)
827             continue;
828         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
829             block->phis[phiIndex]->setReplacement(nullptr);
830         for (unsigned nodeIndex = block->size(); nodeIndex--;)
831             block->at(nodeIndex)->setReplacement(nullptr);
832     }
833 }
834
835 void Graph::clearEpochs()
836 {
837     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
838         BasicBlock* block = m_blocks[blockIndex].get();
839         if (!block)
840             continue;
841         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
842             block->phis[phiIndex]->setEpoch(Epoch());
843         for (unsigned nodeIndex = block->size(); nodeIndex--;)
844             block->at(nodeIndex)->setEpoch(Epoch());
845     }
846 }
847
848 void Graph::initializeNodeOwners()
849 {
850     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
851         BasicBlock* block = m_blocks[blockIndex].get();
852         if (!block)
853             continue;
854         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
855             block->phis[phiIndex]->owner = block;
856         for (unsigned nodeIndex = block->size(); nodeIndex--;)
857             block->at(nodeIndex)->owner = block;
858     }
859 }
860
861 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
862 {
863     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
864         BasicBlock* block = m_blocks[blockIndex].get();
865         if (!block)
866             continue;
867         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
868             block->phis[phiIndex]->clearFlags(flags);
869         for (unsigned nodeIndex = block->size(); nodeIndex--;)
870             block->at(nodeIndex)->clearFlags(flags);
871     }
872 }
873
874 bool Graph::watchCondition(const ObjectPropertyCondition& key)
875 {
876     if (!key.isWatchable())
877         return false;
878     
879     m_plan.weakReferences.addLazily(key.object());
880     if (key.hasPrototype())
881         m_plan.weakReferences.addLazily(key.prototype());
882     if (key.hasRequiredValue())
883         m_plan.weakReferences.addLazily(key.requiredValue());
884     
885     m_plan.watchpoints.addLazily(key);
886
887     if (key.kind() == PropertyCondition::Presence)
888         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
889     
890     return true;
891 }
892
893 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
894 {
895     return m_safeToLoad.contains(std::make_pair(base, offset));
896 }
897
898 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
899 {
900     assertIsRegistered(key.structure());
901     
902     auto iter = m_inferredTypes.find(key);
903     if (iter != m_inferredTypes.end())
904         return iter->value;
905
906     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
907     if (!typeObject) {
908         m_inferredTypes.add(key, InferredType::Top);
909         return InferredType::Top;
910     }
911
912     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
913     if (typeDescriptor.kind() == InferredType::Top) {
914         m_inferredTypes.add(key, InferredType::Top);
915         return InferredType::Top;
916     }
917     
918     m_inferredTypes.add(key, typeDescriptor);
919
920     m_plan.weakReferences.addLazily(typeObject);
921     registerInferredType(typeDescriptor);
922
923     // Note that we may already be watching this desired inferred type, because multiple structures may
924     // point to the same InferredType instance.
925     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
926
927     return typeDescriptor;
928 }
929
930 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
931 {
932     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
933     if (iter != m_bytecodeLiveness.end())
934         return *iter->value;
935     
936     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
937     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
938     FullBytecodeLiveness& result = *liveness;
939     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
940     return result;
941 }
942
943 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
944 {
945     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
946 }
947
948 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
949 {
950     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
951     if (iter != m_bytecodeKills.end())
952         return *iter->value;
953     
954     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
955     codeBlock->livenessAnalysis().computeKills(*kills);
956     BytecodeKills& result = *kills;
957     m_bytecodeKills.add(codeBlock, WTFMove(kills));
958     return result;
959 }
960
961 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
962 {
963     return killsFor(baselineCodeBlockFor(inlineCallFrame));
964 }
965
966 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
967 {
968     CodeOrigin* codeOriginPtr = &codeOrigin;
969     for (;;) {
970         VirtualRegister reg = VirtualRegister(
971             operand.offset() - codeOriginPtr->stackOffset());
972         
973         if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
974             if (reg.isArgument()) {
975                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
976                 
977                 if (codeOriginPtr->inlineCallFrame->isClosureCall
978                     && reg.offset() == JSStack::Callee)
979                     return true;
980                 
981                 if (codeOriginPtr->inlineCallFrame->isVarargs()
982                     && reg.offset() == JSStack::ArgumentCount)
983                     return true;
984                 
985                 return false;
986             }
987             
988             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
989                 reg.offset(), codeOriginPtr->bytecodeIndex);
990         }
991         
992         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
993         if (!inlineCallFrame)
994             break;
995
996         // Arguments are always live. This would be redundant if it wasn't for our
997         // op_call_varargs inlining.
998         if (reg.isArgument()
999             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
1000             return true;
1001         
1002         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1003
1004         // The first inline call frame could be an inline tail call
1005         if (!codeOriginPtr)
1006             break;
1007     }
1008     
1009     return true;
1010 }
1011
1012 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1013 {
1014     BitVector result;
1015     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1016     forAllLocalsLiveInBytecode(
1017         codeOrigin,
1018         [&] (VirtualRegister reg) {
1019             ASSERT(reg.isLocal());
1020             result.quickSet(reg.toLocal());
1021         });
1022     return result;
1023 }
1024
1025 unsigned Graph::frameRegisterCount()
1026 {
1027     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1028     return roundLocalRegisterCountForFramePointerOffset(result);
1029 }
1030
1031 unsigned Graph::stackPointerOffset()
1032 {
1033     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1034 }
1035
1036 unsigned Graph::requiredRegisterCountForExit()
1037 {
1038     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1039     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1040         InlineCallFrame* inlineCallFrame = *iter;
1041         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1042         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1043         count = std::max(count, requiredCount);
1044     }
1045     return count;
1046 }
1047
1048 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1049 {
1050     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1051 }
1052
1053 JSValue Graph::tryGetConstantProperty(
1054     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1055 {
1056     if (!base || !base.isObject())
1057         return JSValue();
1058     
1059     JSObject* object = asObject(base);
1060     
1061     for (unsigned i = structureSet.size(); i--;) {
1062         Structure* structure = structureSet[i];
1063         assertIsRegistered(structure);
1064         
1065         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1066         if (!set || !set->isStillValid())
1067             return JSValue();
1068         
1069         ASSERT(structure->isValidOffset(offset));
1070         ASSERT(!structure->isUncacheableDictionary());
1071         
1072         watchpoints().addLazily(set);
1073     }
1074     
1075     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1076     // our profiling makes sense and we're still on track to generate code that won't be
1077     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1078     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1079     // is doomed.
1080     //
1081     // One argument in favor of this code is that it should definitely work because the butterfly
1082     // is always set before the structure. However, we don't currently have a fence between those
1083     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1084     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1085     // caches told us that the object had a structure that it did not *yet* have, and then later,
1086     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1087     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1088     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1089     // https://bugs.webkit.org/show_bug.cgi?id=134641
1090     //
1091     // For now, we just do the minimal thing: defend against the structure right now being
1092     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1093     // determine if the structure belongs to the proven set.
1094     
1095     if (!structureSet.contains(object->structure()))
1096         return JSValue();
1097     
1098     return object->getDirect(offset);
1099 }
1100
1101 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1102 {
1103     return tryGetConstantProperty(base, StructureSet(structure), offset);
1104 }
1105
1106 JSValue Graph::tryGetConstantProperty(
1107     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1108 {
1109     if (structure.isInfinite()) {
1110         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1111         // watching to constant-fold the property.
1112         // https://bugs.webkit.org/show_bug.cgi?id=147271
1113         return JSValue();
1114     }
1115     
1116     return tryGetConstantProperty(base, structure.set(), offset);
1117 }
1118
1119 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1120 {
1121     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1122 }
1123
1124 AbstractValue Graph::inferredValueForProperty(
1125     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1126 {
1127     AbstractValue result;
1128     base.forEach(
1129         [&] (Structure* structure) {
1130             AbstractValue value;
1131             value.set(*this, inferredTypeForProperty(structure, uid));
1132             result.merge(value);
1133         });
1134     if (clobberState == StructuresAreClobbered)
1135         result.clobberStructures();
1136     return result;
1137 }
1138
1139 AbstractValue Graph::inferredValueForProperty(
1140     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1141     StructureClobberState clobberState)
1142 {
1143     if (JSValue value = tryGetConstantProperty(base, offset)) {
1144         AbstractValue result;
1145         result.set(*this, *freeze(value), clobberState);
1146         return result;
1147     }
1148
1149     if (base.m_structure.isFinite())
1150         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1151
1152     return AbstractValue::heapTop();
1153 }
1154
1155 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1156 {
1157     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1158     
1159     if (!base)
1160         return JSValue();
1161     
1162     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1163     if (!activation)
1164         return JSValue();
1165     
1166     SymbolTable* symbolTable = activation->symbolTable();
1167     JSValue value;
1168     WatchpointSet* set;
1169     {
1170         ConcurrentJITLocker locker(symbolTable->m_lock);
1171         
1172         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1173         if (!entry)
1174             return JSValue();
1175         
1176         set = entry->watchpointSet();
1177         if (!set)
1178             return JSValue();
1179         
1180         if (set->state() != IsWatched)
1181             return JSValue();
1182         
1183         ASSERT(entry->scopeOffset() == offset);
1184         value = activation->variableAt(offset).get();
1185         if (!value)
1186             return JSValue();
1187     }
1188     
1189     watchpoints().addLazily(set);
1190     
1191     return value;
1192 }
1193
1194 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1195 {
1196     return tryGetConstantClosureVar(value.m_value, offset);
1197 }
1198
1199 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1200 {
1201     if (!node->hasConstant())
1202         return JSValue();
1203     return tryGetConstantClosureVar(node->asJSValue(), offset);
1204 }
1205
1206 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1207 {
1208     if (!value)
1209         return nullptr;
1210     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1211     if (!value)
1212         return nullptr;
1213     if (!view->length())
1214         return nullptr;
1215     WTF::loadLoadFence();
1216     watchpoints().addLazily(view);
1217     return view;
1218 }
1219
1220 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1221 {
1222     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1223         return nullptr;
1224     return tryGetFoldableView(value);
1225 }
1226
1227 void Graph::registerFrozenValues()
1228 {
1229     m_codeBlock->constants().resize(0);
1230     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1231     for (FrozenValue* value : m_frozenValues) {
1232         if (!value->pointsToHeap())
1233             continue;
1234         
1235         ASSERT(value->structure());
1236         ASSERT(m_plan.weakReferences.contains(value->structure()));
1237         
1238         switch (value->strength()) {
1239         case WeakValue: {
1240             m_plan.weakReferences.addLazily(value->value().asCell());
1241             break;
1242         }
1243         case StrongValue: {
1244             unsigned constantIndex = m_codeBlock->addConstantLazily();
1245             // We already have a barrier on the code block.
1246             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1247             break;
1248         } }
1249     }
1250     m_codeBlock->constants().shrinkToFit();
1251     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1252 }
1253
1254 void Graph::visitChildren(SlotVisitor& visitor)
1255 {
1256     for (FrozenValue* value : m_frozenValues) {
1257         visitor.appendUnbarrieredReadOnlyValue(value->value());
1258         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1259     }
1260     
1261     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1262         BasicBlock* block = this->block(blockIndex);
1263         if (!block)
1264             continue;
1265         
1266         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1267             Node* node = block->at(nodeIndex);
1268             
1269             switch (node->op()) {
1270             case CheckStructure:
1271                 for (unsigned i = node->structureSet().size(); i--;)
1272                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1273                 break;
1274                 
1275             case NewObject:
1276             case ArrayifyToStructure:
1277             case NewStringObject:
1278                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1279                 break;
1280                 
1281             case PutStructure:
1282             case AllocatePropertyStorage:
1283             case ReallocatePropertyStorage:
1284                 visitor.appendUnbarrieredReadOnlyPointer(
1285                     node->transition()->previous);
1286                 visitor.appendUnbarrieredReadOnlyPointer(
1287                     node->transition()->next);
1288                 break;
1289                 
1290             case MultiGetByOffset:
1291                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1292                     for (Structure* structure : getCase.set())
1293                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1294                 }
1295                 break;
1296                     
1297             case MultiPutByOffset:
1298                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1299                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1300                     const StructureSet& set = variant.oldStructure();
1301                     for (unsigned j = set.size(); j--;)
1302                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1303                     if (variant.kind() == PutByIdVariant::Transition)
1304                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1305                 }
1306                 break;
1307                 
1308             default:
1309                 break;
1310             }
1311         }
1312     }
1313 }
1314
1315 FrozenValue* Graph::freeze(JSValue value)
1316 {
1317     if (UNLIKELY(!value))
1318         return FrozenValue::emptySingleton();
1319     
1320     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1321     if (LIKELY(!result.isNewEntry))
1322         return result.iterator->value;
1323
1324     if (value.isUInt32())
1325         m_uint32ValuesInUse.append(value.asUInt32());
1326     
1327     FrozenValue frozenValue = FrozenValue::freeze(value);
1328     if (Structure* structure = frozenValue.structure())
1329         registerStructure(structure);
1330     
1331     return result.iterator->value = m_frozenValues.add(frozenValue);
1332 }
1333
1334 FrozenValue* Graph::freezeStrong(JSValue value)
1335 {
1336     FrozenValue* result = freeze(value);
1337     result->strengthenTo(StrongValue);
1338     return result;
1339 }
1340
1341 void Graph::convertToConstant(Node* node, FrozenValue* value)
1342 {
1343     if (value->structure())
1344         assertIsRegistered(value->structure());
1345     node->convertToConstant(value);
1346 }
1347
1348 void Graph::convertToConstant(Node* node, JSValue value)
1349 {
1350     convertToConstant(node, freeze(value));
1351 }
1352
1353 void Graph::convertToStrongConstant(Node* node, JSValue value)
1354 {
1355     convertToConstant(node, freezeStrong(value));
1356 }
1357
1358 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1359 {
1360     m_plan.weakReferences.addLazily(structure);
1361     if (m_plan.watchpoints.consider(structure))
1362         return StructureRegisteredAndWatched;
1363     return StructureRegisteredNormally;
1364 }
1365
1366 void Graph::assertIsRegistered(Structure* structure)
1367 {
1368     // It's convenient to be able to call this with a maybe-null structure.
1369     if (!structure)
1370         return;
1371     
1372     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1373     
1374     if (!structure->dfgShouldWatch())
1375         return;
1376     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1377         return;
1378     
1379     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1380 }
1381
1382 NO_RETURN_DUE_TO_CRASH static void crash(
1383     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1384     const char* assertion)
1385 {
1386     startCrashing();
1387     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1388     dataLog(file, "(", line, ") : ", function, "\n");
1389     dataLog("\n");
1390     dataLog(whileText);
1391     dataLog("Graph at time of failure:\n");
1392     graph.dump();
1393     dataLog("\n");
1394     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1395     dataLog(file, "(", line, ") : ", function, "\n");
1396     CRASH_WITH_SECURITY_IMPLICATION();
1397 }
1398
1399 void Graph::handleAssertionFailure(
1400     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1401 {
1402     crash(*this, "", file, line, function, assertion);
1403 }
1404
1405 void Graph::handleAssertionFailure(
1406     Node* node, const char* file, int line, const char* function, const char* assertion)
1407 {
1408     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1409 }
1410
1411 void Graph::handleAssertionFailure(
1412     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1413 {
1414     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1415 }
1416
1417 void Graph::ensureDominators()
1418 {
1419     if (!m_dominators)
1420         m_dominators = std::make_unique<Dominators>(*this);
1421 }
1422
1423 void Graph::ensurePrePostNumbering()
1424 {
1425     if (!m_prePostNumbering)
1426         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1427 }
1428
1429 void Graph::ensureNaturalLoops()
1430 {
1431     ensureDominators();
1432     if (!m_naturalLoops)
1433         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1434 }
1435
1436 ValueProfile* Graph::valueProfileFor(Node* node)
1437 {
1438     if (!node)
1439         return nullptr;
1440         
1441     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1442         
1443     if (node->hasLocal(*this)) {
1444         if (!node->local().isArgument())
1445             return nullptr;
1446         int argument = node->local().toArgument();
1447         Node* argumentNode = m_arguments[argument];
1448         if (!argumentNode)
1449             return nullptr;
1450         if (node->variableAccessData() != argumentNode->variableAccessData())
1451             return nullptr;
1452         return profiledBlock->valueProfileForArgument(argument);
1453     }
1454         
1455     if (node->hasHeapPrediction())
1456         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1457         
1458     return nullptr;
1459 }
1460
1461 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1462 {
1463     if (!node)
1464         return MethodOfGettingAValueProfile();
1465     
1466     if (ValueProfile* valueProfile = valueProfileFor(node))
1467         return MethodOfGettingAValueProfile(valueProfile);
1468     
1469     if (node->op() == GetLocal) {
1470         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1471         
1472         return MethodOfGettingAValueProfile::fromLazyOperand(
1473             profiledBlock,
1474             LazyOperandValueProfileKey(
1475                 node->origin.semantic.bytecodeIndex, node->local()));
1476     }
1477     
1478     return MethodOfGettingAValueProfile();
1479 }
1480
1481 bool Graph::isStringPrototypeMethodSane(JSObject* stringPrototype, Structure* stringPrototypeStructure, UniquedStringImpl* uid)
1482 {
1483     unsigned attributesUnused;
1484     PropertyOffset offset = stringPrototypeStructure->getConcurrently(uid, attributesUnused);
1485     if (!isValidOffset(offset))
1486         return false;
1487
1488     JSValue value = tryGetConstantProperty(stringPrototype, stringPrototypeStructure, offset);
1489     if (!value)
1490         return false;
1491
1492     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1493     if (!function)
1494         return false;
1495
1496     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1497         return false;
1498     
1499     return true;
1500 }
1501
1502 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1503 {
1504     if (hasExitSite(codeOrigin, NotStringObject))
1505         return false;
1506
1507     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1508     registerStructure(stringObjectStructure);
1509     ASSERT(stringObjectStructure->storedPrototype().isObject());
1510     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1511
1512     FrozenValue* stringPrototypeObjectValue = freeze(stringObjectStructure->storedPrototype());
1513     StringPrototype* stringPrototypeObject = stringPrototypeObjectValue->dynamicCast<StringPrototype*>();
1514     Structure* stringPrototypeStructure = stringPrototypeObjectValue->structure();
1515     if (registerStructure(stringPrototypeStructure) != StructureRegisteredAndWatched)
1516         return false;
1517
1518     if (stringPrototypeStructure->isDictionary())
1519         return false;
1520
1521     // We're being conservative here. We want DFG's ToString on StringObject to be
1522     // used in both numeric contexts (that would call valueOf()) and string contexts
1523     // (that would call toString()). We don't want the DFG to have to distinguish
1524     // between the two, just because that seems like it would get confusing. So we
1525     // just require both methods to be sane.
1526     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->valueOf.impl()))
1527         return false;
1528     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->toString.impl()))
1529         return false;
1530
1531     return true;
1532 }
1533
1534 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1535 {
1536     if (!m_hasExceptionHandlers)
1537         return false;
1538
1539     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1540     while (1) {
1541         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1542         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1543         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1544             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1545             catchHandlerOut = handler;
1546             return true;
1547         }
1548
1549         if (!inlineCallFrame)
1550             return false;
1551
1552         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1553         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1554     }
1555
1556     RELEASE_ASSERT_NOT_REACHED();
1557 }
1558
1559 } } // namespace JSC::DFG
1560
1561 #endif // ENABLE(DFG_JIT)