Hook up ShadowChicken to the debugger to show tail deleted frames
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGCFG.h"
39 #include "DFGDominators.h"
40 #include "DFGJITCode.h"
41 #include "DFGMayExit.h"
42 #include "DFGNaturalLoops.h"
43 #include "DFGPrePostNumbering.h"
44 #include "DFGVariableAccessDataDump.h"
45 #include "FullBytecodeLiveness.h"
46 #include "FunctionExecutableDump.h"
47 #include "GetterSetter.h"
48 #include "JIT.h"
49 #include "JSLexicalEnvironment.h"
50 #include "MaxFrameExtentForSlowPathCall.h"
51 #include "OperandsInlines.h"
52 #include "JSCInlines.h"
53 #include "StackAlignment.h"
54 #include <wtf/CommaPrinter.h>
55 #include <wtf/ListDump.h>
56
57 namespace JSC { namespace DFG {
58
59 // Creates an array of stringized names.
60 static const char* dfgOpNames[] = {
61 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
62     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
63 #undef STRINGIZE_DFG_OP_ENUM
64 };
65
66 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
67     : m_vm(vm)
68     , m_plan(plan)
69     , m_codeBlock(m_plan.codeBlock)
70     , m_profiledBlock(m_codeBlock->alternative())
71     , m_allocator(longLivedState.m_allocator)
72     , m_cfg(std::make_unique<CFG>(*this))
73     , m_nextMachineLocal(0)
74     , m_fixpointState(BeforeFixpoint)
75     , m_structureRegistrationState(HaveNotStartedRegistering)
76     , m_form(LoadStore)
77     , m_unificationState(LocallyUnified)
78     , m_refCountState(EverythingIsLive)
79 {
80     ASSERT(m_profiledBlock);
81     
82     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
112 {
113     if (!currentNode->origin.semantic)
114         return false;
115     
116     Node* previousNode = previousNodeRef;
117     previousNodeRef = currentNode;
118
119     if (!previousNode)
120         return false;
121     
122     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
123         return false;
124     
125     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
126     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
127     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
128     unsigned indexOfDivergence = commonSize;
129     for (unsigned i = 0; i < commonSize; ++i) {
130         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
131             indexOfDivergence = i;
132             break;
133         }
134     }
135     
136     bool hasPrinted = false;
137     
138     // Print the pops.
139     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
140         out.print(prefix);
141         printWhiteSpace(out, i * 2);
142         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
143         hasPrinted = true;
144     }
145     
146     // Print the pushes.
147     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     return hasPrinted;
155 }
156
157 int Graph::amountOfNodeWhiteSpace(Node* node)
158 {
159     return (node->origin.semantic.inlineDepth() - 1) * 2;
160 }
161
162 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
163 {
164     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
165 }
166
167 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
168 {
169     NodeType op = node->op();
170
171     unsigned refCount = node->refCount();
172     bool mustGenerate = node->mustGenerate();
173     if (mustGenerate)
174         --refCount;
175
176     out.print(prefix);
177     printNodeWhiteSpace(out, node);
178
179     // Example/explanation of dataflow dump output
180     //
181     //   14:   <!2:7>  GetByVal(@3, @13)
182     //   ^1     ^2 ^3     ^4       ^5
183     //
184     // (1) The nodeIndex of this operation.
185     // (2) The reference count. The number printed is the 'real' count,
186     //     not including the 'mustGenerate' ref. If the node is
187     //     'mustGenerate' then the count it prefixed with '!'.
188     // (3) The virtual register slot assigned to this node.
189     // (4) The name of the operation.
190     // (5) The arguments to the operation. The may be of the form:
191     //         @#   - a NodeIndex referencing a prior node in the graph.
192     //         arg# - an argument number.
193     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
194     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
195     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
196     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
197         out.print(node->virtualRegister());
198     else
199         out.print("-");
200     out.print(">\t", opName(op), "(");
201     CommaPrinter comma;
202     if (node->flags() & NodeHasVarArgs) {
203         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
204             if (!m_varArgChildren[childIdx])
205                 continue;
206             out.print(comma, m_varArgChildren[childIdx]);
207         }
208     } else {
209         if (!!node->child1() || !!node->child2() || !!node->child3())
210             out.print(comma, node->child1());
211         if (!!node->child2() || !!node->child3())
212             out.print(comma, node->child2());
213         if (!!node->child3())
214             out.print(comma, node->child3());
215     }
216
217     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
218         out.print(comma, NodeFlagsDump(node->flags()));
219     if (node->prediction())
220         out.print(comma, SpeculationDump(node->prediction()));
221     if (node->hasArrayMode())
222         out.print(comma, node->arrayMode());
223     if (node->hasArithMode())
224         out.print(comma, node->arithMode());
225     if (node->hasArithRoundingMode())
226         out.print(comma, "Rounding:", node->arithRoundingMode());
227     if (node->hasScopeOffset())
228         out.print(comma, node->scopeOffset());
229     if (node->hasDirectArgumentsOffset())
230         out.print(comma, node->capturedArgumentsOffset());
231     if (node->hasRegisterPointer())
232         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
233     if (node->hasIdentifier())
234         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
235     if (node->hasPromotedLocationDescriptor())
236         out.print(comma, node->promotedLocationDescriptor());
237     if (node->hasStructureSet())
238         out.print(comma, inContext(node->structureSet(), context));
239     if (node->hasStructure())
240         out.print(comma, inContext(*node->structure(), context));
241     if (node->hasTransition()) {
242         out.print(comma, pointerDumpInContext(node->transition(), context));
243 #if USE(JSVALUE64)
244         out.print(", ID:", node->transition()->next->id());
245 #else
246         out.print(", ID:", RawPointer(node->transition()->next));
247 #endif
248     }
249     if (node->hasCellOperand()) {
250         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
251             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
252         else {
253             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
254             if (node->cellOperand()->value().isCell()) {
255                 CallVariant variant(node->cellOperand()->value().asCell());
256                 if (ExecutableBase* executable = variant.executable()) {
257                     if (executable->isHostFunction())
258                         out.print(comma, "<host function>");
259                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
260                         out.print(comma, FunctionExecutableDump(functionExecutable));
261                     else
262                         out.print(comma, "<non-function executable>");
263                 }
264             }
265         }
266     }
267     if (node->hasStorageAccessData()) {
268         StorageAccessData& storageAccessData = node->storageAccessData();
269         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
270         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
271         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
272     }
273     if (node->hasMultiGetByOffsetData()) {
274         MultiGetByOffsetData& data = node->multiGetByOffsetData();
275         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
276         for (unsigned i = 0; i < data.cases.size(); ++i)
277             out.print(comma, inContext(data.cases[i], context));
278     }
279     if (node->hasMultiPutByOffsetData()) {
280         MultiPutByOffsetData& data = node->multiPutByOffsetData();
281         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
282         for (unsigned i = 0; i < data.variants.size(); ++i)
283             out.print(comma, inContext(data.variants[i], context));
284     }
285     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
286     if (node->hasVariableAccessData(*this)) {
287         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
288         if (variableAccessData) {
289             VirtualRegister operand = variableAccessData->local();
290             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
291             operand = variableAccessData->machineLocal();
292             if (operand.isValid())
293                 out.print(comma, "machine:", operand);
294         }
295     }
296     if (node->hasStackAccessData()) {
297         StackAccessData* data = node->stackAccessData();
298         out.print(comma, data->local);
299         if (data->machineLocal.isValid())
300             out.print(comma, "machine:", data->machineLocal);
301         out.print(comma, data->format);
302     }
303     if (node->hasUnlinkedLocal()) 
304         out.print(comma, node->unlinkedLocal());
305     if (node->hasUnlinkedMachineLocal()) {
306         VirtualRegister operand = node->unlinkedMachineLocal();
307         if (operand.isValid())
308             out.print(comma, "machine:", operand);
309     }
310     if (node->hasConstantBuffer()) {
311         out.print(comma);
312         out.print(node->startConstant(), ":[");
313         CommaPrinter anotherComma;
314         for (unsigned i = 0; i < node->numConstants(); ++i)
315             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
316         out.print("]");
317     }
318     if (node->hasLazyJSValue())
319         out.print(comma, node->lazyJSValue());
320     if (node->hasIndexingType())
321         out.print(comma, IndexingTypeDump(node->indexingType()));
322     if (node->hasTypedArrayType())
323         out.print(comma, node->typedArrayType());
324     if (node->hasPhi())
325         out.print(comma, "^", node->phi()->index());
326     if (node->hasExecutionCounter())
327         out.print(comma, RawPointer(node->executionCounter()));
328     if (node->hasWatchpointSet())
329         out.print(comma, RawPointer(node->watchpointSet()));
330     if (node->hasStoragePointer())
331         out.print(comma, RawPointer(node->storagePointer()));
332     if (node->hasObjectMaterializationData())
333         out.print(comma, node->objectMaterializationData());
334     if (node->hasCallVarargsData())
335         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
336     if (node->hasLoadVarargsData()) {
337         LoadVarargsData* data = node->loadVarargsData();
338         out.print(comma, "start = ", data->start, ", count = ", data->count);
339         if (data->machineStart.isValid())
340             out.print(", machineStart = ", data->machineStart);
341         if (data->machineCount.isValid())
342             out.print(", machineCount = ", data->machineCount);
343         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
344         out.print(", limit = ", data->limit);
345     }
346     if (node->isConstant())
347         out.print(comma, pointerDumpInContext(node->constant(), context));
348     if (node->isJump())
349         out.print(comma, "T:", *node->targetBlock());
350     if (node->isBranch())
351         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
352     if (node->isSwitch()) {
353         SwitchData* data = node->switchData();
354         out.print(comma, data->kind);
355         for (unsigned i = 0; i < data->cases.size(); ++i)
356             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
357         out.print(comma, "default:", data->fallThrough);
358     }
359     ClobberSet reads;
360     ClobberSet writes;
361     addReadsAndWrites(*this, node, reads, writes);
362     if (!reads.isEmpty())
363         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
364     if (!writes.isEmpty())
365         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
366     ExitMode exitMode = mayExit(*this, node);
367     if (exitMode != DoesNotExit)
368         out.print(comma, exitMode);
369     if (clobbersExitState(*this, node))
370         out.print(comma, "ClobbersExit");
371     if (node->origin.isSet()) {
372         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
373         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
374             out.print(comma, "exit: ", node->origin.forExit);
375     }
376     if (!node->origin.exitOK)
377         out.print(comma, "ExitInvalid");
378     out.print(")");
379
380     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
381         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
382     else if (node->hasHeapPrediction())
383         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
384     
385     out.print("\n");
386 }
387
388 bool Graph::terminalsAreValid()
389 {
390     for (BasicBlock* block : blocksInNaturalOrder()) {
391         if (!block->terminal())
392             return false;
393     }
394     return true;
395 }
396
397 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
398 {
399     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
400     if (block->executionCount == block->executionCount)
401         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
402     out.print(prefix, "  Predecessors:");
403     for (size_t i = 0; i < block->predecessors.size(); ++i)
404         out.print(" ", *block->predecessors[i]);
405     out.print("\n");
406     out.print(prefix, "  Successors:");
407     if (block->terminal()) {
408         for (BasicBlock* successor : block->successors()) {
409             out.print(" ", *successor);
410             if (m_prePostNumbering)
411                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
412         }
413     } else
414         out.print(" <invalid>");
415     out.print("\n");
416     if (m_dominators && terminalsAreValid()) {
417         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
418         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
419         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
420         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
421     }
422     if (m_prePostNumbering)
423         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
424     if (m_naturalLoops) {
425         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
426             out.print(prefix, "  Loop header, contains:");
427             Vector<BlockIndex> sortedBlockList;
428             for (unsigned i = 0; i < loop->size(); ++i)
429                 sortedBlockList.append(loop->at(i)->index);
430             std::sort(sortedBlockList.begin(), sortedBlockList.end());
431             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
432                 out.print(" #", sortedBlockList[i]);
433             out.print("\n");
434         }
435         
436         Vector<const NaturalLoop*> containingLoops =
437             m_naturalLoops->loopsOf(block);
438         if (!containingLoops.isEmpty()) {
439             out.print(prefix, "  Containing loop headers:");
440             for (unsigned i = 0; i < containingLoops.size(); ++i)
441                 out.print(" ", *containingLoops[i]->header());
442             out.print("\n");
443         }
444     }
445     if (!block->phis.isEmpty()) {
446         out.print(prefix, "  Phi Nodes:");
447         for (size_t i = 0; i < block->phis.size(); ++i) {
448             Node* phiNode = block->phis[i];
449             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
450                 continue;
451             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
452             if (phiNode->child1()) {
453                 out.print("@", phiNode->child1()->index());
454                 if (phiNode->child2()) {
455                     out.print(", @", phiNode->child2()->index());
456                     if (phiNode->child3())
457                         out.print(", @", phiNode->child3()->index());
458                 }
459             }
460             out.print(")", i + 1 < block->phis.size() ? "," : "");
461         }
462         out.print("\n");
463     }
464 }
465
466 void Graph::dump(PrintStream& out, DumpContext* context)
467 {
468     DumpContext myContext;
469     myContext.graph = this;
470     if (!context)
471         context = &myContext;
472     
473     out.print("\n");
474     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
475     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
476     if (m_form == SSA)
477         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
478     else
479         out.print("  Arguments: ", listDump(m_arguments), "\n");
480     out.print("\n");
481     
482     Node* lastNode = nullptr;
483     for (size_t b = 0; b < m_blocks.size(); ++b) {
484         BasicBlock* block = m_blocks[b].get();
485         if (!block)
486             continue;
487         dumpBlockHeader(out, "", block, DumpAllPhis, context);
488         out.print("  States: ", block->cfaStructureClobberStateAtHead);
489         if (!block->cfaHasVisited)
490             out.print(", CurrentlyCFAUnreachable");
491         if (!block->intersectionOfCFAHasVisited)
492             out.print(", CFAUnreachable");
493         out.print("\n");
494         switch (m_form) {
495         case LoadStore:
496         case ThreadedCPS: {
497             out.print("  Vars Before: ");
498             if (block->cfaHasVisited)
499                 out.print(inContext(block->valuesAtHead, context));
500             else
501                 out.print("<empty>");
502             out.print("\n");
503             out.print("  Intersected Vars Before: ");
504             if (block->intersectionOfCFAHasVisited)
505                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
506             else
507                 out.print("<empty>");
508             out.print("\n");
509             out.print("  Var Links: ", block->variablesAtHead, "\n");
510             break;
511         }
512             
513         case SSA: {
514             RELEASE_ASSERT(block->ssa);
515             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
516             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
517             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
518             break;
519         } }
520         for (size_t i = 0; i < block->size(); ++i) {
521             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
522             dump(out, "", block->at(i), context);
523         }
524         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
525         if (!block->cfaDidFinish)
526             out.print(", CFAInvalidated");
527         out.print("\n");
528         switch (m_form) {
529         case LoadStore:
530         case ThreadedCPS: {
531             out.print("  Vars After: ");
532             if (block->cfaHasVisited)
533                 out.print(inContext(block->valuesAtTail, context));
534             else
535                 out.print("<empty>");
536             out.print("\n");
537             out.print("  Var Links: ", block->variablesAtTail, "\n");
538             break;
539         }
540             
541         case SSA: {
542             RELEASE_ASSERT(block->ssa);
543             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
544             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
545             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
546             break;
547         } }
548         out.print("\n");
549     }
550     
551     out.print("GC Values:\n");
552     for (FrozenValue* value : m_frozenValues) {
553         if (value->pointsToHeap())
554             out.print("    ", inContext(*value, &myContext), "\n");
555     }
556
557     out.print(inContext(watchpoints(), &myContext));
558     
559     if (!myContext.isEmpty()) {
560         myContext.dump(out);
561         out.print("\n");
562     }
563 }
564
565 void Graph::dethread()
566 {
567     if (m_form == LoadStore || m_form == SSA)
568         return;
569     
570     if (logCompilationChanges())
571         dataLog("Dethreading DFG graph.\n");
572     
573     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
574         BasicBlock* block = m_blocks[blockIndex].get();
575         if (!block)
576             continue;
577         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
578             Node* phi = block->phis[phiIndex];
579             phi->children.reset();
580         }
581     }
582     
583     m_form = LoadStore;
584 }
585
586 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
587 {
588     if (!successor->isReachable) {
589         successor->isReachable = true;
590         worklist.append(successor);
591     }
592     
593     successor->predecessors.append(block);
594 }
595
596 void Graph::determineReachability()
597 {
598     Vector<BasicBlock*, 16> worklist;
599     worklist.append(block(0));
600     block(0)->isReachable = true;
601     while (!worklist.isEmpty()) {
602         BasicBlock* block = worklist.takeLast();
603         for (unsigned i = block->numSuccessors(); i--;)
604             handleSuccessor(worklist, block, block->successor(i));
605     }
606 }
607
608 void Graph::resetReachability()
609 {
610     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
611         BasicBlock* block = m_blocks[blockIndex].get();
612         if (!block)
613             continue;
614         block->isReachable = false;
615         block->predecessors.clear();
616     }
617     
618     determineReachability();
619 }
620
621 namespace {
622
623 class RefCountCalculator {
624 public:
625     RefCountCalculator(Graph& graph)
626         : m_graph(graph)
627     {
628     }
629     
630     void calculate()
631     {
632         // First reset the counts to 0 for all nodes.
633         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
634             BasicBlock* block = m_graph.block(blockIndex);
635             if (!block)
636                 continue;
637             for (unsigned indexInBlock = block->size(); indexInBlock--;)
638                 block->at(indexInBlock)->setRefCount(0);
639             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
640                 block->phis[phiIndex]->setRefCount(0);
641         }
642     
643         // Now find the roots:
644         // - Nodes that are must-generate.
645         // - Nodes that are reachable from type checks.
646         // Set their ref counts to 1 and put them on the worklist.
647         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
648             BasicBlock* block = m_graph.block(blockIndex);
649             if (!block)
650                 continue;
651             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
652                 Node* node = block->at(indexInBlock);
653                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
654                 if (!(node->flags() & NodeMustGenerate))
655                     continue;
656                 if (!node->postfixRef())
657                     m_worklist.append(node);
658             }
659         }
660         
661         while (!m_worklist.isEmpty()) {
662             while (!m_worklist.isEmpty()) {
663                 Node* node = m_worklist.last();
664                 m_worklist.removeLast();
665                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
666                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
667             }
668             
669             if (m_graph.m_form == SSA) {
670                 // Find Phi->Upsilon edges, which are represented as meta-data in the
671                 // Upsilon.
672                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
673                     BasicBlock* block = m_graph.block(blockIndex);
674                     if (!block)
675                         continue;
676                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
677                         Node* node = block->at(nodeIndex);
678                         if (node->op() != Upsilon)
679                             continue;
680                         if (node->shouldGenerate())
681                             continue;
682                         if (node->phi()->shouldGenerate())
683                             countNode(node);
684                     }
685                 }
686             }
687         }
688     }
689     
690 private:
691     void findTypeCheckRoot(Node*, Edge edge)
692     {
693         // We may have an "unproved" untyped use for code that is unreachable. The CFA
694         // will just not have gotten around to it.
695         if (edge.isProved() || edge.willNotHaveCheck())
696             return;
697         if (!edge->postfixRef())
698             m_worklist.append(edge.node());
699     }
700     
701     void countNode(Node* node)
702     {
703         if (node->postfixRef())
704             return;
705         m_worklist.append(node);
706     }
707     
708     void countEdge(Node*, Edge edge)
709     {
710         // Don't count edges that are already counted for their type checks.
711         if (!(edge.isProved() || edge.willNotHaveCheck()))
712             return;
713         countNode(edge.node());
714     }
715     
716     Graph& m_graph;
717     Vector<Node*, 128> m_worklist;
718 };
719
720 } // anonymous namespace
721
722 void Graph::computeRefCounts()
723 {
724     RefCountCalculator calculator(*this);
725     calculator.calculate();
726 }
727
728 void Graph::killBlockAndItsContents(BasicBlock* block)
729 {
730     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
731         m_allocator.free(block->phis[phiIndex]);
732     for (unsigned nodeIndex = block->size(); nodeIndex--;)
733         m_allocator.free(block->at(nodeIndex));
734     
735     killBlock(block);
736 }
737
738 void Graph::killUnreachableBlocks()
739 {
740     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
741         BasicBlock* block = this->block(blockIndex);
742         if (!block)
743             continue;
744         if (block->isReachable)
745             continue;
746         
747         killBlockAndItsContents(block);
748     }
749 }
750
751 void Graph::invalidateCFG()
752 {
753     m_dominators = nullptr;
754     m_naturalLoops = nullptr;
755     m_prePostNumbering = nullptr;
756 }
757
758 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
759 {
760     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
761         Node* node = block[indexInBlock];
762         bool shouldContinue = true;
763         switch (node->op()) {
764         case SetLocal: {
765             if (node->local() == variableAccessData->local())
766                 shouldContinue = false;
767             break;
768         }
769                 
770         case GetLocal: {
771             if (node->variableAccessData() != variableAccessData)
772                 continue;
773             substitute(block, indexInBlock, node, newGetLocal);
774             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
775             if (oldTailNode == node)
776                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
777             shouldContinue = false;
778             break;
779         }
780                 
781         default:
782             break;
783         }
784         if (!shouldContinue)
785             break;
786     }
787 }
788
789 BlockList Graph::blocksInPreOrder()
790 {
791     BlockList result;
792     BlockWorklist worklist;
793     worklist.push(block(0));
794     while (BasicBlock* block = worklist.pop()) {
795         result.append(block);
796         for (unsigned i = block->numSuccessors(); i--;)
797             worklist.push(block->successor(i));
798     }
799     return result;
800 }
801
802 BlockList Graph::blocksInPostOrder()
803 {
804     BlockList result;
805     PostOrderBlockWorklist worklist;
806     worklist.push(block(0));
807     while (BlockWithOrder item = worklist.pop()) {
808         switch (item.order) {
809         case VisitOrder::Pre:
810             worklist.pushPost(item.node);
811             for (unsigned i = item.node->numSuccessors(); i--;)
812                 worklist.push(item.node->successor(i));
813             break;
814         case VisitOrder::Post:
815             result.append(item.node);
816             break;
817         }
818     }
819     return result;
820 }
821
822 void Graph::clearReplacements()
823 {
824     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
825         BasicBlock* block = m_blocks[blockIndex].get();
826         if (!block)
827             continue;
828         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
829             block->phis[phiIndex]->setReplacement(nullptr);
830         for (unsigned nodeIndex = block->size(); nodeIndex--;)
831             block->at(nodeIndex)->setReplacement(nullptr);
832     }
833 }
834
835 void Graph::clearEpochs()
836 {
837     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
838         BasicBlock* block = m_blocks[blockIndex].get();
839         if (!block)
840             continue;
841         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
842             block->phis[phiIndex]->setEpoch(Epoch());
843         for (unsigned nodeIndex = block->size(); nodeIndex--;)
844             block->at(nodeIndex)->setEpoch(Epoch());
845     }
846 }
847
848 void Graph::initializeNodeOwners()
849 {
850     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
851         BasicBlock* block = m_blocks[blockIndex].get();
852         if (!block)
853             continue;
854         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
855             block->phis[phiIndex]->owner = block;
856         for (unsigned nodeIndex = block->size(); nodeIndex--;)
857             block->at(nodeIndex)->owner = block;
858     }
859 }
860
861 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
862 {
863     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
864         BasicBlock* block = m_blocks[blockIndex].get();
865         if (!block)
866             continue;
867         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
868             block->phis[phiIndex]->clearFlags(flags);
869         for (unsigned nodeIndex = block->size(); nodeIndex--;)
870             block->at(nodeIndex)->clearFlags(flags);
871     }
872 }
873
874 bool Graph::watchCondition(const ObjectPropertyCondition& key)
875 {
876     if (!key.isWatchable())
877         return false;
878     
879     m_plan.weakReferences.addLazily(key.object());
880     if (key.hasPrototype())
881         m_plan.weakReferences.addLazily(key.prototype());
882     if (key.hasRequiredValue())
883         m_plan.weakReferences.addLazily(key.requiredValue());
884     
885     m_plan.watchpoints.addLazily(key);
886
887     if (key.kind() == PropertyCondition::Presence)
888         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
889     
890     return true;
891 }
892
893 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
894 {
895     if (!keys.isValid())
896         return false;
897
898     for (const ObjectPropertyCondition& key : keys) {
899         if (!watchCondition(key))
900             return false;
901     }
902     return true;
903 }
904
905 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
906 {
907     return m_safeToLoad.contains(std::make_pair(base, offset));
908 }
909
910 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
911 {
912     assertIsRegistered(key.structure());
913     
914     auto iter = m_inferredTypes.find(key);
915     if (iter != m_inferredTypes.end())
916         return iter->value;
917
918     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
919     if (!typeObject) {
920         m_inferredTypes.add(key, InferredType::Top);
921         return InferredType::Top;
922     }
923
924     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
925     if (typeDescriptor.kind() == InferredType::Top) {
926         m_inferredTypes.add(key, InferredType::Top);
927         return InferredType::Top;
928     }
929     
930     m_inferredTypes.add(key, typeDescriptor);
931
932     m_plan.weakReferences.addLazily(typeObject);
933     registerInferredType(typeDescriptor);
934
935     // Note that we may already be watching this desired inferred type, because multiple structures may
936     // point to the same InferredType instance.
937     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
938
939     return typeDescriptor;
940 }
941
942 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
943 {
944     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
945     if (iter != m_bytecodeLiveness.end())
946         return *iter->value;
947     
948     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
949     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
950     FullBytecodeLiveness& result = *liveness;
951     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
952     return result;
953 }
954
955 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
956 {
957     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
958 }
959
960 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
961 {
962     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
963     if (iter != m_bytecodeKills.end())
964         return *iter->value;
965     
966     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
967     codeBlock->livenessAnalysis().computeKills(*kills);
968     BytecodeKills& result = *kills;
969     m_bytecodeKills.add(codeBlock, WTFMove(kills));
970     return result;
971 }
972
973 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
974 {
975     return killsFor(baselineCodeBlockFor(inlineCallFrame));
976 }
977
978 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
979 {
980     CodeOrigin* codeOriginPtr = &codeOrigin;
981     for (;;) {
982         VirtualRegister reg = VirtualRegister(
983             operand.offset() - codeOriginPtr->stackOffset());
984         
985         if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
986             if (reg.isArgument()) {
987                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
988                 
989                 if (codeOriginPtr->inlineCallFrame->isClosureCall
990                     && reg.offset() == JSStack::Callee)
991                     return true;
992                 
993                 if (codeOriginPtr->inlineCallFrame->isVarargs()
994                     && reg.offset() == JSStack::ArgumentCount)
995                     return true;
996                 
997                 return false;
998             }
999             
1000             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1001                 reg.offset(), codeOriginPtr->bytecodeIndex);
1002         }
1003         
1004         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1005         if (!inlineCallFrame)
1006             break;
1007
1008         // Arguments are always live. This would be redundant if it wasn't for our
1009         // op_call_varargs inlining.
1010         if (reg.isArgument()
1011             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
1012             return true;
1013         
1014         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1015
1016         // The first inline call frame could be an inline tail call
1017         if (!codeOriginPtr)
1018             break;
1019     }
1020     
1021     return true;
1022 }
1023
1024 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1025 {
1026     BitVector result;
1027     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1028     forAllLocalsLiveInBytecode(
1029         codeOrigin,
1030         [&] (VirtualRegister reg) {
1031             ASSERT(reg.isLocal());
1032             result.quickSet(reg.toLocal());
1033         });
1034     return result;
1035 }
1036
1037 unsigned Graph::frameRegisterCount()
1038 {
1039     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1040     return roundLocalRegisterCountForFramePointerOffset(result);
1041 }
1042
1043 unsigned Graph::stackPointerOffset()
1044 {
1045     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1046 }
1047
1048 unsigned Graph::requiredRegisterCountForExit()
1049 {
1050     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1051     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1052         InlineCallFrame* inlineCallFrame = *iter;
1053         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1054         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1055         count = std::max(count, requiredCount);
1056     }
1057     return count;
1058 }
1059
1060 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1061 {
1062     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1063 }
1064
1065 JSValue Graph::tryGetConstantProperty(
1066     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1067 {
1068     if (!base || !base.isObject())
1069         return JSValue();
1070     
1071     JSObject* object = asObject(base);
1072     
1073     for (unsigned i = structureSet.size(); i--;) {
1074         Structure* structure = structureSet[i];
1075         assertIsRegistered(structure);
1076         
1077         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1078         if (!set || !set->isStillValid())
1079             return JSValue();
1080         
1081         ASSERT(structure->isValidOffset(offset));
1082         ASSERT(!structure->isUncacheableDictionary());
1083         
1084         watchpoints().addLazily(set);
1085     }
1086     
1087     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1088     // our profiling makes sense and we're still on track to generate code that won't be
1089     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1090     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1091     // is doomed.
1092     //
1093     // One argument in favor of this code is that it should definitely work because the butterfly
1094     // is always set before the structure. However, we don't currently have a fence between those
1095     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1096     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1097     // caches told us that the object had a structure that it did not *yet* have, and then later,
1098     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1099     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1100     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1101     // https://bugs.webkit.org/show_bug.cgi?id=134641
1102     //
1103     // For now, we just do the minimal thing: defend against the structure right now being
1104     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1105     // determine if the structure belongs to the proven set.
1106     
1107     if (!structureSet.contains(object->structure()))
1108         return JSValue();
1109     
1110     return object->getDirect(offset);
1111 }
1112
1113 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1114 {
1115     return tryGetConstantProperty(base, StructureSet(structure), offset);
1116 }
1117
1118 JSValue Graph::tryGetConstantProperty(
1119     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1120 {
1121     if (structure.isInfinite()) {
1122         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1123         // watching to constant-fold the property.
1124         // https://bugs.webkit.org/show_bug.cgi?id=147271
1125         return JSValue();
1126     }
1127     
1128     return tryGetConstantProperty(base, structure.set(), offset);
1129 }
1130
1131 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1132 {
1133     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1134 }
1135
1136 AbstractValue Graph::inferredValueForProperty(
1137     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1138 {
1139     AbstractValue result;
1140     base.forEach(
1141         [&] (Structure* structure) {
1142             AbstractValue value;
1143             value.set(*this, inferredTypeForProperty(structure, uid));
1144             result.merge(value);
1145         });
1146     if (clobberState == StructuresAreClobbered)
1147         result.clobberStructures();
1148     return result;
1149 }
1150
1151 AbstractValue Graph::inferredValueForProperty(
1152     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1153     StructureClobberState clobberState)
1154 {
1155     if (JSValue value = tryGetConstantProperty(base, offset)) {
1156         AbstractValue result;
1157         result.set(*this, *freeze(value), clobberState);
1158         return result;
1159     }
1160
1161     if (base.m_structure.isFinite())
1162         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1163
1164     return AbstractValue::heapTop();
1165 }
1166
1167 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1168 {
1169     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1170     
1171     if (!base)
1172         return JSValue();
1173     
1174     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1175     if (!activation)
1176         return JSValue();
1177     
1178     SymbolTable* symbolTable = activation->symbolTable();
1179     JSValue value;
1180     WatchpointSet* set;
1181     {
1182         ConcurrentJITLocker locker(symbolTable->m_lock);
1183         
1184         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1185         if (!entry)
1186             return JSValue();
1187         
1188         set = entry->watchpointSet();
1189         if (!set)
1190             return JSValue();
1191         
1192         if (set->state() != IsWatched)
1193             return JSValue();
1194         
1195         ASSERT(entry->scopeOffset() == offset);
1196         value = activation->variableAt(offset).get();
1197         if (!value)
1198             return JSValue();
1199     }
1200     
1201     watchpoints().addLazily(set);
1202     
1203     return value;
1204 }
1205
1206 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1207 {
1208     return tryGetConstantClosureVar(value.m_value, offset);
1209 }
1210
1211 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1212 {
1213     if (!node->hasConstant())
1214         return JSValue();
1215     return tryGetConstantClosureVar(node->asJSValue(), offset);
1216 }
1217
1218 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1219 {
1220     if (!value)
1221         return nullptr;
1222     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1223     if (!value)
1224         return nullptr;
1225     if (!view->length())
1226         return nullptr;
1227     WTF::loadLoadFence();
1228     watchpoints().addLazily(view);
1229     return view;
1230 }
1231
1232 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1233 {
1234     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1235         return nullptr;
1236     return tryGetFoldableView(value);
1237 }
1238
1239 void Graph::registerFrozenValues()
1240 {
1241     m_codeBlock->constants().resize(0);
1242     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1243     for (FrozenValue* value : m_frozenValues) {
1244         if (!value->pointsToHeap())
1245             continue;
1246         
1247         ASSERT(value->structure());
1248         ASSERT(m_plan.weakReferences.contains(value->structure()));
1249         
1250         switch (value->strength()) {
1251         case WeakValue: {
1252             m_plan.weakReferences.addLazily(value->value().asCell());
1253             break;
1254         }
1255         case StrongValue: {
1256             unsigned constantIndex = m_codeBlock->addConstantLazily();
1257             // We already have a barrier on the code block.
1258             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1259             break;
1260         } }
1261     }
1262     m_codeBlock->constants().shrinkToFit();
1263     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1264 }
1265
1266 void Graph::visitChildren(SlotVisitor& visitor)
1267 {
1268     for (FrozenValue* value : m_frozenValues) {
1269         visitor.appendUnbarrieredReadOnlyValue(value->value());
1270         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1271     }
1272     
1273     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1274         BasicBlock* block = this->block(blockIndex);
1275         if (!block)
1276             continue;
1277         
1278         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1279             Node* node = block->at(nodeIndex);
1280             
1281             switch (node->op()) {
1282             case CheckStructure:
1283                 for (unsigned i = node->structureSet().size(); i--;)
1284                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1285                 break;
1286                 
1287             case NewObject:
1288             case ArrayifyToStructure:
1289             case NewStringObject:
1290                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1291                 break;
1292                 
1293             case PutStructure:
1294             case AllocatePropertyStorage:
1295             case ReallocatePropertyStorage:
1296                 visitor.appendUnbarrieredReadOnlyPointer(
1297                     node->transition()->previous);
1298                 visitor.appendUnbarrieredReadOnlyPointer(
1299                     node->transition()->next);
1300                 break;
1301                 
1302             case MultiGetByOffset:
1303                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1304                     for (Structure* structure : getCase.set())
1305                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1306                 }
1307                 break;
1308                     
1309             case MultiPutByOffset:
1310                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1311                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1312                     const StructureSet& set = variant.oldStructure();
1313                     for (unsigned j = set.size(); j--;)
1314                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1315                     if (variant.kind() == PutByIdVariant::Transition)
1316                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1317                 }
1318                 break;
1319                 
1320             default:
1321                 break;
1322             }
1323         }
1324     }
1325 }
1326
1327 FrozenValue* Graph::freeze(JSValue value)
1328 {
1329     if (UNLIKELY(!value))
1330         return FrozenValue::emptySingleton();
1331     
1332     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1333     if (LIKELY(!result.isNewEntry))
1334         return result.iterator->value;
1335
1336     if (value.isUInt32())
1337         m_uint32ValuesInUse.append(value.asUInt32());
1338     
1339     FrozenValue frozenValue = FrozenValue::freeze(value);
1340     if (Structure* structure = frozenValue.structure())
1341         registerStructure(structure);
1342     
1343     return result.iterator->value = m_frozenValues.add(frozenValue);
1344 }
1345
1346 FrozenValue* Graph::freezeStrong(JSValue value)
1347 {
1348     FrozenValue* result = freeze(value);
1349     result->strengthenTo(StrongValue);
1350     return result;
1351 }
1352
1353 void Graph::convertToConstant(Node* node, FrozenValue* value)
1354 {
1355     if (value->structure())
1356         assertIsRegistered(value->structure());
1357     node->convertToConstant(value);
1358 }
1359
1360 void Graph::convertToConstant(Node* node, JSValue value)
1361 {
1362     convertToConstant(node, freeze(value));
1363 }
1364
1365 void Graph::convertToStrongConstant(Node* node, JSValue value)
1366 {
1367     convertToConstant(node, freezeStrong(value));
1368 }
1369
1370 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1371 {
1372     m_plan.weakReferences.addLazily(structure);
1373     if (m_plan.watchpoints.consider(structure))
1374         return StructureRegisteredAndWatched;
1375     return StructureRegisteredNormally;
1376 }
1377
1378 void Graph::assertIsRegistered(Structure* structure)
1379 {
1380     // It's convenient to be able to call this with a maybe-null structure.
1381     if (!structure)
1382         return;
1383     
1384     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1385     
1386     if (!structure->dfgShouldWatch())
1387         return;
1388     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1389         return;
1390     
1391     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1392 }
1393
1394 NO_RETURN_DUE_TO_CRASH static void crash(
1395     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1396     const char* assertion)
1397 {
1398     startCrashing();
1399     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1400     dataLog(file, "(", line, ") : ", function, "\n");
1401     dataLog("\n");
1402     dataLog(whileText);
1403     dataLog("Graph at time of failure:\n");
1404     graph.dump();
1405     dataLog("\n");
1406     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1407     dataLog(file, "(", line, ") : ", function, "\n");
1408     CRASH_WITH_SECURITY_IMPLICATION();
1409 }
1410
1411 void Graph::handleAssertionFailure(
1412     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1413 {
1414     crash(*this, "", file, line, function, assertion);
1415 }
1416
1417 void Graph::handleAssertionFailure(
1418     Node* node, const char* file, int line, const char* function, const char* assertion)
1419 {
1420     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1421 }
1422
1423 void Graph::handleAssertionFailure(
1424     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1425 {
1426     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1427 }
1428
1429 void Graph::ensureDominators()
1430 {
1431     if (!m_dominators)
1432         m_dominators = std::make_unique<Dominators>(*this);
1433 }
1434
1435 void Graph::ensurePrePostNumbering()
1436 {
1437     if (!m_prePostNumbering)
1438         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1439 }
1440
1441 void Graph::ensureNaturalLoops()
1442 {
1443     ensureDominators();
1444     if (!m_naturalLoops)
1445         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1446 }
1447
1448 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1449 {
1450     while (node) {
1451         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1452         
1453         if (node->hasLocal(*this)) {
1454             ValueProfile* result = [&] () -> ValueProfile* {
1455                 if (!node->local().isArgument())
1456                     return nullptr;
1457                 int argument = node->local().toArgument();
1458                 Node* argumentNode = m_arguments[argument];
1459                 if (!argumentNode)
1460                     return nullptr;
1461                 if (node->variableAccessData() != argumentNode->variableAccessData())
1462                     return nullptr;
1463                 return profiledBlock->valueProfileForArgument(argument);
1464             }();
1465             if (result)
1466                 return result;
1467             
1468             if (node->op() == GetLocal) {
1469                 return MethodOfGettingAValueProfile::fromLazyOperand(
1470                     profiledBlock,
1471                     LazyOperandValueProfileKey(
1472                         node->origin.semantic.bytecodeIndex, node->local()));
1473             }
1474         }
1475         
1476         if (node->hasHeapPrediction())
1477             return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1478         
1479         if (ResultProfile* result = profiledBlock->resultProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1480             return result;
1481         
1482         switch (node->op()) {
1483         case Identity:
1484         case ValueRep:
1485         case DoubleRep:
1486         case Int52Rep:
1487             node = node->child1().node();
1488             break;
1489         default:
1490             node = nullptr;
1491         }
1492     }
1493     
1494     return MethodOfGettingAValueProfile();
1495 }
1496
1497 bool Graph::isStringPrototypeMethodSane(JSObject* stringPrototype, Structure* stringPrototypeStructure, UniquedStringImpl* uid)
1498 {
1499     unsigned attributesUnused;
1500     PropertyOffset offset = stringPrototypeStructure->getConcurrently(uid, attributesUnused);
1501     if (!isValidOffset(offset))
1502         return false;
1503
1504     JSValue value = tryGetConstantProperty(stringPrototype, stringPrototypeStructure, offset);
1505     if (!value)
1506         return false;
1507
1508     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1509     if (!function)
1510         return false;
1511
1512     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1513         return false;
1514     
1515     return true;
1516 }
1517
1518 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1519 {
1520     unsigned attributesUnused;
1521     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1522     if (!isValidOffset(offset))
1523         return false;
1524
1525     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1526     if (!value)
1527         return false;
1528
1529     // We only care about functions and getters at this point. If you want to access other properties
1530     // you'll have to add code for those types.
1531     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1532     if (!function) {
1533         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(value);
1534
1535         if (!getterSetter)
1536             return false;
1537
1538         returnJSValue = JSValue(getterSetter);
1539         return true;
1540     }
1541
1542     returnJSValue = value;
1543     return true;
1544 }
1545
1546 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1547 {
1548     if (hasExitSite(codeOrigin, NotStringObject))
1549         return false;
1550
1551     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1552     registerStructure(stringObjectStructure);
1553     ASSERT(stringObjectStructure->storedPrototype().isObject());
1554     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1555
1556     FrozenValue* stringPrototypeObjectValue = freeze(stringObjectStructure->storedPrototype());
1557     StringPrototype* stringPrototypeObject = stringPrototypeObjectValue->dynamicCast<StringPrototype*>();
1558     Structure* stringPrototypeStructure = stringPrototypeObjectValue->structure();
1559     if (registerStructure(stringPrototypeStructure) != StructureRegisteredAndWatched)
1560         return false;
1561
1562     if (stringPrototypeStructure->isDictionary())
1563         return false;
1564
1565     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObjectFor(codeOrigin), stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1566         return false;
1567
1568     // We're being conservative here. We want DFG's ToString on StringObject to be
1569     // used in both numeric contexts (that would call valueOf()) and string contexts
1570     // (that would call toString()). We don't want the DFG to have to distinguish
1571     // between the two, just because that seems like it would get confusing. So we
1572     // just require both methods to be sane.
1573     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->valueOf.impl()))
1574         return false;
1575     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->toString.impl()))
1576         return false;
1577
1578     return true;
1579 }
1580
1581 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1582 {
1583     if (!m_hasExceptionHandlers)
1584         return false;
1585
1586     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1587     while (1) {
1588         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1589         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1590         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1591             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1592             catchHandlerOut = handler;
1593             return true;
1594         }
1595
1596         if (!inlineCallFrame)
1597             return false;
1598
1599         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1600         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1601     }
1602
1603     RELEASE_ASSERT_NOT_REACHED();
1604 }
1605
1606 } } // namespace JSC::DFG
1607
1608 #endif // ENABLE(DFG_JIT)