DFG_ASSERT should allow stuffing registers before trapping.
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGPrePostNumbering.h"
49 #include "DFGVariableAccessDataDump.h"
50 #include "FullBytecodeLiveness.h"
51 #include "FunctionExecutableDump.h"
52 #include "GetterSetter.h"
53 #include "JIT.h"
54 #include "JSLexicalEnvironment.h"
55 #include "MaxFrameExtentForSlowPathCall.h"
56 #include "OperandsInlines.h"
57 #include "JSCInlines.h"
58 #include "StackAlignment.h"
59 #include <wtf/CommaPrinter.h>
60 #include <wtf/ListDump.h>
61
62 namespace JSC { namespace DFG {
63
64 // Creates an array of stringized names.
65 static const char* dfgOpNames[] = {
66 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
67     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
68 #undef STRINGIZE_DFG_OP_ENUM
69 };
70
71 Graph::Graph(VM& vm, Plan& plan)
72     : m_vm(vm)
73     , m_plan(plan)
74     , m_codeBlock(m_plan.codeBlock)
75     , m_profiledBlock(m_codeBlock->alternative())
76     , m_cfg(std::make_unique<CFG>(*this))
77     , m_nextMachineLocal(0)
78     , m_fixpointState(BeforeFixpoint)
79     , m_structureRegistrationState(HaveNotStartedRegistering)
80     , m_form(LoadStore)
81     , m_unificationState(LocallyUnified)
82     , m_refCountState(EverythingIsLive)
83 {
84     ASSERT(m_profiledBlock);
85     
86     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
87     
88     m_indexingCache = std::make_unique<FlowIndexing>(*this);
89     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
90
91     registerStructure(vm.structureStructure.get());
92     this->stringStructure = registerStructure(vm.stringStructure.get());
93     this->symbolStructure = registerStructure(vm.symbolStructure.get());
94 }
95
96 Graph::~Graph()
97 {
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
112 {
113     if (!currentNode->origin.semantic)
114         return false;
115     
116     Node* previousNode = previousNodeRef;
117     previousNodeRef = currentNode;
118
119     if (!previousNode)
120         return false;
121     
122     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
123         return false;
124     
125     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
126     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
127     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
128     unsigned indexOfDivergence = commonSize;
129     for (unsigned i = 0; i < commonSize; ++i) {
130         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
131             indexOfDivergence = i;
132             break;
133         }
134     }
135     
136     bool hasPrinted = false;
137     
138     // Print the pops.
139     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
140         out.print(prefix);
141         printWhiteSpace(out, i * 2);
142         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
143         hasPrinted = true;
144     }
145     
146     // Print the pushes.
147     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     return hasPrinted;
155 }
156
157 int Graph::amountOfNodeWhiteSpace(Node* node)
158 {
159     return (node->origin.semantic.inlineDepth() - 1) * 2;
160 }
161
162 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
163 {
164     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
165 }
166
167 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
168 {
169     NodeType op = node->op();
170
171     unsigned refCount = node->refCount();
172     bool mustGenerate = node->mustGenerate();
173     if (mustGenerate)
174         --refCount;
175
176     out.print(prefix);
177     printNodeWhiteSpace(out, node);
178
179     // Example/explanation of dataflow dump output
180     //
181     //   14:   <!2:7>  GetByVal(@3, @13)
182     //   ^1     ^2 ^3     ^4       ^5
183     //
184     // (1) The nodeIndex of this operation.
185     // (2) The reference count. The number printed is the 'real' count,
186     //     not including the 'mustGenerate' ref. If the node is
187     //     'mustGenerate' then the count it prefixed with '!'.
188     // (3) The virtual register slot assigned to this node.
189     // (4) The name of the operation.
190     // (5) The arguments to the operation. The may be of the form:
191     //         @#   - a NodeIndex referencing a prior node in the graph.
192     //         arg# - an argument number.
193     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
194     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
195     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
196     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
197         out.print(node->virtualRegister());
198     else
199         out.print("-");
200     out.print(">\t", opName(op), "(");
201     CommaPrinter comma;
202     if (node->flags() & NodeHasVarArgs) {
203         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
204             if (!m_varArgChildren[childIdx])
205                 continue;
206             out.print(comma, m_varArgChildren[childIdx]);
207         }
208     } else {
209         if (!!node->child1() || !!node->child2() || !!node->child3())
210             out.print(comma, node->child1());
211         if (!!node->child2() || !!node->child3())
212             out.print(comma, node->child2());
213         if (!!node->child3())
214             out.print(comma, node->child3());
215     }
216
217     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
218         out.print(comma, NodeFlagsDump(node->flags()));
219     if (node->prediction())
220         out.print(comma, SpeculationDump(node->prediction()));
221     if (node->hasArrayMode())
222         out.print(comma, node->arrayMode());
223     if (node->hasArithUnaryType())
224         out.print(comma, "Type:", node->arithUnaryType());
225     if (node->hasArithMode())
226         out.print(comma, node->arithMode());
227     if (node->hasArithRoundingMode())
228         out.print(comma, "Rounding:", node->arithRoundingMode());
229     if (node->hasScopeOffset())
230         out.print(comma, node->scopeOffset());
231     if (node->hasDirectArgumentsOffset())
232         out.print(comma, node->capturedArgumentsOffset());
233     if (node->hasArgumentIndex())
234         out.print(comma, node->argumentIndex());
235     if (node->hasRegisterPointer())
236         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
237     if (node->hasIdentifier())
238         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
239     if (node->hasPromotedLocationDescriptor())
240         out.print(comma, node->promotedLocationDescriptor());
241     if (node->hasClassInfo())
242         out.print(comma, *node->classInfo());
243     if (node->hasStructureSet())
244         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
245     if (node->hasStructure())
246         out.print(comma, inContext(*node->structure().get(), context));
247     if (node->hasTransition()) {
248         out.print(comma, pointerDumpInContext(node->transition(), context));
249 #if USE(JSVALUE64)
250         out.print(", ID:", node->transition()->next->id());
251 #else
252         out.print(", ID:", RawPointer(node->transition()->next.get()));
253 #endif
254     }
255     if (node->hasCellOperand()) {
256         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
257             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
258         else {
259             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
260             if (node->cellOperand()->value().isCell()) {
261                 CallVariant variant(node->cellOperand()->value().asCell());
262                 if (ExecutableBase* executable = variant.executable()) {
263                     if (executable->isHostFunction())
264                         out.print(comma, "<host function>");
265                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(m_vm, executable))
266                         out.print(comma, FunctionExecutableDump(functionExecutable));
267                     else
268                         out.print(comma, "<non-function executable>");
269                 }
270             }
271         }
272     }
273     if (node->hasSpeculatedTypeForQuery())
274         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
275     if (node->hasStorageAccessData()) {
276         StorageAccessData& storageAccessData = node->storageAccessData();
277         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
278         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
279         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
280     }
281     if (node->hasMultiGetByOffsetData()) {
282         MultiGetByOffsetData& data = node->multiGetByOffsetData();
283         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
284         for (unsigned i = 0; i < data.cases.size(); ++i)
285             out.print(comma, inContext(data.cases[i], context));
286     }
287     if (node->hasMultiPutByOffsetData()) {
288         MultiPutByOffsetData& data = node->multiPutByOffsetData();
289         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
290         for (unsigned i = 0; i < data.variants.size(); ++i)
291             out.print(comma, inContext(data.variants[i], context));
292     }
293     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
294     if (node->hasVariableAccessData(*this)) {
295         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
296         if (variableAccessData) {
297             VirtualRegister operand = variableAccessData->local();
298             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
299             operand = variableAccessData->machineLocal();
300             if (operand.isValid())
301                 out.print(comma, "machine:", operand);
302         }
303     }
304     if (node->hasStackAccessData()) {
305         StackAccessData* data = node->stackAccessData();
306         out.print(comma, data->local);
307         if (data->machineLocal.isValid())
308             out.print(comma, "machine:", data->machineLocal);
309         out.print(comma, data->format);
310     }
311     if (node->hasUnlinkedLocal()) 
312         out.print(comma, node->unlinkedLocal());
313     if (node->hasUnlinkedMachineLocal()) {
314         VirtualRegister operand = node->unlinkedMachineLocal();
315         if (operand.isValid())
316             out.print(comma, "machine:", operand);
317     }
318     if (node->hasConstantBuffer()) {
319         out.print(comma);
320         out.print(node->startConstant(), ":[");
321         CommaPrinter anotherComma;
322         for (unsigned i = 0; i < node->numConstants(); ++i)
323             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
324         out.print("]");
325     }
326     if (node->hasLazyJSValue())
327         out.print(comma, node->lazyJSValue());
328     if (node->hasIndexingType())
329         out.print(comma, IndexingTypeDump(node->indexingType()));
330     if (node->hasTypedArrayType())
331         out.print(comma, node->typedArrayType());
332     if (node->hasPhi())
333         out.print(comma, "^", node->phi()->index());
334     if (node->hasExecutionCounter())
335         out.print(comma, RawPointer(node->executionCounter()));
336     if (node->hasWatchpointSet())
337         out.print(comma, RawPointer(node->watchpointSet()));
338     if (node->hasStoragePointer())
339         out.print(comma, RawPointer(node->storagePointer()));
340     if (node->hasObjectMaterializationData())
341         out.print(comma, node->objectMaterializationData());
342     if (node->hasCallVarargsData())
343         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
344     if (node->hasLoadVarargsData()) {
345         LoadVarargsData* data = node->loadVarargsData();
346         out.print(comma, "start = ", data->start, ", count = ", data->count);
347         if (data->machineStart.isValid())
348             out.print(", machineStart = ", data->machineStart);
349         if (data->machineCount.isValid())
350             out.print(", machineCount = ", data->machineCount);
351         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
352         out.print(", limit = ", data->limit);
353     }
354     if (node->hasCallDOMGetterData()) {
355         CallDOMGetterData* data = node->callDOMGetterData();
356         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
357         out.print(", domJIT = ", RawPointer(data->domJIT));
358     }
359     if (node->isConstant())
360         out.print(comma, pointerDumpInContext(node->constant(), context));
361     if (node->isJump())
362         out.print(comma, "T:", *node->targetBlock());
363     if (node->isBranch())
364         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
365     if (node->isSwitch()) {
366         SwitchData* data = node->switchData();
367         out.print(comma, data->kind);
368         for (unsigned i = 0; i < data->cases.size(); ++i)
369             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
370         out.print(comma, "default:", data->fallThrough);
371     }
372     ClobberSet reads;
373     ClobberSet writes;
374     addReadsAndWrites(*this, node, reads, writes);
375     if (!reads.isEmpty())
376         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
377     if (!writes.isEmpty())
378         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
379     ExitMode exitMode = mayExit(*this, node);
380     if (exitMode != DoesNotExit)
381         out.print(comma, exitMode);
382     if (clobbersExitState(*this, node))
383         out.print(comma, "ClobbersExit");
384     if (node->origin.isSet()) {
385         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
386         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
387             out.print(comma, "exit: ", node->origin.forExit);
388     }
389     if (!node->origin.exitOK)
390         out.print(comma, "ExitInvalid");
391     if (node->origin.wasHoisted)
392         out.print(comma, "WasHoisted");
393     out.print(")");
394
395     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
396         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
397     else if (node->hasHeapPrediction())
398         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
399     
400     out.print("\n");
401 }
402
403 bool Graph::terminalsAreValid()
404 {
405     for (BasicBlock* block : blocksInNaturalOrder()) {
406         if (!block->terminal())
407             return false;
408     }
409     return true;
410 }
411
412 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
413 {
414     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
415     if (block->executionCount == block->executionCount)
416         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
417     out.print(prefix, "  Predecessors:");
418     for (size_t i = 0; i < block->predecessors.size(); ++i)
419         out.print(" ", *block->predecessors[i]);
420     out.print("\n");
421     out.print(prefix, "  Successors:");
422     if (block->terminal()) {
423         for (BasicBlock* successor : block->successors()) {
424             out.print(" ", *successor);
425             if (m_prePostNumbering)
426                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
427         }
428     } else
429         out.print(" <invalid>");
430     out.print("\n");
431     if (m_dominators && terminalsAreValid()) {
432         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
433         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
434         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
435         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
436     }
437     if (m_backwardsDominators && terminalsAreValid()) {
438         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
439         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
440     }
441     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
442         out.print(prefix, "  Control equivalent to:");
443         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
444             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
445                 out.print(" ", *otherBlock);
446         }
447         out.print("\n");
448     }
449     if (m_prePostNumbering)
450         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
451     if (m_naturalLoops) {
452         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
453             out.print(prefix, "  Loop header, contains:");
454             Vector<BlockIndex> sortedBlockList;
455             for (unsigned i = 0; i < loop->size(); ++i)
456                 sortedBlockList.append(loop->at(i)->index);
457             std::sort(sortedBlockList.begin(), sortedBlockList.end());
458             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
459                 out.print(" #", sortedBlockList[i]);
460             out.print("\n");
461         }
462         
463         Vector<const NaturalLoop*> containingLoops =
464             m_naturalLoops->loopsOf(block);
465         if (!containingLoops.isEmpty()) {
466             out.print(prefix, "  Containing loop headers:");
467             for (unsigned i = 0; i < containingLoops.size(); ++i)
468                 out.print(" ", *containingLoops[i]->header());
469             out.print("\n");
470         }
471     }
472     if (!block->phis.isEmpty()) {
473         out.print(prefix, "  Phi Nodes:");
474         for (size_t i = 0; i < block->phis.size(); ++i) {
475             Node* phiNode = block->phis[i];
476             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
477                 continue;
478             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
479             if (phiNode->child1()) {
480                 out.print("@", phiNode->child1()->index());
481                 if (phiNode->child2()) {
482                     out.print(", @", phiNode->child2()->index());
483                     if (phiNode->child3())
484                         out.print(", @", phiNode->child3()->index());
485                 }
486             }
487             out.print(")", i + 1 < block->phis.size() ? "," : "");
488         }
489         out.print("\n");
490     }
491 }
492
493 void Graph::dump(PrintStream& out, DumpContext* context)
494 {
495     DumpContext myContext;
496     myContext.graph = this;
497     if (!context)
498         context = &myContext;
499     
500     out.print("\n");
501     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
502     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
503     if (m_form == SSA)
504         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
505     else
506         out.print("  Arguments: ", listDump(m_arguments), "\n");
507     out.print("\n");
508     
509     Node* lastNode = nullptr;
510     for (size_t b = 0; b < m_blocks.size(); ++b) {
511         BasicBlock* block = m_blocks[b].get();
512         if (!block)
513             continue;
514         dumpBlockHeader(out, "", block, DumpAllPhis, context);
515         out.print("  States: ", block->cfaStructureClobberStateAtHead);
516         if (!block->cfaHasVisited)
517             out.print(", CurrentlyCFAUnreachable");
518         if (!block->intersectionOfCFAHasVisited)
519             out.print(", CFAUnreachable");
520         out.print("\n");
521         switch (m_form) {
522         case LoadStore:
523         case ThreadedCPS: {
524             out.print("  Vars Before: ");
525             if (block->cfaHasVisited)
526                 out.print(inContext(block->valuesAtHead, context));
527             else
528                 out.print("<empty>");
529             out.print("\n");
530             out.print("  Intersected Vars Before: ");
531             if (block->intersectionOfCFAHasVisited)
532                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
533             else
534                 out.print("<empty>");
535             out.print("\n");
536             out.print("  Var Links: ", block->variablesAtHead, "\n");
537             break;
538         }
539             
540         case SSA: {
541             RELEASE_ASSERT(block->ssa);
542             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
543             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
544             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
545             break;
546         } }
547         for (size_t i = 0; i < block->size(); ++i) {
548             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
549             dump(out, "", block->at(i), context);
550         }
551         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
552         if (!block->cfaDidFinish)
553             out.print(", CFAInvalidated");
554         out.print("\n");
555         switch (m_form) {
556         case LoadStore:
557         case ThreadedCPS: {
558             out.print("  Vars After: ");
559             if (block->cfaHasVisited)
560                 out.print(inContext(block->valuesAtTail, context));
561             else
562                 out.print("<empty>");
563             out.print("\n");
564             out.print("  Var Links: ", block->variablesAtTail, "\n");
565             break;
566         }
567             
568         case SSA: {
569             RELEASE_ASSERT(block->ssa);
570             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
571             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
572             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
573             break;
574         } }
575         out.print("\n");
576     }
577     
578     out.print("GC Values:\n");
579     for (FrozenValue* value : m_frozenValues) {
580         if (value->pointsToHeap())
581             out.print("    ", inContext(*value, &myContext), "\n");
582     }
583
584     out.print(inContext(watchpoints(), &myContext));
585     
586     if (!myContext.isEmpty()) {
587         myContext.dump(out);
588         out.print("\n");
589     }
590 }
591
592 void Graph::deleteNode(Node* node)
593 {
594     if (validationEnabled() && m_form == SSA) {
595         for (BasicBlock* block : blocksInNaturalOrder()) {
596             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
597             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
598         }
599     }
600
601     m_nodes.remove(node);
602 }
603
604 void Graph::packNodeIndices()
605 {
606     m_nodes.packIndices();
607 }
608
609 void Graph::dethread()
610 {
611     if (m_form == LoadStore || m_form == SSA)
612         return;
613     
614     if (logCompilationChanges())
615         dataLog("Dethreading DFG graph.\n");
616     
617     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
618         BasicBlock* block = m_blocks[blockIndex].get();
619         if (!block)
620             continue;
621         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
622             Node* phi = block->phis[phiIndex];
623             phi->children.reset();
624         }
625     }
626     
627     m_form = LoadStore;
628 }
629
630 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
631 {
632     if (!successor->isReachable) {
633         successor->isReachable = true;
634         worklist.append(successor);
635     }
636     
637     successor->predecessors.append(block);
638 }
639
640 void Graph::determineReachability()
641 {
642     Vector<BasicBlock*, 16> worklist;
643     worklist.append(block(0));
644     block(0)->isReachable = true;
645     while (!worklist.isEmpty()) {
646         BasicBlock* block = worklist.takeLast();
647         for (unsigned i = block->numSuccessors(); i--;)
648             handleSuccessor(worklist, block, block->successor(i));
649     }
650 }
651
652 void Graph::resetReachability()
653 {
654     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
655         BasicBlock* block = m_blocks[blockIndex].get();
656         if (!block)
657             continue;
658         block->isReachable = false;
659         block->predecessors.clear();
660     }
661     
662     determineReachability();
663 }
664
665 namespace {
666
667 class RefCountCalculator {
668 public:
669     RefCountCalculator(Graph& graph)
670         : m_graph(graph)
671     {
672     }
673     
674     void calculate()
675     {
676         // First reset the counts to 0 for all nodes.
677         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
678             BasicBlock* block = m_graph.block(blockIndex);
679             if (!block)
680                 continue;
681             for (unsigned indexInBlock = block->size(); indexInBlock--;)
682                 block->at(indexInBlock)->setRefCount(0);
683             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
684                 block->phis[phiIndex]->setRefCount(0);
685         }
686     
687         // Now find the roots:
688         // - Nodes that are must-generate.
689         // - Nodes that are reachable from type checks.
690         // Set their ref counts to 1 and put them on the worklist.
691         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
692             BasicBlock* block = m_graph.block(blockIndex);
693             if (!block)
694                 continue;
695             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
696                 Node* node = block->at(indexInBlock);
697                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
698                 if (!(node->flags() & NodeMustGenerate))
699                     continue;
700                 if (!node->postfixRef())
701                     m_worklist.append(node);
702             }
703         }
704         
705         while (!m_worklist.isEmpty()) {
706             while (!m_worklist.isEmpty()) {
707                 Node* node = m_worklist.last();
708                 m_worklist.removeLast();
709                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
710                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
711             }
712             
713             if (m_graph.m_form == SSA) {
714                 // Find Phi->Upsilon edges, which are represented as meta-data in the
715                 // Upsilon.
716                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
717                     BasicBlock* block = m_graph.block(blockIndex);
718                     if (!block)
719                         continue;
720                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
721                         Node* node = block->at(nodeIndex);
722                         if (node->op() != Upsilon)
723                             continue;
724                         if (node->shouldGenerate())
725                             continue;
726                         if (node->phi()->shouldGenerate())
727                             countNode(node);
728                     }
729                 }
730             }
731         }
732     }
733     
734 private:
735     void findTypeCheckRoot(Node*, Edge edge)
736     {
737         // We may have an "unproved" untyped use for code that is unreachable. The CFA
738         // will just not have gotten around to it.
739         if (edge.isProved() || edge.willNotHaveCheck())
740             return;
741         if (!edge->postfixRef())
742             m_worklist.append(edge.node());
743     }
744     
745     void countNode(Node* node)
746     {
747         if (node->postfixRef())
748             return;
749         m_worklist.append(node);
750     }
751     
752     void countEdge(Node*, Edge edge)
753     {
754         // Don't count edges that are already counted for their type checks.
755         if (!(edge.isProved() || edge.willNotHaveCheck()))
756             return;
757         countNode(edge.node());
758     }
759     
760     Graph& m_graph;
761     Vector<Node*, 128> m_worklist;
762 };
763
764 } // anonymous namespace
765
766 void Graph::computeRefCounts()
767 {
768     RefCountCalculator calculator(*this);
769     calculator.calculate();
770 }
771
772 void Graph::killBlockAndItsContents(BasicBlock* block)
773 {
774     if (auto& ssaData = block->ssa)
775         ssaData->invalidate();
776     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
777         deleteNode(block->phis[phiIndex]);
778     for (Node* node : *block)
779         deleteNode(node);
780     
781     killBlock(block);
782 }
783
784 void Graph::killUnreachableBlocks()
785 {
786     invalidateNodeLiveness();
787
788     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
789         BasicBlock* block = this->block(blockIndex);
790         if (!block)
791             continue;
792         if (block->isReachable)
793             continue;
794         
795         killBlockAndItsContents(block);
796     }
797 }
798
799 void Graph::invalidateCFG()
800 {
801     m_dominators = nullptr;
802     m_naturalLoops = nullptr;
803     m_prePostNumbering = nullptr;
804     m_controlEquivalenceAnalysis = nullptr;
805     m_backwardsDominators = nullptr;
806     m_backwardsCFG = nullptr;
807 }
808
809 void Graph::invalidateNodeLiveness()
810 {
811     if (m_form != SSA)
812         return;
813
814     for (BasicBlock* block : blocksInNaturalOrder())
815         block->ssa->invalidate();
816 }
817
818 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
819 {
820     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
821         Node* node = block[indexInBlock];
822         bool shouldContinue = true;
823         switch (node->op()) {
824         case SetLocal: {
825             if (node->local() == variableAccessData->local())
826                 shouldContinue = false;
827             break;
828         }
829                 
830         case GetLocal: {
831             if (node->variableAccessData() != variableAccessData)
832                 continue;
833             substitute(block, indexInBlock, node, newGetLocal);
834             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
835             if (oldTailNode == node)
836                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
837             shouldContinue = false;
838             break;
839         }
840                 
841         default:
842             break;
843         }
844         if (!shouldContinue)
845             break;
846     }
847 }
848
849 BlockList Graph::blocksInPreOrder()
850 {
851     BlockList result;
852     BlockWorklist worklist;
853     worklist.push(block(0));
854     while (BasicBlock* block = worklist.pop()) {
855         result.append(block);
856         for (unsigned i = block->numSuccessors(); i--;)
857             worklist.push(block->successor(i));
858     }
859     return result;
860 }
861
862 BlockList Graph::blocksInPostOrder()
863 {
864     BlockList result;
865     PostOrderBlockWorklist worklist;
866     worklist.push(block(0));
867     while (BlockWithOrder item = worklist.pop()) {
868         switch (item.order) {
869         case VisitOrder::Pre:
870             worklist.pushPost(item.node);
871             for (unsigned i = item.node->numSuccessors(); i--;)
872                 worklist.push(item.node->successor(i));
873             break;
874         case VisitOrder::Post:
875             result.append(item.node);
876             break;
877         }
878     }
879     return result;
880 }
881
882 void Graph::clearReplacements()
883 {
884     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
885         BasicBlock* block = m_blocks[blockIndex].get();
886         if (!block)
887             continue;
888         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
889             block->phis[phiIndex]->setReplacement(nullptr);
890         for (unsigned nodeIndex = block->size(); nodeIndex--;)
891             block->at(nodeIndex)->setReplacement(nullptr);
892     }
893 }
894
895 void Graph::clearEpochs()
896 {
897     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
898         BasicBlock* block = m_blocks[blockIndex].get();
899         if (!block)
900             continue;
901         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
902             block->phis[phiIndex]->setEpoch(Epoch());
903         for (unsigned nodeIndex = block->size(); nodeIndex--;)
904             block->at(nodeIndex)->setEpoch(Epoch());
905     }
906 }
907
908 void Graph::initializeNodeOwners()
909 {
910     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
911         BasicBlock* block = m_blocks[blockIndex].get();
912         if (!block)
913             continue;
914         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
915             block->phis[phiIndex]->owner = block;
916         for (unsigned nodeIndex = block->size(); nodeIndex--;)
917             block->at(nodeIndex)->owner = block;
918     }
919 }
920
921 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
922 {
923     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
924         BasicBlock* block = m_blocks[blockIndex].get();
925         if (!block)
926             continue;
927         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
928             block->phis[phiIndex]->clearFlags(flags);
929         for (unsigned nodeIndex = block->size(); nodeIndex--;)
930             block->at(nodeIndex)->clearFlags(flags);
931     }
932 }
933
934 bool Graph::watchCondition(const ObjectPropertyCondition& key)
935 {
936     if (!key.isWatchable())
937         return false;
938     
939     m_plan.weakReferences.addLazily(key.object());
940     if (key.hasPrototype())
941         m_plan.weakReferences.addLazily(key.prototype());
942     if (key.hasRequiredValue())
943         m_plan.weakReferences.addLazily(key.requiredValue());
944     
945     m_plan.watchpoints.addLazily(key);
946
947     if (key.kind() == PropertyCondition::Presence)
948         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
949     
950     return true;
951 }
952
953 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
954 {
955     if (!keys.isValid())
956         return false;
957
958     for (const ObjectPropertyCondition& key : keys) {
959         if (!watchCondition(key))
960             return false;
961     }
962     return true;
963 }
964
965 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
966 {
967     return m_safeToLoad.contains(std::make_pair(base, offset));
968 }
969
970 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
971 {
972     assertIsRegistered(key.structure());
973     
974     auto iter = m_inferredTypes.find(key);
975     if (iter != m_inferredTypes.end())
976         return iter->value;
977
978     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
979     if (!typeObject) {
980         m_inferredTypes.add(key, InferredType::Top);
981         return InferredType::Top;
982     }
983
984     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
985     if (typeDescriptor.kind() == InferredType::Top) {
986         m_inferredTypes.add(key, InferredType::Top);
987         return InferredType::Top;
988     }
989     
990     m_inferredTypes.add(key, typeDescriptor);
991
992     m_plan.weakReferences.addLazily(typeObject);
993     registerInferredType(typeDescriptor);
994
995     // Note that we may already be watching this desired inferred type, because multiple structures may
996     // point to the same InferredType instance.
997     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
998
999     return typeDescriptor;
1000 }
1001
1002 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1003 {
1004     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1005     if (iter != m_bytecodeLiveness.end())
1006         return *iter->value;
1007     
1008     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1009     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
1010     FullBytecodeLiveness& result = *liveness;
1011     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1012     return result;
1013 }
1014
1015 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1016 {
1017     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1018 }
1019
1020 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1021 {
1022     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1023     if (iter != m_bytecodeKills.end())
1024         return *iter->value;
1025     
1026     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1027     codeBlock->livenessAnalysis().computeKills(*kills);
1028     BytecodeKills& result = *kills;
1029     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1030     return result;
1031 }
1032
1033 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1034 {
1035     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1036 }
1037
1038 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1039 {
1040     static const bool verbose = false;
1041     
1042     if (verbose)
1043         dataLog("Checking of operand is live: ", operand, "\n");
1044     CodeOrigin* codeOriginPtr = &codeOrigin;
1045     for (;;) {
1046         VirtualRegister reg = VirtualRegister(
1047             operand.offset() - codeOriginPtr->stackOffset());
1048         
1049         if (verbose)
1050             dataLog("reg = ", reg, "\n");
1051         
1052         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1053             if (reg.isArgument()) {
1054                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1055                 
1056                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1057                     && reg.offset() == CallFrameSlot::callee) {
1058                     if (verbose)
1059                         dataLog("Looks like a callee.\n");
1060                     return true;
1061                 }
1062                 
1063                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1064                     && reg.offset() == CallFrameSlot::argumentCount) {
1065                     if (verbose)
1066                         dataLog("Looks like the argument count.\n");
1067                     return true;
1068                 }
1069                 
1070                 return false;
1071             }
1072
1073             if (verbose)
1074                 dataLog("Asking the bytecode liveness.\n");
1075             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1076                 reg.offset(), codeOriginPtr->bytecodeIndex);
1077         }
1078         
1079         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1080         if (!inlineCallFrame) {
1081             if (verbose)
1082                 dataLog("Ran out of stack, returning true.\n");
1083             return true;
1084         }
1085
1086         // Arguments are always live. This would be redundant if it wasn't for our
1087         // op_call_varargs inlining.
1088         if (reg.isArgument()
1089             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size()) {
1090             if (verbose)
1091                 dataLog("Argument is live.\n");
1092             return true;
1093         }
1094         
1095         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1096
1097         // The first inline call frame could be an inline tail call
1098         if (!codeOriginPtr) {
1099             if (verbose)
1100                 dataLog("Dead because of tail inlining.\n");
1101             return false;
1102         }
1103     }
1104     
1105     RELEASE_ASSERT_NOT_REACHED();
1106 }
1107
1108 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1109 {
1110     BitVector result;
1111     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1112     forAllLocalsLiveInBytecode(
1113         codeOrigin,
1114         [&] (VirtualRegister reg) {
1115             ASSERT(reg.isLocal());
1116             result.quickSet(reg.toLocal());
1117         });
1118     return result;
1119 }
1120
1121 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1122 {
1123     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1124     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1125     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1126 }
1127
1128 unsigned Graph::frameRegisterCount()
1129 {
1130     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1131     return roundLocalRegisterCountForFramePointerOffset(result);
1132 }
1133
1134 unsigned Graph::stackPointerOffset()
1135 {
1136     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1137 }
1138
1139 unsigned Graph::requiredRegisterCountForExit()
1140 {
1141     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1142     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1143         InlineCallFrame* inlineCallFrame = *iter;
1144         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1145         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1146         count = std::max(count, requiredCount);
1147     }
1148     return count;
1149 }
1150
1151 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1152 {
1153     // FIXME: We should make sure that frameRegisterCount() and requiredRegisterCountForExit()
1154     // never overflows. https://bugs.webkit.org/show_bug.cgi?id=173852
1155     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1156 }
1157
1158 JSValue Graph::tryGetConstantProperty(
1159     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1160 {
1161     if (!base || !base.isObject())
1162         return JSValue();
1163     
1164     JSObject* object = asObject(base);
1165     
1166     for (unsigned i = structureSet.size(); i--;) {
1167         RegisteredStructure structure = structureSet[i];
1168         
1169         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1170         if (!set || !set->isStillValid())
1171             return JSValue();
1172         
1173         ASSERT(structure->isValidOffset(offset));
1174         ASSERT(!structure->isUncacheableDictionary());
1175         
1176         watchpoints().addLazily(set);
1177     }
1178     
1179     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1180     // our profiling makes sense and we're still on track to generate code that won't be
1181     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1182     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1183     // is doomed.
1184     //
1185     // One argument in favor of this code is that it should definitely work because the butterfly
1186     // is always set before the structure. However, we don't currently have a fence between those
1187     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1188     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1189     // caches told us that the object had a structure that it did not *yet* have, and then later,
1190     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1191     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1192     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1193     // https://bugs.webkit.org/show_bug.cgi?id=134641
1194     //
1195     // For now, we just do the minimal thing: defend against the structure right now being
1196     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1197     // determine if the structure belongs to the proven set.
1198     
1199     if (!structureSet.toStructureSet().contains(object->structure()))
1200         return JSValue();
1201     
1202     return object->getDirect(offset);
1203 }
1204
1205 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1206 {
1207     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1208 }
1209
1210 JSValue Graph::tryGetConstantProperty(
1211     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1212 {
1213     if (structure.isInfinite()) {
1214         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1215         // watching to constant-fold the property.
1216         // https://bugs.webkit.org/show_bug.cgi?id=147271
1217         return JSValue();
1218     }
1219     
1220     return tryGetConstantProperty(base, structure.set(), offset);
1221 }
1222
1223 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1224 {
1225     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1226 }
1227
1228 AbstractValue Graph::inferredValueForProperty(
1229     const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1230 {
1231     AbstractValue result;
1232     base.forEach(
1233         [&] (RegisteredStructure structure) {
1234             AbstractValue value;
1235             value.set(*this, inferredTypeForProperty(structure.get(), uid));
1236             result.merge(value);
1237         });
1238     if (clobberState == StructuresAreClobbered)
1239         result.clobberStructures();
1240     return result;
1241 }
1242
1243 AbstractValue Graph::inferredValueForProperty(
1244     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1245     StructureClobberState clobberState)
1246 {
1247     if (JSValue value = tryGetConstantProperty(base, offset)) {
1248         AbstractValue result;
1249         result.set(*this, *freeze(value), clobberState);
1250         return result;
1251     }
1252
1253     if (base.m_structure.isFinite())
1254         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1255
1256     return AbstractValue::heapTop();
1257 }
1258
1259 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1260 {
1261     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1262     
1263     if (!base)
1264         return JSValue();
1265     
1266     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(m_vm, base);
1267     if (!activation)
1268         return JSValue();
1269     
1270     SymbolTable* symbolTable = activation->symbolTable();
1271     JSValue value;
1272     WatchpointSet* set;
1273     {
1274         ConcurrentJSLocker locker(symbolTable->m_lock);
1275         
1276         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1277         if (!entry)
1278             return JSValue();
1279         
1280         set = entry->watchpointSet();
1281         if (!set)
1282             return JSValue();
1283         
1284         if (set->state() != IsWatched)
1285             return JSValue();
1286         
1287         ASSERT(entry->scopeOffset() == offset);
1288         value = activation->variableAt(offset).get();
1289         if (!value)
1290             return JSValue();
1291     }
1292     
1293     watchpoints().addLazily(set);
1294     
1295     return value;
1296 }
1297
1298 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1299 {
1300     return tryGetConstantClosureVar(value.m_value, offset);
1301 }
1302
1303 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1304 {
1305     if (!node->hasConstant())
1306         return JSValue();
1307     return tryGetConstantClosureVar(node->asJSValue(), offset);
1308 }
1309
1310 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1311 {
1312     if (!value)
1313         return nullptr;
1314     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(m_vm, value);
1315     if (!value)
1316         return nullptr;
1317     if (!view->length())
1318         return nullptr;
1319     WTF::loadLoadFence();
1320     watchpoints().addLazily(view);
1321     return view;
1322 }
1323
1324 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1325 {
1326     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1327         return nullptr;
1328     return tryGetFoldableView(value);
1329 }
1330
1331 void Graph::registerFrozenValues()
1332 {
1333     m_codeBlock->constants().resize(0);
1334     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1335     for (FrozenValue* value : m_frozenValues) {
1336         if (!value->pointsToHeap())
1337             continue;
1338         
1339         ASSERT(value->structure());
1340         ASSERT(m_plan.weakReferences.contains(value->structure()));
1341         
1342         switch (value->strength()) {
1343         case WeakValue: {
1344             m_plan.weakReferences.addLazily(value->value().asCell());
1345             break;
1346         }
1347         case StrongValue: {
1348             unsigned constantIndex = m_codeBlock->addConstantLazily();
1349             // We already have a barrier on the code block.
1350             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1351             break;
1352         } }
1353     }
1354     m_codeBlock->constants().shrinkToFit();
1355     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1356 }
1357
1358 void Graph::visitChildren(SlotVisitor& visitor)
1359 {
1360     for (FrozenValue* value : m_frozenValues) {
1361         visitor.appendUnbarriered(value->value());
1362         visitor.appendUnbarriered(value->structure());
1363     }
1364 }
1365
1366 FrozenValue* Graph::freeze(JSValue value)
1367 {
1368     if (UNLIKELY(!value))
1369         return FrozenValue::emptySingleton();
1370
1371     // There are weird relationships in how optimized CodeBlocks
1372     // point to other CodeBlocks. We don't want to have them be
1373     // part of the weak pointer set. For example, an optimized CodeBlock
1374     // having a weak pointer to itself will cause it to get collected.
1375     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(m_vm, value));
1376     
1377     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1378     if (LIKELY(!result.isNewEntry))
1379         return result.iterator->value;
1380
1381     if (value.isUInt32())
1382         m_uint32ValuesInUse.append(value.asUInt32());
1383     
1384     FrozenValue frozenValue = FrozenValue::freeze(value);
1385     if (Structure* structure = frozenValue.structure())
1386         registerStructure(structure);
1387     
1388     return result.iterator->value = m_frozenValues.add(frozenValue);
1389 }
1390
1391 FrozenValue* Graph::freezeStrong(JSValue value)
1392 {
1393     FrozenValue* result = freeze(value);
1394     result->strengthenTo(StrongValue);
1395     return result;
1396 }
1397
1398 void Graph::convertToConstant(Node* node, FrozenValue* value)
1399 {
1400     if (value->structure())
1401         assertIsRegistered(value->structure());
1402     node->convertToConstant(value);
1403 }
1404
1405 void Graph::convertToConstant(Node* node, JSValue value)
1406 {
1407     convertToConstant(node, freeze(value));
1408 }
1409
1410 void Graph::convertToStrongConstant(Node* node, JSValue value)
1411 {
1412     convertToConstant(node, freezeStrong(value));
1413 }
1414
1415 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1416 {
1417     m_plan.weakReferences.addLazily(structure);
1418     if (m_plan.watchpoints.consider(structure))
1419         result = StructureRegisteredAndWatched;
1420     else
1421         result = StructureRegisteredNormally;
1422     return RegisteredStructure::createPrivate(structure);
1423 }
1424
1425 void Graph::assertIsRegistered(Structure* structure)
1426 {
1427     // It's convenient to be able to call this with a maybe-null structure.
1428     if (!structure)
1429         return;
1430     
1431     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1432     
1433     if (!structure->dfgShouldWatch())
1434         return;
1435     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1436         return;
1437     
1438     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1439 }
1440
1441 static void logForCrash(
1442     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1443     const char* assertion)
1444 {
1445     startCrashing();
1446     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1447     dataLog(file, "(", line, ") : ", function, "\n");
1448     dataLog("\n");
1449     dataLog(whileText);
1450     dataLog("Graph at time of failure:\n");
1451     graph.dump();
1452     dataLog("\n");
1453     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1454     dataLog(file, "(", line, ") : ", function, "\n");
1455     WTFReportBacktrace();
1456 }
1457
1458 void Graph::logAssertionFailure(
1459     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1460 {
1461     logForCrash(*this, "", file, line, function, assertion);
1462 }
1463
1464 void Graph::logAssertionFailure(
1465     Node* node, const char* file, int line, const char* function, const char* assertion)
1466 {
1467     logForCrash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1468 }
1469
1470 void Graph::logAssertionFailure(
1471     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1472 {
1473     logForCrash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1474 }
1475
1476 Dominators& Graph::ensureDominators()
1477 {
1478     if (!m_dominators)
1479         m_dominators = std::make_unique<Dominators>(*this);
1480     return *m_dominators;
1481 }
1482
1483 PrePostNumbering& Graph::ensurePrePostNumbering()
1484 {
1485     if (!m_prePostNumbering)
1486         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1487     return *m_prePostNumbering;
1488 }
1489
1490 NaturalLoops& Graph::ensureNaturalLoops()
1491 {
1492     ensureDominators();
1493     if (!m_naturalLoops)
1494         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1495     return *m_naturalLoops;
1496 }
1497
1498 BackwardsCFG& Graph::ensureBackwardsCFG()
1499 {
1500     if (!m_backwardsCFG)
1501         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1502     return *m_backwardsCFG;
1503 }
1504
1505 BackwardsDominators& Graph::ensureBackwardsDominators()
1506 {
1507     if (!m_backwardsDominators)
1508         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1509     return *m_backwardsDominators;
1510 }
1511
1512 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1513 {
1514     if (!m_controlEquivalenceAnalysis)
1515         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1516     return *m_controlEquivalenceAnalysis;
1517 }
1518
1519 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1520 {
1521     for (Node* node = operandNode; node;) {
1522         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1523         if (!currentNode || node->origin != currentNode->origin) {
1524             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1525
1526             if (node->accessesStack(*this)) {
1527                 ValueProfile* result = [&] () -> ValueProfile* {
1528                     if (!node->local().isArgument())
1529                         return nullptr;
1530                     int argument = node->local().toArgument();
1531                     Node* argumentNode = m_arguments[argument];
1532                     if (!argumentNode)
1533                         return nullptr;
1534                     if (node->variableAccessData() != argumentNode->variableAccessData())
1535                         return nullptr;
1536                     return profiledBlock->valueProfileForArgument(argument);
1537                 }();
1538                 if (result)
1539                     return result;
1540
1541                 if (node->op() == GetLocal) {
1542                     return MethodOfGettingAValueProfile::fromLazyOperand(
1543                         profiledBlock,
1544                         LazyOperandValueProfileKey(
1545                             node->origin.semantic.bytecodeIndex, node->local()));
1546                 }
1547             }
1548
1549             if (node->hasHeapPrediction())
1550                 return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1551
1552             if (profiledBlock->hasBaselineJITProfiling()) {
1553                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1554                     return result;
1555             }
1556         }
1557
1558         switch (node->op()) {
1559         case BooleanToNumber:
1560         case Identity:
1561         case ValueRep:
1562         case DoubleRep:
1563         case Int52Rep:
1564             node = node->child1().node();
1565             break;
1566         default:
1567             node = nullptr;
1568         }
1569     }
1570     
1571     return MethodOfGettingAValueProfile();
1572 }
1573
1574 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1575 {
1576     unsigned attributesUnused;
1577     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1578     if (!isValidOffset(offset))
1579         return false;
1580
1581     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1582     if (!value)
1583         return false;
1584
1585     // We only care about functions and getters at this point. If you want to access other properties
1586     // you'll have to add code for those types.
1587     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, value);
1588     if (!function) {
1589         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(m_vm, value);
1590
1591         if (!getterSetter)
1592             return false;
1593
1594         returnJSValue = JSValue(getterSetter);
1595         return true;
1596     }
1597
1598     returnJSValue = value;
1599     return true;
1600 }
1601
1602 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1603 {
1604     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1605
1606     if (!conditions.isValid())
1607         return false;
1608
1609     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1610     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1611     JSFunction* function = jsDynamicCast<JSFunction*>(m_vm, equivalenceCondition.condition().requiredValue());
1612     if (!function)
1613         return false;
1614
1615     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1616         return false;
1617     
1618     return watchConditions(conditions);
1619 }
1620
1621
1622 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1623 {
1624     if (hasExitSite(codeOrigin, NotStringObject))
1625         return false;
1626
1627     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1628     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1629     registerStructure(stringObjectStructure);
1630     ASSERT(stringObjectStructure->storedPrototype().isObject());
1631     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo(*stringObjectStructure->storedPrototype().asCell()->vm()) == StringPrototype::info());
1632
1633     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1634         return false;
1635
1636     // We're being conservative here. We want DFG's ToString on StringObject to be
1637     // used in both numeric contexts (that would call valueOf()) and string contexts
1638     // (that would call toString()). We don't want the DFG to have to distinguish
1639     // between the two, just because that seems like it would get confusing. So we
1640     // just require both methods to be sane.
1641     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1642         return false;
1643     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1644 }
1645
1646 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1647 {
1648     if (!m_hasExceptionHandlers)
1649         return false;
1650
1651     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1652     while (1) {
1653         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1654         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1655         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1656             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1657             catchHandlerOut = handler;
1658             return true;
1659         }
1660
1661         if (!inlineCallFrame)
1662             return false;
1663
1664         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1665         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1666     }
1667
1668     RELEASE_ASSERT_NOT_REACHED();
1669 }
1670
1671 bool Graph::canDoFastSpread(Node* node, const AbstractValue& value)
1672 {
1673     // The parameter 'value' is the AbstractValue for child1 (the thing being spread).
1674     ASSERT(node->op() == Spread);
1675
1676     if (node->child1().useKind() != ArrayUse) {
1677         // Note: we only speculate on ArrayUse when we've set up the necessary watchpoints
1678         // to prove that the iteration protocol is non-observable starting from ArrayPrototype.
1679         return false;
1680     }
1681
1682     // FIXME: We should add profiling of the incoming operand to Spread
1683     // so we can speculate in such a way that we guarantee that this
1684     // function would return true:
1685     // https://bugs.webkit.org/show_bug.cgi?id=171198
1686
1687     if (!value.m_structure.isFinite())
1688         return false;
1689
1690     ArrayPrototype* arrayPrototype = globalObjectFor(node->child1()->origin.semantic)->arrayPrototype();
1691     bool allGood = true;
1692     value.m_structure.forEach([&] (RegisteredStructure structure) {
1693         allGood &= structure->storedPrototype() == arrayPrototype
1694             && !structure->isDictionary()
1695             && structure->getConcurrently(m_vm.propertyNames->iteratorSymbol.impl()) == invalidOffset
1696             && !structure->mayInterceptIndexedAccesses();
1697     });
1698
1699     return allGood;
1700 }
1701
1702 } } // namespace JSC::DFG
1703
1704 #endif // ENABLE(DFG_JIT)