Get rid of HeapRootVisitor and make SlotVisitor less painful to use
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGPrePostNumbering.h"
49 #include "DFGVariableAccessDataDump.h"
50 #include "FullBytecodeLiveness.h"
51 #include "FunctionExecutableDump.h"
52 #include "GetterSetter.h"
53 #include "JIT.h"
54 #include "JSLexicalEnvironment.h"
55 #include "MaxFrameExtentForSlowPathCall.h"
56 #include "OperandsInlines.h"
57 #include "JSCInlines.h"
58 #include "StackAlignment.h"
59 #include <wtf/CommaPrinter.h>
60 #include <wtf/ListDump.h>
61
62 namespace JSC { namespace DFG {
63
64 // Creates an array of stringized names.
65 static const char* dfgOpNames[] = {
66 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
67     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
68 #undef STRINGIZE_DFG_OP_ENUM
69 };
70
71 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
72     : m_vm(vm)
73     , m_plan(plan)
74     , m_codeBlock(m_plan.codeBlock)
75     , m_profiledBlock(m_codeBlock->alternative())
76     , m_allocator(longLivedState.m_allocator)
77     , m_cfg(std::make_unique<CFG>(*this))
78     , m_nextMachineLocal(0)
79     , m_fixpointState(BeforeFixpoint)
80     , m_structureRegistrationState(HaveNotStartedRegistering)
81     , m_form(LoadStore)
82     , m_unificationState(LocallyUnified)
83     , m_refCountState(EverythingIsLive)
84 {
85     ASSERT(m_profiledBlock);
86     
87     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88     
89     m_indexingCache = std::make_unique<FlowIndexing>(*this);
90     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
91 }
92
93 Graph::~Graph()
94 {
95     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
96         BasicBlock* block = this->block(blockIndex);
97         if (!block)
98             continue;
99
100         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
101             m_allocator.free(block->phis[phiIndex]);
102         for (unsigned nodeIndex = block->size(); nodeIndex--;)
103             m_allocator.free(block->at(nodeIndex));
104     }
105     m_allocator.freeAll();
106 }
107
108 const char *Graph::opName(NodeType op)
109 {
110     return dfgOpNames[op];
111 }
112
113 static void printWhiteSpace(PrintStream& out, unsigned amount)
114 {
115     while (amount-- > 0)
116         out.print(" ");
117 }
118
119 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
120 {
121     if (!currentNode->origin.semantic)
122         return false;
123     
124     Node* previousNode = previousNodeRef;
125     previousNodeRef = currentNode;
126
127     if (!previousNode)
128         return false;
129     
130     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
131         return false;
132     
133     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
134     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
135     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
136     unsigned indexOfDivergence = commonSize;
137     for (unsigned i = 0; i < commonSize; ++i) {
138         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
139             indexOfDivergence = i;
140             break;
141         }
142     }
143     
144     bool hasPrinted = false;
145     
146     // Print the pops.
147     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     // Print the pushes.
155     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
156         out.print(prefix);
157         printWhiteSpace(out, i * 2);
158         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
159         hasPrinted = true;
160     }
161     
162     return hasPrinted;
163 }
164
165 int Graph::amountOfNodeWhiteSpace(Node* node)
166 {
167     return (node->origin.semantic.inlineDepth() - 1) * 2;
168 }
169
170 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
171 {
172     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
173 }
174
175 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
176 {
177     NodeType op = node->op();
178
179     unsigned refCount = node->refCount();
180     bool mustGenerate = node->mustGenerate();
181     if (mustGenerate)
182         --refCount;
183
184     out.print(prefix);
185     printNodeWhiteSpace(out, node);
186
187     // Example/explanation of dataflow dump output
188     //
189     //   14:   <!2:7>  GetByVal(@3, @13)
190     //   ^1     ^2 ^3     ^4       ^5
191     //
192     // (1) The nodeIndex of this operation.
193     // (2) The reference count. The number printed is the 'real' count,
194     //     not including the 'mustGenerate' ref. If the node is
195     //     'mustGenerate' then the count it prefixed with '!'.
196     // (3) The virtual register slot assigned to this node.
197     // (4) The name of the operation.
198     // (5) The arguments to the operation. The may be of the form:
199     //         @#   - a NodeIndex referencing a prior node in the graph.
200     //         arg# - an argument number.
201     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
202     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
203     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
204     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
205         out.print(node->virtualRegister());
206     else
207         out.print("-");
208     out.print(">\t", opName(op), "(");
209     CommaPrinter comma;
210     if (node->flags() & NodeHasVarArgs) {
211         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
212             if (!m_varArgChildren[childIdx])
213                 continue;
214             out.print(comma, m_varArgChildren[childIdx]);
215         }
216     } else {
217         if (!!node->child1() || !!node->child2() || !!node->child3())
218             out.print(comma, node->child1());
219         if (!!node->child2() || !!node->child3())
220             out.print(comma, node->child2());
221         if (!!node->child3())
222             out.print(comma, node->child3());
223     }
224
225     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
226         out.print(comma, NodeFlagsDump(node->flags()));
227     if (node->prediction())
228         out.print(comma, SpeculationDump(node->prediction()));
229     if (node->hasArrayMode())
230         out.print(comma, node->arrayMode());
231     if (node->hasArithMode())
232         out.print(comma, node->arithMode());
233     if (node->hasArithRoundingMode())
234         out.print(comma, "Rounding:", node->arithRoundingMode());
235     if (node->hasScopeOffset())
236         out.print(comma, node->scopeOffset());
237     if (node->hasDirectArgumentsOffset())
238         out.print(comma, node->capturedArgumentsOffset());
239     if (node->hasArgumentIndex())
240         out.print(comma, node->argumentIndex());
241     if (node->hasRegisterPointer())
242         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
243     if (node->hasIdentifier())
244         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
245     if (node->hasPromotedLocationDescriptor())
246         out.print(comma, node->promotedLocationDescriptor());
247     if (node->hasStructureSet())
248         out.print(comma, inContext(node->structureSet(), context));
249     if (node->hasStructure())
250         out.print(comma, inContext(*node->structure(), context));
251     if (node->hasTransition()) {
252         out.print(comma, pointerDumpInContext(node->transition(), context));
253 #if USE(JSVALUE64)
254         out.print(", ID:", node->transition()->next->id());
255 #else
256         out.print(", ID:", RawPointer(node->transition()->next));
257 #endif
258     }
259     if (node->hasCellOperand()) {
260         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
261             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
262         else {
263             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
264             if (node->cellOperand()->value().isCell()) {
265                 CallVariant variant(node->cellOperand()->value().asCell());
266                 if (ExecutableBase* executable = variant.executable()) {
267                     if (executable->isHostFunction())
268                         out.print(comma, "<host function>");
269                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
270                         out.print(comma, FunctionExecutableDump(functionExecutable));
271                     else
272                         out.print(comma, "<non-function executable>");
273                 }
274             }
275         }
276     }
277     if (node->hasSpeculatedTypeForQuery())
278         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
279     if (node->hasStorageAccessData()) {
280         StorageAccessData& storageAccessData = node->storageAccessData();
281         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
282         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
283         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
284     }
285     if (node->hasMultiGetByOffsetData()) {
286         MultiGetByOffsetData& data = node->multiGetByOffsetData();
287         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
288         for (unsigned i = 0; i < data.cases.size(); ++i)
289             out.print(comma, inContext(data.cases[i], context));
290     }
291     if (node->hasMultiPutByOffsetData()) {
292         MultiPutByOffsetData& data = node->multiPutByOffsetData();
293         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
294         for (unsigned i = 0; i < data.variants.size(); ++i)
295             out.print(comma, inContext(data.variants[i], context));
296     }
297     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
298     if (node->hasVariableAccessData(*this)) {
299         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
300         if (variableAccessData) {
301             VirtualRegister operand = variableAccessData->local();
302             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
303             operand = variableAccessData->machineLocal();
304             if (operand.isValid())
305                 out.print(comma, "machine:", operand);
306         }
307     }
308     if (node->hasStackAccessData()) {
309         StackAccessData* data = node->stackAccessData();
310         out.print(comma, data->local);
311         if (data->machineLocal.isValid())
312             out.print(comma, "machine:", data->machineLocal);
313         out.print(comma, data->format);
314     }
315     if (node->hasUnlinkedLocal()) 
316         out.print(comma, node->unlinkedLocal());
317     if (node->hasUnlinkedMachineLocal()) {
318         VirtualRegister operand = node->unlinkedMachineLocal();
319         if (operand.isValid())
320             out.print(comma, "machine:", operand);
321     }
322     if (node->hasConstantBuffer()) {
323         out.print(comma);
324         out.print(node->startConstant(), ":[");
325         CommaPrinter anotherComma;
326         for (unsigned i = 0; i < node->numConstants(); ++i)
327             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
328         out.print("]");
329     }
330     if (node->hasLazyJSValue())
331         out.print(comma, node->lazyJSValue());
332     if (node->hasIndexingType())
333         out.print(comma, IndexingTypeDump(node->indexingType()));
334     if (node->hasTypedArrayType())
335         out.print(comma, node->typedArrayType());
336     if (node->hasPhi())
337         out.print(comma, "^", node->phi()->index());
338     if (node->hasExecutionCounter())
339         out.print(comma, RawPointer(node->executionCounter()));
340     if (node->hasWatchpointSet())
341         out.print(comma, RawPointer(node->watchpointSet()));
342     if (node->hasStoragePointer())
343         out.print(comma, RawPointer(node->storagePointer()));
344     if (node->hasObjectMaterializationData())
345         out.print(comma, node->objectMaterializationData());
346     if (node->hasCallVarargsData())
347         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
348     if (node->hasLoadVarargsData()) {
349         LoadVarargsData* data = node->loadVarargsData();
350         out.print(comma, "start = ", data->start, ", count = ", data->count);
351         if (data->machineStart.isValid())
352             out.print(", machineStart = ", data->machineStart);
353         if (data->machineCount.isValid())
354             out.print(", machineCount = ", data->machineCount);
355         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
356         out.print(", limit = ", data->limit);
357     }
358     if (node->hasCallDOMGetterData()) {
359         CallDOMGetterData* data = node->callDOMGetterData();
360         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
361         out.print(", domJIT = ", RawPointer(data->domJIT));
362     }
363     if (node->isConstant())
364         out.print(comma, pointerDumpInContext(node->constant(), context));
365     if (node->isJump())
366         out.print(comma, "T:", *node->targetBlock());
367     if (node->isBranch())
368         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
369     if (node->isSwitch()) {
370         SwitchData* data = node->switchData();
371         out.print(comma, data->kind);
372         for (unsigned i = 0; i < data->cases.size(); ++i)
373             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
374         out.print(comma, "default:", data->fallThrough);
375     }
376     ClobberSet reads;
377     ClobberSet writes;
378     addReadsAndWrites(*this, node, reads, writes);
379     if (!reads.isEmpty())
380         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
381     if (!writes.isEmpty())
382         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
383     ExitMode exitMode = mayExit(*this, node);
384     if (exitMode != DoesNotExit)
385         out.print(comma, exitMode);
386     if (clobbersExitState(*this, node))
387         out.print(comma, "ClobbersExit");
388     if (node->origin.isSet()) {
389         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
390         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
391             out.print(comma, "exit: ", node->origin.forExit);
392     }
393     if (!node->origin.exitOK)
394         out.print(comma, "ExitInvalid");
395     if (node->origin.wasHoisted)
396         out.print(comma, "WasHoisted");
397     out.print(")");
398
399     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
400         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
401     else if (node->hasHeapPrediction())
402         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
403     
404     out.print("\n");
405 }
406
407 bool Graph::terminalsAreValid()
408 {
409     for (BasicBlock* block : blocksInNaturalOrder()) {
410         if (!block->terminal())
411             return false;
412     }
413     return true;
414 }
415
416 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
417 {
418     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
419     if (block->executionCount == block->executionCount)
420         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
421     out.print(prefix, "  Predecessors:");
422     for (size_t i = 0; i < block->predecessors.size(); ++i)
423         out.print(" ", *block->predecessors[i]);
424     out.print("\n");
425     out.print(prefix, "  Successors:");
426     if (block->terminal()) {
427         for (BasicBlock* successor : block->successors()) {
428             out.print(" ", *successor);
429             if (m_prePostNumbering)
430                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
431         }
432     } else
433         out.print(" <invalid>");
434     out.print("\n");
435     if (m_dominators && terminalsAreValid()) {
436         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
437         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
438         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
439         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
440     }
441     if (m_backwardsDominators && terminalsAreValid()) {
442         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
443         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
444     }
445     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
446         out.print(prefix, "  Control equivalent to:");
447         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
448             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
449                 out.print(" ", *otherBlock);
450         }
451         out.print("\n");
452     }
453     if (m_prePostNumbering)
454         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
455     if (m_naturalLoops) {
456         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
457             out.print(prefix, "  Loop header, contains:");
458             Vector<BlockIndex> sortedBlockList;
459             for (unsigned i = 0; i < loop->size(); ++i)
460                 sortedBlockList.append(loop->at(i)->index);
461             std::sort(sortedBlockList.begin(), sortedBlockList.end());
462             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
463                 out.print(" #", sortedBlockList[i]);
464             out.print("\n");
465         }
466         
467         Vector<const NaturalLoop*> containingLoops =
468             m_naturalLoops->loopsOf(block);
469         if (!containingLoops.isEmpty()) {
470             out.print(prefix, "  Containing loop headers:");
471             for (unsigned i = 0; i < containingLoops.size(); ++i)
472                 out.print(" ", *containingLoops[i]->header());
473             out.print("\n");
474         }
475     }
476     if (!block->phis.isEmpty()) {
477         out.print(prefix, "  Phi Nodes:");
478         for (size_t i = 0; i < block->phis.size(); ++i) {
479             Node* phiNode = block->phis[i];
480             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
481                 continue;
482             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
483             if (phiNode->child1()) {
484                 out.print("@", phiNode->child1()->index());
485                 if (phiNode->child2()) {
486                     out.print(", @", phiNode->child2()->index());
487                     if (phiNode->child3())
488                         out.print(", @", phiNode->child3()->index());
489                 }
490             }
491             out.print(")", i + 1 < block->phis.size() ? "," : "");
492         }
493         out.print("\n");
494     }
495 }
496
497 void Graph::dump(PrintStream& out, DumpContext* context)
498 {
499     DumpContext myContext;
500     myContext.graph = this;
501     if (!context)
502         context = &myContext;
503     
504     out.print("\n");
505     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
506     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
507     if (m_form == SSA)
508         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
509     else
510         out.print("  Arguments: ", listDump(m_arguments), "\n");
511     out.print("\n");
512     
513     Node* lastNode = nullptr;
514     for (size_t b = 0; b < m_blocks.size(); ++b) {
515         BasicBlock* block = m_blocks[b].get();
516         if (!block)
517             continue;
518         dumpBlockHeader(out, "", block, DumpAllPhis, context);
519         out.print("  States: ", block->cfaStructureClobberStateAtHead);
520         if (!block->cfaHasVisited)
521             out.print(", CurrentlyCFAUnreachable");
522         if (!block->intersectionOfCFAHasVisited)
523             out.print(", CFAUnreachable");
524         out.print("\n");
525         switch (m_form) {
526         case LoadStore:
527         case ThreadedCPS: {
528             out.print("  Vars Before: ");
529             if (block->cfaHasVisited)
530                 out.print(inContext(block->valuesAtHead, context));
531             else
532                 out.print("<empty>");
533             out.print("\n");
534             out.print("  Intersected Vars Before: ");
535             if (block->intersectionOfCFAHasVisited)
536                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
537             else
538                 out.print("<empty>");
539             out.print("\n");
540             out.print("  Var Links: ", block->variablesAtHead, "\n");
541             break;
542         }
543             
544         case SSA: {
545             RELEASE_ASSERT(block->ssa);
546             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
547             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
548             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
549             break;
550         } }
551         for (size_t i = 0; i < block->size(); ++i) {
552             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
553             dump(out, "", block->at(i), context);
554         }
555         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
556         if (!block->cfaDidFinish)
557             out.print(", CFAInvalidated");
558         out.print("\n");
559         switch (m_form) {
560         case LoadStore:
561         case ThreadedCPS: {
562             out.print("  Vars After: ");
563             if (block->cfaHasVisited)
564                 out.print(inContext(block->valuesAtTail, context));
565             else
566                 out.print("<empty>");
567             out.print("\n");
568             out.print("  Var Links: ", block->variablesAtTail, "\n");
569             break;
570         }
571             
572         case SSA: {
573             RELEASE_ASSERT(block->ssa);
574             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
575             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
576             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
577             break;
578         } }
579         out.print("\n");
580     }
581     
582     out.print("GC Values:\n");
583     for (FrozenValue* value : m_frozenValues) {
584         if (value->pointsToHeap())
585             out.print("    ", inContext(*value, &myContext), "\n");
586     }
587
588     out.print(inContext(watchpoints(), &myContext));
589     
590     if (!myContext.isEmpty()) {
591         myContext.dump(out);
592         out.print("\n");
593     }
594 }
595
596 void Graph::addNodeToMapByIndex(Node* node)
597 {
598     if (m_nodeIndexFreeList.isEmpty()) {
599         node->m_index = m_nodesByIndex.size();
600         m_nodesByIndex.append(node);
601         return;
602     }
603     unsigned index = m_nodeIndexFreeList.takeLast();
604     node->m_index = index;
605     ASSERT(!m_nodesByIndex[index]);
606     m_nodesByIndex[index] = node;
607 }
608
609 void Graph::deleteNode(Node* node)
610 {
611     if (validationEnabled() && m_form == SSA) {
612         for (BasicBlock* block : blocksInNaturalOrder()) {
613             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
614             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
615         }
616     }
617
618     RELEASE_ASSERT(m_nodesByIndex[node->m_index] == node);
619     unsigned nodeIndex = node->m_index;
620     m_nodesByIndex[nodeIndex] = nullptr;
621     m_nodeIndexFreeList.append(nodeIndex);
622
623     m_allocator.free(node);
624 }
625
626 void Graph::packNodeIndices()
627 {
628     if (m_nodeIndexFreeList.isEmpty())
629         return;
630
631     unsigned holeIndex = 0;
632     unsigned endIndex = m_nodesByIndex.size();
633
634     while (true) {
635         while (holeIndex < endIndex && m_nodesByIndex[holeIndex])
636             ++holeIndex;
637
638         if (holeIndex == endIndex)
639             break;
640         ASSERT(holeIndex < m_nodesByIndex.size());
641         ASSERT(!m_nodesByIndex[holeIndex]);
642
643         do {
644             --endIndex;
645         } while (!m_nodesByIndex[endIndex] && endIndex > holeIndex);
646
647         if (holeIndex == endIndex)
648             break;
649         ASSERT(endIndex > holeIndex);
650         ASSERT(m_nodesByIndex[endIndex]);
651
652         auto& value = m_nodesByIndex[endIndex];
653         value->m_index = holeIndex;
654         m_nodesByIndex[holeIndex] = WTFMove(value);
655         ++holeIndex;
656     }
657
658     m_nodeIndexFreeList.resize(0);
659     m_nodesByIndex.resize(endIndex);
660 }
661
662 void Graph::dethread()
663 {
664     if (m_form == LoadStore || m_form == SSA)
665         return;
666     
667     if (logCompilationChanges())
668         dataLog("Dethreading DFG graph.\n");
669     
670     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
671         BasicBlock* block = m_blocks[blockIndex].get();
672         if (!block)
673             continue;
674         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
675             Node* phi = block->phis[phiIndex];
676             phi->children.reset();
677         }
678     }
679     
680     m_form = LoadStore;
681 }
682
683 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
684 {
685     if (!successor->isReachable) {
686         successor->isReachable = true;
687         worklist.append(successor);
688     }
689     
690     successor->predecessors.append(block);
691 }
692
693 void Graph::determineReachability()
694 {
695     Vector<BasicBlock*, 16> worklist;
696     worklist.append(block(0));
697     block(0)->isReachable = true;
698     while (!worklist.isEmpty()) {
699         BasicBlock* block = worklist.takeLast();
700         for (unsigned i = block->numSuccessors(); i--;)
701             handleSuccessor(worklist, block, block->successor(i));
702     }
703 }
704
705 void Graph::resetReachability()
706 {
707     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
708         BasicBlock* block = m_blocks[blockIndex].get();
709         if (!block)
710             continue;
711         block->isReachable = false;
712         block->predecessors.clear();
713     }
714     
715     determineReachability();
716 }
717
718 namespace {
719
720 class RefCountCalculator {
721 public:
722     RefCountCalculator(Graph& graph)
723         : m_graph(graph)
724     {
725     }
726     
727     void calculate()
728     {
729         // First reset the counts to 0 for all nodes.
730         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
731             BasicBlock* block = m_graph.block(blockIndex);
732             if (!block)
733                 continue;
734             for (unsigned indexInBlock = block->size(); indexInBlock--;)
735                 block->at(indexInBlock)->setRefCount(0);
736             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
737                 block->phis[phiIndex]->setRefCount(0);
738         }
739     
740         // Now find the roots:
741         // - Nodes that are must-generate.
742         // - Nodes that are reachable from type checks.
743         // Set their ref counts to 1 and put them on the worklist.
744         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
745             BasicBlock* block = m_graph.block(blockIndex);
746             if (!block)
747                 continue;
748             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
749                 Node* node = block->at(indexInBlock);
750                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
751                 if (!(node->flags() & NodeMustGenerate))
752                     continue;
753                 if (!node->postfixRef())
754                     m_worklist.append(node);
755             }
756         }
757         
758         while (!m_worklist.isEmpty()) {
759             while (!m_worklist.isEmpty()) {
760                 Node* node = m_worklist.last();
761                 m_worklist.removeLast();
762                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
763                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
764             }
765             
766             if (m_graph.m_form == SSA) {
767                 // Find Phi->Upsilon edges, which are represented as meta-data in the
768                 // Upsilon.
769                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
770                     BasicBlock* block = m_graph.block(blockIndex);
771                     if (!block)
772                         continue;
773                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
774                         Node* node = block->at(nodeIndex);
775                         if (node->op() != Upsilon)
776                             continue;
777                         if (node->shouldGenerate())
778                             continue;
779                         if (node->phi()->shouldGenerate())
780                             countNode(node);
781                     }
782                 }
783             }
784         }
785     }
786     
787 private:
788     void findTypeCheckRoot(Node*, Edge edge)
789     {
790         // We may have an "unproved" untyped use for code that is unreachable. The CFA
791         // will just not have gotten around to it.
792         if (edge.isProved() || edge.willNotHaveCheck())
793             return;
794         if (!edge->postfixRef())
795             m_worklist.append(edge.node());
796     }
797     
798     void countNode(Node* node)
799     {
800         if (node->postfixRef())
801             return;
802         m_worklist.append(node);
803     }
804     
805     void countEdge(Node*, Edge edge)
806     {
807         // Don't count edges that are already counted for their type checks.
808         if (!(edge.isProved() || edge.willNotHaveCheck()))
809             return;
810         countNode(edge.node());
811     }
812     
813     Graph& m_graph;
814     Vector<Node*, 128> m_worklist;
815 };
816
817 } // anonymous namespace
818
819 void Graph::computeRefCounts()
820 {
821     RefCountCalculator calculator(*this);
822     calculator.calculate();
823 }
824
825 void Graph::killBlockAndItsContents(BasicBlock* block)
826 {
827     if (auto& ssaData = block->ssa)
828         ssaData->invalidate();
829     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
830         deleteNode(block->phis[phiIndex]);
831     for (Node* node : *block)
832         deleteNode(node);
833     
834     killBlock(block);
835 }
836
837 void Graph::killUnreachableBlocks()
838 {
839     invalidateNodeLiveness();
840
841     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
842         BasicBlock* block = this->block(blockIndex);
843         if (!block)
844             continue;
845         if (block->isReachable)
846             continue;
847         
848         killBlockAndItsContents(block);
849     }
850 }
851
852 void Graph::invalidateCFG()
853 {
854     m_dominators = nullptr;
855     m_naturalLoops = nullptr;
856     m_prePostNumbering = nullptr;
857     m_controlEquivalenceAnalysis = nullptr;
858     m_backwardsDominators = nullptr;
859     m_backwardsCFG = nullptr;
860 }
861
862 void Graph::invalidateNodeLiveness()
863 {
864     if (m_form != SSA)
865         return;
866
867     for (BasicBlock* block : blocksInNaturalOrder())
868         block->ssa->invalidate();
869 }
870
871 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
872 {
873     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
874         Node* node = block[indexInBlock];
875         bool shouldContinue = true;
876         switch (node->op()) {
877         case SetLocal: {
878             if (node->local() == variableAccessData->local())
879                 shouldContinue = false;
880             break;
881         }
882                 
883         case GetLocal: {
884             if (node->variableAccessData() != variableAccessData)
885                 continue;
886             substitute(block, indexInBlock, node, newGetLocal);
887             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
888             if (oldTailNode == node)
889                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
890             shouldContinue = false;
891             break;
892         }
893                 
894         default:
895             break;
896         }
897         if (!shouldContinue)
898             break;
899     }
900 }
901
902 BlockList Graph::blocksInPreOrder()
903 {
904     BlockList result;
905     BlockWorklist worklist;
906     worklist.push(block(0));
907     while (BasicBlock* block = worklist.pop()) {
908         result.append(block);
909         for (unsigned i = block->numSuccessors(); i--;)
910             worklist.push(block->successor(i));
911     }
912     return result;
913 }
914
915 BlockList Graph::blocksInPostOrder()
916 {
917     BlockList result;
918     PostOrderBlockWorklist worklist;
919     worklist.push(block(0));
920     while (BlockWithOrder item = worklist.pop()) {
921         switch (item.order) {
922         case VisitOrder::Pre:
923             worklist.pushPost(item.node);
924             for (unsigned i = item.node->numSuccessors(); i--;)
925                 worklist.push(item.node->successor(i));
926             break;
927         case VisitOrder::Post:
928             result.append(item.node);
929             break;
930         }
931     }
932     return result;
933 }
934
935 void Graph::clearReplacements()
936 {
937     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
938         BasicBlock* block = m_blocks[blockIndex].get();
939         if (!block)
940             continue;
941         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
942             block->phis[phiIndex]->setReplacement(nullptr);
943         for (unsigned nodeIndex = block->size(); nodeIndex--;)
944             block->at(nodeIndex)->setReplacement(nullptr);
945     }
946 }
947
948 void Graph::clearEpochs()
949 {
950     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
951         BasicBlock* block = m_blocks[blockIndex].get();
952         if (!block)
953             continue;
954         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
955             block->phis[phiIndex]->setEpoch(Epoch());
956         for (unsigned nodeIndex = block->size(); nodeIndex--;)
957             block->at(nodeIndex)->setEpoch(Epoch());
958     }
959 }
960
961 void Graph::initializeNodeOwners()
962 {
963     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
964         BasicBlock* block = m_blocks[blockIndex].get();
965         if (!block)
966             continue;
967         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
968             block->phis[phiIndex]->owner = block;
969         for (unsigned nodeIndex = block->size(); nodeIndex--;)
970             block->at(nodeIndex)->owner = block;
971     }
972 }
973
974 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
975 {
976     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
977         BasicBlock* block = m_blocks[blockIndex].get();
978         if (!block)
979             continue;
980         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
981             block->phis[phiIndex]->clearFlags(flags);
982         for (unsigned nodeIndex = block->size(); nodeIndex--;)
983             block->at(nodeIndex)->clearFlags(flags);
984     }
985 }
986
987 bool Graph::watchCondition(const ObjectPropertyCondition& key)
988 {
989     if (!key.isWatchable())
990         return false;
991     
992     m_plan.weakReferences.addLazily(key.object());
993     if (key.hasPrototype())
994         m_plan.weakReferences.addLazily(key.prototype());
995     if (key.hasRequiredValue())
996         m_plan.weakReferences.addLazily(key.requiredValue());
997     
998     m_plan.watchpoints.addLazily(key);
999
1000     if (key.kind() == PropertyCondition::Presence)
1001         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1002     
1003     return true;
1004 }
1005
1006 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1007 {
1008     if (!keys.isValid())
1009         return false;
1010
1011     for (const ObjectPropertyCondition& key : keys) {
1012         if (!watchCondition(key))
1013             return false;
1014     }
1015     return true;
1016 }
1017
1018 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1019 {
1020     return m_safeToLoad.contains(std::make_pair(base, offset));
1021 }
1022
1023 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
1024 {
1025     assertIsRegistered(key.structure());
1026     
1027     auto iter = m_inferredTypes.find(key);
1028     if (iter != m_inferredTypes.end())
1029         return iter->value;
1030
1031     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
1032     if (!typeObject) {
1033         m_inferredTypes.add(key, InferredType::Top);
1034         return InferredType::Top;
1035     }
1036
1037     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
1038     if (typeDescriptor.kind() == InferredType::Top) {
1039         m_inferredTypes.add(key, InferredType::Top);
1040         return InferredType::Top;
1041     }
1042     
1043     m_inferredTypes.add(key, typeDescriptor);
1044
1045     m_plan.weakReferences.addLazily(typeObject);
1046     registerInferredType(typeDescriptor);
1047
1048     // Note that we may already be watching this desired inferred type, because multiple structures may
1049     // point to the same InferredType instance.
1050     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
1051
1052     return typeDescriptor;
1053 }
1054
1055 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1056 {
1057     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1058     if (iter != m_bytecodeLiveness.end())
1059         return *iter->value;
1060     
1061     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1062     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
1063     FullBytecodeLiveness& result = *liveness;
1064     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1065     return result;
1066 }
1067
1068 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1069 {
1070     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1071 }
1072
1073 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1074 {
1075     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1076     if (iter != m_bytecodeKills.end())
1077         return *iter->value;
1078     
1079     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1080     codeBlock->livenessAnalysis().computeKills(*kills);
1081     BytecodeKills& result = *kills;
1082     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1083     return result;
1084 }
1085
1086 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1087 {
1088     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1089 }
1090
1091 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1092 {
1093     static const bool verbose = false;
1094     
1095     if (verbose)
1096         dataLog("Checking of operand is live: ", operand, "\n");
1097     CodeOrigin* codeOriginPtr = &codeOrigin;
1098     for (;;) {
1099         VirtualRegister reg = VirtualRegister(
1100             operand.offset() - codeOriginPtr->stackOffset());
1101         
1102         if (verbose)
1103             dataLog("reg = ", reg, "\n");
1104         
1105         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1106             if (reg.isArgument()) {
1107                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1108                 
1109                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1110                     && reg.offset() == CallFrameSlot::callee) {
1111                     if (verbose)
1112                         dataLog("Looks like a callee.\n");
1113                     return true;
1114                 }
1115                 
1116                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1117                     && reg.offset() == CallFrameSlot::argumentCount) {
1118                     if (verbose)
1119                         dataLog("Looks like the argument count.\n");
1120                     return true;
1121                 }
1122                 
1123                 return false;
1124             }
1125
1126             if (verbose)
1127                 dataLog("Asking the bytecode liveness.\n");
1128             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1129                 reg.offset(), codeOriginPtr->bytecodeIndex);
1130         }
1131         
1132         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1133         if (!inlineCallFrame) {
1134             if (verbose)
1135                 dataLog("Ran out of stack, returning true.\n");
1136             return true;
1137         }
1138
1139         // Arguments are always live. This would be redundant if it wasn't for our
1140         // op_call_varargs inlining.
1141         if (reg.isArgument()
1142             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size()) {
1143             if (verbose)
1144                 dataLog("Argument is live.\n");
1145             return true;
1146         }
1147         
1148         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1149
1150         // The first inline call frame could be an inline tail call
1151         if (!codeOriginPtr) {
1152             if (verbose)
1153                 dataLog("Dead because of tail inlining.\n");
1154             return false;
1155         }
1156     }
1157     
1158     RELEASE_ASSERT_NOT_REACHED();
1159 }
1160
1161 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1162 {
1163     BitVector result;
1164     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1165     forAllLocalsLiveInBytecode(
1166         codeOrigin,
1167         [&] (VirtualRegister reg) {
1168             ASSERT(reg.isLocal());
1169             result.quickSet(reg.toLocal());
1170         });
1171     return result;
1172 }
1173
1174 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1175 {
1176     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1177     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1178     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1179 }
1180
1181 unsigned Graph::frameRegisterCount()
1182 {
1183     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1184     return roundLocalRegisterCountForFramePointerOffset(result);
1185 }
1186
1187 unsigned Graph::stackPointerOffset()
1188 {
1189     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1190 }
1191
1192 unsigned Graph::requiredRegisterCountForExit()
1193 {
1194     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1195     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1196         InlineCallFrame* inlineCallFrame = *iter;
1197         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1198         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1199         count = std::max(count, requiredCount);
1200     }
1201     return count;
1202 }
1203
1204 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1205 {
1206     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1207 }
1208
1209 JSValue Graph::tryGetConstantProperty(
1210     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1211 {
1212     if (!base || !base.isObject())
1213         return JSValue();
1214     
1215     JSObject* object = asObject(base);
1216     
1217     for (unsigned i = structureSet.size(); i--;) {
1218         Structure* structure = structureSet[i];
1219         assertIsRegistered(structure);
1220         
1221         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1222         if (!set || !set->isStillValid())
1223             return JSValue();
1224         
1225         ASSERT(structure->isValidOffset(offset));
1226         ASSERT(!structure->isUncacheableDictionary());
1227         
1228         watchpoints().addLazily(set);
1229     }
1230     
1231     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1232     // our profiling makes sense and we're still on track to generate code that won't be
1233     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1234     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1235     // is doomed.
1236     //
1237     // One argument in favor of this code is that it should definitely work because the butterfly
1238     // is always set before the structure. However, we don't currently have a fence between those
1239     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1240     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1241     // caches told us that the object had a structure that it did not *yet* have, and then later,
1242     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1243     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1244     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1245     // https://bugs.webkit.org/show_bug.cgi?id=134641
1246     //
1247     // For now, we just do the minimal thing: defend against the structure right now being
1248     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1249     // determine if the structure belongs to the proven set.
1250     
1251     if (!structureSet.contains(object->structure()))
1252         return JSValue();
1253     
1254     return object->getDirect(offset);
1255 }
1256
1257 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1258 {
1259     return tryGetConstantProperty(base, StructureSet(structure), offset);
1260 }
1261
1262 JSValue Graph::tryGetConstantProperty(
1263     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1264 {
1265     if (structure.isInfinite()) {
1266         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1267         // watching to constant-fold the property.
1268         // https://bugs.webkit.org/show_bug.cgi?id=147271
1269         return JSValue();
1270     }
1271     
1272     return tryGetConstantProperty(base, structure.set(), offset);
1273 }
1274
1275 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1276 {
1277     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1278 }
1279
1280 AbstractValue Graph::inferredValueForProperty(
1281     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1282 {
1283     AbstractValue result;
1284     base.forEach(
1285         [&] (Structure* structure) {
1286             AbstractValue value;
1287             value.set(*this, inferredTypeForProperty(structure, uid));
1288             result.merge(value);
1289         });
1290     if (clobberState == StructuresAreClobbered)
1291         result.clobberStructures();
1292     return result;
1293 }
1294
1295 AbstractValue Graph::inferredValueForProperty(
1296     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1297     StructureClobberState clobberState)
1298 {
1299     if (JSValue value = tryGetConstantProperty(base, offset)) {
1300         AbstractValue result;
1301         result.set(*this, *freeze(value), clobberState);
1302         return result;
1303     }
1304
1305     if (base.m_structure.isFinite())
1306         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1307
1308     return AbstractValue::heapTop();
1309 }
1310
1311 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1312 {
1313     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1314     
1315     if (!base)
1316         return JSValue();
1317     
1318     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1319     if (!activation)
1320         return JSValue();
1321     
1322     SymbolTable* symbolTable = activation->symbolTable();
1323     JSValue value;
1324     WatchpointSet* set;
1325     {
1326         ConcurrentJSLocker locker(symbolTable->m_lock);
1327         
1328         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1329         if (!entry)
1330             return JSValue();
1331         
1332         set = entry->watchpointSet();
1333         if (!set)
1334             return JSValue();
1335         
1336         if (set->state() != IsWatched)
1337             return JSValue();
1338         
1339         ASSERT(entry->scopeOffset() == offset);
1340         value = activation->variableAt(offset).get();
1341         if (!value)
1342             return JSValue();
1343     }
1344     
1345     watchpoints().addLazily(set);
1346     
1347     return value;
1348 }
1349
1350 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1351 {
1352     return tryGetConstantClosureVar(value.m_value, offset);
1353 }
1354
1355 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1356 {
1357     if (!node->hasConstant())
1358         return JSValue();
1359     return tryGetConstantClosureVar(node->asJSValue(), offset);
1360 }
1361
1362 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1363 {
1364     if (!value)
1365         return nullptr;
1366     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1367     if (!value)
1368         return nullptr;
1369     if (!view->length())
1370         return nullptr;
1371     WTF::loadLoadFence();
1372     watchpoints().addLazily(view);
1373     return view;
1374 }
1375
1376 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1377 {
1378     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1379         return nullptr;
1380     return tryGetFoldableView(value);
1381 }
1382
1383 void Graph::registerFrozenValues()
1384 {
1385     m_codeBlock->constants().resize(0);
1386     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1387     for (FrozenValue* value : m_frozenValues) {
1388         if (!value->pointsToHeap())
1389             continue;
1390         
1391         ASSERT(value->structure());
1392         ASSERT(m_plan.weakReferences.contains(value->structure()));
1393         
1394         switch (value->strength()) {
1395         case WeakValue: {
1396             m_plan.weakReferences.addLazily(value->value().asCell());
1397             break;
1398         }
1399         case StrongValue: {
1400             unsigned constantIndex = m_codeBlock->addConstantLazily();
1401             // We already have a barrier on the code block.
1402             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1403             break;
1404         } }
1405     }
1406     m_codeBlock->constants().shrinkToFit();
1407     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1408 }
1409
1410 void Graph::visitChildren(SlotVisitor& visitor)
1411 {
1412     for (FrozenValue* value : m_frozenValues) {
1413         visitor.appendUnbarriered(value->value());
1414         visitor.appendUnbarriered(value->structure());
1415     }
1416     
1417     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1418         BasicBlock* block = this->block(blockIndex);
1419         if (!block)
1420             continue;
1421         
1422         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1423             Node* node = block->at(nodeIndex);
1424             
1425             switch (node->op()) {
1426             case CheckStructure:
1427                 for (unsigned i = node->structureSet().size(); i--;)
1428                     visitor.appendUnbarriered(node->structureSet()[i]);
1429                 break;
1430                 
1431             case NewObject:
1432             case ArrayifyToStructure:
1433             case NewStringObject:
1434                 visitor.appendUnbarriered(node->structure());
1435                 break;
1436                 
1437             case PutStructure:
1438             case AllocatePropertyStorage:
1439             case ReallocatePropertyStorage:
1440                 visitor.appendUnbarriered(node->transition()->previous);
1441                 visitor.appendUnbarriered(node->transition()->next);
1442                 break;
1443                 
1444             case MultiGetByOffset:
1445                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1446                     for (Structure* structure : getCase.set())
1447                         visitor.appendUnbarriered(structure);
1448                 }
1449                 break;
1450                     
1451             case MultiPutByOffset:
1452                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1453                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1454                     const StructureSet& set = variant.oldStructure();
1455                     for (unsigned j = set.size(); j--;)
1456                         visitor.appendUnbarriered(set[j]);
1457                     if (variant.kind() == PutByIdVariant::Transition)
1458                         visitor.appendUnbarriered(variant.newStructure());
1459                 }
1460                 break;
1461                 
1462             default:
1463                 break;
1464             }
1465         }
1466     }
1467 }
1468
1469 FrozenValue* Graph::freeze(JSValue value)
1470 {
1471     if (UNLIKELY(!value))
1472         return FrozenValue::emptySingleton();
1473     
1474     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1475     if (LIKELY(!result.isNewEntry))
1476         return result.iterator->value;
1477
1478     if (value.isUInt32())
1479         m_uint32ValuesInUse.append(value.asUInt32());
1480     
1481     FrozenValue frozenValue = FrozenValue::freeze(value);
1482     if (Structure* structure = frozenValue.structure())
1483         registerStructure(structure);
1484     
1485     return result.iterator->value = m_frozenValues.add(frozenValue);
1486 }
1487
1488 FrozenValue* Graph::freezeStrong(JSValue value)
1489 {
1490     FrozenValue* result = freeze(value);
1491     result->strengthenTo(StrongValue);
1492     return result;
1493 }
1494
1495 void Graph::convertToConstant(Node* node, FrozenValue* value)
1496 {
1497     if (value->structure())
1498         assertIsRegistered(value->structure());
1499     node->convertToConstant(value);
1500 }
1501
1502 void Graph::convertToConstant(Node* node, JSValue value)
1503 {
1504     convertToConstant(node, freeze(value));
1505 }
1506
1507 void Graph::convertToStrongConstant(Node* node, JSValue value)
1508 {
1509     convertToConstant(node, freezeStrong(value));
1510 }
1511
1512 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1513 {
1514     m_plan.weakReferences.addLazily(structure);
1515     if (m_plan.watchpoints.consider(structure))
1516         return StructureRegisteredAndWatched;
1517     return StructureRegisteredNormally;
1518 }
1519
1520 void Graph::assertIsRegistered(Structure* structure)
1521 {
1522     // It's convenient to be able to call this with a maybe-null structure.
1523     if (!structure)
1524         return;
1525     
1526     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1527     
1528     if (!structure->dfgShouldWatch())
1529         return;
1530     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1531         return;
1532     
1533     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1534 }
1535
1536 NO_RETURN_DUE_TO_CRASH static void crash(
1537     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1538     const char* assertion)
1539 {
1540     startCrashing();
1541     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1542     dataLog(file, "(", line, ") : ", function, "\n");
1543     dataLog("\n");
1544     dataLog(whileText);
1545     dataLog("Graph at time of failure:\n");
1546     graph.dump();
1547     dataLog("\n");
1548     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1549     dataLog(file, "(", line, ") : ", function, "\n");
1550     CRASH_WITH_SECURITY_IMPLICATION();
1551 }
1552
1553 void Graph::handleAssertionFailure(
1554     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1555 {
1556     crash(*this, "", file, line, function, assertion);
1557 }
1558
1559 void Graph::handleAssertionFailure(
1560     Node* node, const char* file, int line, const char* function, const char* assertion)
1561 {
1562     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1563 }
1564
1565 void Graph::handleAssertionFailure(
1566     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1567 {
1568     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1569 }
1570
1571 Dominators& Graph::ensureDominators()
1572 {
1573     if (!m_dominators)
1574         m_dominators = std::make_unique<Dominators>(*this);
1575     return *m_dominators;
1576 }
1577
1578 PrePostNumbering& Graph::ensurePrePostNumbering()
1579 {
1580     if (!m_prePostNumbering)
1581         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1582     return *m_prePostNumbering;
1583 }
1584
1585 NaturalLoops& Graph::ensureNaturalLoops()
1586 {
1587     ensureDominators();
1588     if (!m_naturalLoops)
1589         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1590     return *m_naturalLoops;
1591 }
1592
1593 BackwardsCFG& Graph::ensureBackwardsCFG()
1594 {
1595     if (!m_backwardsCFG)
1596         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1597     return *m_backwardsCFG;
1598 }
1599
1600 BackwardsDominators& Graph::ensureBackwardsDominators()
1601 {
1602     if (!m_backwardsDominators)
1603         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1604     return *m_backwardsDominators;
1605 }
1606
1607 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1608 {
1609     if (!m_controlEquivalenceAnalysis)
1610         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1611     return *m_controlEquivalenceAnalysis;
1612 }
1613
1614 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1615 {
1616     for (Node* node = operandNode; node;) {
1617         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1618         if (!currentNode || node->origin != currentNode->origin) {
1619             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1620
1621             if (node->accessesStack(*this)) {
1622                 ValueProfile* result = [&] () -> ValueProfile* {
1623                     if (!node->local().isArgument())
1624                         return nullptr;
1625                     int argument = node->local().toArgument();
1626                     Node* argumentNode = m_arguments[argument];
1627                     if (!argumentNode)
1628                         return nullptr;
1629                     if (node->variableAccessData() != argumentNode->variableAccessData())
1630                         return nullptr;
1631                     return profiledBlock->valueProfileForArgument(argument);
1632                 }();
1633                 if (result)
1634                     return result;
1635
1636                 if (node->op() == GetLocal) {
1637                     return MethodOfGettingAValueProfile::fromLazyOperand(
1638                         profiledBlock,
1639                         LazyOperandValueProfileKey(
1640                             node->origin.semantic.bytecodeIndex, node->local()));
1641                 }
1642             }
1643
1644             if (node->hasHeapPrediction())
1645                 return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1646
1647             if (profiledBlock->hasBaselineJITProfiling()) {
1648                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1649                     return result;
1650             }
1651         }
1652
1653         switch (node->op()) {
1654         case BooleanToNumber:
1655         case Identity:
1656         case ValueRep:
1657         case DoubleRep:
1658         case Int52Rep:
1659             node = node->child1().node();
1660             break;
1661         default:
1662             node = nullptr;
1663         }
1664     }
1665     
1666     return MethodOfGettingAValueProfile();
1667 }
1668
1669 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1670 {
1671     unsigned attributesUnused;
1672     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1673     if (!isValidOffset(offset))
1674         return false;
1675
1676     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1677     if (!value)
1678         return false;
1679
1680     // We only care about functions and getters at this point. If you want to access other properties
1681     // you'll have to add code for those types.
1682     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1683     if (!function) {
1684         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(value);
1685
1686         if (!getterSetter)
1687             return false;
1688
1689         returnJSValue = JSValue(getterSetter);
1690         return true;
1691     }
1692
1693     returnJSValue = value;
1694     return true;
1695 }
1696
1697 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1698 {
1699     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1700
1701     if (!conditions.isValid())
1702         return false;
1703
1704     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1705     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1706     JSFunction* function = jsDynamicCast<JSFunction*>(equivalenceCondition.condition().requiredValue());
1707     if (!function)
1708         return false;
1709
1710     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1711         return false;
1712     
1713     return watchConditions(conditions);
1714 }
1715
1716
1717 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1718 {
1719     if (hasExitSite(codeOrigin, NotStringObject))
1720         return false;
1721
1722     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1723     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1724     registerStructure(stringObjectStructure);
1725     ASSERT(stringObjectStructure->storedPrototype().isObject());
1726     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1727
1728     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1729         return false;
1730
1731     // We're being conservative here. We want DFG's ToString on StringObject to be
1732     // used in both numeric contexts (that would call valueOf()) and string contexts
1733     // (that would call toString()). We don't want the DFG to have to distinguish
1734     // between the two, just because that seems like it would get confusing. So we
1735     // just require both methods to be sane.
1736     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1737         return false;
1738     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1739 }
1740
1741 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1742 {
1743     if (!m_hasExceptionHandlers)
1744         return false;
1745
1746     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1747     while (1) {
1748         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1749         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1750         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1751             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1752             catchHandlerOut = handler;
1753             return true;
1754         }
1755
1756         if (!inlineCallFrame)
1757             return false;
1758
1759         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1760         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1761     }
1762
1763     RELEASE_ASSERT_NOT_REACHED();
1764 }
1765
1766 } } // namespace JSC::DFG
1767
1768 #endif // ENABLE(DFG_JIT)