[JSC] DFG::Node should not have its own allocator
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGJITCode.h"
44 #include "DFGMayExit.h"
45 #include "DFGNaturalLoops.h"
46 #include "DFGPrePostNumbering.h"
47 #include "DFGVariableAccessDataDump.h"
48 #include "FullBytecodeLiveness.h"
49 #include "FunctionExecutableDump.h"
50 #include "GetterSetter.h"
51 #include "JIT.h"
52 #include "JSLexicalEnvironment.h"
53 #include "MaxFrameExtentForSlowPathCall.h"
54 #include "OperandsInlines.h"
55 #include "JSCInlines.h"
56 #include "StackAlignment.h"
57 #include <wtf/CommaPrinter.h>
58 #include <wtf/ListDump.h>
59
60 namespace JSC { namespace DFG {
61
62 // Creates an array of stringized names.
63 static const char* dfgOpNames[] = {
64 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
65     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
66 #undef STRINGIZE_DFG_OP_ENUM
67 };
68
69 Graph::Graph(VM& vm, Plan& plan)
70     : m_vm(vm)
71     , m_plan(plan)
72     , m_codeBlock(m_plan.codeBlock)
73     , m_profiledBlock(m_codeBlock->alternative())
74     , m_cfg(std::make_unique<CFG>(*this))
75     , m_nextMachineLocal(0)
76     , m_fixpointState(BeforeFixpoint)
77     , m_structureRegistrationState(HaveNotStartedRegistering)
78     , m_form(LoadStore)
79     , m_unificationState(LocallyUnified)
80     , m_refCountState(EverythingIsLive)
81 {
82     ASSERT(m_profiledBlock);
83     
84     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
85 }
86
87 Graph::~Graph()
88 {
89 }
90
91 const char *Graph::opName(NodeType op)
92 {
93     return dfgOpNames[op];
94 }
95
96 static void printWhiteSpace(PrintStream& out, unsigned amount)
97 {
98     while (amount-- > 0)
99         out.print(" ");
100 }
101
102 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
103 {
104     if (!currentNode->origin.semantic)
105         return false;
106     
107     Node* previousNode = previousNodeRef;
108     previousNodeRef = currentNode;
109
110     if (!previousNode)
111         return false;
112     
113     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
114         return false;
115     
116     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
117     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
118     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
119     unsigned indexOfDivergence = commonSize;
120     for (unsigned i = 0; i < commonSize; ++i) {
121         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
122             indexOfDivergence = i;
123             break;
124         }
125     }
126     
127     bool hasPrinted = false;
128     
129     // Print the pops.
130     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
131         out.print(prefix);
132         printWhiteSpace(out, i * 2);
133         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
134         hasPrinted = true;
135     }
136     
137     // Print the pushes.
138     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
139         out.print(prefix);
140         printWhiteSpace(out, i * 2);
141         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
142         hasPrinted = true;
143     }
144     
145     return hasPrinted;
146 }
147
148 int Graph::amountOfNodeWhiteSpace(Node* node)
149 {
150     return (node->origin.semantic.inlineDepth() - 1) * 2;
151 }
152
153 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
154 {
155     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
156 }
157
158 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
159 {
160     NodeType op = node->op();
161
162     unsigned refCount = node->refCount();
163     bool mustGenerate = node->mustGenerate();
164     if (mustGenerate)
165         --refCount;
166
167     out.print(prefix);
168     printNodeWhiteSpace(out, node);
169
170     // Example/explanation of dataflow dump output
171     //
172     //   14:   <!2:7>  GetByVal(@3, @13)
173     //   ^1     ^2 ^3     ^4       ^5
174     //
175     // (1) The nodeIndex of this operation.
176     // (2) The reference count. The number printed is the 'real' count,
177     //     not including the 'mustGenerate' ref. If the node is
178     //     'mustGenerate' then the count it prefixed with '!'.
179     // (3) The virtual register slot assigned to this node.
180     // (4) The name of the operation.
181     // (5) The arguments to the operation. The may be of the form:
182     //         @#   - a NodeIndex referencing a prior node in the graph.
183     //         arg# - an argument number.
184     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
185     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
186     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
187     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
188         out.print(node->virtualRegister());
189     else
190         out.print("-");
191     out.print(">\t", opName(op), "(");
192     CommaPrinter comma;
193     if (node->flags() & NodeHasVarArgs) {
194         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
195             if (!m_varArgChildren[childIdx])
196                 continue;
197             out.print(comma, m_varArgChildren[childIdx]);
198         }
199     } else {
200         if (!!node->child1() || !!node->child2() || !!node->child3())
201             out.print(comma, node->child1());
202         if (!!node->child2() || !!node->child3())
203             out.print(comma, node->child2());
204         if (!!node->child3())
205             out.print(comma, node->child3());
206     }
207
208     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
209         out.print(comma, NodeFlagsDump(node->flags()));
210     if (node->prediction())
211         out.print(comma, SpeculationDump(node->prediction()));
212     if (node->hasArrayMode())
213         out.print(comma, node->arrayMode());
214     if (node->hasArithMode())
215         out.print(comma, node->arithMode());
216     if (node->hasArithRoundingMode())
217         out.print(comma, "Rounding:", node->arithRoundingMode());
218     if (node->hasScopeOffset())
219         out.print(comma, node->scopeOffset());
220     if (node->hasDirectArgumentsOffset())
221         out.print(comma, node->capturedArgumentsOffset());
222     if (node->hasRegisterPointer())
223         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
224     if (node->hasIdentifier())
225         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
226     if (node->hasPromotedLocationDescriptor())
227         out.print(comma, node->promotedLocationDescriptor());
228     if (node->hasStructureSet())
229         out.print(comma, inContext(node->structureSet(), context));
230     if (node->hasStructure())
231         out.print(comma, inContext(*node->structure(), context));
232     if (node->hasTransition()) {
233         out.print(comma, pointerDumpInContext(node->transition(), context));
234 #if USE(JSVALUE64)
235         out.print(", ID:", node->transition()->next->id());
236 #else
237         out.print(", ID:", RawPointer(node->transition()->next));
238 #endif
239     }
240     if (node->hasCellOperand()) {
241         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
242             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
243         else {
244             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
245             if (node->cellOperand()->value().isCell()) {
246                 CallVariant variant(node->cellOperand()->value().asCell());
247                 if (ExecutableBase* executable = variant.executable()) {
248                     if (executable->isHostFunction())
249                         out.print(comma, "<host function>");
250                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
251                         out.print(comma, FunctionExecutableDump(functionExecutable));
252                     else
253                         out.print(comma, "<non-function executable>");
254                 }
255             }
256         }
257     }
258     if (node->hasStorageAccessData()) {
259         StorageAccessData& storageAccessData = node->storageAccessData();
260         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
261         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
262         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
263     }
264     if (node->hasMultiGetByOffsetData()) {
265         MultiGetByOffsetData& data = node->multiGetByOffsetData();
266         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
267         for (unsigned i = 0; i < data.cases.size(); ++i)
268             out.print(comma, inContext(data.cases[i], context));
269     }
270     if (node->hasMultiPutByOffsetData()) {
271         MultiPutByOffsetData& data = node->multiPutByOffsetData();
272         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
273         for (unsigned i = 0; i < data.variants.size(); ++i)
274             out.print(comma, inContext(data.variants[i], context));
275     }
276     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
277     if (node->hasVariableAccessData(*this)) {
278         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
279         if (variableAccessData) {
280             VirtualRegister operand = variableAccessData->local();
281             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
282             operand = variableAccessData->machineLocal();
283             if (operand.isValid())
284                 out.print(comma, "machine:", operand);
285         }
286     }
287     if (node->hasStackAccessData()) {
288         StackAccessData* data = node->stackAccessData();
289         out.print(comma, data->local);
290         if (data->machineLocal.isValid())
291             out.print(comma, "machine:", data->machineLocal);
292         out.print(comma, data->format);
293     }
294     if (node->hasUnlinkedLocal()) 
295         out.print(comma, node->unlinkedLocal());
296     if (node->hasUnlinkedMachineLocal()) {
297         VirtualRegister operand = node->unlinkedMachineLocal();
298         if (operand.isValid())
299             out.print(comma, "machine:", operand);
300     }
301     if (node->hasConstantBuffer()) {
302         out.print(comma);
303         out.print(node->startConstant(), ":[");
304         CommaPrinter anotherComma;
305         for (unsigned i = 0; i < node->numConstants(); ++i)
306             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
307         out.print("]");
308     }
309     if (node->hasLazyJSValue())
310         out.print(comma, node->lazyJSValue());
311     if (node->hasIndexingType())
312         out.print(comma, IndexingTypeDump(node->indexingType()));
313     if (node->hasTypedArrayType())
314         out.print(comma, node->typedArrayType());
315     if (node->hasPhi())
316         out.print(comma, "^", node->phi()->index());
317     if (node->hasExecutionCounter())
318         out.print(comma, RawPointer(node->executionCounter()));
319     if (node->hasWatchpointSet())
320         out.print(comma, RawPointer(node->watchpointSet()));
321     if (node->hasStoragePointer())
322         out.print(comma, RawPointer(node->storagePointer()));
323     if (node->hasObjectMaterializationData())
324         out.print(comma, node->objectMaterializationData());
325     if (node->hasCallVarargsData())
326         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
327     if (node->hasLoadVarargsData()) {
328         LoadVarargsData* data = node->loadVarargsData();
329         out.print(comma, "start = ", data->start, ", count = ", data->count);
330         if (data->machineStart.isValid())
331             out.print(", machineStart = ", data->machineStart);
332         if (data->machineCount.isValid())
333             out.print(", machineCount = ", data->machineCount);
334         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
335         out.print(", limit = ", data->limit);
336     }
337     if (node->isConstant())
338         out.print(comma, pointerDumpInContext(node->constant(), context));
339     if (node->isJump())
340         out.print(comma, "T:", *node->targetBlock());
341     if (node->isBranch())
342         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
343     if (node->isSwitch()) {
344         SwitchData* data = node->switchData();
345         out.print(comma, data->kind);
346         for (unsigned i = 0; i < data->cases.size(); ++i)
347             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
348         out.print(comma, "default:", data->fallThrough);
349     }
350     ClobberSet reads;
351     ClobberSet writes;
352     addReadsAndWrites(*this, node, reads, writes);
353     if (!reads.isEmpty())
354         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
355     if (!writes.isEmpty())
356         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
357     ExitMode exitMode = mayExit(*this, node);
358     if (exitMode != DoesNotExit)
359         out.print(comma, exitMode);
360     if (clobbersExitState(*this, node))
361         out.print(comma, "ClobbersExit");
362     if (node->origin.isSet()) {
363         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
364         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
365             out.print(comma, "exit: ", node->origin.forExit);
366     }
367     if (!node->origin.exitOK)
368         out.print(comma, "ExitInvalid");
369     if (node->origin.wasHoisted)
370         out.print(comma, "WasHoisted");
371     out.print(")");
372
373     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
374         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
375     else if (node->hasHeapPrediction())
376         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
377     
378     out.print("\n");
379 }
380
381 bool Graph::terminalsAreValid()
382 {
383     for (BasicBlock* block : blocksInNaturalOrder()) {
384         if (!block->terminal())
385             return false;
386     }
387     return true;
388 }
389
390 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
391 {
392     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
393     if (block->executionCount == block->executionCount)
394         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
395     out.print(prefix, "  Predecessors:");
396     for (size_t i = 0; i < block->predecessors.size(); ++i)
397         out.print(" ", *block->predecessors[i]);
398     out.print("\n");
399     out.print(prefix, "  Successors:");
400     if (block->terminal()) {
401         for (BasicBlock* successor : block->successors()) {
402             out.print(" ", *successor);
403             if (m_prePostNumbering)
404                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
405         }
406     } else
407         out.print(" <invalid>");
408     out.print("\n");
409     if (m_dominators && terminalsAreValid()) {
410         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
411         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
412         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
413         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
414     }
415     if (m_backwardsDominators && terminalsAreValid()) {
416         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
417         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
418     }
419     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
420         out.print(prefix, "  Control equivalent to:");
421         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
422             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
423                 out.print(" ", *otherBlock);
424         }
425         out.print("\n");
426     }
427     if (m_prePostNumbering)
428         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
429     if (m_naturalLoops) {
430         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
431             out.print(prefix, "  Loop header, contains:");
432             Vector<BlockIndex> sortedBlockList;
433             for (unsigned i = 0; i < loop->size(); ++i)
434                 sortedBlockList.append(loop->at(i)->index);
435             std::sort(sortedBlockList.begin(), sortedBlockList.end());
436             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
437                 out.print(" #", sortedBlockList[i]);
438             out.print("\n");
439         }
440         
441         Vector<const NaturalLoop*> containingLoops =
442             m_naturalLoops->loopsOf(block);
443         if (!containingLoops.isEmpty()) {
444             out.print(prefix, "  Containing loop headers:");
445             for (unsigned i = 0; i < containingLoops.size(); ++i)
446                 out.print(" ", *containingLoops[i]->header());
447             out.print("\n");
448         }
449     }
450     if (!block->phis.isEmpty()) {
451         out.print(prefix, "  Phi Nodes:");
452         for (size_t i = 0; i < block->phis.size(); ++i) {
453             Node* phiNode = block->phis[i];
454             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
455                 continue;
456             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
457             if (phiNode->child1()) {
458                 out.print("@", phiNode->child1()->index());
459                 if (phiNode->child2()) {
460                     out.print(", @", phiNode->child2()->index());
461                     if (phiNode->child3())
462                         out.print(", @", phiNode->child3()->index());
463                 }
464             }
465             out.print(")", i + 1 < block->phis.size() ? "," : "");
466         }
467         out.print("\n");
468     }
469 }
470
471 void Graph::dump(PrintStream& out, DumpContext* context)
472 {
473     DumpContext myContext;
474     myContext.graph = this;
475     if (!context)
476         context = &myContext;
477     
478     out.print("\n");
479     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
480     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
481     if (m_form == SSA)
482         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
483     else
484         out.print("  Arguments: ", listDump(m_arguments), "\n");
485     out.print("\n");
486     
487     Node* lastNode = nullptr;
488     for (size_t b = 0; b < m_blocks.size(); ++b) {
489         BasicBlock* block = m_blocks[b].get();
490         if (!block)
491             continue;
492         dumpBlockHeader(out, "", block, DumpAllPhis, context);
493         out.print("  States: ", block->cfaStructureClobberStateAtHead);
494         if (!block->cfaHasVisited)
495             out.print(", CurrentlyCFAUnreachable");
496         if (!block->intersectionOfCFAHasVisited)
497             out.print(", CFAUnreachable");
498         out.print("\n");
499         switch (m_form) {
500         case LoadStore:
501         case ThreadedCPS: {
502             out.print("  Vars Before: ");
503             if (block->cfaHasVisited)
504                 out.print(inContext(block->valuesAtHead, context));
505             else
506                 out.print("<empty>");
507             out.print("\n");
508             out.print("  Intersected Vars Before: ");
509             if (block->intersectionOfCFAHasVisited)
510                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
511             else
512                 out.print("<empty>");
513             out.print("\n");
514             out.print("  Var Links: ", block->variablesAtHead, "\n");
515             break;
516         }
517             
518         case SSA: {
519             RELEASE_ASSERT(block->ssa);
520             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
521             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
522             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
523             break;
524         } }
525         for (size_t i = 0; i < block->size(); ++i) {
526             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
527             dump(out, "", block->at(i), context);
528         }
529         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
530         if (!block->cfaDidFinish)
531             out.print(", CFAInvalidated");
532         out.print("\n");
533         switch (m_form) {
534         case LoadStore:
535         case ThreadedCPS: {
536             out.print("  Vars After: ");
537             if (block->cfaHasVisited)
538                 out.print(inContext(block->valuesAtTail, context));
539             else
540                 out.print("<empty>");
541             out.print("\n");
542             out.print("  Var Links: ", block->variablesAtTail, "\n");
543             break;
544         }
545             
546         case SSA: {
547             RELEASE_ASSERT(block->ssa);
548             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
549             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
550             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
551             break;
552         } }
553         out.print("\n");
554     }
555     
556     out.print("GC Values:\n");
557     for (FrozenValue* value : m_frozenValues) {
558         if (value->pointsToHeap())
559             out.print("    ", inContext(*value, &myContext), "\n");
560     }
561
562     out.print(inContext(watchpoints(), &myContext));
563     
564     if (!myContext.isEmpty()) {
565         myContext.dump(out);
566         out.print("\n");
567     }
568 }
569
570 void Graph::deleteNode(Node* node)
571 {
572     if (validationEnabled() && m_form == SSA) {
573         for (BasicBlock* block : blocksInNaturalOrder()) {
574             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
575             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
576         }
577     }
578     m_nodes.remove(node);
579 }
580
581 void Graph::dethread()
582 {
583     if (m_form == LoadStore || m_form == SSA)
584         return;
585     
586     if (logCompilationChanges())
587         dataLog("Dethreading DFG graph.\n");
588     
589     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
590         BasicBlock* block = m_blocks[blockIndex].get();
591         if (!block)
592             continue;
593         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
594             Node* phi = block->phis[phiIndex];
595             phi->children.reset();
596         }
597     }
598     
599     m_form = LoadStore;
600 }
601
602 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
603 {
604     if (!successor->isReachable) {
605         successor->isReachable = true;
606         worklist.append(successor);
607     }
608     
609     successor->predecessors.append(block);
610 }
611
612 void Graph::determineReachability()
613 {
614     Vector<BasicBlock*, 16> worklist;
615     worklist.append(block(0));
616     block(0)->isReachable = true;
617     while (!worklist.isEmpty()) {
618         BasicBlock* block = worklist.takeLast();
619         for (unsigned i = block->numSuccessors(); i--;)
620             handleSuccessor(worklist, block, block->successor(i));
621     }
622 }
623
624 void Graph::resetReachability()
625 {
626     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
627         BasicBlock* block = m_blocks[blockIndex].get();
628         if (!block)
629             continue;
630         block->isReachable = false;
631         block->predecessors.clear();
632     }
633     
634     determineReachability();
635 }
636
637 namespace {
638
639 class RefCountCalculator {
640 public:
641     RefCountCalculator(Graph& graph)
642         : m_graph(graph)
643     {
644     }
645     
646     void calculate()
647     {
648         // First reset the counts to 0 for all nodes.
649         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
650             BasicBlock* block = m_graph.block(blockIndex);
651             if (!block)
652                 continue;
653             for (unsigned indexInBlock = block->size(); indexInBlock--;)
654                 block->at(indexInBlock)->setRefCount(0);
655             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
656                 block->phis[phiIndex]->setRefCount(0);
657         }
658     
659         // Now find the roots:
660         // - Nodes that are must-generate.
661         // - Nodes that are reachable from type checks.
662         // Set their ref counts to 1 and put them on the worklist.
663         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
664             BasicBlock* block = m_graph.block(blockIndex);
665             if (!block)
666                 continue;
667             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
668                 Node* node = block->at(indexInBlock);
669                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
670                 if (!(node->flags() & NodeMustGenerate))
671                     continue;
672                 if (!node->postfixRef())
673                     m_worklist.append(node);
674             }
675         }
676         
677         while (!m_worklist.isEmpty()) {
678             while (!m_worklist.isEmpty()) {
679                 Node* node = m_worklist.last();
680                 m_worklist.removeLast();
681                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
682                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
683             }
684             
685             if (m_graph.m_form == SSA) {
686                 // Find Phi->Upsilon edges, which are represented as meta-data in the
687                 // Upsilon.
688                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
689                     BasicBlock* block = m_graph.block(blockIndex);
690                     if (!block)
691                         continue;
692                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
693                         Node* node = block->at(nodeIndex);
694                         if (node->op() != Upsilon)
695                             continue;
696                         if (node->shouldGenerate())
697                             continue;
698                         if (node->phi()->shouldGenerate())
699                             countNode(node);
700                     }
701                 }
702             }
703         }
704     }
705     
706 private:
707     void findTypeCheckRoot(Node*, Edge edge)
708     {
709         // We may have an "unproved" untyped use for code that is unreachable. The CFA
710         // will just not have gotten around to it.
711         if (edge.isProved() || edge.willNotHaveCheck())
712             return;
713         if (!edge->postfixRef())
714             m_worklist.append(edge.node());
715     }
716     
717     void countNode(Node* node)
718     {
719         if (node->postfixRef())
720             return;
721         m_worklist.append(node);
722     }
723     
724     void countEdge(Node*, Edge edge)
725     {
726         // Don't count edges that are already counted for their type checks.
727         if (!(edge.isProved() || edge.willNotHaveCheck()))
728             return;
729         countNode(edge.node());
730     }
731     
732     Graph& m_graph;
733     Vector<Node*, 128> m_worklist;
734 };
735
736 } // anonymous namespace
737
738 void Graph::computeRefCounts()
739 {
740     RefCountCalculator calculator(*this);
741     calculator.calculate();
742 }
743
744 void Graph::killBlockAndItsContents(BasicBlock* block)
745 {
746     if (auto& ssaData = block->ssa)
747         ssaData->invalidate();
748     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
749         deleteNode(block->phis[phiIndex]);
750     for (Node* node : *block)
751         deleteNode(node);
752     
753     killBlock(block);
754 }
755
756 void Graph::killUnreachableBlocks()
757 {
758     invalidateNodeLiveness();
759
760     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
761         BasicBlock* block = this->block(blockIndex);
762         if (!block)
763             continue;
764         if (block->isReachable)
765             continue;
766         
767         killBlockAndItsContents(block);
768     }
769 }
770
771 void Graph::invalidateCFG()
772 {
773     m_dominators = nullptr;
774     m_naturalLoops = nullptr;
775     m_prePostNumbering = nullptr;
776     m_controlEquivalenceAnalysis = nullptr;
777     m_backwardsDominators = nullptr;
778     m_backwardsCFG = nullptr;
779 }
780
781 void Graph::invalidateNodeLiveness()
782 {
783     if (m_form != SSA)
784         return;
785
786     for (BasicBlock* block : blocksInNaturalOrder())
787         block->ssa->invalidate();
788 }
789
790 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
791 {
792     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
793         Node* node = block[indexInBlock];
794         bool shouldContinue = true;
795         switch (node->op()) {
796         case SetLocal: {
797             if (node->local() == variableAccessData->local())
798                 shouldContinue = false;
799             break;
800         }
801                 
802         case GetLocal: {
803             if (node->variableAccessData() != variableAccessData)
804                 continue;
805             substitute(block, indexInBlock, node, newGetLocal);
806             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
807             if (oldTailNode == node)
808                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
809             shouldContinue = false;
810             break;
811         }
812                 
813         default:
814             break;
815         }
816         if (!shouldContinue)
817             break;
818     }
819 }
820
821 BlockList Graph::blocksInPreOrder()
822 {
823     BlockList result;
824     BlockWorklist worklist;
825     worklist.push(block(0));
826     while (BasicBlock* block = worklist.pop()) {
827         result.append(block);
828         for (unsigned i = block->numSuccessors(); i--;)
829             worklist.push(block->successor(i));
830     }
831     return result;
832 }
833
834 BlockList Graph::blocksInPostOrder()
835 {
836     BlockList result;
837     PostOrderBlockWorklist worklist;
838     worklist.push(block(0));
839     while (BlockWithOrder item = worklist.pop()) {
840         switch (item.order) {
841         case VisitOrder::Pre:
842             worklist.pushPost(item.node);
843             for (unsigned i = item.node->numSuccessors(); i--;)
844                 worklist.push(item.node->successor(i));
845             break;
846         case VisitOrder::Post:
847             result.append(item.node);
848             break;
849         }
850     }
851     return result;
852 }
853
854 void Graph::clearReplacements()
855 {
856     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
857         BasicBlock* block = m_blocks[blockIndex].get();
858         if (!block)
859             continue;
860         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
861             block->phis[phiIndex]->setReplacement(nullptr);
862         for (unsigned nodeIndex = block->size(); nodeIndex--;)
863             block->at(nodeIndex)->setReplacement(nullptr);
864     }
865 }
866
867 void Graph::clearEpochs()
868 {
869     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
870         BasicBlock* block = m_blocks[blockIndex].get();
871         if (!block)
872             continue;
873         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
874             block->phis[phiIndex]->setEpoch(Epoch());
875         for (unsigned nodeIndex = block->size(); nodeIndex--;)
876             block->at(nodeIndex)->setEpoch(Epoch());
877     }
878 }
879
880 void Graph::initializeNodeOwners()
881 {
882     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
883         BasicBlock* block = m_blocks[blockIndex].get();
884         if (!block)
885             continue;
886         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
887             block->phis[phiIndex]->owner = block;
888         for (unsigned nodeIndex = block->size(); nodeIndex--;)
889             block->at(nodeIndex)->owner = block;
890     }
891 }
892
893 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
894 {
895     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
896         BasicBlock* block = m_blocks[blockIndex].get();
897         if (!block)
898             continue;
899         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
900             block->phis[phiIndex]->clearFlags(flags);
901         for (unsigned nodeIndex = block->size(); nodeIndex--;)
902             block->at(nodeIndex)->clearFlags(flags);
903     }
904 }
905
906 bool Graph::watchCondition(const ObjectPropertyCondition& key)
907 {
908     if (!key.isWatchable())
909         return false;
910     
911     m_plan.weakReferences.addLazily(key.object());
912     if (key.hasPrototype())
913         m_plan.weakReferences.addLazily(key.prototype());
914     if (key.hasRequiredValue())
915         m_plan.weakReferences.addLazily(key.requiredValue());
916     
917     m_plan.watchpoints.addLazily(key);
918
919     if (key.kind() == PropertyCondition::Presence)
920         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
921     
922     return true;
923 }
924
925 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
926 {
927     if (!keys.isValid())
928         return false;
929
930     for (const ObjectPropertyCondition& key : keys) {
931         if (!watchCondition(key))
932             return false;
933     }
934     return true;
935 }
936
937 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
938 {
939     return m_safeToLoad.contains(std::make_pair(base, offset));
940 }
941
942 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
943 {
944     assertIsRegistered(key.structure());
945     
946     auto iter = m_inferredTypes.find(key);
947     if (iter != m_inferredTypes.end())
948         return iter->value;
949
950     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
951     if (!typeObject) {
952         m_inferredTypes.add(key, InferredType::Top);
953         return InferredType::Top;
954     }
955
956     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
957     if (typeDescriptor.kind() == InferredType::Top) {
958         m_inferredTypes.add(key, InferredType::Top);
959         return InferredType::Top;
960     }
961     
962     m_inferredTypes.add(key, typeDescriptor);
963
964     m_plan.weakReferences.addLazily(typeObject);
965     registerInferredType(typeDescriptor);
966
967     // Note that we may already be watching this desired inferred type, because multiple structures may
968     // point to the same InferredType instance.
969     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
970
971     return typeDescriptor;
972 }
973
974 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
975 {
976     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
977     if (iter != m_bytecodeLiveness.end())
978         return *iter->value;
979     
980     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
981     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
982     FullBytecodeLiveness& result = *liveness;
983     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
984     return result;
985 }
986
987 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
988 {
989     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
990 }
991
992 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
993 {
994     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
995     if (iter != m_bytecodeKills.end())
996         return *iter->value;
997     
998     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
999     codeBlock->livenessAnalysis().computeKills(*kills);
1000     BytecodeKills& result = *kills;
1001     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1002     return result;
1003 }
1004
1005 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1006 {
1007     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1008 }
1009
1010 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1011 {
1012     static const bool verbose = false;
1013     
1014     if (verbose)
1015         dataLog("Checking of operand is live: ", operand, "\n");
1016     CodeOrigin* codeOriginPtr = &codeOrigin;
1017     for (;;) {
1018         VirtualRegister reg = VirtualRegister(
1019             operand.offset() - codeOriginPtr->stackOffset());
1020         
1021         if (verbose)
1022             dataLog("reg = ", reg, "\n");
1023         
1024         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1025             if (reg.isArgument()) {
1026                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1027                 
1028                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1029                     && reg.offset() == CallFrameSlot::callee) {
1030                     if (verbose)
1031                         dataLog("Looks like a callee.\n");
1032                     return true;
1033                 }
1034                 
1035                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1036                     && reg.offset() == CallFrameSlot::argumentCount) {
1037                     if (verbose)
1038                         dataLog("Looks like the argument count.\n");
1039                     return true;
1040                 }
1041                 
1042                 return false;
1043             }
1044
1045             if (verbose)
1046                 dataLog("Asking the bytecode liveness.\n");
1047             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1048                 reg.offset(), codeOriginPtr->bytecodeIndex);
1049         }
1050         
1051         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1052         if (!inlineCallFrame) {
1053             if (verbose)
1054                 dataLog("Ran out of stack, returning true.\n");
1055             return true;
1056         }
1057
1058         // Arguments are always live. This would be redundant if it wasn't for our
1059         // op_call_varargs inlining.
1060         if (reg.isArgument()
1061             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size()) {
1062             if (verbose)
1063                 dataLog("Argument is live.\n");
1064             return true;
1065         }
1066         
1067         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1068
1069         // The first inline call frame could be an inline tail call
1070         if (!codeOriginPtr) {
1071             if (verbose)
1072                 dataLog("Dead because of tail inlining.\n");
1073             return false;
1074         }
1075     }
1076     
1077     RELEASE_ASSERT_NOT_REACHED();
1078 }
1079
1080 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1081 {
1082     BitVector result;
1083     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1084     forAllLocalsLiveInBytecode(
1085         codeOrigin,
1086         [&] (VirtualRegister reg) {
1087             ASSERT(reg.isLocal());
1088             result.quickSet(reg.toLocal());
1089         });
1090     return result;
1091 }
1092
1093 unsigned Graph::frameRegisterCount()
1094 {
1095     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1096     return roundLocalRegisterCountForFramePointerOffset(result);
1097 }
1098
1099 unsigned Graph::stackPointerOffset()
1100 {
1101     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1102 }
1103
1104 unsigned Graph::requiredRegisterCountForExit()
1105 {
1106     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1107     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1108         InlineCallFrame* inlineCallFrame = *iter;
1109         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1110         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1111         count = std::max(count, requiredCount);
1112     }
1113     return count;
1114 }
1115
1116 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1117 {
1118     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1119 }
1120
1121 JSValue Graph::tryGetConstantProperty(
1122     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1123 {
1124     if (!base || !base.isObject())
1125         return JSValue();
1126     
1127     JSObject* object = asObject(base);
1128     
1129     for (unsigned i = structureSet.size(); i--;) {
1130         Structure* structure = structureSet[i];
1131         assertIsRegistered(structure);
1132         
1133         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1134         if (!set || !set->isStillValid())
1135             return JSValue();
1136         
1137         ASSERT(structure->isValidOffset(offset));
1138         ASSERT(!structure->isUncacheableDictionary());
1139         
1140         watchpoints().addLazily(set);
1141     }
1142     
1143     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1144     // our profiling makes sense and we're still on track to generate code that won't be
1145     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1146     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1147     // is doomed.
1148     //
1149     // One argument in favor of this code is that it should definitely work because the butterfly
1150     // is always set before the structure. However, we don't currently have a fence between those
1151     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1152     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1153     // caches told us that the object had a structure that it did not *yet* have, and then later,
1154     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1155     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1156     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1157     // https://bugs.webkit.org/show_bug.cgi?id=134641
1158     //
1159     // For now, we just do the minimal thing: defend against the structure right now being
1160     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1161     // determine if the structure belongs to the proven set.
1162     
1163     if (!structureSet.contains(object->structure()))
1164         return JSValue();
1165     
1166     return object->getDirect(offset);
1167 }
1168
1169 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1170 {
1171     return tryGetConstantProperty(base, StructureSet(structure), offset);
1172 }
1173
1174 JSValue Graph::tryGetConstantProperty(
1175     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1176 {
1177     if (structure.isInfinite()) {
1178         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1179         // watching to constant-fold the property.
1180         // https://bugs.webkit.org/show_bug.cgi?id=147271
1181         return JSValue();
1182     }
1183     
1184     return tryGetConstantProperty(base, structure.set(), offset);
1185 }
1186
1187 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1188 {
1189     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1190 }
1191
1192 AbstractValue Graph::inferredValueForProperty(
1193     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1194 {
1195     AbstractValue result;
1196     base.forEach(
1197         [&] (Structure* structure) {
1198             AbstractValue value;
1199             value.set(*this, inferredTypeForProperty(structure, uid));
1200             result.merge(value);
1201         });
1202     if (clobberState == StructuresAreClobbered)
1203         result.clobberStructures();
1204     return result;
1205 }
1206
1207 AbstractValue Graph::inferredValueForProperty(
1208     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1209     StructureClobberState clobberState)
1210 {
1211     if (JSValue value = tryGetConstantProperty(base, offset)) {
1212         AbstractValue result;
1213         result.set(*this, *freeze(value), clobberState);
1214         return result;
1215     }
1216
1217     if (base.m_structure.isFinite())
1218         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1219
1220     return AbstractValue::heapTop();
1221 }
1222
1223 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1224 {
1225     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1226     
1227     if (!base)
1228         return JSValue();
1229     
1230     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1231     if (!activation)
1232         return JSValue();
1233     
1234     SymbolTable* symbolTable = activation->symbolTable();
1235     JSValue value;
1236     WatchpointSet* set;
1237     {
1238         ConcurrentJITLocker locker(symbolTable->m_lock);
1239         
1240         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1241         if (!entry)
1242             return JSValue();
1243         
1244         set = entry->watchpointSet();
1245         if (!set)
1246             return JSValue();
1247         
1248         if (set->state() != IsWatched)
1249             return JSValue();
1250         
1251         ASSERT(entry->scopeOffset() == offset);
1252         value = activation->variableAt(offset).get();
1253         if (!value)
1254             return JSValue();
1255     }
1256     
1257     watchpoints().addLazily(set);
1258     
1259     return value;
1260 }
1261
1262 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1263 {
1264     return tryGetConstantClosureVar(value.m_value, offset);
1265 }
1266
1267 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1268 {
1269     if (!node->hasConstant())
1270         return JSValue();
1271     return tryGetConstantClosureVar(node->asJSValue(), offset);
1272 }
1273
1274 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1275 {
1276     if (!value)
1277         return nullptr;
1278     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1279     if (!value)
1280         return nullptr;
1281     if (!view->length())
1282         return nullptr;
1283     WTF::loadLoadFence();
1284     watchpoints().addLazily(view);
1285     return view;
1286 }
1287
1288 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1289 {
1290     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1291         return nullptr;
1292     return tryGetFoldableView(value);
1293 }
1294
1295 void Graph::registerFrozenValues()
1296 {
1297     m_codeBlock->constants().resize(0);
1298     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1299     for (FrozenValue* value : m_frozenValues) {
1300         if (!value->pointsToHeap())
1301             continue;
1302         
1303         ASSERT(value->structure());
1304         ASSERT(m_plan.weakReferences.contains(value->structure()));
1305         
1306         switch (value->strength()) {
1307         case WeakValue: {
1308             m_plan.weakReferences.addLazily(value->value().asCell());
1309             break;
1310         }
1311         case StrongValue: {
1312             unsigned constantIndex = m_codeBlock->addConstantLazily();
1313             // We already have a barrier on the code block.
1314             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1315             break;
1316         } }
1317     }
1318     m_codeBlock->constants().shrinkToFit();
1319     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1320 }
1321
1322 void Graph::visitChildren(SlotVisitor& visitor)
1323 {
1324     for (FrozenValue* value : m_frozenValues) {
1325         visitor.appendUnbarrieredReadOnlyValue(value->value());
1326         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1327     }
1328     
1329     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1330         BasicBlock* block = this->block(blockIndex);
1331         if (!block)
1332             continue;
1333         
1334         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1335             Node* node = block->at(nodeIndex);
1336             
1337             switch (node->op()) {
1338             case CheckStructure:
1339                 for (unsigned i = node->structureSet().size(); i--;)
1340                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1341                 break;
1342                 
1343             case NewObject:
1344             case ArrayifyToStructure:
1345             case NewStringObject:
1346                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1347                 break;
1348                 
1349             case PutStructure:
1350             case AllocatePropertyStorage:
1351             case ReallocatePropertyStorage:
1352                 visitor.appendUnbarrieredReadOnlyPointer(
1353                     node->transition()->previous);
1354                 visitor.appendUnbarrieredReadOnlyPointer(
1355                     node->transition()->next);
1356                 break;
1357                 
1358             case MultiGetByOffset:
1359                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1360                     for (Structure* structure : getCase.set())
1361                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1362                 }
1363                 break;
1364                     
1365             case MultiPutByOffset:
1366                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1367                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1368                     const StructureSet& set = variant.oldStructure();
1369                     for (unsigned j = set.size(); j--;)
1370                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1371                     if (variant.kind() == PutByIdVariant::Transition)
1372                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1373                 }
1374                 break;
1375                 
1376             default:
1377                 break;
1378             }
1379         }
1380     }
1381 }
1382
1383 FrozenValue* Graph::freeze(JSValue value)
1384 {
1385     if (UNLIKELY(!value))
1386         return FrozenValue::emptySingleton();
1387     
1388     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1389     if (LIKELY(!result.isNewEntry))
1390         return result.iterator->value;
1391
1392     if (value.isUInt32())
1393         m_uint32ValuesInUse.append(value.asUInt32());
1394     
1395     FrozenValue frozenValue = FrozenValue::freeze(value);
1396     if (Structure* structure = frozenValue.structure())
1397         registerStructure(structure);
1398     
1399     return result.iterator->value = m_frozenValues.add(frozenValue);
1400 }
1401
1402 FrozenValue* Graph::freezeStrong(JSValue value)
1403 {
1404     FrozenValue* result = freeze(value);
1405     result->strengthenTo(StrongValue);
1406     return result;
1407 }
1408
1409 void Graph::convertToConstant(Node* node, FrozenValue* value)
1410 {
1411     if (value->structure())
1412         assertIsRegistered(value->structure());
1413     node->convertToConstant(value);
1414 }
1415
1416 void Graph::convertToConstant(Node* node, JSValue value)
1417 {
1418     convertToConstant(node, freeze(value));
1419 }
1420
1421 void Graph::convertToStrongConstant(Node* node, JSValue value)
1422 {
1423     convertToConstant(node, freezeStrong(value));
1424 }
1425
1426 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1427 {
1428     m_plan.weakReferences.addLazily(structure);
1429     if (m_plan.watchpoints.consider(structure))
1430         return StructureRegisteredAndWatched;
1431     return StructureRegisteredNormally;
1432 }
1433
1434 void Graph::assertIsRegistered(Structure* structure)
1435 {
1436     // It's convenient to be able to call this with a maybe-null structure.
1437     if (!structure)
1438         return;
1439     
1440     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1441     
1442     if (!structure->dfgShouldWatch())
1443         return;
1444     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1445         return;
1446     
1447     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1448 }
1449
1450 NO_RETURN_DUE_TO_CRASH static void crash(
1451     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1452     const char* assertion)
1453 {
1454     startCrashing();
1455     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1456     dataLog(file, "(", line, ") : ", function, "\n");
1457     dataLog("\n");
1458     dataLog(whileText);
1459     dataLog("Graph at time of failure:\n");
1460     graph.dump();
1461     dataLog("\n");
1462     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1463     dataLog(file, "(", line, ") : ", function, "\n");
1464     CRASH_WITH_SECURITY_IMPLICATION();
1465 }
1466
1467 void Graph::handleAssertionFailure(
1468     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1469 {
1470     crash(*this, "", file, line, function, assertion);
1471 }
1472
1473 void Graph::handleAssertionFailure(
1474     Node* node, const char* file, int line, const char* function, const char* assertion)
1475 {
1476     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1477 }
1478
1479 void Graph::handleAssertionFailure(
1480     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1481 {
1482     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1483 }
1484
1485 Dominators& Graph::ensureDominators()
1486 {
1487     if (!m_dominators)
1488         m_dominators = std::make_unique<Dominators>(*this);
1489     return *m_dominators;
1490 }
1491
1492 PrePostNumbering& Graph::ensurePrePostNumbering()
1493 {
1494     if (!m_prePostNumbering)
1495         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1496     return *m_prePostNumbering;
1497 }
1498
1499 NaturalLoops& Graph::ensureNaturalLoops()
1500 {
1501     ensureDominators();
1502     if (!m_naturalLoops)
1503         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1504     return *m_naturalLoops;
1505 }
1506
1507 BackwardsCFG& Graph::ensureBackwardsCFG()
1508 {
1509     if (!m_backwardsCFG)
1510         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1511     return *m_backwardsCFG;
1512 }
1513
1514 BackwardsDominators& Graph::ensureBackwardsDominators()
1515 {
1516     if (!m_backwardsDominators)
1517         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1518     return *m_backwardsDominators;
1519 }
1520
1521 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1522 {
1523     if (!m_controlEquivalenceAnalysis)
1524         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1525     return *m_controlEquivalenceAnalysis;
1526 }
1527
1528 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1529 {
1530     while (node) {
1531         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1532         
1533         if (node->hasLocal(*this)) {
1534             ValueProfile* result = [&] () -> ValueProfile* {
1535                 if (!node->local().isArgument())
1536                     return nullptr;
1537                 int argument = node->local().toArgument();
1538                 Node* argumentNode = m_arguments[argument];
1539                 if (!argumentNode)
1540                     return nullptr;
1541                 if (node->variableAccessData() != argumentNode->variableAccessData())
1542                     return nullptr;
1543                 return profiledBlock->valueProfileForArgument(argument);
1544             }();
1545             if (result)
1546                 return result;
1547             
1548             if (node->op() == GetLocal) {
1549                 return MethodOfGettingAValueProfile::fromLazyOperand(
1550                     profiledBlock,
1551                     LazyOperandValueProfileKey(
1552                         node->origin.semantic.bytecodeIndex, node->local()));
1553             }
1554         }
1555         
1556         if (node->hasHeapPrediction())
1557             return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1558         
1559         {
1560             if (profiledBlock->hasBaselineJITProfiling()) {
1561                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1562                     return result;
1563             }
1564         }
1565         
1566         switch (node->op()) {
1567         case Identity:
1568         case ValueRep:
1569         case DoubleRep:
1570         case Int52Rep:
1571             node = node->child1().node();
1572             break;
1573         default:
1574             node = nullptr;
1575         }
1576     }
1577     
1578     return MethodOfGettingAValueProfile();
1579 }
1580
1581 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1582 {
1583     unsigned attributesUnused;
1584     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1585     if (!isValidOffset(offset))
1586         return false;
1587
1588     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1589     if (!value)
1590         return false;
1591
1592     // We only care about functions and getters at this point. If you want to access other properties
1593     // you'll have to add code for those types.
1594     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1595     if (!function) {
1596         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(value);
1597
1598         if (!getterSetter)
1599             return false;
1600
1601         returnJSValue = JSValue(getterSetter);
1602         return true;
1603     }
1604
1605     returnJSValue = value;
1606     return true;
1607 }
1608
1609 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1610 {
1611     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1612
1613     if (!conditions.isValid())
1614         return false;
1615
1616     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1617     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1618     JSFunction* function = jsDynamicCast<JSFunction*>(equivalenceCondition.condition().requiredValue());
1619     if (!function)
1620         return false;
1621
1622     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1623         return false;
1624     
1625     return watchConditions(conditions);
1626 }
1627
1628
1629 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1630 {
1631     if (hasExitSite(codeOrigin, NotStringObject))
1632         return false;
1633
1634     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1635     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1636     registerStructure(stringObjectStructure);
1637     ASSERT(stringObjectStructure->storedPrototype().isObject());
1638     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1639
1640     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1641         return false;
1642
1643     // We're being conservative here. We want DFG's ToString on StringObject to be
1644     // used in both numeric contexts (that would call valueOf()) and string contexts
1645     // (that would call toString()). We don't want the DFG to have to distinguish
1646     // between the two, just because that seems like it would get confusing. So we
1647     // just require both methods to be sane.
1648     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1649         return false;
1650     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1651 }
1652
1653 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1654 {
1655     if (!m_hasExceptionHandlers)
1656         return false;
1657
1658     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1659     while (1) {
1660         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1661         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1662         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1663             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1664             catchHandlerOut = handler;
1665             return true;
1666         }
1667
1668         if (!inlineCallFrame)
1669             return false;
1670
1671         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1672         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1673     }
1674
1675     RELEASE_ASSERT_NOT_REACHED();
1676 }
1677
1678 } } // namespace JSC::DFG
1679
1680 #endif // ENABLE(DFG_JIT)