Reconsider B3's constant motion policy
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGCFG.h"
39 #include "DFGDominators.h"
40 #include "DFGJITCode.h"
41 #include "DFGMayExit.h"
42 #include "DFGNaturalLoops.h"
43 #include "DFGPrePostNumbering.h"
44 #include "DFGVariableAccessDataDump.h"
45 #include "FullBytecodeLiveness.h"
46 #include "FunctionExecutableDump.h"
47 #include "JIT.h"
48 #include "JSLexicalEnvironment.h"
49 #include "MaxFrameExtentForSlowPathCall.h"
50 #include "OperandsInlines.h"
51 #include "JSCInlines.h"
52 #include "StackAlignment.h"
53 #include <wtf/CommaPrinter.h>
54 #include <wtf/ListDump.h>
55
56 namespace JSC { namespace DFG {
57
58 // Creates an array of stringized names.
59 static const char* dfgOpNames[] = {
60 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
61     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
62 #undef STRINGIZE_DFG_OP_ENUM
63 };
64
65 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
66     : m_vm(vm)
67     , m_plan(plan)
68     , m_codeBlock(m_plan.codeBlock)
69     , m_profiledBlock(m_codeBlock->alternative())
70     , m_allocator(longLivedState.m_allocator)
71     , m_cfg(std::make_unique<CFG>(*this))
72     , m_nextMachineLocal(0)
73     , m_fixpointState(BeforeFixpoint)
74     , m_structureRegistrationState(HaveNotStartedRegistering)
75     , m_form(LoadStore)
76     , m_unificationState(LocallyUnified)
77     , m_refCountState(EverythingIsLive)
78 {
79     ASSERT(m_profiledBlock);
80     
81     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
82         || Options::forceDebuggerBytecodeGeneration();
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
112 {
113     if (!currentNode->origin.semantic)
114         return false;
115     
116     Node* previousNode = previousNodeRef;
117     previousNodeRef = currentNode;
118
119     if (!previousNode)
120         return false;
121     
122     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
123         return false;
124     
125     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
126     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
127     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
128     unsigned indexOfDivergence = commonSize;
129     for (unsigned i = 0; i < commonSize; ++i) {
130         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
131             indexOfDivergence = i;
132             break;
133         }
134     }
135     
136     bool hasPrinted = false;
137     
138     // Print the pops.
139     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
140         out.print(prefix);
141         printWhiteSpace(out, i * 2);
142         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
143         hasPrinted = true;
144     }
145     
146     // Print the pushes.
147     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     return hasPrinted;
155 }
156
157 int Graph::amountOfNodeWhiteSpace(Node* node)
158 {
159     return (node->origin.semantic.inlineDepth() - 1) * 2;
160 }
161
162 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
163 {
164     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
165 }
166
167 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
168 {
169     NodeType op = node->op();
170
171     unsigned refCount = node->refCount();
172     bool mustGenerate = node->mustGenerate();
173     if (mustGenerate)
174         --refCount;
175
176     out.print(prefix);
177     printNodeWhiteSpace(out, node);
178
179     // Example/explanation of dataflow dump output
180     //
181     //   14:   <!2:7>  GetByVal(@3, @13)
182     //   ^1     ^2 ^3     ^4       ^5
183     //
184     // (1) The nodeIndex of this operation.
185     // (2) The reference count. The number printed is the 'real' count,
186     //     not including the 'mustGenerate' ref. If the node is
187     //     'mustGenerate' then the count it prefixed with '!'.
188     // (3) The virtual register slot assigned to this node.
189     // (4) The name of the operation.
190     // (5) The arguments to the operation. The may be of the form:
191     //         @#   - a NodeIndex referencing a prior node in the graph.
192     //         arg# - an argument number.
193     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
194     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
195     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
196     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
197         out.print(node->virtualRegister());
198     else
199         out.print("-");
200     out.print(">\t", opName(op), "(");
201     CommaPrinter comma;
202     if (node->flags() & NodeHasVarArgs) {
203         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
204             if (!m_varArgChildren[childIdx])
205                 continue;
206             out.print(comma, m_varArgChildren[childIdx]);
207         }
208     } else {
209         if (!!node->child1() || !!node->child2() || !!node->child3())
210             out.print(comma, node->child1());
211         if (!!node->child2() || !!node->child3())
212             out.print(comma, node->child2());
213         if (!!node->child3())
214             out.print(comma, node->child3());
215     }
216
217     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
218         out.print(comma, NodeFlagsDump(node->flags()));
219     if (node->prediction())
220         out.print(comma, SpeculationDump(node->prediction()));
221     if (node->hasArrayMode())
222         out.print(comma, node->arrayMode());
223     if (node->hasArithMode())
224         out.print(comma, node->arithMode());
225     if (node->hasScopeOffset())
226         out.print(comma, node->scopeOffset());
227     if (node->hasDirectArgumentsOffset())
228         out.print(comma, node->capturedArgumentsOffset());
229     if (node->hasRegisterPointer())
230         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
231     if (node->hasIdentifier())
232         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
233     if (node->hasPromotedLocationDescriptor())
234         out.print(comma, node->promotedLocationDescriptor());
235     if (node->hasStructureSet())
236         out.print(comma, inContext(node->structureSet(), context));
237     if (node->hasStructure())
238         out.print(comma, inContext(*node->structure(), context));
239     if (node->hasTransition()) {
240         out.print(comma, pointerDumpInContext(node->transition(), context));
241 #if USE(JSVALUE64)
242         out.print(", ID:", node->transition()->next->id());
243 #else
244         out.print(", ID:", RawPointer(node->transition()->next));
245 #endif
246     }
247     if (node->hasCellOperand()) {
248         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
249             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
250         else {
251             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
252             if (node->cellOperand()->value().isCell()) {
253                 CallVariant variant(node->cellOperand()->value().asCell());
254                 if (ExecutableBase* executable = variant.executable()) {
255                     if (executable->isHostFunction())
256                         out.print(comma, "<host function>");
257                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
258                         out.print(comma, FunctionExecutableDump(functionExecutable));
259                     else
260                         out.print(comma, "<non-function executable>");
261                 }
262             }
263         }
264     }
265     if (node->hasStorageAccessData()) {
266         StorageAccessData& storageAccessData = node->storageAccessData();
267         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
268         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
269         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
270     }
271     if (node->hasMultiGetByOffsetData()) {
272         MultiGetByOffsetData& data = node->multiGetByOffsetData();
273         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
274         for (unsigned i = 0; i < data.cases.size(); ++i)
275             out.print(comma, inContext(data.cases[i], context));
276     }
277     if (node->hasMultiPutByOffsetData()) {
278         MultiPutByOffsetData& data = node->multiPutByOffsetData();
279         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
280         for (unsigned i = 0; i < data.variants.size(); ++i)
281             out.print(comma, inContext(data.variants[i], context));
282     }
283     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
284     if (node->hasVariableAccessData(*this)) {
285         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
286         if (variableAccessData) {
287             VirtualRegister operand = variableAccessData->local();
288             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
289             operand = variableAccessData->machineLocal();
290             if (operand.isValid())
291                 out.print(comma, "machine:", operand);
292         }
293     }
294     if (node->hasStackAccessData()) {
295         StackAccessData* data = node->stackAccessData();
296         out.print(comma, data->local);
297         if (data->machineLocal.isValid())
298             out.print(comma, "machine:", data->machineLocal);
299         out.print(comma, data->format);
300     }
301     if (node->hasUnlinkedLocal()) 
302         out.print(comma, node->unlinkedLocal());
303     if (node->hasUnlinkedMachineLocal()) {
304         VirtualRegister operand = node->unlinkedMachineLocal();
305         if (operand.isValid())
306             out.print(comma, "machine:", operand);
307     }
308     if (node->hasConstantBuffer()) {
309         out.print(comma);
310         out.print(node->startConstant(), ":[");
311         CommaPrinter anotherComma;
312         for (unsigned i = 0; i < node->numConstants(); ++i)
313             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
314         out.print("]");
315     }
316     if (node->hasIndexingType())
317         out.print(comma, IndexingTypeDump(node->indexingType()));
318     if (node->hasTypedArrayType())
319         out.print(comma, node->typedArrayType());
320     if (node->hasPhi())
321         out.print(comma, "^", node->phi()->index());
322     if (node->hasExecutionCounter())
323         out.print(comma, RawPointer(node->executionCounter()));
324     if (node->hasWatchpointSet())
325         out.print(comma, RawPointer(node->watchpointSet()));
326     if (node->hasStoragePointer())
327         out.print(comma, RawPointer(node->storagePointer()));
328     if (node->hasObjectMaterializationData())
329         out.print(comma, node->objectMaterializationData());
330     if (node->hasCallVarargsData())
331         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
332     if (node->hasLoadVarargsData()) {
333         LoadVarargsData* data = node->loadVarargsData();
334         out.print(comma, "start = ", data->start, ", count = ", data->count);
335         if (data->machineStart.isValid())
336             out.print(", machineStart = ", data->machineStart);
337         if (data->machineCount.isValid())
338             out.print(", machineCount = ", data->machineCount);
339         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
340         out.print(", limit = ", data->limit);
341     }
342     if (node->isConstant())
343         out.print(comma, pointerDumpInContext(node->constant(), context));
344     if (node->isJump())
345         out.print(comma, "T:", *node->targetBlock());
346     if (node->isBranch())
347         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
348     if (node->isSwitch()) {
349         SwitchData* data = node->switchData();
350         out.print(comma, data->kind);
351         for (unsigned i = 0; i < data->cases.size(); ++i)
352             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
353         out.print(comma, "default:", data->fallThrough);
354     }
355     ClobberSet reads;
356     ClobberSet writes;
357     addReadsAndWrites(*this, node, reads, writes);
358     if (!reads.isEmpty())
359         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
360     if (!writes.isEmpty())
361         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
362     ExitMode exitMode = mayExit(*this, node);
363     if (exitMode != DoesNotExit)
364         out.print(comma, exitMode);
365     if (clobbersExitState(*this, node))
366         out.print(comma, "ClobbersExit");
367     if (node->origin.isSet()) {
368         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
369         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
370             out.print(comma, "exit: ", node->origin.forExit);
371     }
372     if (!node->origin.exitOK)
373         out.print(comma, "ExitInvalid");
374     out.print(")");
375
376     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
377         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
378     else if (node->hasHeapPrediction())
379         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
380     
381     out.print("\n");
382 }
383
384 bool Graph::terminalsAreValid()
385 {
386     for (BasicBlock* block : blocksInNaturalOrder()) {
387         if (!block->terminal())
388             return false;
389     }
390     return true;
391 }
392
393 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
394 {
395     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
396     if (block->executionCount == block->executionCount)
397         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
398     out.print(prefix, "  Predecessors:");
399     for (size_t i = 0; i < block->predecessors.size(); ++i)
400         out.print(" ", *block->predecessors[i]);
401     out.print("\n");
402     out.print(prefix, "  Successors:");
403     if (block->terminal()) {
404         for (BasicBlock* successor : block->successors()) {
405             out.print(" ", *successor);
406             if (m_prePostNumbering)
407                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
408         }
409     } else
410         out.print(" <invalid>");
411     out.print("\n");
412     if (m_dominators && terminalsAreValid()) {
413         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
414         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
415         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
416         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
417     }
418     if (m_prePostNumbering)
419         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
420     if (m_naturalLoops) {
421         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
422             out.print(prefix, "  Loop header, contains:");
423             Vector<BlockIndex> sortedBlockList;
424             for (unsigned i = 0; i < loop->size(); ++i)
425                 sortedBlockList.append(loop->at(i)->index);
426             std::sort(sortedBlockList.begin(), sortedBlockList.end());
427             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
428                 out.print(" #", sortedBlockList[i]);
429             out.print("\n");
430         }
431         
432         Vector<const NaturalLoop*> containingLoops =
433             m_naturalLoops->loopsOf(block);
434         if (!containingLoops.isEmpty()) {
435             out.print(prefix, "  Containing loop headers:");
436             for (unsigned i = 0; i < containingLoops.size(); ++i)
437                 out.print(" ", *containingLoops[i]->header());
438             out.print("\n");
439         }
440     }
441     if (!block->phis.isEmpty()) {
442         out.print(prefix, "  Phi Nodes:");
443         for (size_t i = 0; i < block->phis.size(); ++i) {
444             Node* phiNode = block->phis[i];
445             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
446                 continue;
447             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
448             if (phiNode->child1()) {
449                 out.print("@", phiNode->child1()->index());
450                 if (phiNode->child2()) {
451                     out.print(", @", phiNode->child2()->index());
452                     if (phiNode->child3())
453                         out.print(", @", phiNode->child3()->index());
454                 }
455             }
456             out.print(")", i + 1 < block->phis.size() ? "," : "");
457         }
458         out.print("\n");
459     }
460 }
461
462 void Graph::dump(PrintStream& out, DumpContext* context)
463 {
464     DumpContext myContext;
465     myContext.graph = this;
466     if (!context)
467         context = &myContext;
468     
469     out.print("\n");
470     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
471     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
472     if (m_form == SSA)
473         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
474     else
475         out.print("  Arguments: ", listDump(m_arguments), "\n");
476     out.print("\n");
477     
478     Node* lastNode = nullptr;
479     for (size_t b = 0; b < m_blocks.size(); ++b) {
480         BasicBlock* block = m_blocks[b].get();
481         if (!block)
482             continue;
483         dumpBlockHeader(out, "", block, DumpAllPhis, context);
484         out.print("  States: ", block->cfaStructureClobberStateAtHead);
485         if (!block->cfaHasVisited)
486             out.print(", CurrentlyCFAUnreachable");
487         if (!block->intersectionOfCFAHasVisited)
488             out.print(", CFAUnreachable");
489         out.print("\n");
490         switch (m_form) {
491         case LoadStore:
492         case ThreadedCPS: {
493             out.print("  Vars Before: ");
494             if (block->cfaHasVisited)
495                 out.print(inContext(block->valuesAtHead, context));
496             else
497                 out.print("<empty>");
498             out.print("\n");
499             out.print("  Intersected Vars Before: ");
500             if (block->intersectionOfCFAHasVisited)
501                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
502             else
503                 out.print("<empty>");
504             out.print("\n");
505             out.print("  Var Links: ", block->variablesAtHead, "\n");
506             break;
507         }
508             
509         case SSA: {
510             RELEASE_ASSERT(block->ssa);
511             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
512             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
513             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
514             break;
515         } }
516         for (size_t i = 0; i < block->size(); ++i) {
517             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
518             dump(out, "", block->at(i), context);
519         }
520         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
521         if (!block->cfaDidFinish)
522             out.print(", CFAInvalidated");
523         out.print("\n");
524         switch (m_form) {
525         case LoadStore:
526         case ThreadedCPS: {
527             out.print("  Vars After: ");
528             if (block->cfaHasVisited)
529                 out.print(inContext(block->valuesAtTail, context));
530             else
531                 out.print("<empty>");
532             out.print("\n");
533             out.print("  Var Links: ", block->variablesAtTail, "\n");
534             break;
535         }
536             
537         case SSA: {
538             RELEASE_ASSERT(block->ssa);
539             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
540             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
541             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
542             break;
543         } }
544         out.print("\n");
545     }
546     
547     out.print("GC Values:\n");
548     for (FrozenValue* value : m_frozenValues) {
549         if (value->pointsToHeap())
550             out.print("    ", inContext(*value, &myContext), "\n");
551     }
552
553     out.print(inContext(watchpoints(), &myContext));
554     
555     if (!myContext.isEmpty()) {
556         myContext.dump(out);
557         out.print("\n");
558     }
559 }
560
561 void Graph::dethread()
562 {
563     if (m_form == LoadStore || m_form == SSA)
564         return;
565     
566     if (logCompilationChanges())
567         dataLog("Dethreading DFG graph.\n");
568     
569     SamplingRegion samplingRegion("DFG Dethreading");
570     
571     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
572         BasicBlock* block = m_blocks[blockIndex].get();
573         if (!block)
574             continue;
575         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
576             Node* phi = block->phis[phiIndex];
577             phi->children.reset();
578         }
579     }
580     
581     m_form = LoadStore;
582 }
583
584 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
585 {
586     if (!successor->isReachable) {
587         successor->isReachable = true;
588         worklist.append(successor);
589     }
590     
591     successor->predecessors.append(block);
592 }
593
594 void Graph::determineReachability()
595 {
596     Vector<BasicBlock*, 16> worklist;
597     worklist.append(block(0));
598     block(0)->isReachable = true;
599     while (!worklist.isEmpty()) {
600         BasicBlock* block = worklist.takeLast();
601         for (unsigned i = block->numSuccessors(); i--;)
602             handleSuccessor(worklist, block, block->successor(i));
603     }
604 }
605
606 void Graph::resetReachability()
607 {
608     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
609         BasicBlock* block = m_blocks[blockIndex].get();
610         if (!block)
611             continue;
612         block->isReachable = false;
613         block->predecessors.clear();
614     }
615     
616     determineReachability();
617 }
618
619 namespace {
620
621 class RefCountCalculator {
622 public:
623     RefCountCalculator(Graph& graph)
624         : m_graph(graph)
625     {
626     }
627     
628     void calculate()
629     {
630         // First reset the counts to 0 for all nodes.
631         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
632             BasicBlock* block = m_graph.block(blockIndex);
633             if (!block)
634                 continue;
635             for (unsigned indexInBlock = block->size(); indexInBlock--;)
636                 block->at(indexInBlock)->setRefCount(0);
637             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
638                 block->phis[phiIndex]->setRefCount(0);
639         }
640     
641         // Now find the roots:
642         // - Nodes that are must-generate.
643         // - Nodes that are reachable from type checks.
644         // Set their ref counts to 1 and put them on the worklist.
645         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
646             BasicBlock* block = m_graph.block(blockIndex);
647             if (!block)
648                 continue;
649             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
650                 Node* node = block->at(indexInBlock);
651                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
652                 if (!(node->flags() & NodeMustGenerate))
653                     continue;
654                 if (!node->postfixRef())
655                     m_worklist.append(node);
656             }
657         }
658         
659         while (!m_worklist.isEmpty()) {
660             while (!m_worklist.isEmpty()) {
661                 Node* node = m_worklist.last();
662                 m_worklist.removeLast();
663                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
664                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
665             }
666             
667             if (m_graph.m_form == SSA) {
668                 // Find Phi->Upsilon edges, which are represented as meta-data in the
669                 // Upsilon.
670                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
671                     BasicBlock* block = m_graph.block(blockIndex);
672                     if (!block)
673                         continue;
674                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
675                         Node* node = block->at(nodeIndex);
676                         if (node->op() != Upsilon)
677                             continue;
678                         if (node->shouldGenerate())
679                             continue;
680                         if (node->phi()->shouldGenerate())
681                             countNode(node);
682                     }
683                 }
684             }
685         }
686     }
687     
688 private:
689     void findTypeCheckRoot(Node*, Edge edge)
690     {
691         // We may have an "unproved" untyped use for code that is unreachable. The CFA
692         // will just not have gotten around to it.
693         if (edge.isProved() || edge.willNotHaveCheck())
694             return;
695         if (!edge->postfixRef())
696             m_worklist.append(edge.node());
697     }
698     
699     void countNode(Node* node)
700     {
701         if (node->postfixRef())
702             return;
703         m_worklist.append(node);
704     }
705     
706     void countEdge(Node*, Edge edge)
707     {
708         // Don't count edges that are already counted for their type checks.
709         if (!(edge.isProved() || edge.willNotHaveCheck()))
710             return;
711         countNode(edge.node());
712     }
713     
714     Graph& m_graph;
715     Vector<Node*, 128> m_worklist;
716 };
717
718 } // anonymous namespace
719
720 void Graph::computeRefCounts()
721 {
722     RefCountCalculator calculator(*this);
723     calculator.calculate();
724 }
725
726 void Graph::killBlockAndItsContents(BasicBlock* block)
727 {
728     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
729         m_allocator.free(block->phis[phiIndex]);
730     for (unsigned nodeIndex = block->size(); nodeIndex--;)
731         m_allocator.free(block->at(nodeIndex));
732     
733     killBlock(block);
734 }
735
736 void Graph::killUnreachableBlocks()
737 {
738     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
739         BasicBlock* block = this->block(blockIndex);
740         if (!block)
741             continue;
742         if (block->isReachable)
743             continue;
744         
745         killBlockAndItsContents(block);
746     }
747 }
748
749 void Graph::invalidateCFG()
750 {
751     m_dominators = nullptr;
752     m_naturalLoops = nullptr;
753     m_prePostNumbering = nullptr;
754 }
755
756 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
757 {
758     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
759         Node* node = block[indexInBlock];
760         bool shouldContinue = true;
761         switch (node->op()) {
762         case SetLocal: {
763             if (node->local() == variableAccessData->local())
764                 shouldContinue = false;
765             break;
766         }
767                 
768         case GetLocal: {
769             if (node->variableAccessData() != variableAccessData)
770                 continue;
771             substitute(block, indexInBlock, node, newGetLocal);
772             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
773             if (oldTailNode == node)
774                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
775             shouldContinue = false;
776             break;
777         }
778                 
779         default:
780             break;
781         }
782         if (!shouldContinue)
783             break;
784     }
785 }
786
787 BlockList Graph::blocksInPreOrder()
788 {
789     BlockList result;
790     BlockWorklist worklist;
791     worklist.push(block(0));
792     while (BasicBlock* block = worklist.pop()) {
793         result.append(block);
794         for (unsigned i = block->numSuccessors(); i--;)
795             worklist.push(block->successor(i));
796     }
797     return result;
798 }
799
800 BlockList Graph::blocksInPostOrder()
801 {
802     BlockList result;
803     PostOrderBlockWorklist worklist;
804     worklist.push(block(0));
805     while (BlockWithOrder item = worklist.pop()) {
806         switch (item.order) {
807         case VisitOrder::Pre:
808             worklist.pushPost(item.node);
809             for (unsigned i = item.node->numSuccessors(); i--;)
810                 worklist.push(item.node->successor(i));
811             break;
812         case VisitOrder::Post:
813             result.append(item.node);
814             break;
815         }
816     }
817     return result;
818 }
819
820 void Graph::clearReplacements()
821 {
822     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
823         BasicBlock* block = m_blocks[blockIndex].get();
824         if (!block)
825             continue;
826         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
827             block->phis[phiIndex]->setReplacement(nullptr);
828         for (unsigned nodeIndex = block->size(); nodeIndex--;)
829             block->at(nodeIndex)->setReplacement(nullptr);
830     }
831 }
832
833 void Graph::clearEpochs()
834 {
835     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
836         BasicBlock* block = m_blocks[blockIndex].get();
837         if (!block)
838             continue;
839         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
840             block->phis[phiIndex]->setEpoch(Epoch());
841         for (unsigned nodeIndex = block->size(); nodeIndex--;)
842             block->at(nodeIndex)->setEpoch(Epoch());
843     }
844 }
845
846 void Graph::initializeNodeOwners()
847 {
848     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
849         BasicBlock* block = m_blocks[blockIndex].get();
850         if (!block)
851             continue;
852         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
853             block->phis[phiIndex]->owner = block;
854         for (unsigned nodeIndex = block->size(); nodeIndex--;)
855             block->at(nodeIndex)->owner = block;
856     }
857 }
858
859 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
860 {
861     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
862         BasicBlock* block = m_blocks[blockIndex].get();
863         if (!block)
864             continue;
865         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
866             block->phis[phiIndex]->clearFlags(flags);
867         for (unsigned nodeIndex = block->size(); nodeIndex--;)
868             block->at(nodeIndex)->clearFlags(flags);
869     }
870 }
871
872 bool Graph::watchCondition(const ObjectPropertyCondition& key)
873 {
874     if (!key.isWatchable())
875         return false;
876     
877     m_plan.weakReferences.addLazily(key.object());
878     if (key.hasPrototype())
879         m_plan.weakReferences.addLazily(key.prototype());
880     if (key.hasRequiredValue())
881         m_plan.weakReferences.addLazily(key.requiredValue());
882     
883     m_plan.watchpoints.addLazily(key);
884
885     if (key.kind() == PropertyCondition::Presence)
886         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
887     
888     return true;
889 }
890
891 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
892 {
893     return m_safeToLoad.contains(std::make_pair(base, offset));
894 }
895
896 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
897 {
898     assertIsRegistered(key.structure());
899     
900     auto iter = m_inferredTypes.find(key);
901     if (iter != m_inferredTypes.end())
902         return iter->value;
903
904     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
905     if (!typeObject) {
906         m_inferredTypes.add(key, InferredType::Top);
907         return InferredType::Top;
908     }
909
910     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
911     if (typeDescriptor.kind() == InferredType::Top) {
912         m_inferredTypes.add(key, InferredType::Top);
913         return InferredType::Top;
914     }
915     
916     m_inferredTypes.add(key, typeDescriptor);
917
918     m_plan.weakReferences.addLazily(typeObject);
919     registerInferredType(typeDescriptor);
920
921     // Note that we may already be watching this desired inferred type, because multiple structures may
922     // point to the same InferredType instance.
923     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
924
925     return typeDescriptor;
926 }
927
928 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
929 {
930     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
931     if (iter != m_bytecodeLiveness.end())
932         return *iter->value;
933     
934     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
935     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
936     FullBytecodeLiveness& result = *liveness;
937     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
938     return result;
939 }
940
941 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
942 {
943     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
944 }
945
946 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
947 {
948     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
949     if (iter != m_bytecodeKills.end())
950         return *iter->value;
951     
952     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
953     codeBlock->livenessAnalysis().computeKills(*kills);
954     BytecodeKills& result = *kills;
955     m_bytecodeKills.add(codeBlock, WTFMove(kills));
956     return result;
957 }
958
959 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
960 {
961     return killsFor(baselineCodeBlockFor(inlineCallFrame));
962 }
963
964 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
965 {
966     CodeOrigin* codeOriginPtr = &codeOrigin;
967     for (;;) {
968         VirtualRegister reg = VirtualRegister(
969             operand.offset() - codeOriginPtr->stackOffset());
970         
971         if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
972             if (reg.isArgument()) {
973                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
974                 
975                 if (codeOriginPtr->inlineCallFrame->isClosureCall
976                     && reg.offset() == JSStack::Callee)
977                     return true;
978                 
979                 if (codeOriginPtr->inlineCallFrame->isVarargs()
980                     && reg.offset() == JSStack::ArgumentCount)
981                     return true;
982                 
983                 return false;
984             }
985             
986             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
987                 reg.offset(), codeOriginPtr->bytecodeIndex);
988         }
989         
990         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
991         if (!inlineCallFrame)
992             break;
993
994         // Arguments are always live. This would be redundant if it wasn't for our
995         // op_call_varargs inlining.
996         if (reg.isArgument()
997             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
998             return true;
999         
1000         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1001
1002         // The first inline call frame could be an inline tail call
1003         if (!codeOriginPtr)
1004             break;
1005     }
1006     
1007     return true;
1008 }
1009
1010 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1011 {
1012     BitVector result;
1013     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1014     forAllLocalsLiveInBytecode(
1015         codeOrigin,
1016         [&] (VirtualRegister reg) {
1017             ASSERT(reg.isLocal());
1018             result.quickSet(reg.toLocal());
1019         });
1020     return result;
1021 }
1022
1023 unsigned Graph::frameRegisterCount()
1024 {
1025     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1026     return roundLocalRegisterCountForFramePointerOffset(result);
1027 }
1028
1029 unsigned Graph::stackPointerOffset()
1030 {
1031     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1032 }
1033
1034 unsigned Graph::requiredRegisterCountForExit()
1035 {
1036     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1037     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1038         InlineCallFrame* inlineCallFrame = *iter;
1039         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1040         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1041         count = std::max(count, requiredCount);
1042     }
1043     return count;
1044 }
1045
1046 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1047 {
1048     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1049 }
1050
1051 JSValue Graph::tryGetConstantProperty(
1052     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1053 {
1054     if (!base || !base.isObject())
1055         return JSValue();
1056     
1057     JSObject* object = asObject(base);
1058     
1059     for (unsigned i = structureSet.size(); i--;) {
1060         Structure* structure = structureSet[i];
1061         assertIsRegistered(structure);
1062         
1063         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1064         if (!set || !set->isStillValid())
1065             return JSValue();
1066         
1067         ASSERT(structure->isValidOffset(offset));
1068         ASSERT(!structure->isUncacheableDictionary());
1069         
1070         watchpoints().addLazily(set);
1071     }
1072     
1073     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1074     // our profiling makes sense and we're still on track to generate code that won't be
1075     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1076     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1077     // is doomed.
1078     //
1079     // One argument in favor of this code is that it should definitely work because the butterfly
1080     // is always set before the structure. However, we don't currently have a fence between those
1081     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1082     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1083     // caches told us that the object had a structure that it did not *yet* have, and then later,
1084     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1085     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1086     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1087     // https://bugs.webkit.org/show_bug.cgi?id=134641
1088     //
1089     // For now, we just do the minimal thing: defend against the structure right now being
1090     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1091     // determine if the structure belongs to the proven set.
1092     
1093     if (!structureSet.contains(object->structure()))
1094         return JSValue();
1095     
1096     return object->getDirect(offset);
1097 }
1098
1099 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1100 {
1101     return tryGetConstantProperty(base, StructureSet(structure), offset);
1102 }
1103
1104 JSValue Graph::tryGetConstantProperty(
1105     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1106 {
1107     if (structure.isInfinite()) {
1108         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1109         // watching to constant-fold the property.
1110         // https://bugs.webkit.org/show_bug.cgi?id=147271
1111         return JSValue();
1112     }
1113     
1114     return tryGetConstantProperty(base, structure.set(), offset);
1115 }
1116
1117 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1118 {
1119     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1120 }
1121
1122 AbstractValue Graph::inferredValueForProperty(
1123     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1124 {
1125     AbstractValue result;
1126     base.forEach(
1127         [&] (Structure* structure) {
1128             AbstractValue value;
1129             value.set(*this, inferredTypeForProperty(structure, uid));
1130             result.merge(value);
1131         });
1132     if (clobberState == StructuresAreClobbered)
1133         result.clobberStructures();
1134     return result;
1135 }
1136
1137 AbstractValue Graph::inferredValueForProperty(
1138     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1139     StructureClobberState clobberState)
1140 {
1141     if (JSValue value = tryGetConstantProperty(base, offset)) {
1142         AbstractValue result;
1143         result.set(*this, *freeze(value), clobberState);
1144         return result;
1145     }
1146
1147     if (base.m_structure.isFinite())
1148         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1149
1150     return AbstractValue::heapTop();
1151 }
1152
1153 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1154 {
1155     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1156     
1157     if (!base)
1158         return JSValue();
1159     
1160     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1161     if (!activation)
1162         return JSValue();
1163     
1164     SymbolTable* symbolTable = activation->symbolTable();
1165     JSValue value;
1166     WatchpointSet* set;
1167     {
1168         ConcurrentJITLocker locker(symbolTable->m_lock);
1169         
1170         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1171         if (!entry)
1172             return JSValue();
1173         
1174         set = entry->watchpointSet();
1175         if (!set)
1176             return JSValue();
1177         
1178         if (set->state() != IsWatched)
1179             return JSValue();
1180         
1181         ASSERT(entry->scopeOffset() == offset);
1182         value = activation->variableAt(offset).get();
1183         if (!value)
1184             return JSValue();
1185     }
1186     
1187     watchpoints().addLazily(set);
1188     
1189     return value;
1190 }
1191
1192 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1193 {
1194     return tryGetConstantClosureVar(value.m_value, offset);
1195 }
1196
1197 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1198 {
1199     if (!node->hasConstant())
1200         return JSValue();
1201     return tryGetConstantClosureVar(node->asJSValue(), offset);
1202 }
1203
1204 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1205 {
1206     if (!value)
1207         return nullptr;
1208     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1209     if (!value)
1210         return nullptr;
1211     if (!view->length())
1212         return nullptr;
1213     WTF::loadLoadFence();
1214     watchpoints().addLazily(view);
1215     return view;
1216 }
1217
1218 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1219 {
1220     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1221         return nullptr;
1222     return tryGetFoldableView(value);
1223 }
1224
1225 void Graph::registerFrozenValues()
1226 {
1227     m_codeBlock->constants().resize(0);
1228     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1229     for (FrozenValue* value : m_frozenValues) {
1230         if (!value->pointsToHeap())
1231             continue;
1232         
1233         ASSERT(value->structure());
1234         ASSERT(m_plan.weakReferences.contains(value->structure()));
1235         
1236         switch (value->strength()) {
1237         case WeakValue: {
1238             m_plan.weakReferences.addLazily(value->value().asCell());
1239             break;
1240         }
1241         case StrongValue: {
1242             unsigned constantIndex = m_codeBlock->addConstantLazily();
1243             // We already have a barrier on the code block.
1244             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1245             break;
1246         } }
1247     }
1248     m_codeBlock->constants().shrinkToFit();
1249     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1250 }
1251
1252 void Graph::visitChildren(SlotVisitor& visitor)
1253 {
1254     for (FrozenValue* value : m_frozenValues) {
1255         visitor.appendUnbarrieredReadOnlyValue(value->value());
1256         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1257     }
1258     
1259     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1260         BasicBlock* block = this->block(blockIndex);
1261         if (!block)
1262             continue;
1263         
1264         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1265             Node* node = block->at(nodeIndex);
1266             
1267             switch (node->op()) {
1268             case CheckStructure:
1269                 for (unsigned i = node->structureSet().size(); i--;)
1270                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1271                 break;
1272                 
1273             case NewObject:
1274             case ArrayifyToStructure:
1275             case NewStringObject:
1276                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1277                 break;
1278                 
1279             case PutStructure:
1280             case AllocatePropertyStorage:
1281             case ReallocatePropertyStorage:
1282                 visitor.appendUnbarrieredReadOnlyPointer(
1283                     node->transition()->previous);
1284                 visitor.appendUnbarrieredReadOnlyPointer(
1285                     node->transition()->next);
1286                 break;
1287                 
1288             case MultiGetByOffset:
1289                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1290                     for (Structure* structure : getCase.set())
1291                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1292                 }
1293                 break;
1294                     
1295             case MultiPutByOffset:
1296                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1297                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1298                     const StructureSet& set = variant.oldStructure();
1299                     for (unsigned j = set.size(); j--;)
1300                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1301                     if (variant.kind() == PutByIdVariant::Transition)
1302                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1303                 }
1304                 break;
1305                 
1306             default:
1307                 break;
1308             }
1309         }
1310     }
1311 }
1312
1313 FrozenValue* Graph::freeze(JSValue value)
1314 {
1315     if (UNLIKELY(!value))
1316         return FrozenValue::emptySingleton();
1317     
1318     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1319     if (LIKELY(!result.isNewEntry))
1320         return result.iterator->value;
1321
1322     if (value.isUInt32())
1323         m_uint32ValuesInUse.append(value.asUInt32());
1324     
1325     FrozenValue frozenValue = FrozenValue::freeze(value);
1326     if (Structure* structure = frozenValue.structure())
1327         registerStructure(structure);
1328     
1329     return result.iterator->value = m_frozenValues.add(frozenValue);
1330 }
1331
1332 FrozenValue* Graph::freezeStrong(JSValue value)
1333 {
1334     FrozenValue* result = freeze(value);
1335     result->strengthenTo(StrongValue);
1336     return result;
1337 }
1338
1339 void Graph::convertToConstant(Node* node, FrozenValue* value)
1340 {
1341     if (value->structure())
1342         assertIsRegistered(value->structure());
1343     node->convertToConstant(value);
1344 }
1345
1346 void Graph::convertToConstant(Node* node, JSValue value)
1347 {
1348     convertToConstant(node, freeze(value));
1349 }
1350
1351 void Graph::convertToStrongConstant(Node* node, JSValue value)
1352 {
1353     convertToConstant(node, freezeStrong(value));
1354 }
1355
1356 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1357 {
1358     m_plan.weakReferences.addLazily(structure);
1359     if (m_plan.watchpoints.consider(structure))
1360         return StructureRegisteredAndWatched;
1361     return StructureRegisteredNormally;
1362 }
1363
1364 void Graph::assertIsRegistered(Structure* structure)
1365 {
1366     // It's convenient to be able to call this with a maybe-null structure.
1367     if (!structure)
1368         return;
1369     
1370     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1371     
1372     if (!structure->dfgShouldWatch())
1373         return;
1374     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1375         return;
1376     
1377     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1378 }
1379
1380 NO_RETURN_DUE_TO_CRASH static void crash(
1381     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1382     const char* assertion)
1383 {
1384     startCrashing();
1385     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1386     dataLog(file, "(", line, ") : ", function, "\n");
1387     dataLog("\n");
1388     dataLog(whileText);
1389     dataLog("Graph at time of failure:\n");
1390     graph.dump();
1391     dataLog("\n");
1392     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1393     dataLog(file, "(", line, ") : ", function, "\n");
1394     CRASH_WITH_SECURITY_IMPLICATION();
1395 }
1396
1397 void Graph::handleAssertionFailure(
1398     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1399 {
1400     crash(*this, "", file, line, function, assertion);
1401 }
1402
1403 void Graph::handleAssertionFailure(
1404     Node* node, const char* file, int line, const char* function, const char* assertion)
1405 {
1406     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1407 }
1408
1409 void Graph::handleAssertionFailure(
1410     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1411 {
1412     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1413 }
1414
1415 void Graph::ensureDominators()
1416 {
1417     if (!m_dominators)
1418         m_dominators = std::make_unique<Dominators>(*this);
1419 }
1420
1421 void Graph::ensurePrePostNumbering()
1422 {
1423     if (!m_prePostNumbering)
1424         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1425 }
1426
1427 void Graph::ensureNaturalLoops()
1428 {
1429     ensureDominators();
1430     if (!m_naturalLoops)
1431         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1432 }
1433
1434 ValueProfile* Graph::valueProfileFor(Node* node)
1435 {
1436     if (!node)
1437         return nullptr;
1438         
1439     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1440         
1441     if (node->hasLocal(*this)) {
1442         if (!node->local().isArgument())
1443             return nullptr;
1444         int argument = node->local().toArgument();
1445         Node* argumentNode = m_arguments[argument];
1446         if (!argumentNode)
1447             return nullptr;
1448         if (node->variableAccessData() != argumentNode->variableAccessData())
1449             return nullptr;
1450         return profiledBlock->valueProfileForArgument(argument);
1451     }
1452         
1453     if (node->hasHeapPrediction())
1454         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1455         
1456     return nullptr;
1457 }
1458
1459 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1460 {
1461     if (!node)
1462         return MethodOfGettingAValueProfile();
1463     
1464     if (ValueProfile* valueProfile = valueProfileFor(node))
1465         return MethodOfGettingAValueProfile(valueProfile);
1466     
1467     if (node->op() == GetLocal) {
1468         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1469         
1470         return MethodOfGettingAValueProfile::fromLazyOperand(
1471             profiledBlock,
1472             LazyOperandValueProfileKey(
1473                 node->origin.semantic.bytecodeIndex, node->local()));
1474     }
1475     
1476     return MethodOfGettingAValueProfile();
1477 }
1478
1479 bool Graph::isStringPrototypeMethodSane(JSObject* stringPrototype, Structure* stringPrototypeStructure, UniquedStringImpl* uid)
1480 {
1481     unsigned attributesUnused;
1482     PropertyOffset offset = stringPrototypeStructure->getConcurrently(uid, attributesUnused);
1483     if (!isValidOffset(offset))
1484         return false;
1485
1486     JSValue value = tryGetConstantProperty(stringPrototype, stringPrototypeStructure, offset);
1487     if (!value)
1488         return false;
1489
1490     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1491     if (!function)
1492         return false;
1493
1494     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1495         return false;
1496     
1497     return true;
1498 }
1499
1500 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1501 {
1502     if (hasExitSite(codeOrigin, NotStringObject))
1503         return false;
1504
1505     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1506     registerStructure(stringObjectStructure);
1507     ASSERT(stringObjectStructure->storedPrototype().isObject());
1508     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1509
1510     FrozenValue* stringPrototypeObjectValue = freeze(stringObjectStructure->storedPrototype());
1511     StringPrototype* stringPrototypeObject = stringPrototypeObjectValue->dynamicCast<StringPrototype*>();
1512     Structure* stringPrototypeStructure = stringPrototypeObjectValue->structure();
1513     if (registerStructure(stringPrototypeStructure) != StructureRegisteredAndWatched)
1514         return false;
1515
1516     if (stringPrototypeStructure->isDictionary())
1517         return false;
1518
1519     // We're being conservative here. We want DFG's ToString on StringObject to be
1520     // used in both numeric contexts (that would call valueOf()) and string contexts
1521     // (that would call toString()). We don't want the DFG to have to distinguish
1522     // between the two, just because that seems like it would get confusing. So we
1523     // just require both methods to be sane.
1524     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->valueOf.impl()))
1525         return false;
1526     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->toString.impl()))
1527         return false;
1528
1529     return true;
1530 }
1531
1532 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1533 {
1534     if (!m_hasExceptionHandlers)
1535         return false;
1536
1537     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1538     while (1) {
1539         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1540         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1541         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1542             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1543             catchHandlerOut = handler;
1544             return true;
1545         }
1546
1547         if (!inlineCallFrame)
1548             return false;
1549
1550         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1551         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1552     }
1553
1554     RELEASE_ASSERT_NOT_REACHED();
1555 }
1556
1557 } } // namespace JSC::DFG
1558
1559 #endif // ENABLE(DFG_JIT)