Fix endless OSR exits when creating a rope that contains an object that ToPrimitive...
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBlockWorklist.h"
36 #include "DFGClobberSet.h"
37 #include "DFGClobbersExitState.h"
38 #include "DFGCFG.h"
39 #include "DFGDominators.h"
40 #include "DFGJITCode.h"
41 #include "DFGMayExit.h"
42 #include "DFGNaturalLoops.h"
43 #include "DFGPrePostNumbering.h"
44 #include "DFGVariableAccessDataDump.h"
45 #include "FullBytecodeLiveness.h"
46 #include "FunctionExecutableDump.h"
47 #include "JIT.h"
48 #include "JSLexicalEnvironment.h"
49 #include "MaxFrameExtentForSlowPathCall.h"
50 #include "OperandsInlines.h"
51 #include "JSCInlines.h"
52 #include "StackAlignment.h"
53 #include <wtf/CommaPrinter.h>
54 #include <wtf/ListDump.h>
55
56 namespace JSC { namespace DFG {
57
58 // Creates an array of stringized names.
59 static const char* dfgOpNames[] = {
60 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
61     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
62 #undef STRINGIZE_DFG_OP_ENUM
63 };
64
65 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
66     : m_vm(vm)
67     , m_plan(plan)
68     , m_codeBlock(m_plan.codeBlock)
69     , m_profiledBlock(m_codeBlock->alternative())
70     , m_allocator(longLivedState.m_allocator)
71     , m_cfg(std::make_unique<CFG>(*this))
72     , m_nextMachineLocal(0)
73     , m_fixpointState(BeforeFixpoint)
74     , m_structureRegistrationState(HaveNotStartedRegistering)
75     , m_form(LoadStore)
76     , m_unificationState(LocallyUnified)
77     , m_refCountState(EverythingIsLive)
78 {
79     ASSERT(m_profiledBlock);
80     
81     m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
82         || Options::forceDebuggerBytecodeGeneration();
83 }
84
85 Graph::~Graph()
86 {
87     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
88         BasicBlock* block = this->block(blockIndex);
89         if (!block)
90             continue;
91
92         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
93             m_allocator.free(block->phis[phiIndex]);
94         for (unsigned nodeIndex = block->size(); nodeIndex--;)
95             m_allocator.free(block->at(nodeIndex));
96     }
97     m_allocator.freeAll();
98 }
99
100 const char *Graph::opName(NodeType op)
101 {
102     return dfgOpNames[op];
103 }
104
105 static void printWhiteSpace(PrintStream& out, unsigned amount)
106 {
107     while (amount-- > 0)
108         out.print(" ");
109 }
110
111 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
112 {
113     if (!currentNode->origin.semantic)
114         return false;
115     
116     Node* previousNode = previousNodeRef;
117     previousNodeRef = currentNode;
118
119     if (!previousNode)
120         return false;
121     
122     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
123         return false;
124     
125     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
126     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
127     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
128     unsigned indexOfDivergence = commonSize;
129     for (unsigned i = 0; i < commonSize; ++i) {
130         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
131             indexOfDivergence = i;
132             break;
133         }
134     }
135     
136     bool hasPrinted = false;
137     
138     // Print the pops.
139     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
140         out.print(prefix);
141         printWhiteSpace(out, i * 2);
142         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
143         hasPrinted = true;
144     }
145     
146     // Print the pushes.
147     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
148         out.print(prefix);
149         printWhiteSpace(out, i * 2);
150         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
151         hasPrinted = true;
152     }
153     
154     return hasPrinted;
155 }
156
157 int Graph::amountOfNodeWhiteSpace(Node* node)
158 {
159     return (node->origin.semantic.inlineDepth() - 1) * 2;
160 }
161
162 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
163 {
164     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
165 }
166
167 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
168 {
169     NodeType op = node->op();
170
171     unsigned refCount = node->refCount();
172     bool mustGenerate = node->mustGenerate();
173     if (mustGenerate)
174         --refCount;
175
176     out.print(prefix);
177     printNodeWhiteSpace(out, node);
178
179     // Example/explanation of dataflow dump output
180     //
181     //   14:   <!2:7>  GetByVal(@3, @13)
182     //   ^1     ^2 ^3     ^4       ^5
183     //
184     // (1) The nodeIndex of this operation.
185     // (2) The reference count. The number printed is the 'real' count,
186     //     not including the 'mustGenerate' ref. If the node is
187     //     'mustGenerate' then the count it prefixed with '!'.
188     // (3) The virtual register slot assigned to this node.
189     // (4) The name of the operation.
190     // (5) The arguments to the operation. The may be of the form:
191     //         @#   - a NodeIndex referencing a prior node in the graph.
192     //         arg# - an argument number.
193     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
194     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
195     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
196     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
197         out.print(node->virtualRegister());
198     else
199         out.print("-");
200     out.print(">\t", opName(op), "(");
201     CommaPrinter comma;
202     if (node->flags() & NodeHasVarArgs) {
203         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
204             if (!m_varArgChildren[childIdx])
205                 continue;
206             out.print(comma, m_varArgChildren[childIdx]);
207         }
208     } else {
209         if (!!node->child1() || !!node->child2() || !!node->child3())
210             out.print(comma, node->child1());
211         if (!!node->child2() || !!node->child3())
212             out.print(comma, node->child2());
213         if (!!node->child3())
214             out.print(comma, node->child3());
215     }
216
217     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
218         out.print(comma, NodeFlagsDump(node->flags()));
219     if (node->prediction())
220         out.print(comma, SpeculationDump(node->prediction()));
221     if (node->hasArrayMode())
222         out.print(comma, node->arrayMode());
223     if (node->hasArithMode())
224         out.print(comma, node->arithMode());
225     if (node->hasScopeOffset())
226         out.print(comma, node->scopeOffset());
227     if (node->hasDirectArgumentsOffset())
228         out.print(comma, node->capturedArgumentsOffset());
229     if (node->hasRegisterPointer())
230         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
231     if (node->hasIdentifier())
232         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
233     if (node->hasPromotedLocationDescriptor())
234         out.print(comma, node->promotedLocationDescriptor());
235     if (node->hasStructureSet())
236         out.print(comma, inContext(node->structureSet(), context));
237     if (node->hasStructure())
238         out.print(comma, inContext(*node->structure(), context));
239     if (node->hasTransition()) {
240         out.print(comma, pointerDumpInContext(node->transition(), context));
241 #if USE(JSVALUE64)
242         out.print(", ID:", node->transition()->next->id());
243 #else
244         out.print(", ID:", RawPointer(node->transition()->next));
245 #endif
246     }
247     if (node->hasCellOperand()) {
248         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
249             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
250         else {
251             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
252             if (node->cellOperand()->value().isCell()) {
253                 CallVariant variant(node->cellOperand()->value().asCell());
254                 if (ExecutableBase* executable = variant.executable()) {
255                     if (executable->isHostFunction())
256                         out.print(comma, "<host function>");
257                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
258                         out.print(comma, FunctionExecutableDump(functionExecutable));
259                     else
260                         out.print(comma, "<non-function executable>");
261                 }
262             }
263         }
264     }
265     if (node->hasStorageAccessData()) {
266         StorageAccessData& storageAccessData = node->storageAccessData();
267         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
268         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
269         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
270     }
271     if (node->hasMultiGetByOffsetData()) {
272         MultiGetByOffsetData& data = node->multiGetByOffsetData();
273         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
274         for (unsigned i = 0; i < data.cases.size(); ++i) {
275             out.print(comma, inContext(data.cases[i], context));
276             if (data.cases[i].method().kind() == GetByOffsetMethod::Load)
277                 out.print(" (inferred value = ", inContext(inferredValueForProperty(data.cases[i].set(), identifiers()[data.identifierNumber]), context), ")");
278         }
279     }
280     if (node->hasMultiPutByOffsetData()) {
281         MultiPutByOffsetData& data = node->multiPutByOffsetData();
282         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
283         for (unsigned i = 0; i < data.variants.size(); ++i)
284             out.print(comma, inContext(data.variants[i], context));
285     }
286     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
287     if (node->hasVariableAccessData(*this)) {
288         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
289         if (variableAccessData) {
290             VirtualRegister operand = variableAccessData->local();
291             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
292             operand = variableAccessData->machineLocal();
293             if (operand.isValid())
294                 out.print(comma, "machine:", operand);
295         }
296     }
297     if (node->hasStackAccessData()) {
298         StackAccessData* data = node->stackAccessData();
299         out.print(comma, data->local);
300         if (data->machineLocal.isValid())
301             out.print(comma, "machine:", data->machineLocal);
302         out.print(comma, data->format);
303     }
304     if (node->hasUnlinkedLocal()) 
305         out.print(comma, node->unlinkedLocal());
306     if (node->hasUnlinkedMachineLocal()) {
307         VirtualRegister operand = node->unlinkedMachineLocal();
308         if (operand.isValid())
309             out.print(comma, "machine:", operand);
310     }
311     if (node->hasConstantBuffer()) {
312         out.print(comma);
313         out.print(node->startConstant(), ":[");
314         CommaPrinter anotherComma;
315         for (unsigned i = 0; i < node->numConstants(); ++i)
316             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
317         out.print("]");
318     }
319     if (node->hasIndexingType())
320         out.print(comma, IndexingTypeDump(node->indexingType()));
321     if (node->hasTypedArrayType())
322         out.print(comma, node->typedArrayType());
323     if (node->hasPhi())
324         out.print(comma, "^", node->phi()->index());
325     if (node->hasExecutionCounter())
326         out.print(comma, RawPointer(node->executionCounter()));
327     if (node->hasWatchpointSet())
328         out.print(comma, RawPointer(node->watchpointSet()));
329     if (node->hasStoragePointer())
330         out.print(comma, RawPointer(node->storagePointer()));
331     if (node->hasObjectMaterializationData())
332         out.print(comma, node->objectMaterializationData());
333     if (node->hasCallVarargsData())
334         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
335     if (node->hasLoadVarargsData()) {
336         LoadVarargsData* data = node->loadVarargsData();
337         out.print(comma, "start = ", data->start, ", count = ", data->count);
338         if (data->machineStart.isValid())
339             out.print(", machineStart = ", data->machineStart);
340         if (data->machineCount.isValid())
341             out.print(", machineCount = ", data->machineCount);
342         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
343         out.print(", limit = ", data->limit);
344     }
345     if (node->isConstant())
346         out.print(comma, pointerDumpInContext(node->constant(), context));
347     if (node->isJump())
348         out.print(comma, "T:", *node->targetBlock());
349     if (node->isBranch())
350         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
351     if (node->isSwitch()) {
352         SwitchData* data = node->switchData();
353         out.print(comma, data->kind);
354         for (unsigned i = 0; i < data->cases.size(); ++i)
355             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
356         out.print(comma, "default:", data->fallThrough);
357     }
358     ClobberSet reads;
359     ClobberSet writes;
360     addReadsAndWrites(*this, node, reads, writes);
361     if (!reads.isEmpty())
362         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
363     if (!writes.isEmpty())
364         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
365     ExitMode exitMode = mayExit(*this, node);
366     if (exitMode != DoesNotExit)
367         out.print(comma, exitMode);
368     if (clobbersExitState(*this, node))
369         out.print(comma, "ClobbersExit");
370     if (node->origin.isSet()) {
371         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
372         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
373             out.print(comma, "exit: ", node->origin.forExit);
374     }
375     if (!node->origin.exitOK)
376         out.print(comma, "ExitInvalid");
377     out.print(")");
378
379     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
380         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
381     else if (node->hasHeapPrediction())
382         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
383     
384     out.print("\n");
385 }
386
387 bool Graph::terminalsAreValid()
388 {
389     for (BasicBlock* block : blocksInNaturalOrder()) {
390         if (!block->terminal())
391             return false;
392     }
393     return true;
394 }
395
396 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
397 {
398     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
399     if (block->executionCount == block->executionCount)
400         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
401     out.print(prefix, "  Predecessors:");
402     for (size_t i = 0; i < block->predecessors.size(); ++i)
403         out.print(" ", *block->predecessors[i]);
404     out.print("\n");
405     out.print(prefix, "  Successors:");
406     if (block->terminal()) {
407         for (BasicBlock* successor : block->successors()) {
408             out.print(" ", *successor);
409             if (m_prePostNumbering)
410                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
411         }
412     } else
413         out.print(" <invalid>");
414     out.print("\n");
415     if (m_dominators && terminalsAreValid()) {
416         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
417         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
418         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
419         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
420     }
421     if (m_prePostNumbering)
422         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
423     if (m_naturalLoops) {
424         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
425             out.print(prefix, "  Loop header, contains:");
426             Vector<BlockIndex> sortedBlockList;
427             for (unsigned i = 0; i < loop->size(); ++i)
428                 sortedBlockList.append(loop->at(i)->index);
429             std::sort(sortedBlockList.begin(), sortedBlockList.end());
430             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
431                 out.print(" #", sortedBlockList[i]);
432             out.print("\n");
433         }
434         
435         Vector<const NaturalLoop*> containingLoops =
436             m_naturalLoops->loopsOf(block);
437         if (!containingLoops.isEmpty()) {
438             out.print(prefix, "  Containing loop headers:");
439             for (unsigned i = 0; i < containingLoops.size(); ++i)
440                 out.print(" ", *containingLoops[i]->header());
441             out.print("\n");
442         }
443     }
444     if (!block->phis.isEmpty()) {
445         out.print(prefix, "  Phi Nodes:");
446         for (size_t i = 0; i < block->phis.size(); ++i) {
447             Node* phiNode = block->phis[i];
448             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
449                 continue;
450             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
451             if (phiNode->child1()) {
452                 out.print("@", phiNode->child1()->index());
453                 if (phiNode->child2()) {
454                     out.print(", @", phiNode->child2()->index());
455                     if (phiNode->child3())
456                         out.print(", @", phiNode->child3()->index());
457                 }
458             }
459             out.print(")", i + 1 < block->phis.size() ? "," : "");
460         }
461         out.print("\n");
462     }
463 }
464
465 void Graph::dump(PrintStream& out, DumpContext* context)
466 {
467     DumpContext myContext;
468     myContext.graph = this;
469     if (!context)
470         context = &myContext;
471     
472     out.print("\n");
473     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
474     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
475     if (m_form == SSA)
476         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
477     else
478         out.print("  Arguments: ", listDump(m_arguments), "\n");
479     out.print("\n");
480     
481     Node* lastNode = nullptr;
482     for (size_t b = 0; b < m_blocks.size(); ++b) {
483         BasicBlock* block = m_blocks[b].get();
484         if (!block)
485             continue;
486         dumpBlockHeader(out, "", block, DumpAllPhis, context);
487         out.print("  States: ", block->cfaStructureClobberStateAtHead);
488         if (!block->cfaHasVisited)
489             out.print(", CurrentlyCFAUnreachable");
490         if (!block->intersectionOfCFAHasVisited)
491             out.print(", CFAUnreachable");
492         out.print("\n");
493         switch (m_form) {
494         case LoadStore:
495         case ThreadedCPS: {
496             out.print("  Vars Before: ");
497             if (block->cfaHasVisited)
498                 out.print(inContext(block->valuesAtHead, context));
499             else
500                 out.print("<empty>");
501             out.print("\n");
502             out.print("  Intersected Vars Before: ");
503             if (block->intersectionOfCFAHasVisited)
504                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
505             else
506                 out.print("<empty>");
507             out.print("\n");
508             out.print("  Var Links: ", block->variablesAtHead, "\n");
509             break;
510         }
511             
512         case SSA: {
513             RELEASE_ASSERT(block->ssa);
514             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
515             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
516             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
517             break;
518         } }
519         for (size_t i = 0; i < block->size(); ++i) {
520             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
521             dump(out, "", block->at(i), context);
522         }
523         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
524         if (!block->cfaDidFinish)
525             out.print(", CFAInvalidated");
526         out.print("\n");
527         switch (m_form) {
528         case LoadStore:
529         case ThreadedCPS: {
530             out.print("  Vars After: ");
531             if (block->cfaHasVisited)
532                 out.print(inContext(block->valuesAtTail, context));
533             else
534                 out.print("<empty>");
535             out.print("\n");
536             out.print("  Var Links: ", block->variablesAtTail, "\n");
537             break;
538         }
539             
540         case SSA: {
541             RELEASE_ASSERT(block->ssa);
542             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
543             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
544             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
545             break;
546         } }
547         out.print("\n");
548     }
549     
550     out.print("GC Values:\n");
551     for (FrozenValue* value : m_frozenValues) {
552         if (value->pointsToHeap())
553             out.print("    ", inContext(*value, &myContext), "\n");
554     }
555
556     out.print(inContext(watchpoints(), &myContext));
557     
558     if (!myContext.isEmpty()) {
559         myContext.dump(out);
560         out.print("\n");
561     }
562 }
563
564 void Graph::dethread()
565 {
566     if (m_form == LoadStore || m_form == SSA)
567         return;
568     
569     if (logCompilationChanges())
570         dataLog("Dethreading DFG graph.\n");
571     
572     SamplingRegion samplingRegion("DFG Dethreading");
573     
574     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
575         BasicBlock* block = m_blocks[blockIndex].get();
576         if (!block)
577             continue;
578         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
579             Node* phi = block->phis[phiIndex];
580             phi->children.reset();
581         }
582     }
583     
584     m_form = LoadStore;
585 }
586
587 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
588 {
589     if (!successor->isReachable) {
590         successor->isReachable = true;
591         worklist.append(successor);
592     }
593     
594     successor->predecessors.append(block);
595 }
596
597 void Graph::determineReachability()
598 {
599     Vector<BasicBlock*, 16> worklist;
600     worklist.append(block(0));
601     block(0)->isReachable = true;
602     while (!worklist.isEmpty()) {
603         BasicBlock* block = worklist.takeLast();
604         for (unsigned i = block->numSuccessors(); i--;)
605             handleSuccessor(worklist, block, block->successor(i));
606     }
607 }
608
609 void Graph::resetReachability()
610 {
611     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
612         BasicBlock* block = m_blocks[blockIndex].get();
613         if (!block)
614             continue;
615         block->isReachable = false;
616         block->predecessors.clear();
617     }
618     
619     determineReachability();
620 }
621
622 namespace {
623
624 class RefCountCalculator {
625 public:
626     RefCountCalculator(Graph& graph)
627         : m_graph(graph)
628     {
629     }
630     
631     void calculate()
632     {
633         // First reset the counts to 0 for all nodes.
634         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
635             BasicBlock* block = m_graph.block(blockIndex);
636             if (!block)
637                 continue;
638             for (unsigned indexInBlock = block->size(); indexInBlock--;)
639                 block->at(indexInBlock)->setRefCount(0);
640             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
641                 block->phis[phiIndex]->setRefCount(0);
642         }
643     
644         // Now find the roots:
645         // - Nodes that are must-generate.
646         // - Nodes that are reachable from type checks.
647         // Set their ref counts to 1 and put them on the worklist.
648         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
649             BasicBlock* block = m_graph.block(blockIndex);
650             if (!block)
651                 continue;
652             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
653                 Node* node = block->at(indexInBlock);
654                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
655                 if (!(node->flags() & NodeMustGenerate))
656                     continue;
657                 if (!node->postfixRef())
658                     m_worklist.append(node);
659             }
660         }
661         
662         while (!m_worklist.isEmpty()) {
663             while (!m_worklist.isEmpty()) {
664                 Node* node = m_worklist.last();
665                 m_worklist.removeLast();
666                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
667                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
668             }
669             
670             if (m_graph.m_form == SSA) {
671                 // Find Phi->Upsilon edges, which are represented as meta-data in the
672                 // Upsilon.
673                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
674                     BasicBlock* block = m_graph.block(blockIndex);
675                     if (!block)
676                         continue;
677                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
678                         Node* node = block->at(nodeIndex);
679                         if (node->op() != Upsilon)
680                             continue;
681                         if (node->shouldGenerate())
682                             continue;
683                         if (node->phi()->shouldGenerate())
684                             countNode(node);
685                     }
686                 }
687             }
688         }
689     }
690     
691 private:
692     void findTypeCheckRoot(Node*, Edge edge)
693     {
694         // We may have an "unproved" untyped use for code that is unreachable. The CFA
695         // will just not have gotten around to it.
696         if (edge.isProved() || edge.willNotHaveCheck())
697             return;
698         if (!edge->postfixRef())
699             m_worklist.append(edge.node());
700     }
701     
702     void countNode(Node* node)
703     {
704         if (node->postfixRef())
705             return;
706         m_worklist.append(node);
707     }
708     
709     void countEdge(Node*, Edge edge)
710     {
711         // Don't count edges that are already counted for their type checks.
712         if (!(edge.isProved() || edge.willNotHaveCheck()))
713             return;
714         countNode(edge.node());
715     }
716     
717     Graph& m_graph;
718     Vector<Node*, 128> m_worklist;
719 };
720
721 } // anonymous namespace
722
723 void Graph::computeRefCounts()
724 {
725     RefCountCalculator calculator(*this);
726     calculator.calculate();
727 }
728
729 void Graph::killBlockAndItsContents(BasicBlock* block)
730 {
731     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
732         m_allocator.free(block->phis[phiIndex]);
733     for (unsigned nodeIndex = block->size(); nodeIndex--;)
734         m_allocator.free(block->at(nodeIndex));
735     
736     killBlock(block);
737 }
738
739 void Graph::killUnreachableBlocks()
740 {
741     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
742         BasicBlock* block = this->block(blockIndex);
743         if (!block)
744             continue;
745         if (block->isReachable)
746             continue;
747         
748         killBlockAndItsContents(block);
749     }
750 }
751
752 void Graph::invalidateCFG()
753 {
754     m_dominators = nullptr;
755     m_naturalLoops = nullptr;
756     m_prePostNumbering = nullptr;
757 }
758
759 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
760 {
761     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
762         Node* node = block[indexInBlock];
763         bool shouldContinue = true;
764         switch (node->op()) {
765         case SetLocal: {
766             if (node->local() == variableAccessData->local())
767                 shouldContinue = false;
768             break;
769         }
770                 
771         case GetLocal: {
772             if (node->variableAccessData() != variableAccessData)
773                 continue;
774             substitute(block, indexInBlock, node, newGetLocal);
775             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
776             if (oldTailNode == node)
777                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
778             shouldContinue = false;
779             break;
780         }
781                 
782         default:
783             break;
784         }
785         if (!shouldContinue)
786             break;
787     }
788 }
789
790 BlockList Graph::blocksInPreOrder()
791 {
792     BlockList result;
793     BlockWorklist worklist;
794     worklist.push(block(0));
795     while (BasicBlock* block = worklist.pop()) {
796         result.append(block);
797         for (unsigned i = block->numSuccessors(); i--;)
798             worklist.push(block->successor(i));
799     }
800     return result;
801 }
802
803 BlockList Graph::blocksInPostOrder()
804 {
805     BlockList result;
806     PostOrderBlockWorklist worklist;
807     worklist.push(block(0));
808     while (BlockWithOrder item = worklist.pop()) {
809         switch (item.order) {
810         case VisitOrder::Pre:
811             worklist.pushPost(item.node);
812             for (unsigned i = item.node->numSuccessors(); i--;)
813                 worklist.push(item.node->successor(i));
814             break;
815         case VisitOrder::Post:
816             result.append(item.node);
817             break;
818         }
819     }
820     return result;
821 }
822
823 void Graph::clearReplacements()
824 {
825     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
826         BasicBlock* block = m_blocks[blockIndex].get();
827         if (!block)
828             continue;
829         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
830             block->phis[phiIndex]->setReplacement(nullptr);
831         for (unsigned nodeIndex = block->size(); nodeIndex--;)
832             block->at(nodeIndex)->setReplacement(nullptr);
833     }
834 }
835
836 void Graph::clearEpochs()
837 {
838     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
839         BasicBlock* block = m_blocks[blockIndex].get();
840         if (!block)
841             continue;
842         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
843             block->phis[phiIndex]->setEpoch(Epoch());
844         for (unsigned nodeIndex = block->size(); nodeIndex--;)
845             block->at(nodeIndex)->setEpoch(Epoch());
846     }
847 }
848
849 void Graph::initializeNodeOwners()
850 {
851     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
852         BasicBlock* block = m_blocks[blockIndex].get();
853         if (!block)
854             continue;
855         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
856             block->phis[phiIndex]->owner = block;
857         for (unsigned nodeIndex = block->size(); nodeIndex--;)
858             block->at(nodeIndex)->owner = block;
859     }
860 }
861
862 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
863 {
864     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
865         BasicBlock* block = m_blocks[blockIndex].get();
866         if (!block)
867             continue;
868         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
869             block->phis[phiIndex]->clearFlags(flags);
870         for (unsigned nodeIndex = block->size(); nodeIndex--;)
871             block->at(nodeIndex)->clearFlags(flags);
872     }
873 }
874
875 bool Graph::watchCondition(const ObjectPropertyCondition& key)
876 {
877     if (!key.isWatchable())
878         return false;
879     
880     m_plan.weakReferences.addLazily(key.object());
881     if (key.hasPrototype())
882         m_plan.weakReferences.addLazily(key.prototype());
883     if (key.hasRequiredValue())
884         m_plan.weakReferences.addLazily(key.requiredValue());
885     
886     m_plan.watchpoints.addLazily(key);
887
888     if (key.kind() == PropertyCondition::Presence)
889         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
890     
891     return true;
892 }
893
894 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
895 {
896     return m_safeToLoad.contains(std::make_pair(base, offset));
897 }
898
899 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
900 {
901     assertIsRegistered(key.structure());
902     
903     auto iter = m_inferredTypes.find(key);
904     if (iter != m_inferredTypes.end())
905         return iter->value;
906
907     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
908     if (!typeObject) {
909         m_inferredTypes.add(key, InferredType::Top);
910         return InferredType::Top;
911     }
912
913     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
914     if (typeDescriptor.kind() == InferredType::Top) {
915         m_inferredTypes.add(key, InferredType::Top);
916         return InferredType::Top;
917     }
918     
919     m_inferredTypes.add(key, typeDescriptor);
920
921     m_plan.weakReferences.addLazily(typeObject);
922     registerInferredType(typeDescriptor);
923
924     // Note that we may already be watching this desired inferred type, because multiple structures may
925     // point to the same InferredType instance.
926     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
927
928     return typeDescriptor;
929 }
930
931 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
932 {
933     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
934     if (iter != m_bytecodeLiveness.end())
935         return *iter->value;
936     
937     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
938     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
939     FullBytecodeLiveness& result = *liveness;
940     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
941     return result;
942 }
943
944 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
945 {
946     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
947 }
948
949 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
950 {
951     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
952     if (iter != m_bytecodeKills.end())
953         return *iter->value;
954     
955     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
956     codeBlock->livenessAnalysis().computeKills(*kills);
957     BytecodeKills& result = *kills;
958     m_bytecodeKills.add(codeBlock, WTF::move(kills));
959     return result;
960 }
961
962 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
963 {
964     return killsFor(baselineCodeBlockFor(inlineCallFrame));
965 }
966
967 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
968 {
969     CodeOrigin* codeOriginPtr = &codeOrigin;
970     for (;;) {
971         VirtualRegister reg = VirtualRegister(
972             operand.offset() - codeOriginPtr->stackOffset());
973         
974         if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
975             if (reg.isArgument()) {
976                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
977                 
978                 if (codeOriginPtr->inlineCallFrame->isClosureCall
979                     && reg.offset() == JSStack::Callee)
980                     return true;
981                 
982                 if (codeOriginPtr->inlineCallFrame->isVarargs()
983                     && reg.offset() == JSStack::ArgumentCount)
984                     return true;
985                 
986                 return false;
987             }
988             
989             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
990                 reg.offset(), codeOriginPtr->bytecodeIndex);
991         }
992         
993         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
994         if (!inlineCallFrame)
995             break;
996
997         // Arguments are always live. This would be redundant if it wasn't for our
998         // op_call_varargs inlining.
999         if (reg.isArgument()
1000             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
1001             return true;
1002         
1003         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1004
1005         // The first inline call frame could be an inline tail call
1006         if (!codeOriginPtr)
1007             break;
1008     }
1009     
1010     return true;
1011 }
1012
1013 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1014 {
1015     BitVector result;
1016     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1017     forAllLocalsLiveInBytecode(
1018         codeOrigin,
1019         [&] (VirtualRegister reg) {
1020             ASSERT(reg.isLocal());
1021             result.quickSet(reg.toLocal());
1022         });
1023     return result;
1024 }
1025
1026 unsigned Graph::frameRegisterCount()
1027 {
1028     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1029     return roundLocalRegisterCountForFramePointerOffset(result);
1030 }
1031
1032 unsigned Graph::stackPointerOffset()
1033 {
1034     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1035 }
1036
1037 unsigned Graph::requiredRegisterCountForExit()
1038 {
1039     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1040     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1041         InlineCallFrame* inlineCallFrame = *iter;
1042         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1043         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1044         count = std::max(count, requiredCount);
1045     }
1046     return count;
1047 }
1048
1049 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1050 {
1051     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1052 }
1053
1054 JSValue Graph::tryGetConstantProperty(
1055     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
1056 {
1057     if (!base || !base.isObject())
1058         return JSValue();
1059     
1060     JSObject* object = asObject(base);
1061     
1062     for (unsigned i = structureSet.size(); i--;) {
1063         Structure* structure = structureSet[i];
1064         assertIsRegistered(structure);
1065         
1066         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1067         if (!set || !set->isStillValid())
1068             return JSValue();
1069         
1070         ASSERT(structure->isValidOffset(offset));
1071         ASSERT(!structure->isUncacheableDictionary());
1072         
1073         watchpoints().addLazily(set);
1074     }
1075     
1076     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1077     // our profiling makes sense and we're still on track to generate code that won't be
1078     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1079     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1080     // is doomed.
1081     //
1082     // One argument in favor of this code is that it should definitely work because the butterfly
1083     // is always set before the structure. However, we don't currently have a fence between those
1084     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1085     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1086     // caches told us that the object had a structure that it did not *yet* have, and then later,
1087     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1088     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1089     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1090     // https://bugs.webkit.org/show_bug.cgi?id=134641
1091     //
1092     // For now, we just do the minimal thing: defend against the structure right now being
1093     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1094     // determine if the structure belongs to the proven set.
1095     
1096     if (!structureSet.contains(object->structure()))
1097         return JSValue();
1098     
1099     return object->getDirect(offset);
1100 }
1101
1102 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1103 {
1104     return tryGetConstantProperty(base, StructureSet(structure), offset);
1105 }
1106
1107 JSValue Graph::tryGetConstantProperty(
1108     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1109 {
1110     if (structure.isInfinite()) {
1111         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1112         // watching to constant-fold the property.
1113         // https://bugs.webkit.org/show_bug.cgi?id=147271
1114         return JSValue();
1115     }
1116     
1117     return tryGetConstantProperty(base, structure.set(), offset);
1118 }
1119
1120 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1121 {
1122     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1123 }
1124
1125 AbstractValue Graph::inferredValueForProperty(
1126     const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1127 {
1128     AbstractValue result;
1129     base.forEach(
1130         [&] (Structure* structure) {
1131             AbstractValue value;
1132             value.set(*this, inferredTypeForProperty(structure, uid));
1133             result.merge(value);
1134         });
1135     if (clobberState == StructuresAreClobbered)
1136         result.clobberStructures();
1137     return result;
1138 }
1139
1140 AbstractValue Graph::inferredValueForProperty(
1141     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1142     StructureClobberState clobberState)
1143 {
1144     if (JSValue value = tryGetConstantProperty(base, offset)) {
1145         AbstractValue result;
1146         result.set(*this, *freeze(value), clobberState);
1147         return result;
1148     }
1149
1150     if (base.m_structure.isFinite())
1151         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1152
1153     return AbstractValue::heapTop();
1154 }
1155
1156 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1157 {
1158     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1159     
1160     if (!base)
1161         return JSValue();
1162     
1163     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1164     if (!activation)
1165         return JSValue();
1166     
1167     SymbolTable* symbolTable = activation->symbolTable();
1168     JSValue value;
1169     WatchpointSet* set;
1170     {
1171         ConcurrentJITLocker locker(symbolTable->m_lock);
1172         
1173         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1174         if (!entry)
1175             return JSValue();
1176         
1177         set = entry->watchpointSet();
1178         if (!set)
1179             return JSValue();
1180         
1181         if (set->state() != IsWatched)
1182             return JSValue();
1183         
1184         ASSERT(entry->scopeOffset() == offset);
1185         value = activation->variableAt(offset).get();
1186         if (!value)
1187             return JSValue();
1188     }
1189     
1190     watchpoints().addLazily(set);
1191     
1192     return value;
1193 }
1194
1195 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1196 {
1197     return tryGetConstantClosureVar(value.m_value, offset);
1198 }
1199
1200 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1201 {
1202     if (!node->hasConstant())
1203         return JSValue();
1204     return tryGetConstantClosureVar(node->asJSValue(), offset);
1205 }
1206
1207 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1208 {
1209     if (!value)
1210         return nullptr;
1211     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1212     if (!value)
1213         return nullptr;
1214     if (!view->length())
1215         return nullptr;
1216     WTF::loadLoadFence();
1217     watchpoints().addLazily(view);
1218     return view;
1219 }
1220
1221 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1222 {
1223     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1224         return nullptr;
1225     return tryGetFoldableView(value);
1226 }
1227
1228 void Graph::registerFrozenValues()
1229 {
1230     m_codeBlock->constants().resize(0);
1231     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1232     for (FrozenValue* value : m_frozenValues) {
1233         if (!value->pointsToHeap())
1234             continue;
1235         
1236         ASSERT(value->structure());
1237         ASSERT(m_plan.weakReferences.contains(value->structure()));
1238         
1239         switch (value->strength()) {
1240         case WeakValue: {
1241             m_plan.weakReferences.addLazily(value->value().asCell());
1242             break;
1243         }
1244         case StrongValue: {
1245             unsigned constantIndex = m_codeBlock->addConstantLazily();
1246             // We already have a barrier on the code block.
1247             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1248             break;
1249         } }
1250     }
1251     m_codeBlock->constants().shrinkToFit();
1252     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1253 }
1254
1255 void Graph::visitChildren(SlotVisitor& visitor)
1256 {
1257     for (FrozenValue* value : m_frozenValues) {
1258         visitor.appendUnbarrieredReadOnlyValue(value->value());
1259         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
1260     }
1261     
1262     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
1263         BasicBlock* block = this->block(blockIndex);
1264         if (!block)
1265             continue;
1266         
1267         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
1268             Node* node = block->at(nodeIndex);
1269             
1270             switch (node->op()) {
1271             case CheckStructure:
1272                 for (unsigned i = node->structureSet().size(); i--;)
1273                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
1274                 break;
1275                 
1276             case NewObject:
1277             case ArrayifyToStructure:
1278             case NewStringObject:
1279                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
1280                 break;
1281                 
1282             case PutStructure:
1283             case AllocatePropertyStorage:
1284             case ReallocatePropertyStorage:
1285                 visitor.appendUnbarrieredReadOnlyPointer(
1286                     node->transition()->previous);
1287                 visitor.appendUnbarrieredReadOnlyPointer(
1288                     node->transition()->next);
1289                 break;
1290                 
1291             case MultiGetByOffset:
1292                 for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
1293                     for (Structure* structure : getCase.set())
1294                         visitor.appendUnbarrieredReadOnlyPointer(structure);
1295                 }
1296                 break;
1297                     
1298             case MultiPutByOffset:
1299                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
1300                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
1301                     const StructureSet& set = variant.oldStructure();
1302                     for (unsigned j = set.size(); j--;)
1303                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
1304                     if (variant.kind() == PutByIdVariant::Transition)
1305                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
1306                 }
1307                 break;
1308                 
1309             default:
1310                 break;
1311             }
1312         }
1313     }
1314 }
1315
1316 FrozenValue* Graph::freeze(JSValue value)
1317 {
1318     if (UNLIKELY(!value))
1319         return FrozenValue::emptySingleton();
1320     
1321     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1322     if (LIKELY(!result.isNewEntry))
1323         return result.iterator->value;
1324
1325     if (value.isUInt32())
1326         m_uint32ValuesInUse.append(value.asUInt32());
1327     
1328     FrozenValue frozenValue = FrozenValue::freeze(value);
1329     if (Structure* structure = frozenValue.structure())
1330         registerStructure(structure);
1331     
1332     return result.iterator->value = m_frozenValues.add(frozenValue);
1333 }
1334
1335 FrozenValue* Graph::freezeStrong(JSValue value)
1336 {
1337     FrozenValue* result = freeze(value);
1338     result->strengthenTo(StrongValue);
1339     return result;
1340 }
1341
1342 void Graph::convertToConstant(Node* node, FrozenValue* value)
1343 {
1344     if (value->structure())
1345         assertIsRegistered(value->structure());
1346     node->convertToConstant(value);
1347 }
1348
1349 void Graph::convertToConstant(Node* node, JSValue value)
1350 {
1351     convertToConstant(node, freeze(value));
1352 }
1353
1354 void Graph::convertToStrongConstant(Node* node, JSValue value)
1355 {
1356     convertToConstant(node, freezeStrong(value));
1357 }
1358
1359 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1360 {
1361     m_plan.weakReferences.addLazily(structure);
1362     if (m_plan.watchpoints.consider(structure))
1363         return StructureRegisteredAndWatched;
1364     return StructureRegisteredNormally;
1365 }
1366
1367 void Graph::assertIsRegistered(Structure* structure)
1368 {
1369     // It's convenient to be able to call this with a maybe-null structure.
1370     if (!structure)
1371         return;
1372     
1373     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1374     
1375     if (!structure->dfgShouldWatch())
1376         return;
1377     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1378         return;
1379     
1380     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1381 }
1382
1383 NO_RETURN_DUE_TO_CRASH static void crash(
1384     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1385     const char* assertion)
1386 {
1387     startCrashing();
1388     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1389     dataLog(file, "(", line, ") : ", function, "\n");
1390     dataLog("\n");
1391     dataLog(whileText);
1392     dataLog("Graph at time of failure:\n");
1393     graph.dump();
1394     dataLog("\n");
1395     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1396     dataLog(file, "(", line, ") : ", function, "\n");
1397     CRASH_WITH_SECURITY_IMPLICATION();
1398 }
1399
1400 void Graph::handleAssertionFailure(
1401     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1402 {
1403     crash(*this, "", file, line, function, assertion);
1404 }
1405
1406 void Graph::handleAssertionFailure(
1407     Node* node, const char* file, int line, const char* function, const char* assertion)
1408 {
1409     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1410 }
1411
1412 void Graph::handleAssertionFailure(
1413     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1414 {
1415     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1416 }
1417
1418 void Graph::ensureDominators()
1419 {
1420     if (!m_dominators)
1421         m_dominators = std::make_unique<Dominators>(*this);
1422 }
1423
1424 void Graph::ensurePrePostNumbering()
1425 {
1426     if (!m_prePostNumbering)
1427         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1428 }
1429
1430 void Graph::ensureNaturalLoops()
1431 {
1432     ensureDominators();
1433     if (!m_naturalLoops)
1434         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1435 }
1436
1437 ValueProfile* Graph::valueProfileFor(Node* node)
1438 {
1439     if (!node)
1440         return nullptr;
1441         
1442     CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1443         
1444     if (node->hasLocal(*this)) {
1445         if (!node->local().isArgument())
1446             return nullptr;
1447         int argument = node->local().toArgument();
1448         Node* argumentNode = m_arguments[argument];
1449         if (!argumentNode)
1450             return nullptr;
1451         if (node->variableAccessData() != argumentNode->variableAccessData())
1452             return nullptr;
1453         return profiledBlock->valueProfileForArgument(argument);
1454     }
1455         
1456     if (node->hasHeapPrediction())
1457         return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1458         
1459     return nullptr;
1460 }
1461
1462 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
1463 {
1464     if (!node)
1465         return MethodOfGettingAValueProfile();
1466     
1467     if (ValueProfile* valueProfile = valueProfileFor(node))
1468         return MethodOfGettingAValueProfile(valueProfile);
1469     
1470     if (node->op() == GetLocal) {
1471         CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1472         
1473         return MethodOfGettingAValueProfile::fromLazyOperand(
1474             profiledBlock,
1475             LazyOperandValueProfileKey(
1476                 node->origin.semantic.bytecodeIndex, node->local()));
1477     }
1478     
1479     return MethodOfGettingAValueProfile();
1480 }
1481
1482 bool Graph::isStringPrototypeMethodSane(JSObject* stringPrototype, Structure* stringPrototypeStructure, UniquedStringImpl* uid)
1483 {
1484     unsigned attributesUnused;
1485     PropertyOffset offset = stringPrototypeStructure->getConcurrently(uid, attributesUnused);
1486     if (!isValidOffset(offset))
1487         return false;
1488
1489     JSValue value = tryGetConstantProperty(stringPrototype, stringPrototypeStructure, offset);
1490     if (!value)
1491         return false;
1492
1493     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1494     if (!function)
1495         return false;
1496
1497     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1498         return false;
1499     
1500     return true;
1501 }
1502
1503 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1504 {
1505     if (hasExitSite(codeOrigin, NotStringObject))
1506         return false;
1507
1508     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1509     registerStructure(stringObjectStructure);
1510     ASSERT(stringObjectStructure->storedPrototype().isObject());
1511     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1512
1513     FrozenValue* stringPrototypeObjectValue = freeze(stringObjectStructure->storedPrototype());
1514     StringPrototype* stringPrototypeObject = stringPrototypeObjectValue->dynamicCast<StringPrototype*>();
1515     Structure* stringPrototypeStructure = stringPrototypeObjectValue->structure();
1516     if (registerStructure(stringPrototypeStructure) != StructureRegisteredAndWatched)
1517         return false;
1518
1519     if (stringPrototypeStructure->isDictionary())
1520         return false;
1521
1522     // We're being conservative here. We want DFG's ToString on StringObject to be
1523     // used in both numeric contexts (that would call valueOf()) and string contexts
1524     // (that would call toString()). We don't want the DFG to have to distinguish
1525     // between the two, just because that seems like it would get confusing. So we
1526     // just require both methods to be sane.
1527     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->valueOf.impl()))
1528         return false;
1529     if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->toString.impl()))
1530         return false;
1531
1532     return true;
1533 }
1534
1535 } } // namespace JSC::DFG
1536
1537 #endif // ENABLE(DFG_JIT)