Harden how the compiler references GC objects
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeKills.h"
32 #include "BytecodeLivenessAnalysisInlines.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGBackwardsCFG.h"
36 #include "DFGBackwardsDominators.h"
37 #include "DFGBlockWorklist.h"
38 #include "DFGCFG.h"
39 #include "DFGClobberSet.h"
40 #include "DFGClobbersExitState.h"
41 #include "DFGControlEquivalenceAnalysis.h"
42 #include "DFGDominators.h"
43 #include "DFGFlowIndexing.h"
44 #include "DFGFlowMap.h"
45 #include "DFGJITCode.h"
46 #include "DFGMayExit.h"
47 #include "DFGNaturalLoops.h"
48 #include "DFGPrePostNumbering.h"
49 #include "DFGVariableAccessDataDump.h"
50 #include "FullBytecodeLiveness.h"
51 #include "FunctionExecutableDump.h"
52 #include "GetterSetter.h"
53 #include "JIT.h"
54 #include "JSLexicalEnvironment.h"
55 #include "MaxFrameExtentForSlowPathCall.h"
56 #include "OperandsInlines.h"
57 #include "JSCInlines.h"
58 #include "StackAlignment.h"
59 #include <wtf/CommaPrinter.h>
60 #include <wtf/ListDump.h>
61
62 namespace JSC { namespace DFG {
63
64 // Creates an array of stringized names.
65 static const char* dfgOpNames[] = {
66 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
67     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
68 #undef STRINGIZE_DFG_OP_ENUM
69 };
70
71 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
72     : m_vm(vm)
73     , m_plan(plan)
74     , m_codeBlock(m_plan.codeBlock)
75     , m_profiledBlock(m_codeBlock->alternative())
76     , m_allocator(longLivedState.m_allocator)
77     , m_cfg(std::make_unique<CFG>(*this))
78     , m_nextMachineLocal(0)
79     , m_fixpointState(BeforeFixpoint)
80     , m_structureRegistrationState(HaveNotStartedRegistering)
81     , m_form(LoadStore)
82     , m_unificationState(LocallyUnified)
83     , m_refCountState(EverythingIsLive)
84 {
85     ASSERT(m_profiledBlock);
86     
87     m_hasDebuggerEnabled = m_profiledBlock->wasCompiledWithDebuggingOpcodes() || Options::forceDebuggerBytecodeGeneration();
88     
89     m_indexingCache = std::make_unique<FlowIndexing>(*this);
90     m_abstractValuesCache = std::make_unique<FlowMap<AbstractValue>>(*this);
91
92     registerStructure(vm.structureStructure.get());
93     this->stringStructure = registerStructure(vm.stringStructure.get());
94     this->symbolStructure = registerStructure(vm.symbolStructure.get());
95 }
96
97 Graph::~Graph()
98 {
99     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
100         BasicBlock* block = this->block(blockIndex);
101         if (!block)
102             continue;
103
104         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
105             m_allocator.free(block->phis[phiIndex]);
106         for (unsigned nodeIndex = block->size(); nodeIndex--;)
107             m_allocator.free(block->at(nodeIndex));
108     }
109     m_allocator.freeAll();
110 }
111
112 const char *Graph::opName(NodeType op)
113 {
114     return dfgOpNames[op];
115 }
116
117 static void printWhiteSpace(PrintStream& out, unsigned amount)
118 {
119     while (amount-- > 0)
120         out.print(" ");
121 }
122
123 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
124 {
125     if (!currentNode->origin.semantic)
126         return false;
127     
128     Node* previousNode = previousNodeRef;
129     previousNodeRef = currentNode;
130
131     if (!previousNode)
132         return false;
133     
134     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
135         return false;
136     
137     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
138     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
139     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
140     unsigned indexOfDivergence = commonSize;
141     for (unsigned i = 0; i < commonSize; ++i) {
142         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
143             indexOfDivergence = i;
144             break;
145         }
146     }
147     
148     bool hasPrinted = false;
149     
150     // Print the pops.
151     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
152         out.print(prefix);
153         printWhiteSpace(out, i * 2);
154         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
155         hasPrinted = true;
156     }
157     
158     // Print the pushes.
159     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
160         out.print(prefix);
161         printWhiteSpace(out, i * 2);
162         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
163         hasPrinted = true;
164     }
165     
166     return hasPrinted;
167 }
168
169 int Graph::amountOfNodeWhiteSpace(Node* node)
170 {
171     return (node->origin.semantic.inlineDepth() - 1) * 2;
172 }
173
174 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
175 {
176     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
177 }
178
179 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
180 {
181     NodeType op = node->op();
182
183     unsigned refCount = node->refCount();
184     bool mustGenerate = node->mustGenerate();
185     if (mustGenerate)
186         --refCount;
187
188     out.print(prefix);
189     printNodeWhiteSpace(out, node);
190
191     // Example/explanation of dataflow dump output
192     //
193     //   14:   <!2:7>  GetByVal(@3, @13)
194     //   ^1     ^2 ^3     ^4       ^5
195     //
196     // (1) The nodeIndex of this operation.
197     // (2) The reference count. The number printed is the 'real' count,
198     //     not including the 'mustGenerate' ref. If the node is
199     //     'mustGenerate' then the count it prefixed with '!'.
200     // (3) The virtual register slot assigned to this node.
201     // (4) The name of the operation.
202     // (5) The arguments to the operation. The may be of the form:
203     //         @#   - a NodeIndex referencing a prior node in the graph.
204     //         arg# - an argument number.
205     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
206     //         var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
207     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
208     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
209         out.print(node->virtualRegister());
210     else
211         out.print("-");
212     out.print(">\t", opName(op), "(");
213     CommaPrinter comma;
214     if (node->flags() & NodeHasVarArgs) {
215         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
216             if (!m_varArgChildren[childIdx])
217                 continue;
218             out.print(comma, m_varArgChildren[childIdx]);
219         }
220     } else {
221         if (!!node->child1() || !!node->child2() || !!node->child3())
222             out.print(comma, node->child1());
223         if (!!node->child2() || !!node->child3())
224             out.print(comma, node->child2());
225         if (!!node->child3())
226             out.print(comma, node->child3());
227     }
228
229     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
230         out.print(comma, NodeFlagsDump(node->flags()));
231     if (node->prediction())
232         out.print(comma, SpeculationDump(node->prediction()));
233     if (node->hasArrayMode())
234         out.print(comma, node->arrayMode());
235     if (node->hasArithMode())
236         out.print(comma, node->arithMode());
237     if (node->hasArithRoundingMode())
238         out.print(comma, "Rounding:", node->arithRoundingMode());
239     if (node->hasScopeOffset())
240         out.print(comma, node->scopeOffset());
241     if (node->hasDirectArgumentsOffset())
242         out.print(comma, node->capturedArgumentsOffset());
243     if (node->hasArgumentIndex())
244         out.print(comma, node->argumentIndex());
245     if (node->hasRegisterPointer())
246         out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
247     if (node->hasIdentifier())
248         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
249     if (node->hasPromotedLocationDescriptor())
250         out.print(comma, node->promotedLocationDescriptor());
251     if (node->hasStructureSet())
252         out.print(comma, inContext(node->structureSet().toStructureSet(), context));
253     if (node->hasStructure())
254         out.print(comma, inContext(*node->structure().get(), context));
255     if (node->hasTransition()) {
256         out.print(comma, pointerDumpInContext(node->transition(), context));
257 #if USE(JSVALUE64)
258         out.print(", ID:", node->transition()->next->id());
259 #else
260         out.print(", ID:", RawPointer(node->transition()->next.get()));
261 #endif
262     }
263     if (node->hasCellOperand()) {
264         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
265             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
266         else {
267             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
268             if (node->cellOperand()->value().isCell()) {
269                 CallVariant variant(node->cellOperand()->value().asCell());
270                 if (ExecutableBase* executable = variant.executable()) {
271                     if (executable->isHostFunction())
272                         out.print(comma, "<host function>");
273                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
274                         out.print(comma, FunctionExecutableDump(functionExecutable));
275                     else
276                         out.print(comma, "<non-function executable>");
277                 }
278             }
279         }
280     }
281     if (node->hasSpeculatedTypeForQuery())
282         out.print(comma, SpeculationDump(node->speculatedTypeForQuery()));
283     if (node->hasStorageAccessData()) {
284         StorageAccessData& storageAccessData = node->storageAccessData();
285         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
286         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
287         out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
288     }
289     if (node->hasMultiGetByOffsetData()) {
290         MultiGetByOffsetData& data = node->multiGetByOffsetData();
291         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
292         for (unsigned i = 0; i < data.cases.size(); ++i)
293             out.print(comma, inContext(data.cases[i], context));
294     }
295     if (node->hasMultiPutByOffsetData()) {
296         MultiPutByOffsetData& data = node->multiPutByOffsetData();
297         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
298         for (unsigned i = 0; i < data.variants.size(); ++i)
299             out.print(comma, inContext(data.variants[i], context));
300     }
301     ASSERT(node->hasVariableAccessData(*this) == node->accessesStack(*this));
302     if (node->hasVariableAccessData(*this)) {
303         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
304         if (variableAccessData) {
305             VirtualRegister operand = variableAccessData->local();
306             out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
307             operand = variableAccessData->machineLocal();
308             if (operand.isValid())
309                 out.print(comma, "machine:", operand);
310         }
311     }
312     if (node->hasStackAccessData()) {
313         StackAccessData* data = node->stackAccessData();
314         out.print(comma, data->local);
315         if (data->machineLocal.isValid())
316             out.print(comma, "machine:", data->machineLocal);
317         out.print(comma, data->format);
318     }
319     if (node->hasUnlinkedLocal()) 
320         out.print(comma, node->unlinkedLocal());
321     if (node->hasUnlinkedMachineLocal()) {
322         VirtualRegister operand = node->unlinkedMachineLocal();
323         if (operand.isValid())
324             out.print(comma, "machine:", operand);
325     }
326     if (node->hasConstantBuffer()) {
327         out.print(comma);
328         out.print(node->startConstant(), ":[");
329         CommaPrinter anotherComma;
330         for (unsigned i = 0; i < node->numConstants(); ++i)
331             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
332         out.print("]");
333     }
334     if (node->hasLazyJSValue())
335         out.print(comma, node->lazyJSValue());
336     if (node->hasIndexingType())
337         out.print(comma, IndexingTypeDump(node->indexingType()));
338     if (node->hasTypedArrayType())
339         out.print(comma, node->typedArrayType());
340     if (node->hasPhi())
341         out.print(comma, "^", node->phi()->index());
342     if (node->hasExecutionCounter())
343         out.print(comma, RawPointer(node->executionCounter()));
344     if (node->hasWatchpointSet())
345         out.print(comma, RawPointer(node->watchpointSet()));
346     if (node->hasStoragePointer())
347         out.print(comma, RawPointer(node->storagePointer()));
348     if (node->hasObjectMaterializationData())
349         out.print(comma, node->objectMaterializationData());
350     if (node->hasCallVarargsData())
351         out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
352     if (node->hasLoadVarargsData()) {
353         LoadVarargsData* data = node->loadVarargsData();
354         out.print(comma, "start = ", data->start, ", count = ", data->count);
355         if (data->machineStart.isValid())
356             out.print(", machineStart = ", data->machineStart);
357         if (data->machineCount.isValid())
358             out.print(", machineCount = ", data->machineCount);
359         out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
360         out.print(", limit = ", data->limit);
361     }
362     if (node->hasCallDOMGetterData()) {
363         CallDOMGetterData* data = node->callDOMGetterData();
364         out.print(comma, "id", data->identifierNumber, "{", identifiers()[data->identifierNumber], "}");
365         out.print(", domJIT = ", RawPointer(data->domJIT));
366     }
367     if (node->isConstant())
368         out.print(comma, pointerDumpInContext(node->constant(), context));
369     if (node->isJump())
370         out.print(comma, "T:", *node->targetBlock());
371     if (node->isBranch())
372         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
373     if (node->isSwitch()) {
374         SwitchData* data = node->switchData();
375         out.print(comma, data->kind);
376         for (unsigned i = 0; i < data->cases.size(); ++i)
377             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
378         out.print(comma, "default:", data->fallThrough);
379     }
380     ClobberSet reads;
381     ClobberSet writes;
382     addReadsAndWrites(*this, node, reads, writes);
383     if (!reads.isEmpty())
384         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
385     if (!writes.isEmpty())
386         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
387     ExitMode exitMode = mayExit(*this, node);
388     if (exitMode != DoesNotExit)
389         out.print(comma, exitMode);
390     if (clobbersExitState(*this, node))
391         out.print(comma, "ClobbersExit");
392     if (node->origin.isSet()) {
393         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
394         if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
395             out.print(comma, "exit: ", node->origin.forExit);
396     }
397     if (!node->origin.exitOK)
398         out.print(comma, "ExitInvalid");
399     if (node->origin.wasHoisted)
400         out.print(comma, "WasHoisted");
401     out.print(")");
402
403     if (node->accessesStack(*this) && node->tryGetVariableAccessData())
404         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
405     else if (node->hasHeapPrediction())
406         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
407     
408     out.print("\n");
409 }
410
411 bool Graph::terminalsAreValid()
412 {
413     for (BasicBlock* block : blocksInNaturalOrder()) {
414         if (!block->terminal())
415             return false;
416     }
417     return true;
418 }
419
420 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
421 {
422     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
423     if (block->executionCount == block->executionCount)
424         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
425     out.print(prefix, "  Predecessors:");
426     for (size_t i = 0; i < block->predecessors.size(); ++i)
427         out.print(" ", *block->predecessors[i]);
428     out.print("\n");
429     out.print(prefix, "  Successors:");
430     if (block->terminal()) {
431         for (BasicBlock* successor : block->successors()) {
432             out.print(" ", *successor);
433             if (m_prePostNumbering)
434                 out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
435         }
436     } else
437         out.print(" <invalid>");
438     out.print("\n");
439     if (m_dominators && terminalsAreValid()) {
440         out.print(prefix, "  Dominated by: ", m_dominators->dominatorsOf(block), "\n");
441         out.print(prefix, "  Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
442         out.print(prefix, "  Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
443         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
444     }
445     if (m_backwardsDominators && terminalsAreValid()) {
446         out.print(prefix, "  Backwards dominates by: ", m_backwardsDominators->dominatorsOf(block), "\n");
447         out.print(prefix, "  Backwards dominates: ", m_backwardsDominators->blocksDominatedBy(block), "\n");
448     }
449     if (m_controlEquivalenceAnalysis && terminalsAreValid()) {
450         out.print(prefix, "  Control equivalent to:");
451         for (BasicBlock* otherBlock : blocksInNaturalOrder()) {
452             if (m_controlEquivalenceAnalysis->areEquivalent(block, otherBlock))
453                 out.print(" ", *otherBlock);
454         }
455         out.print("\n");
456     }
457     if (m_prePostNumbering)
458         out.print(prefix, "  Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
459     if (m_naturalLoops) {
460         if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
461             out.print(prefix, "  Loop header, contains:");
462             Vector<BlockIndex> sortedBlockList;
463             for (unsigned i = 0; i < loop->size(); ++i)
464                 sortedBlockList.append(loop->at(i)->index);
465             std::sort(sortedBlockList.begin(), sortedBlockList.end());
466             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
467                 out.print(" #", sortedBlockList[i]);
468             out.print("\n");
469         }
470         
471         Vector<const NaturalLoop*> containingLoops =
472             m_naturalLoops->loopsOf(block);
473         if (!containingLoops.isEmpty()) {
474             out.print(prefix, "  Containing loop headers:");
475             for (unsigned i = 0; i < containingLoops.size(); ++i)
476                 out.print(" ", *containingLoops[i]->header());
477             out.print("\n");
478         }
479     }
480     if (!block->phis.isEmpty()) {
481         out.print(prefix, "  Phi Nodes:");
482         for (size_t i = 0; i < block->phis.size(); ++i) {
483             Node* phiNode = block->phis[i];
484             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
485                 continue;
486             out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
487             if (phiNode->child1()) {
488                 out.print("@", phiNode->child1()->index());
489                 if (phiNode->child2()) {
490                     out.print(", @", phiNode->child2()->index());
491                     if (phiNode->child3())
492                         out.print(", @", phiNode->child3()->index());
493                 }
494             }
495             out.print(")", i + 1 < block->phis.size() ? "," : "");
496         }
497         out.print("\n");
498     }
499 }
500
501 void Graph::dump(PrintStream& out, DumpContext* context)
502 {
503     DumpContext myContext;
504     myContext.graph = this;
505     if (!context)
506         context = &myContext;
507     
508     out.print("\n");
509     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
510     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
511     if (m_form == SSA)
512         out.print("  Argument formats: ", listDump(m_argumentFormats), "\n");
513     else
514         out.print("  Arguments: ", listDump(m_arguments), "\n");
515     out.print("\n");
516     
517     Node* lastNode = nullptr;
518     for (size_t b = 0; b < m_blocks.size(); ++b) {
519         BasicBlock* block = m_blocks[b].get();
520         if (!block)
521             continue;
522         dumpBlockHeader(out, "", block, DumpAllPhis, context);
523         out.print("  States: ", block->cfaStructureClobberStateAtHead);
524         if (!block->cfaHasVisited)
525             out.print(", CurrentlyCFAUnreachable");
526         if (!block->intersectionOfCFAHasVisited)
527             out.print(", CFAUnreachable");
528         out.print("\n");
529         switch (m_form) {
530         case LoadStore:
531         case ThreadedCPS: {
532             out.print("  Vars Before: ");
533             if (block->cfaHasVisited)
534                 out.print(inContext(block->valuesAtHead, context));
535             else
536                 out.print("<empty>");
537             out.print("\n");
538             out.print("  Intersected Vars Before: ");
539             if (block->intersectionOfCFAHasVisited)
540                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
541             else
542                 out.print("<empty>");
543             out.print("\n");
544             out.print("  Var Links: ", block->variablesAtHead, "\n");
545             break;
546         }
547             
548         case SSA: {
549             RELEASE_ASSERT(block->ssa);
550             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
551             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
552             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtHead, context), "\n");
553             break;
554         } }
555         for (size_t i = 0; i < block->size(); ++i) {
556             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
557             dump(out, "", block->at(i), context);
558         }
559         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
560         if (!block->cfaDidFinish)
561             out.print(", CFAInvalidated");
562         out.print("\n");
563         switch (m_form) {
564         case LoadStore:
565         case ThreadedCPS: {
566             out.print("  Vars After: ");
567             if (block->cfaHasVisited)
568                 out.print(inContext(block->valuesAtTail, context));
569             else
570                 out.print("<empty>");
571             out.print("\n");
572             out.print("  Var Links: ", block->variablesAtTail, "\n");
573             break;
574         }
575             
576         case SSA: {
577             RELEASE_ASSERT(block->ssa);
578             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
579             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
580             out.print("  Values: ", nodeValuePairListDump(block->ssa->valuesAtTail, context), "\n");
581             break;
582         } }
583         out.print("\n");
584     }
585     
586     out.print("GC Values:\n");
587     for (FrozenValue* value : m_frozenValues) {
588         if (value->pointsToHeap())
589             out.print("    ", inContext(*value, &myContext), "\n");
590     }
591
592     out.print(inContext(watchpoints(), &myContext));
593     
594     if (!myContext.isEmpty()) {
595         myContext.dump(out);
596         out.print("\n");
597     }
598 }
599
600 void Graph::addNodeToMapByIndex(Node* node)
601 {
602     if (m_nodeIndexFreeList.isEmpty()) {
603         node->m_index = m_nodesByIndex.size();
604         m_nodesByIndex.append(node);
605         return;
606     }
607     unsigned index = m_nodeIndexFreeList.takeLast();
608     node->m_index = index;
609     ASSERT(!m_nodesByIndex[index]);
610     m_nodesByIndex[index] = node;
611 }
612
613 void Graph::deleteNode(Node* node)
614 {
615     if (validationEnabled() && m_form == SSA) {
616         for (BasicBlock* block : blocksInNaturalOrder()) {
617             DFG_ASSERT(*this, node, !block->ssa->liveAtHead.contains(node));
618             DFG_ASSERT(*this, node, !block->ssa->liveAtTail.contains(node));
619         }
620     }
621
622     RELEASE_ASSERT(m_nodesByIndex[node->m_index] == node);
623     unsigned nodeIndex = node->m_index;
624     m_nodesByIndex[nodeIndex] = nullptr;
625     m_nodeIndexFreeList.append(nodeIndex);
626
627     m_allocator.free(node);
628 }
629
630 void Graph::packNodeIndices()
631 {
632     if (m_nodeIndexFreeList.isEmpty())
633         return;
634
635     unsigned holeIndex = 0;
636     unsigned endIndex = m_nodesByIndex.size();
637
638     while (true) {
639         while (holeIndex < endIndex && m_nodesByIndex[holeIndex])
640             ++holeIndex;
641
642         if (holeIndex == endIndex)
643             break;
644         ASSERT(holeIndex < m_nodesByIndex.size());
645         ASSERT(!m_nodesByIndex[holeIndex]);
646
647         do {
648             --endIndex;
649         } while (!m_nodesByIndex[endIndex] && endIndex > holeIndex);
650
651         if (holeIndex == endIndex)
652             break;
653         ASSERT(endIndex > holeIndex);
654         ASSERT(m_nodesByIndex[endIndex]);
655
656         auto& value = m_nodesByIndex[endIndex];
657         value->m_index = holeIndex;
658         m_nodesByIndex[holeIndex] = WTFMove(value);
659         ++holeIndex;
660     }
661
662     m_nodeIndexFreeList.resize(0);
663     m_nodesByIndex.resize(endIndex);
664 }
665
666 void Graph::dethread()
667 {
668     if (m_form == LoadStore || m_form == SSA)
669         return;
670     
671     if (logCompilationChanges())
672         dataLog("Dethreading DFG graph.\n");
673     
674     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
675         BasicBlock* block = m_blocks[blockIndex].get();
676         if (!block)
677             continue;
678         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
679             Node* phi = block->phis[phiIndex];
680             phi->children.reset();
681         }
682     }
683     
684     m_form = LoadStore;
685 }
686
687 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
688 {
689     if (!successor->isReachable) {
690         successor->isReachable = true;
691         worklist.append(successor);
692     }
693     
694     successor->predecessors.append(block);
695 }
696
697 void Graph::determineReachability()
698 {
699     Vector<BasicBlock*, 16> worklist;
700     worklist.append(block(0));
701     block(0)->isReachable = true;
702     while (!worklist.isEmpty()) {
703         BasicBlock* block = worklist.takeLast();
704         for (unsigned i = block->numSuccessors(); i--;)
705             handleSuccessor(worklist, block, block->successor(i));
706     }
707 }
708
709 void Graph::resetReachability()
710 {
711     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
712         BasicBlock* block = m_blocks[blockIndex].get();
713         if (!block)
714             continue;
715         block->isReachable = false;
716         block->predecessors.clear();
717     }
718     
719     determineReachability();
720 }
721
722 namespace {
723
724 class RefCountCalculator {
725 public:
726     RefCountCalculator(Graph& graph)
727         : m_graph(graph)
728     {
729     }
730     
731     void calculate()
732     {
733         // First reset the counts to 0 for all nodes.
734         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
735             BasicBlock* block = m_graph.block(blockIndex);
736             if (!block)
737                 continue;
738             for (unsigned indexInBlock = block->size(); indexInBlock--;)
739                 block->at(indexInBlock)->setRefCount(0);
740             for (unsigned phiIndex = block->phis.size(); phiIndex--;)
741                 block->phis[phiIndex]->setRefCount(0);
742         }
743     
744         // Now find the roots:
745         // - Nodes that are must-generate.
746         // - Nodes that are reachable from type checks.
747         // Set their ref counts to 1 and put them on the worklist.
748         for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
749             BasicBlock* block = m_graph.block(blockIndex);
750             if (!block)
751                 continue;
752             for (unsigned indexInBlock = block->size(); indexInBlock--;) {
753                 Node* node = block->at(indexInBlock);
754                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
755                 if (!(node->flags() & NodeMustGenerate))
756                     continue;
757                 if (!node->postfixRef())
758                     m_worklist.append(node);
759             }
760         }
761         
762         while (!m_worklist.isEmpty()) {
763             while (!m_worklist.isEmpty()) {
764                 Node* node = m_worklist.last();
765                 m_worklist.removeLast();
766                 ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
767                 DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
768             }
769             
770             if (m_graph.m_form == SSA) {
771                 // Find Phi->Upsilon edges, which are represented as meta-data in the
772                 // Upsilon.
773                 for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
774                     BasicBlock* block = m_graph.block(blockIndex);
775                     if (!block)
776                         continue;
777                     for (unsigned nodeIndex = block->size(); nodeIndex--;) {
778                         Node* node = block->at(nodeIndex);
779                         if (node->op() != Upsilon)
780                             continue;
781                         if (node->shouldGenerate())
782                             continue;
783                         if (node->phi()->shouldGenerate())
784                             countNode(node);
785                     }
786                 }
787             }
788         }
789     }
790     
791 private:
792     void findTypeCheckRoot(Node*, Edge edge)
793     {
794         // We may have an "unproved" untyped use for code that is unreachable. The CFA
795         // will just not have gotten around to it.
796         if (edge.isProved() || edge.willNotHaveCheck())
797             return;
798         if (!edge->postfixRef())
799             m_worklist.append(edge.node());
800     }
801     
802     void countNode(Node* node)
803     {
804         if (node->postfixRef())
805             return;
806         m_worklist.append(node);
807     }
808     
809     void countEdge(Node*, Edge edge)
810     {
811         // Don't count edges that are already counted for their type checks.
812         if (!(edge.isProved() || edge.willNotHaveCheck()))
813             return;
814         countNode(edge.node());
815     }
816     
817     Graph& m_graph;
818     Vector<Node*, 128> m_worklist;
819 };
820
821 } // anonymous namespace
822
823 void Graph::computeRefCounts()
824 {
825     RefCountCalculator calculator(*this);
826     calculator.calculate();
827 }
828
829 void Graph::killBlockAndItsContents(BasicBlock* block)
830 {
831     if (auto& ssaData = block->ssa)
832         ssaData->invalidate();
833     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
834         deleteNode(block->phis[phiIndex]);
835     for (Node* node : *block)
836         deleteNode(node);
837     
838     killBlock(block);
839 }
840
841 void Graph::killUnreachableBlocks()
842 {
843     invalidateNodeLiveness();
844
845     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
846         BasicBlock* block = this->block(blockIndex);
847         if (!block)
848             continue;
849         if (block->isReachable)
850             continue;
851         
852         killBlockAndItsContents(block);
853     }
854 }
855
856 void Graph::invalidateCFG()
857 {
858     m_dominators = nullptr;
859     m_naturalLoops = nullptr;
860     m_prePostNumbering = nullptr;
861     m_controlEquivalenceAnalysis = nullptr;
862     m_backwardsDominators = nullptr;
863     m_backwardsCFG = nullptr;
864 }
865
866 void Graph::invalidateNodeLiveness()
867 {
868     if (m_form != SSA)
869         return;
870
871     for (BasicBlock* block : blocksInNaturalOrder())
872         block->ssa->invalidate();
873 }
874
875 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
876 {
877     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
878         Node* node = block[indexInBlock];
879         bool shouldContinue = true;
880         switch (node->op()) {
881         case SetLocal: {
882             if (node->local() == variableAccessData->local())
883                 shouldContinue = false;
884             break;
885         }
886                 
887         case GetLocal: {
888             if (node->variableAccessData() != variableAccessData)
889                 continue;
890             substitute(block, indexInBlock, node, newGetLocal);
891             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
892             if (oldTailNode == node)
893                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
894             shouldContinue = false;
895             break;
896         }
897                 
898         default:
899             break;
900         }
901         if (!shouldContinue)
902             break;
903     }
904 }
905
906 BlockList Graph::blocksInPreOrder()
907 {
908     BlockList result;
909     BlockWorklist worklist;
910     worklist.push(block(0));
911     while (BasicBlock* block = worklist.pop()) {
912         result.append(block);
913         for (unsigned i = block->numSuccessors(); i--;)
914             worklist.push(block->successor(i));
915     }
916     return result;
917 }
918
919 BlockList Graph::blocksInPostOrder()
920 {
921     BlockList result;
922     PostOrderBlockWorklist worklist;
923     worklist.push(block(0));
924     while (BlockWithOrder item = worklist.pop()) {
925         switch (item.order) {
926         case VisitOrder::Pre:
927             worklist.pushPost(item.node);
928             for (unsigned i = item.node->numSuccessors(); i--;)
929                 worklist.push(item.node->successor(i));
930             break;
931         case VisitOrder::Post:
932             result.append(item.node);
933             break;
934         }
935     }
936     return result;
937 }
938
939 void Graph::clearReplacements()
940 {
941     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
942         BasicBlock* block = m_blocks[blockIndex].get();
943         if (!block)
944             continue;
945         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
946             block->phis[phiIndex]->setReplacement(nullptr);
947         for (unsigned nodeIndex = block->size(); nodeIndex--;)
948             block->at(nodeIndex)->setReplacement(nullptr);
949     }
950 }
951
952 void Graph::clearEpochs()
953 {
954     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
955         BasicBlock* block = m_blocks[blockIndex].get();
956         if (!block)
957             continue;
958         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
959             block->phis[phiIndex]->setEpoch(Epoch());
960         for (unsigned nodeIndex = block->size(); nodeIndex--;)
961             block->at(nodeIndex)->setEpoch(Epoch());
962     }
963 }
964
965 void Graph::initializeNodeOwners()
966 {
967     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
968         BasicBlock* block = m_blocks[blockIndex].get();
969         if (!block)
970             continue;
971         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
972             block->phis[phiIndex]->owner = block;
973         for (unsigned nodeIndex = block->size(); nodeIndex--;)
974             block->at(nodeIndex)->owner = block;
975     }
976 }
977
978 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
979 {
980     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
981         BasicBlock* block = m_blocks[blockIndex].get();
982         if (!block)
983             continue;
984         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
985             block->phis[phiIndex]->clearFlags(flags);
986         for (unsigned nodeIndex = block->size(); nodeIndex--;)
987             block->at(nodeIndex)->clearFlags(flags);
988     }
989 }
990
991 bool Graph::watchCondition(const ObjectPropertyCondition& key)
992 {
993     if (!key.isWatchable())
994         return false;
995     
996     m_plan.weakReferences.addLazily(key.object());
997     if (key.hasPrototype())
998         m_plan.weakReferences.addLazily(key.prototype());
999     if (key.hasRequiredValue())
1000         m_plan.weakReferences.addLazily(key.requiredValue());
1001     
1002     m_plan.watchpoints.addLazily(key);
1003
1004     if (key.kind() == PropertyCondition::Presence)
1005         m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
1006     
1007     return true;
1008 }
1009
1010 bool Graph::watchConditions(const ObjectPropertyConditionSet& keys)
1011 {
1012     if (!keys.isValid())
1013         return false;
1014
1015     for (const ObjectPropertyCondition& key : keys) {
1016         if (!watchCondition(key))
1017             return false;
1018     }
1019     return true;
1020 }
1021
1022 bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
1023 {
1024     return m_safeToLoad.contains(std::make_pair(base, offset));
1025 }
1026
1027 InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
1028 {
1029     assertIsRegistered(key.structure());
1030     
1031     auto iter = m_inferredTypes.find(key);
1032     if (iter != m_inferredTypes.end())
1033         return iter->value;
1034
1035     InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
1036     if (!typeObject) {
1037         m_inferredTypes.add(key, InferredType::Top);
1038         return InferredType::Top;
1039     }
1040
1041     InferredType::Descriptor typeDescriptor = typeObject->descriptor();
1042     if (typeDescriptor.kind() == InferredType::Top) {
1043         m_inferredTypes.add(key, InferredType::Top);
1044         return InferredType::Top;
1045     }
1046     
1047     m_inferredTypes.add(key, typeDescriptor);
1048
1049     m_plan.weakReferences.addLazily(typeObject);
1050     registerInferredType(typeDescriptor);
1051
1052     // Note that we may already be watching this desired inferred type, because multiple structures may
1053     // point to the same InferredType instance.
1054     m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
1055
1056     return typeDescriptor;
1057 }
1058
1059 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
1060 {
1061     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
1062     if (iter != m_bytecodeLiveness.end())
1063         return *iter->value;
1064     
1065     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
1066     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
1067     FullBytecodeLiveness& result = *liveness;
1068     m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
1069     return result;
1070 }
1071
1072 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
1073 {
1074     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
1075 }
1076
1077 BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
1078 {
1079     HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
1080     if (iter != m_bytecodeKills.end())
1081         return *iter->value;
1082     
1083     std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
1084     codeBlock->livenessAnalysis().computeKills(*kills);
1085     BytecodeKills& result = *kills;
1086     m_bytecodeKills.add(codeBlock, WTFMove(kills));
1087     return result;
1088 }
1089
1090 BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
1091 {
1092     return killsFor(baselineCodeBlockFor(inlineCallFrame));
1093 }
1094
1095 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
1096 {
1097     static const bool verbose = false;
1098     
1099     if (verbose)
1100         dataLog("Checking of operand is live: ", operand, "\n");
1101     CodeOrigin* codeOriginPtr = &codeOrigin;
1102     for (;;) {
1103         VirtualRegister reg = VirtualRegister(
1104             operand.offset() - codeOriginPtr->stackOffset());
1105         
1106         if (verbose)
1107             dataLog("reg = ", reg, "\n");
1108         
1109         if (operand.offset() < codeOriginPtr->stackOffset() + CallFrame::headerSizeInRegisters) {
1110             if (reg.isArgument()) {
1111                 RELEASE_ASSERT(reg.offset() < CallFrame::headerSizeInRegisters);
1112                 
1113                 if (codeOriginPtr->inlineCallFrame->isClosureCall
1114                     && reg.offset() == CallFrameSlot::callee) {
1115                     if (verbose)
1116                         dataLog("Looks like a callee.\n");
1117                     return true;
1118                 }
1119                 
1120                 if (codeOriginPtr->inlineCallFrame->isVarargs()
1121                     && reg.offset() == CallFrameSlot::argumentCount) {
1122                     if (verbose)
1123                         dataLog("Looks like the argument count.\n");
1124                     return true;
1125                 }
1126                 
1127                 return false;
1128             }
1129
1130             if (verbose)
1131                 dataLog("Asking the bytecode liveness.\n");
1132             return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
1133                 reg.offset(), codeOriginPtr->bytecodeIndex);
1134         }
1135         
1136         InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
1137         if (!inlineCallFrame) {
1138             if (verbose)
1139                 dataLog("Ran out of stack, returning true.\n");
1140             return true;
1141         }
1142
1143         // Arguments are always live. This would be redundant if it wasn't for our
1144         // op_call_varargs inlining.
1145         if (reg.isArgument()
1146             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size()) {
1147             if (verbose)
1148                 dataLog("Argument is live.\n");
1149             return true;
1150         }
1151         
1152         codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
1153
1154         // The first inline call frame could be an inline tail call
1155         if (!codeOriginPtr) {
1156             if (verbose)
1157                 dataLog("Dead because of tail inlining.\n");
1158             return false;
1159         }
1160     }
1161     
1162     RELEASE_ASSERT_NOT_REACHED();
1163 }
1164
1165 BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
1166 {
1167     BitVector result;
1168     result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
1169     forAllLocalsLiveInBytecode(
1170         codeOrigin,
1171         [&] (VirtualRegister reg) {
1172             ASSERT(reg.isLocal());
1173             result.quickSet(reg.toLocal());
1174         });
1175     return result;
1176 }
1177
1178 unsigned Graph::parameterSlotsForArgCount(unsigned argCount)
1179 {
1180     size_t frameSize = CallFrame::headerSizeInRegisters + argCount;
1181     size_t alignedFrameSize = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), frameSize);
1182     return alignedFrameSize - CallerFrameAndPC::sizeInRegisters;
1183 }
1184
1185 unsigned Graph::frameRegisterCount()
1186 {
1187     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
1188     return roundLocalRegisterCountForFramePointerOffset(result);
1189 }
1190
1191 unsigned Graph::stackPointerOffset()
1192 {
1193     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
1194 }
1195
1196 unsigned Graph::requiredRegisterCountForExit()
1197 {
1198     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
1199     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
1200         InlineCallFrame* inlineCallFrame = *iter;
1201         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
1202         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
1203         count = std::max(count, requiredCount);
1204     }
1205     return count;
1206 }
1207
1208 unsigned Graph::requiredRegisterCountForExecutionAndExit()
1209 {
1210     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
1211 }
1212
1213 JSValue Graph::tryGetConstantProperty(
1214     JSValue base, const RegisteredStructureSet& structureSet, PropertyOffset offset)
1215 {
1216     if (!base || !base.isObject())
1217         return JSValue();
1218     
1219     JSObject* object = asObject(base);
1220     
1221     for (unsigned i = structureSet.size(); i--;) {
1222         RegisteredStructure structure = structureSet[i];
1223         
1224         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
1225         if (!set || !set->isStillValid())
1226             return JSValue();
1227         
1228         ASSERT(structure->isValidOffset(offset));
1229         ASSERT(!structure->isUncacheableDictionary());
1230         
1231         watchpoints().addLazily(set);
1232     }
1233     
1234     // What follows may require some extra thought. We need this load to load a valid JSValue. If
1235     // our profiling makes sense and we're still on track to generate code that won't be
1236     // invalidated, then we have nothing to worry about. We do, however, have to worry about
1237     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
1238     // is doomed.
1239     //
1240     // One argument in favor of this code is that it should definitely work because the butterfly
1241     // is always set before the structure. However, we don't currently have a fence between those
1242     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
1243     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
1244     // caches told us that the object had a structure that it did not *yet* have, and then later,
1245     // the object transitioned to that structure that the inline caches had alraedy seen. And then
1246     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
1247     // this is worth revisiting but it isn't worth losing sleep over. Filed:
1248     // https://bugs.webkit.org/show_bug.cgi?id=134641
1249     //
1250     // For now, we just do the minimal thing: defend against the structure right now being
1251     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
1252     // determine if the structure belongs to the proven set.
1253     
1254     if (!structureSet.toStructureSet().contains(object->structure()))
1255         return JSValue();
1256     
1257     return object->getDirect(offset);
1258 }
1259
1260 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
1261 {
1262     return tryGetConstantProperty(base, RegisteredStructureSet(registerStructure(structure)), offset);
1263 }
1264
1265 JSValue Graph::tryGetConstantProperty(
1266     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
1267 {
1268     if (structure.isInfinite()) {
1269         // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
1270         // watching to constant-fold the property.
1271         // https://bugs.webkit.org/show_bug.cgi?id=147271
1272         return JSValue();
1273     }
1274     
1275     return tryGetConstantProperty(base, structure.set(), offset);
1276 }
1277
1278 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
1279 {
1280     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
1281 }
1282
1283 AbstractValue Graph::inferredValueForProperty(
1284     const RegisteredStructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
1285 {
1286     AbstractValue result;
1287     base.forEach(
1288         [&] (RegisteredStructure structure) {
1289             AbstractValue value;
1290             value.set(*this, inferredTypeForProperty(structure.get(), uid));
1291             result.merge(value);
1292         });
1293     if (clobberState == StructuresAreClobbered)
1294         result.clobberStructures();
1295     return result;
1296 }
1297
1298 AbstractValue Graph::inferredValueForProperty(
1299     const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
1300     StructureClobberState clobberState)
1301 {
1302     if (JSValue value = tryGetConstantProperty(base, offset)) {
1303         AbstractValue result;
1304         result.set(*this, *freeze(value), clobberState);
1305         return result;
1306     }
1307
1308     if (base.m_structure.isFinite())
1309         return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
1310
1311     return AbstractValue::heapTop();
1312 }
1313
1314 JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
1315 {
1316     // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
1317     
1318     if (!base)
1319         return JSValue();
1320     
1321     JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
1322     if (!activation)
1323         return JSValue();
1324     
1325     SymbolTable* symbolTable = activation->symbolTable();
1326     JSValue value;
1327     WatchpointSet* set;
1328     {
1329         ConcurrentJSLocker locker(symbolTable->m_lock);
1330         
1331         SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
1332         if (!entry)
1333             return JSValue();
1334         
1335         set = entry->watchpointSet();
1336         if (!set)
1337             return JSValue();
1338         
1339         if (set->state() != IsWatched)
1340             return JSValue();
1341         
1342         ASSERT(entry->scopeOffset() == offset);
1343         value = activation->variableAt(offset).get();
1344         if (!value)
1345             return JSValue();
1346     }
1347     
1348     watchpoints().addLazily(set);
1349     
1350     return value;
1351 }
1352
1353 JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
1354 {
1355     return tryGetConstantClosureVar(value.m_value, offset);
1356 }
1357
1358 JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
1359 {
1360     if (!node->hasConstant())
1361         return JSValue();
1362     return tryGetConstantClosureVar(node->asJSValue(), offset);
1363 }
1364
1365 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
1366 {
1367     if (!value)
1368         return nullptr;
1369     JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
1370     if (!value)
1371         return nullptr;
1372     if (!view->length())
1373         return nullptr;
1374     WTF::loadLoadFence();
1375     watchpoints().addLazily(view);
1376     return view;
1377 }
1378
1379 JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
1380 {
1381     if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
1382         return nullptr;
1383     return tryGetFoldableView(value);
1384 }
1385
1386 void Graph::registerFrozenValues()
1387 {
1388     m_codeBlock->constants().resize(0);
1389     m_codeBlock->constantsSourceCodeRepresentation().resize(0);
1390     for (FrozenValue* value : m_frozenValues) {
1391         if (!value->pointsToHeap())
1392             continue;
1393         
1394         ASSERT(value->structure());
1395         ASSERT(m_plan.weakReferences.contains(value->structure()));
1396         
1397         switch (value->strength()) {
1398         case WeakValue: {
1399             m_plan.weakReferences.addLazily(value->value().asCell());
1400             break;
1401         }
1402         case StrongValue: {
1403             unsigned constantIndex = m_codeBlock->addConstantLazily();
1404             // We already have a barrier on the code block.
1405             m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
1406             break;
1407         } }
1408     }
1409     m_codeBlock->constants().shrinkToFit();
1410     m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
1411 }
1412
1413 void Graph::visitChildren(SlotVisitor& visitor)
1414 {
1415     for (FrozenValue* value : m_frozenValues) {
1416         visitor.appendUnbarriered(value->value());
1417         visitor.appendUnbarriered(value->structure());
1418     }
1419 }
1420
1421 FrozenValue* Graph::freeze(JSValue value)
1422 {
1423     if (UNLIKELY(!value))
1424         return FrozenValue::emptySingleton();
1425
1426     // There are weird relationships in how optimized CodeBlocks
1427     // point to other CodeBlocks. We don't want to have them be
1428     // part of the weak pointer set. For example, an optimized CodeBlock
1429     // having a weak pointer to itself will cause it to get collected.
1430     RELEASE_ASSERT(!jsDynamicCast<CodeBlock*>(value));
1431     
1432     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
1433     if (LIKELY(!result.isNewEntry))
1434         return result.iterator->value;
1435
1436     if (value.isUInt32())
1437         m_uint32ValuesInUse.append(value.asUInt32());
1438     
1439     FrozenValue frozenValue = FrozenValue::freeze(value);
1440     if (Structure* structure = frozenValue.structure())
1441         registerStructure(structure);
1442     
1443     return result.iterator->value = m_frozenValues.add(frozenValue);
1444 }
1445
1446 FrozenValue* Graph::freezeStrong(JSValue value)
1447 {
1448     FrozenValue* result = freeze(value);
1449     result->strengthenTo(StrongValue);
1450     return result;
1451 }
1452
1453 void Graph::convertToConstant(Node* node, FrozenValue* value)
1454 {
1455     if (value->structure())
1456         assertIsRegistered(value->structure());
1457     node->convertToConstant(value);
1458 }
1459
1460 void Graph::convertToConstant(Node* node, JSValue value)
1461 {
1462     convertToConstant(node, freeze(value));
1463 }
1464
1465 void Graph::convertToStrongConstant(Node* node, JSValue value)
1466 {
1467     convertToConstant(node, freezeStrong(value));
1468 }
1469
1470 RegisteredStructure Graph::registerStructure(Structure* structure, StructureRegistrationResult& result)
1471 {
1472     m_plan.weakReferences.addLazily(structure);
1473     if (m_plan.watchpoints.consider(structure))
1474         result = StructureRegisteredAndWatched;
1475     else
1476         result = StructureRegisteredNormally;
1477     return RegisteredStructure::createPrivate(structure);
1478 }
1479
1480 void Graph::assertIsRegistered(Structure* structure)
1481 {
1482     // It's convenient to be able to call this with a maybe-null structure.
1483     if (!structure)
1484         return;
1485     
1486     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1487     
1488     if (!structure->dfgShouldWatch())
1489         return;
1490     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1491         return;
1492     
1493     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1494 }
1495
1496 NO_RETURN_DUE_TO_CRASH static void crash(
1497     Graph& graph, const CString& whileText, const char* file, int line, const char* function,
1498     const char* assertion)
1499 {
1500     startCrashing();
1501     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1502     dataLog(file, "(", line, ") : ", function, "\n");
1503     dataLog("\n");
1504     dataLog(whileText);
1505     dataLog("Graph at time of failure:\n");
1506     graph.dump();
1507     dataLog("\n");
1508     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1509     dataLog(file, "(", line, ") : ", function, "\n");
1510     CRASH_WITH_SECURITY_IMPLICATION();
1511 }
1512
1513 void Graph::handleAssertionFailure(
1514     std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
1515 {
1516     crash(*this, "", file, line, function, assertion);
1517 }
1518
1519 void Graph::handleAssertionFailure(
1520     Node* node, const char* file, int line, const char* function, const char* assertion)
1521 {
1522     crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
1523 }
1524
1525 void Graph::handleAssertionFailure(
1526     BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
1527 {
1528     crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
1529 }
1530
1531 Dominators& Graph::ensureDominators()
1532 {
1533     if (!m_dominators)
1534         m_dominators = std::make_unique<Dominators>(*this);
1535     return *m_dominators;
1536 }
1537
1538 PrePostNumbering& Graph::ensurePrePostNumbering()
1539 {
1540     if (!m_prePostNumbering)
1541         m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
1542     return *m_prePostNumbering;
1543 }
1544
1545 NaturalLoops& Graph::ensureNaturalLoops()
1546 {
1547     ensureDominators();
1548     if (!m_naturalLoops)
1549         m_naturalLoops = std::make_unique<NaturalLoops>(*this);
1550     return *m_naturalLoops;
1551 }
1552
1553 BackwardsCFG& Graph::ensureBackwardsCFG()
1554 {
1555     if (!m_backwardsCFG)
1556         m_backwardsCFG = std::make_unique<BackwardsCFG>(*this);
1557     return *m_backwardsCFG;
1558 }
1559
1560 BackwardsDominators& Graph::ensureBackwardsDominators()
1561 {
1562     if (!m_backwardsDominators)
1563         m_backwardsDominators = std::make_unique<BackwardsDominators>(*this);
1564     return *m_backwardsDominators;
1565 }
1566
1567 ControlEquivalenceAnalysis& Graph::ensureControlEquivalenceAnalysis()
1568 {
1569     if (!m_controlEquivalenceAnalysis)
1570         m_controlEquivalenceAnalysis = std::make_unique<ControlEquivalenceAnalysis>(*this);
1571     return *m_controlEquivalenceAnalysis;
1572 }
1573
1574 MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* currentNode, Node* operandNode)
1575 {
1576     for (Node* node = operandNode; node;) {
1577         // currentNode is null when we're doing speculation checks for checkArgumentTypes().
1578         if (!currentNode || node->origin != currentNode->origin) {
1579             CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
1580
1581             if (node->accessesStack(*this)) {
1582                 ValueProfile* result = [&] () -> ValueProfile* {
1583                     if (!node->local().isArgument())
1584                         return nullptr;
1585                     int argument = node->local().toArgument();
1586                     Node* argumentNode = m_arguments[argument];
1587                     if (!argumentNode)
1588                         return nullptr;
1589                     if (node->variableAccessData() != argumentNode->variableAccessData())
1590                         return nullptr;
1591                     return profiledBlock->valueProfileForArgument(argument);
1592                 }();
1593                 if (result)
1594                     return result;
1595
1596                 if (node->op() == GetLocal) {
1597                     return MethodOfGettingAValueProfile::fromLazyOperand(
1598                         profiledBlock,
1599                         LazyOperandValueProfileKey(
1600                             node->origin.semantic.bytecodeIndex, node->local()));
1601                 }
1602             }
1603
1604             if (node->hasHeapPrediction())
1605                 return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
1606
1607             if (profiledBlock->hasBaselineJITProfiling()) {
1608                 if (ArithProfile* result = profiledBlock->arithProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
1609                     return result;
1610             }
1611         }
1612
1613         switch (node->op()) {
1614         case BooleanToNumber:
1615         case Identity:
1616         case ValueRep:
1617         case DoubleRep:
1618         case Int52Rep:
1619             node = node->child1().node();
1620             break;
1621         default:
1622             node = nullptr;
1623         }
1624     }
1625     
1626     return MethodOfGettingAValueProfile();
1627 }
1628
1629 bool Graph::getRegExpPrototypeProperty(JSObject* regExpPrototype, Structure* regExpPrototypeStructure, UniquedStringImpl* uid, JSValue& returnJSValue)
1630 {
1631     unsigned attributesUnused;
1632     PropertyOffset offset = regExpPrototypeStructure->getConcurrently(uid, attributesUnused);
1633     if (!isValidOffset(offset))
1634         return false;
1635
1636     JSValue value = tryGetConstantProperty(regExpPrototype, regExpPrototypeStructure, offset);
1637     if (!value)
1638         return false;
1639
1640     // We only care about functions and getters at this point. If you want to access other properties
1641     // you'll have to add code for those types.
1642     JSFunction* function = jsDynamicCast<JSFunction*>(value);
1643     if (!function) {
1644         GetterSetter* getterSetter = jsDynamicCast<GetterSetter*>(value);
1645
1646         if (!getterSetter)
1647             return false;
1648
1649         returnJSValue = JSValue(getterSetter);
1650         return true;
1651     }
1652
1653     returnJSValue = value;
1654     return true;
1655 }
1656
1657 bool Graph::isStringPrototypeMethodSane(JSGlobalObject* globalObject, UniquedStringImpl* uid)
1658 {
1659     ObjectPropertyConditionSet conditions = generateConditionsForPrototypeEquivalenceConcurrently(m_vm, globalObject, globalObject->stringObjectStructure(), globalObject->stringPrototype(), uid);
1660
1661     if (!conditions.isValid())
1662         return false;
1663
1664     ObjectPropertyCondition equivalenceCondition = conditions.slotBaseCondition();
1665     RELEASE_ASSERT(equivalenceCondition.hasRequiredValue());
1666     JSFunction* function = jsDynamicCast<JSFunction*>(equivalenceCondition.condition().requiredValue());
1667     if (!function)
1668         return false;
1669
1670     if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
1671         return false;
1672     
1673     return watchConditions(conditions);
1674 }
1675
1676
1677 bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
1678 {
1679     if (hasExitSite(codeOrigin, NotStringObject))
1680         return false;
1681
1682     JSGlobalObject* globalObject = globalObjectFor(codeOrigin);
1683     Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
1684     registerStructure(stringObjectStructure);
1685     ASSERT(stringObjectStructure->storedPrototype().isObject());
1686     ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
1687
1688     if (!watchConditions(generateConditionsForPropertyMissConcurrently(m_vm, globalObject, stringObjectStructure, m_vm.propertyNames->toPrimitiveSymbol.impl())))
1689         return false;
1690
1691     // We're being conservative here. We want DFG's ToString on StringObject to be
1692     // used in both numeric contexts (that would call valueOf()) and string contexts
1693     // (that would call toString()). We don't want the DFG to have to distinguish
1694     // between the two, just because that seems like it would get confusing. So we
1695     // just require both methods to be sane.
1696     if (!isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->valueOf.impl()))
1697         return false;
1698     return isStringPrototypeMethodSane(globalObject, m_vm.propertyNames->toString.impl());
1699 }
1700
1701 bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
1702 {
1703     if (!m_hasExceptionHandlers)
1704         return false;
1705
1706     unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
1707     while (1) {
1708         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
1709         CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
1710         if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
1711             opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
1712             catchHandlerOut = handler;
1713             return true;
1714         }
1715
1716         if (!inlineCallFrame)
1717             return false;
1718
1719         bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
1720         codeOrigin = codeOrigin.inlineCallFrame->directCaller;
1721     }
1722
1723     RELEASE_ASSERT_NOT_REACHED();
1724 }
1725
1726 } } // namespace JSC::DFG
1727
1728 #endif // ENABLE(DFG_JIT)