Rename activation to be more in line with spec language
[WebKit.git] / Source / JavaScriptCore / dfg / DFGGraph.cpp
1 /*
2  * Copyright (C) 2011, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGGraph.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "BytecodeLivenessAnalysisInlines.h"
32 #include "CodeBlock.h"
33 #include "CodeBlockWithJITType.h"
34 #include "DFGBlockWorklist.h"
35 #include "DFGClobberSet.h"
36 #include "DFGJITCode.h"
37 #include "DFGVariableAccessDataDump.h"
38 #include "FullBytecodeLiveness.h"
39 #include "FunctionExecutableDump.h"
40 #include "JIT.h"
41 #include "JSLexicalEnvironment.h"
42 #include "MaxFrameExtentForSlowPathCall.h"
43 #include "OperandsInlines.h"
44 #include "JSCInlines.h"
45 #include "StackAlignment.h"
46 #include <wtf/CommaPrinter.h>
47 #include <wtf/ListDump.h>
48
49 namespace JSC { namespace DFG {
50
51 // Creates an array of stringized names.
52 static const char* dfgOpNames[] = {
53 #define STRINGIZE_DFG_OP_ENUM(opcode, flags) #opcode ,
54     FOR_EACH_DFG_OP(STRINGIZE_DFG_OP_ENUM)
55 #undef STRINGIZE_DFG_OP_ENUM
56 };
57
58 Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
59     : m_vm(vm)
60     , m_plan(plan)
61     , m_codeBlock(m_plan.codeBlock.get())
62     , m_profiledBlock(m_codeBlock->alternative())
63     , m_allocator(longLivedState.m_allocator)
64     , m_mustHandleValues(OperandsLike, plan.mustHandleValues)
65     , m_hasArguments(false)
66     , m_nextMachineLocal(0)
67     , m_machineCaptureStart(std::numeric_limits<int>::max())
68     , m_fixpointState(BeforeFixpoint)
69     , m_structureRegistrationState(HaveNotStartedRegistering)
70     , m_form(LoadStore)
71     , m_unificationState(LocallyUnified)
72     , m_refCountState(EverythingIsLive)
73 {
74     ASSERT(m_profiledBlock);
75     
76     for (unsigned i = m_mustHandleValues.size(); i--;)
77         m_mustHandleValues[i] = freezeFragile(plan.mustHandleValues[i]);
78 }
79
80 Graph::~Graph()
81 {
82     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
83         BasicBlock* block = this->block(blockIndex);
84         if (!block)
85             continue;
86
87         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
88             m_allocator.free(block->phis[phiIndex]);
89         for (unsigned nodeIndex = block->size(); nodeIndex--;)
90             m_allocator.free(block->at(nodeIndex));
91     }
92     m_allocator.freeAll();
93 }
94
95 const char *Graph::opName(NodeType op)
96 {
97     return dfgOpNames[op];
98 }
99
100 static void printWhiteSpace(PrintStream& out, unsigned amount)
101 {
102     while (amount-- > 0)
103         out.print(" ");
104 }
105
106 bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
107 {
108     if (!previousNode)
109         return false;
110     
111     if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
112         return false;
113     
114     Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
115     Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
116     unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
117     unsigned indexOfDivergence = commonSize;
118     for (unsigned i = 0; i < commonSize; ++i) {
119         if (previousInlineStack[i].inlineCallFrame != currentInlineStack[i].inlineCallFrame) {
120             indexOfDivergence = i;
121             break;
122         }
123     }
124     
125     bool hasPrinted = false;
126     
127     // Print the pops.
128     for (unsigned i = previousInlineStack.size(); i-- > indexOfDivergence;) {
129         out.print(prefix);
130         printWhiteSpace(out, i * 2);
131         out.print("<-- ", inContext(*previousInlineStack[i].inlineCallFrame, context), "\n");
132         hasPrinted = true;
133     }
134     
135     // Print the pushes.
136     for (unsigned i = indexOfDivergence; i < currentInlineStack.size(); ++i) {
137         out.print(prefix);
138         printWhiteSpace(out, i * 2);
139         out.print("--> ", inContext(*currentInlineStack[i].inlineCallFrame, context), "\n");
140         hasPrinted = true;
141     }
142     
143     return hasPrinted;
144 }
145
146 int Graph::amountOfNodeWhiteSpace(Node* node)
147 {
148     return (node->origin.semantic.inlineDepth() - 1) * 2;
149 }
150
151 void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
152 {
153     printWhiteSpace(out, amountOfNodeWhiteSpace(node));
154 }
155
156 void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext* context)
157 {
158     NodeType op = node->op();
159
160     unsigned refCount = node->refCount();
161     bool mustGenerate = node->mustGenerate();
162     if (mustGenerate)
163         --refCount;
164
165     out.print(prefix);
166     printNodeWhiteSpace(out, node);
167
168     // Example/explanation of dataflow dump output
169     //
170     //   14:   <!2:7>  GetByVal(@3, @13)
171     //   ^1     ^2 ^3     ^4       ^5
172     //
173     // (1) The nodeIndex of this operation.
174     // (2) The reference count. The number printed is the 'real' count,
175     //     not including the 'mustGenerate' ref. If the node is
176     //     'mustGenerate' then the count it prefixed with '!'.
177     // (3) The virtual register slot assigned to this node.
178     // (4) The name of the operation.
179     // (5) The arguments to the operation. The may be of the form:
180     //         @#   - a NodeIndex referencing a prior node in the graph.
181     //         arg# - an argument number.
182     //         id#  - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
183     //         var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
184     out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
185     if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
186         out.print(node->virtualRegister());
187     else
188         out.print("-");
189     out.print(">\t", opName(op), "(");
190     CommaPrinter comma;
191     if (node->flags() & NodeHasVarArgs) {
192         for (unsigned childIdx = node->firstChild(); childIdx < node->firstChild() + node->numChildren(); childIdx++) {
193             if (!m_varArgChildren[childIdx])
194                 continue;
195             out.print(comma, m_varArgChildren[childIdx]);
196         }
197     } else {
198         if (!!node->child1() || !!node->child2() || !!node->child3())
199             out.print(comma, node->child1());
200         if (!!node->child2() || !!node->child3())
201             out.print(comma, node->child2());
202         if (!!node->child3())
203             out.print(comma, node->child3());
204     }
205
206     if (toCString(NodeFlagsDump(node->flags())) != "<empty>")
207         out.print(comma, NodeFlagsDump(node->flags()));
208     if (node->prediction())
209         out.print(comma, SpeculationDump(node->prediction()));
210     if (node->hasArrayMode())
211         out.print(comma, node->arrayMode());
212     if (node->hasArithMode())
213         out.print(comma, node->arithMode());
214     if (node->hasVarNumber())
215         out.print(comma, node->varNumber());
216     if (node->hasRegisterPointer())
217         out.print(comma, "global", globalObjectFor(node->origin.semantic)->findRegisterIndex(node->registerPointer()), "(", RawPointer(node->registerPointer()), ")");
218     if (node->hasIdentifier())
219         out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
220     if (node->hasStructureSet())
221         out.print(comma, inContext(node->structureSet(), context));
222     if (node->hasStructure())
223         out.print(comma, inContext(*node->structure(), context));
224     if (node->hasTransition())
225         out.print(comma, pointerDumpInContext(node->transition(), context));
226     if (node->hasCellOperand()) {
227         if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
228             out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
229         else {
230             out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
231             if (node->cellOperand()->value().isCell()) {
232                 CallVariant variant(node->cellOperand()->value().asCell());
233                 if (ExecutableBase* executable = variant.executable()) {
234                     if (executable->isHostFunction())
235                         out.print(comma, "<host function>");
236                     else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
237                         out.print(comma, FunctionExecutableDump(functionExecutable));
238                     else
239                         out.print(comma, "<non-function executable>");
240                 }
241             }
242         }
243     }
244     if (node->hasFunctionDeclIndex()) {
245         FunctionExecutable* executable = m_codeBlock->functionDecl(node->functionDeclIndex());
246         out.print(comma, FunctionExecutableDump(executable));
247     }
248     if (node->hasFunctionExprIndex()) {
249         FunctionExecutable* executable = m_codeBlock->functionExpr(node->functionExprIndex());
250         out.print(comma, FunctionExecutableDump(executable));
251     }
252     if (node->hasStorageAccessData()) {
253         StorageAccessData& storageAccessData = m_storageAccessData[node->storageAccessDataIndex()];
254         out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
255         out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
256     }
257     if (node->hasMultiGetByOffsetData()) {
258         MultiGetByOffsetData& data = node->multiGetByOffsetData();
259         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
260         for (unsigned i = 0; i < data.variants.size(); ++i)
261             out.print(comma, inContext(data.variants[i], context));
262     }
263     if (node->hasMultiPutByOffsetData()) {
264         MultiPutByOffsetData& data = node->multiPutByOffsetData();
265         out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
266         for (unsigned i = 0; i < data.variants.size(); ++i)
267             out.print(comma, inContext(data.variants[i], context));
268     }
269     ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
270     if (node->hasVariableAccessData(*this)) {
271         VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
272         if (variableAccessData) {
273             VirtualRegister operand = variableAccessData->local();
274             if (operand.isArgument())
275                 out.print(comma, "arg", operand.toArgument(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
276             else
277                 out.print(comma, "loc", operand.toLocal(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
278             
279             operand = variableAccessData->machineLocal();
280             if (operand.isValid()) {
281                 if (operand.isArgument())
282                     out.print(comma, "machine:arg", operand.toArgument());
283                 else
284                     out.print(comma, "machine:loc", operand.toLocal());
285             }
286         }
287     }
288     if (node->hasUnlinkedLocal()) {
289         VirtualRegister operand = node->unlinkedLocal();
290         if (operand.isArgument())
291             out.print(comma, "arg", operand.toArgument());
292         else
293             out.print(comma, "loc", operand.toLocal());
294     }
295     if (node->hasUnlinkedMachineLocal()) {
296         VirtualRegister operand = node->unlinkedMachineLocal();
297         if (operand.isValid()) {
298             if (operand.isArgument())
299                 out.print(comma, "machine:arg", operand.toArgument());
300             else
301                 out.print(comma, "machine:loc", operand.toLocal());
302         }
303     }
304     if (node->hasConstantBuffer()) {
305         out.print(comma);
306         out.print(node->startConstant(), ":[");
307         CommaPrinter anotherComma;
308         for (unsigned i = 0; i < node->numConstants(); ++i)
309             out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
310         out.print("]");
311     }
312     if (node->hasIndexingType())
313         out.print(comma, IndexingTypeDump(node->indexingType()));
314     if (node->hasTypedArrayType())
315         out.print(comma, node->typedArrayType());
316     if (node->hasPhi())
317         out.print(comma, "^", node->phi()->index());
318     if (node->hasExecutionCounter())
319         out.print(comma, RawPointer(node->executionCounter()));
320     if (node->hasVariableWatchpointSet())
321         out.print(comma, RawPointer(node->variableWatchpointSet()));
322     if (node->hasTypedArray())
323         out.print(comma, inContext(JSValue(node->typedArray()), context));
324     if (node->hasStoragePointer())
325         out.print(comma, RawPointer(node->storagePointer()));
326     if (node->isConstant())
327         out.print(comma, pointerDumpInContext(node->constant(), context));
328     if (node->isJump())
329         out.print(comma, "T:", *node->targetBlock());
330     if (node->isBranch())
331         out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
332     if (node->isSwitch()) {
333         SwitchData* data = node->switchData();
334         out.print(comma, data->kind);
335         for (unsigned i = 0; i < data->cases.size(); ++i)
336             out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
337         out.print(comma, "default:", data->fallThrough);
338     }
339     ClobberSet reads;
340     ClobberSet writes;
341     addReadsAndWrites(*this, node, reads, writes);
342     if (!reads.isEmpty())
343         out.print(comma, "R:", sortedListDump(reads.direct(), ","));
344     if (!writes.isEmpty())
345         out.print(comma, "W:", sortedListDump(writes.direct(), ","));
346     if (node->origin.isSet()) {
347         out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
348         if (node->origin.semantic != node->origin.forExit)
349             out.print(comma, "exit: ", node->origin.forExit);
350     }
351     
352     out.print(")");
353
354     if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
355         out.print("  predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
356     else if (node->hasHeapPrediction())
357         out.print("  predicting ", SpeculationDump(node->getHeapPrediction()));
358     
359     out.print("\n");
360 }
361
362 void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
363 {
364     out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
365     if (block->executionCount == block->executionCount)
366         out.print(prefix, "  Execution count: ", block->executionCount, "\n");
367     out.print(prefix, "  Predecessors:");
368     for (size_t i = 0; i < block->predecessors.size(); ++i)
369         out.print(" ", *block->predecessors[i]);
370     out.print("\n");
371     if (m_dominators.isValid()) {
372         out.print(prefix, "  Dominated by: ", m_dominators.dominatorsOf(block), "\n");
373         out.print(prefix, "  Dominates: ", m_dominators.blocksDominatedBy(block), "\n");
374         out.print(prefix, "  Dominance Frontier: ", m_dominators.dominanceFrontierOf(block), "\n");
375         out.print(prefix, "  Iterated Dominance Frontier: ", m_dominators.iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
376     }
377     if (m_naturalLoops.isValid()) {
378         if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
379             out.print(prefix, "  Loop header, contains:");
380             Vector<BlockIndex> sortedBlockList;
381             for (unsigned i = 0; i < loop->size(); ++i)
382                 sortedBlockList.append(loop->at(i)->index);
383             std::sort(sortedBlockList.begin(), sortedBlockList.end());
384             for (unsigned i = 0; i < sortedBlockList.size(); ++i)
385                 out.print(" #", sortedBlockList[i]);
386             out.print("\n");
387         }
388         
389         Vector<const NaturalLoop*> containingLoops =
390             m_naturalLoops.loopsOf(block);
391         if (!containingLoops.isEmpty()) {
392             out.print(prefix, "  Containing loop headers:");
393             for (unsigned i = 0; i < containingLoops.size(); ++i)
394                 out.print(" ", *containingLoops[i]->header());
395             out.print("\n");
396         }
397     }
398     if (!block->phis.isEmpty()) {
399         out.print(prefix, "  Phi Nodes:");
400         for (size_t i = 0; i < block->phis.size(); ++i) {
401             Node* phiNode = block->phis[i];
402             if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
403                 continue;
404             out.print(" @", phiNode->index(), "<", phiNode->refCount(), ">->(");
405             if (phiNode->child1()) {
406                 out.print("@", phiNode->child1()->index());
407                 if (phiNode->child2()) {
408                     out.print(", @", phiNode->child2()->index());
409                     if (phiNode->child3())
410                         out.print(", @", phiNode->child3()->index());
411                 }
412             }
413             out.print(")", i + 1 < block->phis.size() ? "," : "");
414         }
415         out.print("\n");
416     }
417 }
418
419 void Graph::dump(PrintStream& out, DumpContext* context)
420 {
421     DumpContext myContext;
422     myContext.graph = this;
423     if (!context)
424         context = &myContext;
425     
426     out.print("\n");
427     out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
428     out.print("  Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
429     out.print("\n");
430     
431     Node* lastNode = 0;
432     for (size_t b = 0; b < m_blocks.size(); ++b) {
433         BasicBlock* block = m_blocks[b].get();
434         if (!block)
435             continue;
436         dumpBlockHeader(out, "", block, DumpAllPhis, context);
437         out.print("  States: ", block->cfaStructureClobberStateAtHead);
438         if (!block->cfaHasVisited)
439             out.print(", CurrentlyCFAUnreachable");
440         if (!block->intersectionOfCFAHasVisited)
441             out.print(", CFAUnreachable");
442         out.print("\n");
443         switch (m_form) {
444         case LoadStore:
445         case ThreadedCPS: {
446             out.print("  Vars Before: ");
447             if (block->cfaHasVisited)
448                 out.print(inContext(block->valuesAtHead, context));
449             else
450                 out.print("<empty>");
451             out.print("\n");
452             out.print("  Intersected Vars Before: ");
453             if (block->intersectionOfCFAHasVisited)
454                 out.print(inContext(block->intersectionOfPastValuesAtHead, context));
455             else
456                 out.print("<empty>");
457             out.print("\n");
458             out.print("  Var Links: ", block->variablesAtHead, "\n");
459             break;
460         }
461             
462         case SSA: {
463             RELEASE_ASSERT(block->ssa);
464             out.print("  Availability: ", block->ssa->availabilityAtHead, "\n");
465             out.print("  Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
466             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
467             break;
468         } }
469         for (size_t i = 0; i < block->size(); ++i) {
470             dumpCodeOrigin(out, "", lastNode, block->at(i), context);
471             dump(out, "", block->at(i), context);
472             lastNode = block->at(i);
473         }
474         out.print("  States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
475         if (!block->cfaDidFinish)
476             out.print(", CFAInvalidated");
477         out.print("\n");
478         switch (m_form) {
479         case LoadStore:
480         case ThreadedCPS: {
481             out.print("  Vars After: ");
482             if (block->cfaHasVisited)
483                 out.print(inContext(block->valuesAtTail, context));
484             else
485                 out.print("<empty>");
486             out.print("\n");
487             out.print("  Var Links: ", block->variablesAtTail, "\n");
488             break;
489         }
490             
491         case SSA: {
492             RELEASE_ASSERT(block->ssa);
493             out.print("  Availability: ", block->ssa->availabilityAtTail, "\n");
494             out.print("  Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
495             out.print("  Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
496             break;
497         } }
498         out.print("\n");
499     }
500     
501     if (!myContext.isEmpty()) {
502         myContext.dump(out);
503         out.print("\n");
504     }
505 }
506
507 void Graph::dethread()
508 {
509     if (m_form == LoadStore || m_form == SSA)
510         return;
511     
512     if (logCompilationChanges())
513         dataLog("Dethreading DFG graph.\n");
514     
515     SamplingRegion samplingRegion("DFG Dethreading");
516     
517     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
518         BasicBlock* block = m_blocks[blockIndex].get();
519         if (!block)
520             continue;
521         for (unsigned phiIndex = block->phis.size(); phiIndex--;) {
522             Node* phi = block->phis[phiIndex];
523             phi->children.reset();
524         }
525     }
526     
527     m_form = LoadStore;
528 }
529
530 void Graph::handleSuccessor(Vector<BasicBlock*, 16>& worklist, BasicBlock* block, BasicBlock* successor)
531 {
532     if (!successor->isReachable) {
533         successor->isReachable = true;
534         worklist.append(successor);
535     }
536     
537     successor->predecessors.append(block);
538 }
539
540 void Graph::determineReachability()
541 {
542     Vector<BasicBlock*, 16> worklist;
543     worklist.append(block(0));
544     block(0)->isReachable = true;
545     while (!worklist.isEmpty()) {
546         BasicBlock* block = worklist.takeLast();
547         for (unsigned i = block->numSuccessors(); i--;)
548             handleSuccessor(worklist, block, block->successor(i));
549     }
550 }
551
552 void Graph::resetReachability()
553 {
554     for (BlockIndex blockIndex = m_blocks.size(); blockIndex--;) {
555         BasicBlock* block = m_blocks[blockIndex].get();
556         if (!block)
557             continue;
558         block->isReachable = false;
559         block->predecessors.clear();
560     }
561     
562     determineReachability();
563 }
564
565 void Graph::killBlockAndItsContents(BasicBlock* block)
566 {
567     for (unsigned phiIndex = block->phis.size(); phiIndex--;)
568         m_allocator.free(block->phis[phiIndex]);
569     for (unsigned nodeIndex = block->size(); nodeIndex--;)
570         m_allocator.free(block->at(nodeIndex));
571     
572     killBlock(block);
573 }
574
575 void Graph::killUnreachableBlocks()
576 {
577     for (BlockIndex blockIndex = 0; blockIndex < numBlocks(); ++blockIndex) {
578         BasicBlock* block = this->block(blockIndex);
579         if (!block)
580             continue;
581         if (block->isReachable)
582             continue;
583         
584         killBlockAndItsContents(block);
585     }
586 }
587
588 void Graph::invalidateCFG()
589 {
590     m_dominators.invalidate();
591     m_naturalLoops.invalidate();
592 }
593
594 void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
595 {
596     if (variableAccessData->isCaptured()) {
597         // Let CSE worry about this one.
598         return;
599     }
600     for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
601         Node* node = block[indexInBlock];
602         bool shouldContinue = true;
603         switch (node->op()) {
604         case SetLocal: {
605             if (node->local() == variableAccessData->local())
606                 shouldContinue = false;
607             break;
608         }
609                 
610         case GetLocal: {
611             if (node->variableAccessData() != variableAccessData)
612                 continue;
613             substitute(block, indexInBlock, node, newGetLocal);
614             Node* oldTailNode = block.variablesAtTail.operand(variableAccessData->local());
615             if (oldTailNode == node)
616                 block.variablesAtTail.operand(variableAccessData->local()) = newGetLocal;
617             shouldContinue = false;
618             break;
619         }
620                 
621         default:
622             break;
623         }
624         if (!shouldContinue)
625             break;
626     }
627 }
628
629 BlockList Graph::blocksInPreOrder()
630 {
631     BlockList result;
632     BlockWorklist worklist;
633     worklist.push(block(0));
634     while (BasicBlock* block = worklist.pop()) {
635         result.append(block);
636         for (unsigned i = block->numSuccessors(); i--;)
637             worklist.push(block->successor(i));
638     }
639     return result;
640 }
641
642 BlockList Graph::blocksInPostOrder()
643 {
644     BlockList result;
645     PostOrderBlockWorklist worklist;
646     worklist.push(block(0));
647     while (BlockWithOrder item = worklist.pop()) {
648         switch (item.order) {
649         case PreOrder:
650             worklist.pushPost(item.block);
651             for (unsigned i = item.block->numSuccessors(); i--;)
652                 worklist.push(item.block->successor(i));
653             break;
654         case PostOrder:
655             result.append(item.block);
656             break;
657         }
658     }
659     return result;
660 }
661
662 void Graph::clearReplacements()
663 {
664     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
665         BasicBlock* block = m_blocks[blockIndex].get();
666         if (!block)
667             continue;
668         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
669             block->phis[phiIndex]->replacement = 0;
670         for (unsigned nodeIndex = block->size(); nodeIndex--;)
671             block->at(nodeIndex)->replacement = 0;
672     }
673 }
674
675 void Graph::initializeNodeOwners()
676 {
677     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
678         BasicBlock* block = m_blocks[blockIndex].get();
679         if (!block)
680             continue;
681         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
682             block->phis[phiIndex]->owner = block;
683         for (unsigned nodeIndex = block->size(); nodeIndex--;)
684             block->at(nodeIndex)->owner = block;
685     }
686 }
687
688 void Graph::clearFlagsOnAllNodes(NodeFlags flags)
689 {
690     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
691         BasicBlock* block = m_blocks[blockIndex].get();
692         if (!block)
693             continue;
694         for (unsigned phiIndex = block->phis.size(); phiIndex--;)
695             block->phis[phiIndex]->clearFlags(flags);
696         for (unsigned nodeIndex = block->size(); nodeIndex--;)
697             block->at(nodeIndex)->clearFlags(flags);
698     }
699 }
700
701 FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
702 {
703     HashMap<CodeBlock*, std::unique_ptr<FullBytecodeLiveness>>::iterator iter = m_bytecodeLiveness.find(codeBlock);
704     if (iter != m_bytecodeLiveness.end())
705         return *iter->value;
706     
707     std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
708     codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
709     FullBytecodeLiveness& result = *liveness;
710     m_bytecodeLiveness.add(codeBlock, WTF::move(liveness));
711     return result;
712 }
713
714 FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
715 {
716     return livenessFor(baselineCodeBlockFor(inlineCallFrame));
717 }
718
719 bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
720 {
721     for (;;) {
722         VirtualRegister reg = VirtualRegister(
723             operand.offset() - codeOrigin.stackOffset());
724         
725         if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
726             if (reg.isArgument()) {
727                 RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
728                 
729                 if (!codeOrigin.inlineCallFrame->isClosureCall)
730                     return false;
731                 
732                 if (reg.offset() == JSStack::Callee)
733                     return true;
734                 if (reg.offset() == JSStack::ScopeChain)
735                     return true;
736                 
737                 return false;
738             }
739             
740             return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
741                 reg.offset(), codeOrigin.bytecodeIndex);
742         }
743         
744         InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
745         if (!inlineCallFrame)
746             break;
747
748         // Arguments are always live. This would be redundant if it wasn't for our
749         // op_call_varargs inlining.
750         // FIXME: 'this' might not be live, but we don't have a way of knowing.
751         // https://bugs.webkit.org/show_bug.cgi?id=128519
752         if (reg.isArgument()
753             && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
754             return true;
755         
756         codeOrigin = inlineCallFrame->caller;
757     }
758     
759     return true;
760 }
761
762 unsigned Graph::frameRegisterCount()
763 {
764     unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
765     return roundLocalRegisterCountForFramePointerOffset(result);
766 }
767
768 unsigned Graph::stackPointerOffset()
769 {
770     return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
771 }
772
773 unsigned Graph::requiredRegisterCountForExit()
774 {
775     unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
776     for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
777         InlineCallFrame* inlineCallFrame = *iter;
778         CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
779         unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
780         count = std::max(count, requiredCount);
781     }
782     return count;
783 }
784
785 unsigned Graph::requiredRegisterCountForExecutionAndExit()
786 {
787     return std::max(frameRegisterCount(), requiredRegisterCountForExit());
788 }
789
790 JSValue Graph::tryGetConstantProperty(
791     JSValue base, const StructureSet& structureSet, PropertyOffset offset)
792 {
793     if (!base || !base.isObject())
794         return JSValue();
795     
796     JSObject* object = asObject(base);
797     
798     for (unsigned i = structureSet.size(); i--;) {
799         Structure* structure = structureSet[i];
800         WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
801         if (!set || !set->isStillValid())
802             return JSValue();
803         
804         ASSERT(structure->isValidOffset(offset));
805         ASSERT(!structure->isUncacheableDictionary());
806         
807         watchpoints().addLazily(set);
808     }
809     
810     // What follows may require some extra thought. We need this load to load a valid JSValue. If
811     // our profiling makes sense and we're still on track to generate code that won't be
812     // invalidated, then we have nothing to worry about. We do, however, have to worry about
813     // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
814     // is doomed.
815     //
816     // One argument in favor of this code is that it should definitely work because the butterfly
817     // is always set before the structure. However, we don't currently have a fence between those
818     // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
819     // So, for this to fail, you'd need an access on a constant object pointer such that the inline
820     // caches told us that the object had a structure that it did not *yet* have, and then later,
821     // the object transitioned to that structure that the inline caches had alraedy seen. And then
822     // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
823     // this is worth revisiting but it isn't worth losing sleep over. Filed:
824     // https://bugs.webkit.org/show_bug.cgi?id=134641
825     //
826     // For now, we just do the minimal thing: defend against the structure right now being
827     // incompatible with the getDirect we're trying to do. The easiest way to do that is to
828     // determine if the structure belongs to the proven set.
829     
830     if (!structureSet.contains(object->structure()))
831         return JSValue();
832     
833     return object->getDirect(offset);
834 }
835
836 JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
837 {
838     return tryGetConstantProperty(base, StructureSet(structure), offset);
839 }
840
841 JSValue Graph::tryGetConstantProperty(
842     JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
843 {
844     if (structure.isTop() || structure.isClobbered())
845         return JSValue();
846     
847     return tryGetConstantProperty(base, structure.set(), offset);
848 }
849
850 JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
851 {
852     return tryGetConstantProperty(base.m_value, base.m_structure, offset);
853 }
854
855 JSLexicalEnvironment* Graph::tryGetActivation(Node* node)
856 {
857     return node->dynamicCastConstant<JSLexicalEnvironment*>();
858 }
859
860 WriteBarrierBase<Unknown>* Graph::tryGetRegisters(Node* node)
861 {
862     JSLexicalEnvironment* lexicalEnvironment = tryGetActivation(node);
863     if (!lexicalEnvironment)
864         return 0;
865     if (!lexicalEnvironment->isTornOff())
866         return 0;
867     return lexicalEnvironment->registers();
868 }
869
870 JSArrayBufferView* Graph::tryGetFoldableView(Node* node)
871 {
872     JSArrayBufferView* view = node->dynamicCastConstant<JSArrayBufferView*>();
873     if (!view)
874         return nullptr;
875     if (!view->length())
876         return nullptr;
877     WTF::loadLoadFence();
878     return view;
879 }
880
881 JSArrayBufferView* Graph::tryGetFoldableView(Node* node, ArrayMode arrayMode)
882 {
883     if (arrayMode.typedArrayType() == NotTypedArray)
884         return 0;
885     return tryGetFoldableView(node);
886 }
887
888 JSArrayBufferView* Graph::tryGetFoldableViewForChild1(Node* node)
889 {
890     return tryGetFoldableView(child(node, 0).node(), node->arrayMode());
891 }
892
893 void Graph::registerFrozenValues()
894 {
895     m_codeBlock->constants().resize(0);
896     for (FrozenValue* value : m_frozenValues) {
897         if (value->structure())
898             ASSERT(m_plan.weakReferences.contains(value->structure()));
899         
900         switch (value->strength()) {
901         case FragileValue: {
902             break;
903         }
904         case WeakValue: {
905             m_plan.weakReferences.addLazily(value->value().asCell());
906             break;
907         }
908         case StrongValue: {
909             unsigned constantIndex = m_codeBlock->addConstantLazily();
910             initializeLazyWriteBarrierForConstant(
911                 m_plan.writeBarriers,
912                 m_codeBlock->constants()[constantIndex],
913                 m_codeBlock,
914                 constantIndex,
915                 m_codeBlock->ownerExecutable(),
916                 value->value());
917             break;
918         } }
919     }
920     m_codeBlock->constants().shrinkToFit();
921 }
922
923 void Graph::visitChildren(SlotVisitor& visitor)
924 {
925     for (FrozenValue* value : m_frozenValues) {
926         visitor.appendUnbarrieredReadOnlyValue(value->value());
927         visitor.appendUnbarrieredReadOnlyPointer(value->structure());
928     }
929     
930     for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
931         BasicBlock* block = this->block(blockIndex);
932         if (!block)
933             continue;
934         
935         for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
936             Node* node = block->at(nodeIndex);
937             
938             switch (node->op()) {
939             case CheckStructure:
940                 for (unsigned i = node->structureSet().size(); i--;)
941                     visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
942                 break;
943                 
944             case NewObject:
945             case ArrayifyToStructure:
946             case NewStringObject:
947                 visitor.appendUnbarrieredReadOnlyPointer(node->structure());
948                 break;
949                 
950             case PutStructure:
951             case AllocatePropertyStorage:
952             case ReallocatePropertyStorage:
953                 visitor.appendUnbarrieredReadOnlyPointer(
954                     node->transition()->previous);
955                 visitor.appendUnbarrieredReadOnlyPointer(
956                     node->transition()->next);
957                 break;
958                 
959             case MultiGetByOffset:
960                 for (unsigned i = node->multiGetByOffsetData().variants.size(); i--;) {
961                     GetByIdVariant& variant = node->multiGetByOffsetData().variants[i];
962                     const StructureSet& set = variant.structureSet();
963                     for (unsigned j = set.size(); j--;)
964                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
965
966                     // Don't need to mark anything in the structure chain because that would
967                     // have been decomposed into CheckStructure's. Don't need to mark the
968                     // callLinkStatus because we wouldn't use MultiGetByOffset if any of the
969                     // variants did that.
970                     ASSERT(!variant.callLinkStatus());
971                 }
972                 break;
973                     
974             case MultiPutByOffset:
975                 for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
976                     PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
977                     const StructureSet& set = variant.oldStructure();
978                     for (unsigned j = set.size(); j--;)
979                         visitor.appendUnbarrieredReadOnlyPointer(set[j]);
980                     if (variant.kind() == PutByIdVariant::Transition)
981                         visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
982                 }
983                 break;
984                 
985             default:
986                 break;
987             }
988         }
989     }
990 }
991
992 FrozenValue* Graph::freezeFragile(JSValue value)
993 {
994     if (UNLIKELY(!value))
995         return FrozenValue::emptySingleton();
996     
997     auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
998     if (LIKELY(!result.isNewEntry))
999         return result.iterator->value;
1000     
1001     return result.iterator->value = m_frozenValues.add(FrozenValue::freeze(value));
1002 }
1003
1004 FrozenValue* Graph::freeze(JSValue value)
1005 {
1006     FrozenValue* result = freezeFragile(value);
1007     result->strengthenTo(WeakValue);
1008     return result;
1009 }
1010
1011 FrozenValue* Graph::freezeStrong(JSValue value)
1012 {
1013     FrozenValue* result = freezeFragile(value);
1014     result->strengthenTo(StrongValue);
1015     return result;
1016 }
1017
1018 void Graph::convertToConstant(Node* node, FrozenValue* value)
1019 {
1020     if (value->structure())
1021         assertIsRegistered(value->structure());
1022     if (m_form == ThreadedCPS) {
1023         if (node->op() == GetLocal)
1024             dethread();
1025         else
1026             ASSERT(!node->hasVariableAccessData(*this));
1027     }
1028     node->convertToConstant(value);
1029 }
1030
1031 void Graph::convertToConstant(Node* node, JSValue value)
1032 {
1033     convertToConstant(node, freeze(value));
1034 }
1035
1036 void Graph::convertToStrongConstant(Node* node, JSValue value)
1037 {
1038     convertToConstant(node, freezeStrong(value));
1039 }
1040
1041 StructureRegistrationResult Graph::registerStructure(Structure* structure)
1042 {
1043     m_plan.weakReferences.addLazily(structure);
1044     if (m_plan.watchpoints.consider(structure))
1045         return StructureRegisteredAndWatched;
1046     return StructureRegisteredNormally;
1047 }
1048
1049 void Graph::assertIsRegistered(Structure* structure)
1050 {
1051     if (m_structureRegistrationState == HaveNotStartedRegistering)
1052         return;
1053     
1054     DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
1055     
1056     if (!structure->dfgShouldWatch())
1057         return;
1058     if (watchpoints().isWatched(structure->transitionWatchpointSet()))
1059         return;
1060     
1061     DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
1062 }
1063
1064 void Graph::handleAssertionFailure(
1065     Node* node, const char* file, int line, const char* function, const char* assertion)
1066 {
1067     startCrashing();
1068     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1069     dataLog(file, "(", line, ") : ", function, "\n");
1070     dataLog("\n");
1071     if (node) {
1072         dataLog("While handling node ", node, "\n");
1073         dataLog("\n");
1074     }
1075     dataLog("Graph at time of failure:\n");
1076     dump();
1077     dataLog("\n");
1078     dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
1079     dataLog(file, "(", line, ") : ", function, "\n");
1080     CRASH();
1081 }
1082
1083 } } // namespace JSC::DFG
1084
1085 #endif // ENABLE(DFG_JIT)