58c42100674d8c2068018e157870ee8abb68fe9b
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "DFGArrayMode.h"
35 #include "DFGByteCodeCache.h"
36 #include "DFGCapabilities.h"
37 #include "GetByIdStatus.h"
38 #include "PutByIdStatus.h"
39 #include "ResolveGlobalStatus.h"
40 #include <wtf/HashMap.h>
41 #include <wtf/MathExtras.h>
42
43 namespace JSC { namespace DFG {
44
45 class ConstantBufferKey {
46 public:
47     ConstantBufferKey()
48         : m_codeBlock(0)
49         , m_index(0)
50     {
51     }
52     
53     ConstantBufferKey(WTF::HashTableDeletedValueType)
54         : m_codeBlock(0)
55         , m_index(1)
56     {
57     }
58     
59     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
60         : m_codeBlock(codeBlock)
61         , m_index(index)
62     {
63     }
64     
65     bool operator==(const ConstantBufferKey& other) const
66     {
67         return m_codeBlock == other.m_codeBlock
68             && m_index == other.m_index;
69     }
70     
71     unsigned hash() const
72     {
73         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
74     }
75     
76     bool isHashTableDeletedValue() const
77     {
78         return !m_codeBlock && m_index;
79     }
80     
81     CodeBlock* codeBlock() const { return m_codeBlock; }
82     unsigned index() const { return m_index; }
83     
84 private:
85     CodeBlock* m_codeBlock;
86     unsigned m_index;
87 };
88
89 struct ConstantBufferKeyHash {
90     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
91     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
92     {
93         return a == b;
94     }
95     
96     static const bool safeToCompareToEmptyOrDeleted = true;
97 };
98
99 } } // namespace JSC::DFG
100
101 namespace WTF {
102
103 template<typename T> struct DefaultHash;
104 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
105     typedef JSC::DFG::ConstantBufferKeyHash Hash;
106 };
107
108 template<typename T> struct HashTraits;
109 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
110
111 } // namespace WTF
112
113 namespace JSC { namespace DFG {
114
115 // === ByteCodeParser ===
116 //
117 // This class is used to compile the dataflow graph from a CodeBlock.
118 class ByteCodeParser {
119 public:
120     ByteCodeParser(ExecState* exec, Graph& graph)
121         : m_exec(exec)
122         , m_globalData(&graph.m_globalData)
123         , m_codeBlock(graph.m_codeBlock)
124         , m_profiledBlock(graph.m_profiledBlock)
125         , m_graph(graph)
126         , m_currentBlock(0)
127         , m_currentIndex(0)
128         , m_currentProfilingIndex(0)
129         , m_constantUndefined(UINT_MAX)
130         , m_constantNull(UINT_MAX)
131         , m_constantNaN(UINT_MAX)
132         , m_constant1(UINT_MAX)
133         , m_constants(m_codeBlock->numberOfConstantRegisters())
134         , m_numArguments(m_codeBlock->numParameters())
135         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
136         , m_preservedVars(m_codeBlock->m_numVars)
137         , m_parameterSlots(0)
138         , m_numPassedVarArgs(0)
139         , m_globalResolveNumber(0)
140         , m_inlineStackTop(0)
141         , m_haveBuiltOperandMaps(false)
142         , m_emptyJSValueIndex(UINT_MAX)
143         , m_currentInstruction(0)
144     {
145         ASSERT(m_profiledBlock);
146         
147         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
148             m_preservedVars.set(i);
149     }
150     
151     // Parse a full CodeBlock of bytecode.
152     bool parse();
153     
154 private:
155     // Just parse from m_currentIndex to the end of the current CodeBlock.
156     void parseCodeBlock();
157
158     // Helper for min and max.
159     bool handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
160     
161     // Handle calls. This resolves issues surrounding inlining and intrinsics.
162     void handleCall(Interpreter*, Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
163     void emitFunctionCheck(JSFunction* expectedFunction, NodeIndex callTarget, int registerOffset, CodeSpecializationKind);
164     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
165     bool handleInlining(bool usesResult, int callTarget, NodeIndex callTargetNodeIndex, int resultOperand, bool certainAboutExpectedFunction, JSFunction*, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
166     // Handle setting the result of an intrinsic.
167     void setIntrinsicResult(bool usesResult, int resultOperand, NodeIndex);
168     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
169     bool handleIntrinsic(bool usesResult, int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
170     bool handleConstantInternalFunction(bool usesResult, int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
171     NodeIndex handleGetByOffset(SpeculatedType, NodeIndex base, unsigned identifierNumber, PropertyOffset);
172     void handleGetByOffset(
173         int destinationOperand, SpeculatedType, NodeIndex base, unsigned identifierNumber,
174         PropertyOffset);
175     void handleGetById(
176         int destinationOperand, SpeculatedType, NodeIndex base, unsigned identifierNumber,
177         const GetByIdStatus&);
178
179     NodeIndex getScope(bool skipTop, unsigned skipCount);
180     
181     // Convert a set of ResolveOperations into graph nodes
182     bool parseResolveOperations(SpeculatedType, unsigned identifierNumber, unsigned operations, unsigned putToBaseOperation, NodeIndex* base, NodeIndex* value);
183
184     // Prepare to parse a block.
185     void prepareToParseBlock();
186     // Parse a single basic block of bytecode instructions.
187     bool parseBlock(unsigned limit);
188     // Link block successors.
189     void linkBlock(BasicBlock*, Vector<BlockIndex>& possibleTargets);
190     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets);
191     // Link GetLocal & SetLocal nodes, to ensure live values are generated.
192     enum PhiStackType {
193         LocalPhiStack,
194         ArgumentPhiStack
195     };
196     template<PhiStackType stackType>
197     void processPhiStack();
198     
199     void fixVariableAccessPredictions();
200     // Add spill locations to nodes.
201     void allocateVirtualRegisters();
202     
203     VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
204     {
205         ASSERT(operand < FirstConstantRegisterIndex);
206         
207         m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
208         return &m_graph.m_variableAccessData.last();
209     }
210     
211     // Get/Set the operands/result of a bytecode instruction.
212     NodeIndex getDirect(int operand)
213     {
214         // Is this a constant?
215         if (operand >= FirstConstantRegisterIndex) {
216             unsigned constant = operand - FirstConstantRegisterIndex;
217             ASSERT(constant < m_constants.size());
218             return getJSConstant(constant);
219         }
220
221         if (operand == JSStack::Callee)
222             return getCallee();
223         
224         // Is this an argument?
225         if (operandIsArgument(operand))
226             return getArgument(operand);
227
228         // Must be a local.
229         return getLocal((unsigned)operand);
230     }
231     NodeIndex get(int operand)
232     {
233         return getDirect(m_inlineStackTop->remapOperand(operand));
234     }
235     enum SetMode { NormalSet, SetOnEntry };
236     void setDirect(int operand, NodeIndex value, SetMode setMode = NormalSet)
237     {
238         // Is this an argument?
239         if (operandIsArgument(operand)) {
240             setArgument(operand, value, setMode);
241             return;
242         }
243
244         // Must be a local.
245         setLocal((unsigned)operand, value, setMode);
246     }
247     void set(int operand, NodeIndex value, SetMode setMode = NormalSet)
248     {
249         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
250     }
251     
252     void setPair(int operand1, NodeIndex value1, int operand2, NodeIndex value2)
253     {
254         // First emit dead SetLocals for the benefit of OSR.
255         set(operand1, value1);
256         set(operand2, value2);
257         
258         // Now emit the real SetLocals.
259         set(operand1, value1);
260         set(operand2, value2);
261     }
262     
263     NodeIndex injectLazyOperandSpeculation(NodeIndex nodeIndex)
264     {
265         Node& node = m_graph[nodeIndex];
266         ASSERT(node.op() == GetLocal);
267         ASSERT(node.codeOrigin.bytecodeIndex == m_currentIndex);
268         SpeculatedType prediction = 
269             m_inlineStackTop->m_lazyOperands.prediction(
270                 LazyOperandValueProfileKey(m_currentIndex, node.local()));
271 #if DFG_ENABLE(DEBUG_VERBOSE)
272         dataLog("Lazy operand [@", nodeIndex, ", bc#", m_currentIndex, ", r", node.local(), "] prediction: ", SpeculationDump(prediction), "\n");
273 #endif
274         node.variableAccessData()->predict(prediction);
275         return nodeIndex;
276     }
277
278     // Used in implementing get/set, above, where the operand is a local variable.
279     NodeIndex getLocal(unsigned operand)
280     {
281         NodeIndex nodeIndex = m_currentBlock->variablesAtTail.local(operand);
282         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
283         
284         if (nodeIndex != NoNode) {
285             Node* nodePtr = &m_graph[nodeIndex];
286             if (nodePtr->op() == Flush) {
287                 // Two possibilities: either the block wants the local to be live
288                 // but has not loaded its value, or it has loaded its value, in
289                 // which case we're done.
290                 nodeIndex = nodePtr->child1().index();
291                 Node& flushChild = m_graph[nodeIndex];
292                 if (flushChild.op() == Phi) {
293                     VariableAccessData* variableAccessData = flushChild.variableAccessData();
294                     variableAccessData->mergeIsCaptured(isCaptured);
295                     nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), nodeIndex));
296                     m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
297                     return nodeIndex;
298                 }
299                 nodePtr = &flushChild;
300             }
301             
302             ASSERT(&m_graph[nodeIndex] == nodePtr);
303             ASSERT(nodePtr->op() != Flush);
304
305             nodePtr->variableAccessData()->mergeIsCaptured(isCaptured);
306                 
307             if (isCaptured) {
308                 // We wish to use the same variable access data as the previous access,
309                 // but for all other purposes we want to issue a load since for all we
310                 // know, at this stage of compilation, the local has been clobbered.
311                 
312                 // Make sure we link to the Phi node, not to the GetLocal.
313                 if (nodePtr->op() == GetLocal)
314                     nodeIndex = nodePtr->child1().index();
315                 
316                 NodeIndex newGetLocal = injectLazyOperandSpeculation(
317                     addToGraph(GetLocal, OpInfo(nodePtr->variableAccessData()), nodeIndex));
318                 m_currentBlock->variablesAtTail.local(operand) = newGetLocal;
319                 return newGetLocal;
320             }
321             
322             if (nodePtr->op() == GetLocal)
323                 return nodeIndex;
324             ASSERT(nodePtr->op() == SetLocal);
325             return nodePtr->child1().index();
326         }
327
328         // Check for reads of temporaries from prior blocks,
329         // expand m_preservedVars to cover these.
330         m_preservedVars.set(operand);
331         
332         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
333         
334         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
335         m_localPhiStack.append(PhiStackEntry(m_currentBlock, phi, operand));
336         nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), phi));
337         m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
338         
339         m_currentBlock->variablesAtHead.setLocalFirstTime(operand, nodeIndex);
340         
341         return nodeIndex;
342     }
343     void setLocal(unsigned operand, NodeIndex value, SetMode setMode = NormalSet)
344     {
345         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
346         
347         if (setMode == NormalSet) {
348             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
349             if (isCaptured || argumentPosition)
350                 flushDirect(operand, argumentPosition);
351         }
352
353         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
354         variableAccessData->mergeStructureCheckHoistingFailed(
355             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
356         NodeIndex nodeIndex = addToGraph(SetLocal, OpInfo(variableAccessData), value);
357         m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
358     }
359
360     // Used in implementing get/set, above, where the operand is an argument.
361     NodeIndex getArgument(unsigned operand)
362     {
363         unsigned argument = operandToArgument(operand);
364         ASSERT(argument < m_numArguments);
365         
366         NodeIndex nodeIndex = m_currentBlock->variablesAtTail.argument(argument);
367         bool isCaptured = m_codeBlock->isCaptured(operand);
368
369         if (nodeIndex != NoNode) {
370             Node* nodePtr = &m_graph[nodeIndex];
371             if (nodePtr->op() == Flush) {
372                 // Two possibilities: either the block wants the local to be live
373                 // but has not loaded its value, or it has loaded its value, in
374                 // which case we're done.
375                 nodeIndex = nodePtr->child1().index();
376                 Node& flushChild = m_graph[nodeIndex];
377                 if (flushChild.op() == Phi) {
378                     VariableAccessData* variableAccessData = flushChild.variableAccessData();
379                     variableAccessData->mergeIsCaptured(isCaptured);
380                     nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), nodeIndex));
381                     m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
382                     return nodeIndex;
383                 }
384                 nodePtr = &flushChild;
385             }
386             
387             ASSERT(&m_graph[nodeIndex] == nodePtr);
388             ASSERT(nodePtr->op() != Flush);
389             
390             nodePtr->variableAccessData()->mergeIsCaptured(isCaptured);
391             
392             if (nodePtr->op() == SetArgument) {
393                 // We're getting an argument in the first basic block; link
394                 // the GetLocal to the SetArgument.
395                 ASSERT(nodePtr->local() == static_cast<VirtualRegister>(operand));
396                 VariableAccessData* variable = nodePtr->variableAccessData();
397                 nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable), nodeIndex));
398                 m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
399                 return nodeIndex;
400             }
401             
402             if (isCaptured) {
403                 if (nodePtr->op() == GetLocal)
404                     nodeIndex = nodePtr->child1().index();
405                 return injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(nodePtr->variableAccessData()), nodeIndex));
406             }
407             
408             if (nodePtr->op() == GetLocal)
409                 return nodeIndex;
410             
411             ASSERT(nodePtr->op() == SetLocal);
412             return nodePtr->child1().index();
413         }
414         
415         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
416
417         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
418         m_argumentPhiStack.append(PhiStackEntry(m_currentBlock, phi, argument));
419         nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), phi));
420         m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
421         
422         m_currentBlock->variablesAtHead.setArgumentFirstTime(argument, nodeIndex);
423         
424         return nodeIndex;
425     }
426     void setArgument(int operand, NodeIndex value, SetMode setMode = NormalSet)
427     {
428         unsigned argument = operandToArgument(operand);
429         ASSERT(argument < m_numArguments);
430         
431         bool isCaptured = m_codeBlock->isCaptured(operand);
432
433         // Always flush arguments, except for 'this'.
434         if (argument && setMode == NormalSet)
435             flushDirect(operand);
436         
437         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
438         variableAccessData->mergeStructureCheckHoistingFailed(
439             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
440         NodeIndex nodeIndex = addToGraph(SetLocal, OpInfo(variableAccessData), value);
441         m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
442     }
443     
444     ArgumentPosition* findArgumentPositionForArgument(int argument)
445     {
446         InlineStackEntry* stack = m_inlineStackTop;
447         while (stack->m_inlineCallFrame)
448             stack = stack->m_caller;
449         return stack->m_argumentPositions[argument];
450     }
451     
452     ArgumentPosition* findArgumentPositionForLocal(int operand)
453     {
454         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
455             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
456             if (!inlineCallFrame)
457                 break;
458             if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
459                 continue;
460             if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
461                 continue;
462             if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
463                 continue;
464             int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
465             return stack->m_argumentPositions[argument];
466         }
467         return 0;
468     }
469     
470     ArgumentPosition* findArgumentPosition(int operand)
471     {
472         if (operandIsArgument(operand))
473             return findArgumentPositionForArgument(operandToArgument(operand));
474         return findArgumentPositionForLocal(operand);
475     }
476     
477     void flush(int operand)
478     {
479         flushDirect(m_inlineStackTop->remapOperand(operand));
480     }
481     
482     void flushDirect(int operand)
483     {
484         flushDirect(operand, findArgumentPosition(operand));
485     }
486     
487     void flushDirect(int operand, ArgumentPosition* argumentPosition)
488     {
489         // FIXME: This should check if the same operand had already been flushed to
490         // some other local variable.
491         
492         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
493         
494         ASSERT(operand < FirstConstantRegisterIndex);
495         
496         NodeIndex nodeIndex;
497         int index;
498         if (operandIsArgument(operand)) {
499             index = operandToArgument(operand);
500             nodeIndex = m_currentBlock->variablesAtTail.argument(index);
501         } else {
502             index = operand;
503             nodeIndex = m_currentBlock->variablesAtTail.local(index);
504             m_preservedVars.set(operand);
505         }
506         
507         if (nodeIndex != NoNode) {
508             Node& node = m_graph[nodeIndex];
509             switch (node.op()) {
510             case Flush:
511                 nodeIndex = node.child1().index();
512                 break;
513             case GetLocal:
514                 nodeIndex = node.child1().index();
515                 break;
516             default:
517                 break;
518             }
519             
520             ASSERT(m_graph[nodeIndex].op() != Flush
521                    && m_graph[nodeIndex].op() != GetLocal);
522             
523             // Emit a Flush regardless of whether we already flushed it.
524             // This gives us guidance to see that the variable also needs to be flushed
525             // for arguments, even if it already had to be flushed for other reasons.
526             VariableAccessData* variableAccessData = node.variableAccessData();
527             variableAccessData->mergeIsCaptured(isCaptured);
528             addToGraph(Flush, OpInfo(variableAccessData), nodeIndex);
529             if (argumentPosition)
530                 argumentPosition->addVariable(variableAccessData);
531             return;
532         }
533         
534         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
535         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
536         nodeIndex = addToGraph(Flush, OpInfo(variableAccessData), phi);
537         if (operandIsArgument(operand)) {
538             m_argumentPhiStack.append(PhiStackEntry(m_currentBlock, phi, index));
539             m_currentBlock->variablesAtTail.argument(index) = nodeIndex;
540             m_currentBlock->variablesAtHead.setArgumentFirstTime(index, nodeIndex);
541         } else {
542             m_localPhiStack.append(PhiStackEntry(m_currentBlock, phi, index));
543             m_currentBlock->variablesAtTail.local(index) = nodeIndex;
544             m_currentBlock->variablesAtHead.setLocalFirstTime(index, nodeIndex);
545         }
546         if (argumentPosition)
547             argumentPosition->addVariable(variableAccessData);
548     }
549     
550     void flushArgumentsAndCapturedVariables()
551     {
552         int numArguments;
553         if (m_inlineStackTop->m_inlineCallFrame)
554             numArguments = m_inlineStackTop->m_inlineCallFrame->arguments.size();
555         else
556             numArguments = m_inlineStackTop->m_codeBlock->numParameters();
557         for (unsigned argument = numArguments; argument-- > 1;)
558             flush(argumentToOperand(argument));
559         for (int local = 0; local < m_inlineStackTop->m_codeBlock->m_numVars; ++local) {
560             if (!m_inlineStackTop->m_codeBlock->isCaptured(local))
561                 continue;
562             flush(local);
563         }
564     }
565
566     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
567     NodeIndex getToInt32(int operand)
568     {
569         return toInt32(get(operand));
570     }
571
572     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
573     NodeIndex toInt32(NodeIndex index)
574     {
575         Node& node = m_graph[index];
576
577         if (node.hasInt32Result())
578             return index;
579
580         if (node.op() == UInt32ToNumber)
581             return node.child1().index();
582
583         // Check for numeric constants boxed as JSValues.
584         if (node.op() == JSConstant) {
585             JSValue v = valueOfJSConstant(index);
586             if (v.isInt32())
587                 return getJSConstant(node.constantNumber());
588             if (v.isNumber())
589                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
590         }
591
592         return addToGraph(ValueToInt32, index);
593     }
594
595     NodeIndex getJSConstantForValue(JSValue constantValue)
596     {
597         unsigned constantIndex = m_codeBlock->addOrFindConstant(constantValue);
598         if (constantIndex >= m_constants.size())
599             m_constants.append(ConstantRecord());
600         
601         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
602         
603         return getJSConstant(constantIndex);
604     }
605
606     NodeIndex getJSConstant(unsigned constant)
607     {
608         NodeIndex index = m_constants[constant].asJSValue;
609         if (index != NoNode)
610             return index;
611
612         NodeIndex resultIndex = addToGraph(JSConstant, OpInfo(constant));
613         m_constants[constant].asJSValue = resultIndex;
614         return resultIndex;
615     }
616
617     NodeIndex getCallee()
618     {
619         return addToGraph(GetCallee);
620     }
621
622     // Helper functions to get/set the this value.
623     NodeIndex getThis()
624     {
625         return get(m_inlineStackTop->m_codeBlock->thisRegister());
626     }
627     void setThis(NodeIndex value)
628     {
629         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
630     }
631
632     // Convenience methods for checking nodes for constants.
633     bool isJSConstant(NodeIndex index)
634     {
635         return m_graph[index].op() == JSConstant;
636     }
637     bool isInt32Constant(NodeIndex nodeIndex)
638     {
639         return isJSConstant(nodeIndex) && valueOfJSConstant(nodeIndex).isInt32();
640     }
641     // Convenience methods for getting constant values.
642     JSValue valueOfJSConstant(NodeIndex index)
643     {
644         ASSERT(isJSConstant(index));
645         return m_codeBlock->getConstant(FirstConstantRegisterIndex + m_graph[index].constantNumber());
646     }
647     int32_t valueOfInt32Constant(NodeIndex nodeIndex)
648     {
649         ASSERT(isInt32Constant(nodeIndex));
650         return valueOfJSConstant(nodeIndex).asInt32();
651     }
652     
653     // This method returns a JSConstant with the value 'undefined'.
654     NodeIndex constantUndefined()
655     {
656         // Has m_constantUndefined been set up yet?
657         if (m_constantUndefined == UINT_MAX) {
658             // Search the constant pool for undefined, if we find it, we can just reuse this!
659             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
660             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
661                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
662                 if (testMe.isUndefined())
663                     return getJSConstant(m_constantUndefined);
664             }
665
666             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
667             ASSERT(m_constants.size() == numberOfConstants);
668             m_codeBlock->addConstant(jsUndefined());
669             m_constants.append(ConstantRecord());
670             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
671         }
672
673         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
674         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
675         return getJSConstant(m_constantUndefined);
676     }
677
678     // This method returns a JSConstant with the value 'null'.
679     NodeIndex constantNull()
680     {
681         // Has m_constantNull been set up yet?
682         if (m_constantNull == UINT_MAX) {
683             // Search the constant pool for null, if we find it, we can just reuse this!
684             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
685             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
686                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
687                 if (testMe.isNull())
688                     return getJSConstant(m_constantNull);
689             }
690
691             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
692             ASSERT(m_constants.size() == numberOfConstants);
693             m_codeBlock->addConstant(jsNull());
694             m_constants.append(ConstantRecord());
695             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
696         }
697
698         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
699         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
700         return getJSConstant(m_constantNull);
701     }
702
703     // This method returns a DoubleConstant with the value 1.
704     NodeIndex one()
705     {
706         // Has m_constant1 been set up yet?
707         if (m_constant1 == UINT_MAX) {
708             // Search the constant pool for the value 1, if we find it, we can just reuse this!
709             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
710             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
711                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
712                 if (testMe.isInt32() && testMe.asInt32() == 1)
713                     return getJSConstant(m_constant1);
714             }
715
716             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
717             ASSERT(m_constants.size() == numberOfConstants);
718             m_codeBlock->addConstant(jsNumber(1));
719             m_constants.append(ConstantRecord());
720             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
721         }
722
723         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
724         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
725         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
726         return getJSConstant(m_constant1);
727     }
728     
729     // This method returns a DoubleConstant with the value NaN.
730     NodeIndex constantNaN()
731     {
732         JSValue nan = jsNaN();
733         
734         // Has m_constantNaN been set up yet?
735         if (m_constantNaN == UINT_MAX) {
736             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
737             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
738             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
739                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
740                 if (JSValue::encode(testMe) == JSValue::encode(nan))
741                     return getJSConstant(m_constantNaN);
742             }
743
744             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
745             ASSERT(m_constants.size() == numberOfConstants);
746             m_codeBlock->addConstant(nan);
747             m_constants.append(ConstantRecord());
748             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
749         }
750
751         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
752         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
753         ASSERT(isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
754         return getJSConstant(m_constantNaN);
755     }
756     
757     NodeIndex cellConstant(JSCell* cell)
758     {
759         HashMap<JSCell*, NodeIndex>::AddResult result = m_cellConstantNodes.add(cell, NoNode);
760         if (result.isNewEntry)
761             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
762         
763         return result.iterator->value;
764     }
765     
766     CodeOrigin currentCodeOrigin()
767     {
768         return CodeOrigin(m_currentIndex, m_inlineStackTop->m_inlineCallFrame, m_currentProfilingIndex - m_currentIndex);
769     }
770
771     // These methods create a node and add it to the graph. If nodes of this type are
772     // 'mustGenerate' then the node  will implicitly be ref'ed to ensure generation.
773     NodeIndex addToGraph(NodeType op, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
774     {
775         NodeIndex resultIndex = (NodeIndex)m_graph.size();
776         m_graph.append(Node(op, currentCodeOrigin(), child1, child2, child3));
777         ASSERT(op != Phi);
778         m_currentBlock->append(resultIndex);
779
780         if (defaultFlags(op) & NodeMustGenerate)
781             m_graph.ref(resultIndex);
782         return resultIndex;
783     }
784     NodeIndex addToGraph(NodeType op, OpInfo info, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
785     {
786         NodeIndex resultIndex = (NodeIndex)m_graph.size();
787         m_graph.append(Node(op, currentCodeOrigin(), info, child1, child2, child3));
788         if (op == Phi)
789             m_currentBlock->phis.append(resultIndex);
790         else
791             m_currentBlock->append(resultIndex);
792
793         if (defaultFlags(op) & NodeMustGenerate)
794             m_graph.ref(resultIndex);
795         return resultIndex;
796     }
797     NodeIndex addToGraph(NodeType op, OpInfo info1, OpInfo info2, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
798     {
799         NodeIndex resultIndex = (NodeIndex)m_graph.size();
800         m_graph.append(Node(op, currentCodeOrigin(), info1, info2, child1, child2, child3));
801         ASSERT(op != Phi);
802         m_currentBlock->append(resultIndex);
803
804         if (defaultFlags(op) & NodeMustGenerate)
805             m_graph.ref(resultIndex);
806         return resultIndex;
807     }
808     
809     NodeIndex addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
810     {
811         NodeIndex resultIndex = (NodeIndex)m_graph.size();
812         m_graph.append(Node(Node::VarArg, op, currentCodeOrigin(), info1, info2, m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs));
813         ASSERT(op != Phi);
814         m_currentBlock->append(resultIndex);
815         
816         m_numPassedVarArgs = 0;
817         
818         if (defaultFlags(op) & NodeMustGenerate)
819             m_graph.ref(resultIndex);
820         return resultIndex;
821     }
822
823     NodeIndex insertPhiNode(OpInfo info, BasicBlock* block)
824     {
825         NodeIndex resultIndex = (NodeIndex)m_graph.size();
826         m_graph.append(Node(Phi, currentCodeOrigin(), info));
827         block->phis.append(resultIndex);
828
829         return resultIndex;
830     }
831
832     void addVarArgChild(NodeIndex child)
833     {
834         m_graph.m_varArgChildren.append(Edge(child));
835         m_numPassedVarArgs++;
836     }
837     
838     NodeIndex addCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op)
839     {
840         Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
841
842         SpeculatedType prediction = SpecNone;
843         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
844             m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call);
845             prediction = getPrediction();
846         }
847         
848         addVarArgChild(get(currentInstruction[1].u.operand));
849         int argCount = currentInstruction[2].u.operand;
850         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
851             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
852
853         int registerOffset = currentInstruction[3].u.operand;
854         int dummyThisArgument = op == Call ? 0 : 1;
855         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
856             addVarArgChild(get(registerOffset + argumentToOperand(i)));
857
858         NodeIndex call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
859         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
860             set(putInstruction[1].u.operand, call);
861         return call;
862     }
863     
864     NodeIndex addStructureTransitionCheck(JSCell* object, Structure* structure)
865     {
866         // Add a weak JS constant for the object regardless, since the code should
867         // be jettisoned if the object ever dies.
868         NodeIndex objectIndex = cellConstant(object);
869         
870         if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
871             addToGraph(StructureTransitionWatchpoint, OpInfo(structure), objectIndex);
872             return objectIndex;
873         }
874         
875         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectIndex);
876         
877         return objectIndex;
878     }
879     
880     NodeIndex addStructureTransitionCheck(JSCell* object)
881     {
882         return addStructureTransitionCheck(object, object->structure());
883     }
884     
885     SpeculatedType getPredictionWithoutOSRExit(NodeIndex nodeIndex, unsigned bytecodeIndex)
886     {
887         UNUSED_PARAM(nodeIndex);
888         
889         SpeculatedType prediction = m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(bytecodeIndex);
890 #if DFG_ENABLE(DEBUG_VERBOSE)
891         dataLog("Dynamic [@", nodeIndex, ", bc#", bytecodeIndex, "] prediction: ", SpeculationDump(prediction), "\n");
892 #endif
893         
894         return prediction;
895     }
896
897     SpeculatedType getPrediction(NodeIndex nodeIndex, unsigned bytecodeIndex)
898     {
899         SpeculatedType prediction = getPredictionWithoutOSRExit(nodeIndex, bytecodeIndex);
900         
901         if (prediction == SpecNone) {
902             // We have no information about what values this node generates. Give up
903             // on executing this code, since we're likely to do more damage than good.
904             addToGraph(ForceOSRExit);
905         }
906         
907         return prediction;
908     }
909     
910     SpeculatedType getPredictionWithoutOSRExit()
911     {
912         return getPredictionWithoutOSRExit(m_graph.size(), m_currentProfilingIndex);
913     }
914     
915     SpeculatedType getPrediction()
916     {
917         return getPrediction(m_graph.size(), m_currentProfilingIndex);
918     }
919     
920     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
921     {
922         profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
923         return ArrayMode::fromObserved(profile, action, false);
924     }
925     
926     ArrayMode getArrayMode(ArrayProfile* profile)
927     {
928         return getArrayMode(profile, Array::Read);
929     }
930     
931     ArrayMode getArrayModeAndEmitChecks(ArrayProfile* profile, Array::Action action, NodeIndex base)
932     {
933         profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
934         
935 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
936         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
937             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
938         dataLogF("Array profile for bc#%u: %p%s%s, %u\n", m_currentIndex, profile->expectedStructure(), profile->structureIsPolymorphic() ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses() ? " (may intercept)" : "", profile->observedArrayModes());
939 #endif
940         
941         bool makeSafe =
942             m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
943             || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, OutOfBounds);
944         
945         ArrayMode result = ArrayMode::fromObserved(profile, action, makeSafe);
946         
947         if (profile->hasDefiniteStructure() && result.benefitsFromStructureCheck())
948             addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(profile->expectedStructure())), base);
949         
950         return result;
951     }
952     
953     NodeIndex makeSafe(NodeIndex nodeIndex)
954     {
955         Node& node = m_graph[nodeIndex];
956         
957         bool likelyToTakeSlowCase;
958         if (!isX86() && node.op() == ArithMod)
959             likelyToTakeSlowCase = false;
960         else
961             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
962         
963         if (!likelyToTakeSlowCase
964             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
965             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
966             return nodeIndex;
967         
968         switch (m_graph[nodeIndex].op()) {
969         case UInt32ToNumber:
970         case ArithAdd:
971         case ArithSub:
972         case ArithNegate:
973         case ValueAdd:
974         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
975             m_graph[nodeIndex].mergeFlags(NodeMayOverflow);
976             break;
977             
978         case ArithMul:
979             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
980                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
981 #if DFG_ENABLE(DEBUG_VERBOSE)
982                 dataLogF("Making ArithMul @%u take deepest slow case.\n", nodeIndex);
983 #endif
984                 m_graph[nodeIndex].mergeFlags(NodeMayOverflow | NodeMayNegZero);
985             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
986                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
987 #if DFG_ENABLE(DEBUG_VERBOSE)
988                 dataLogF("Making ArithMul @%u take faster slow case.\n", nodeIndex);
989 #endif
990                 m_graph[nodeIndex].mergeFlags(NodeMayNegZero);
991             }
992             break;
993             
994         default:
995             ASSERT_NOT_REACHED();
996             break;
997         }
998         
999         return nodeIndex;
1000     }
1001     
1002     NodeIndex makeDivSafe(NodeIndex nodeIndex)
1003     {
1004         ASSERT(m_graph[nodeIndex].op() == ArithDiv);
1005         
1006         // The main slow case counter for op_div in the old JIT counts only when
1007         // the operands are not numbers. We don't care about that since we already
1008         // have speculations in place that take care of that separately. We only
1009         // care about when the outcome of the division is not an integer, which
1010         // is what the special fast case counter tells us.
1011         
1012         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
1013             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
1014             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
1015             return nodeIndex;
1016         
1017 #if DFG_ENABLE(DEBUG_VERBOSE)
1018         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(m_graph[nodeIndex].op()), nodeIndex, m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
1019 #endif
1020         
1021         // FIXME: It might be possible to make this more granular. The DFG certainly can
1022         // distinguish between negative zero and overflow in its exit profiles.
1023         m_graph[nodeIndex].mergeFlags(NodeMayOverflow | NodeMayNegZero);
1024         
1025         return nodeIndex;
1026     }
1027     
1028     bool willNeedFlush(StructureStubInfo& stubInfo)
1029     {
1030         PolymorphicAccessStructureList* list;
1031         int listSize;
1032         switch (stubInfo.accessType) {
1033         case access_get_by_id_self_list:
1034             list = stubInfo.u.getByIdSelfList.structureList;
1035             listSize = stubInfo.u.getByIdSelfList.listSize;
1036             break;
1037         case access_get_by_id_proto_list:
1038             list = stubInfo.u.getByIdProtoList.structureList;
1039             listSize = stubInfo.u.getByIdProtoList.listSize;
1040             break;
1041         default:
1042             return false;
1043         }
1044         for (int i = 0; i < listSize; ++i) {
1045             if (!list->list[i].isDirect)
1046                 return true;
1047         }
1048         return false;
1049     }
1050     
1051     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
1052     {
1053         if (direct)
1054             return true;
1055         
1056         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
1057             return false;
1058         
1059         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
1060             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
1061                 return false;
1062         }
1063         
1064         return true;
1065     }
1066     
1067     void buildOperandMapsIfNecessary();
1068     
1069     ExecState* m_exec;
1070     JSGlobalData* m_globalData;
1071     CodeBlock* m_codeBlock;
1072     CodeBlock* m_profiledBlock;
1073     Graph& m_graph;
1074
1075     // The current block being generated.
1076     BasicBlock* m_currentBlock;
1077     // The bytecode index of the current instruction being generated.
1078     unsigned m_currentIndex;
1079     // The bytecode index of the value profile of the current instruction being generated.
1080     unsigned m_currentProfilingIndex;
1081
1082     // We use these values during code generation, and to avoid the need for
1083     // special handling we make sure they are available as constants in the
1084     // CodeBlock's constant pool. These variables are initialized to
1085     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
1086     // constant pool, as necessary.
1087     unsigned m_constantUndefined;
1088     unsigned m_constantNull;
1089     unsigned m_constantNaN;
1090     unsigned m_constant1;
1091     HashMap<JSCell*, unsigned> m_cellConstants;
1092     HashMap<JSCell*, NodeIndex> m_cellConstantNodes;
1093
1094     // A constant in the constant pool may be represented by more than one
1095     // node in the graph, depending on the context in which it is being used.
1096     struct ConstantRecord {
1097         ConstantRecord()
1098             : asInt32(NoNode)
1099             , asNumeric(NoNode)
1100             , asJSValue(NoNode)
1101         {
1102         }
1103
1104         NodeIndex asInt32;
1105         NodeIndex asNumeric;
1106         NodeIndex asJSValue;
1107     };
1108
1109     // Track the index of the node whose result is the current value for every
1110     // register value in the bytecode - argument, local, and temporary.
1111     Vector<ConstantRecord, 16> m_constants;
1112
1113     // The number of arguments passed to the function.
1114     unsigned m_numArguments;
1115     // The number of locals (vars + temporaries) used in the function.
1116     unsigned m_numLocals;
1117     // The set of registers we need to preserve across BasicBlock boundaries;
1118     // typically equal to the set of vars, but we expand this to cover all
1119     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
1120     BitVector m_preservedVars;
1121     // The number of slots (in units of sizeof(Register)) that we need to
1122     // preallocate for calls emanating from this frame. This includes the
1123     // size of the CallFrame, only if this is not a leaf function.  (I.e.
1124     // this is 0 if and only if this function is a leaf.)
1125     unsigned m_parameterSlots;
1126     // The number of var args passed to the next var arg node.
1127     unsigned m_numPassedVarArgs;
1128     // The index in the global resolve info.
1129     unsigned m_globalResolveNumber;
1130
1131     struct PhiStackEntry {
1132         PhiStackEntry(BasicBlock* block, NodeIndex phi, unsigned varNo)
1133             : m_block(block)
1134             , m_phi(phi)
1135             , m_varNo(varNo)
1136         {
1137         }
1138
1139         BasicBlock* m_block;
1140         NodeIndex m_phi;
1141         unsigned m_varNo;
1142     };
1143     Vector<PhiStackEntry, 16> m_argumentPhiStack;
1144     Vector<PhiStackEntry, 16> m_localPhiStack;
1145     
1146     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1147     
1148     struct InlineStackEntry {
1149         ByteCodeParser* m_byteCodeParser;
1150         
1151         CodeBlock* m_codeBlock;
1152         CodeBlock* m_profiledBlock;
1153         InlineCallFrame* m_inlineCallFrame;
1154         VirtualRegister m_calleeVR; // absolute virtual register, not relative to call frame
1155         
1156         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1157         
1158         QueryableExitProfile m_exitProfile;
1159         
1160         // Remapping of identifier and constant numbers from the code block being
1161         // inlined (inline callee) to the code block that we're inlining into
1162         // (the machine code block, which is the transitive, though not necessarily
1163         // direct, caller).
1164         Vector<unsigned> m_identifierRemap;
1165         Vector<unsigned> m_constantRemap;
1166         Vector<unsigned> m_constantBufferRemap;
1167         Vector<unsigned> m_resolveOperationRemap;
1168         Vector<unsigned> m_putToBaseOperationRemap;
1169         
1170         // Blocks introduced by this code block, which need successor linking.
1171         // May include up to one basic block that includes the continuation after
1172         // the callsite in the caller. These must be appended in the order that they
1173         // are created, but their bytecodeBegin values need not be in order as they
1174         // are ignored.
1175         Vector<UnlinkedBlock> m_unlinkedBlocks;
1176         
1177         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1178         // cannot have two blocks that have the same bytecodeBegin. For this very
1179         // reason, this is not equivalent to 
1180         Vector<BlockIndex> m_blockLinkingTargets;
1181         
1182         // If the callsite's basic block was split into two, then this will be
1183         // the head of the callsite block. It needs its successors linked to the
1184         // m_unlinkedBlocks, but not the other way around: there's no way for
1185         // any blocks in m_unlinkedBlocks to jump back into this block.
1186         BlockIndex m_callsiteBlockHead;
1187         
1188         // Does the callsite block head need linking? This is typically true
1189         // but will be false for the machine code block's inline stack entry
1190         // (since that one is not inlined) and for cases where an inline callee
1191         // did the linking for us.
1192         bool m_callsiteBlockHeadNeedsLinking;
1193         
1194         VirtualRegister m_returnValue;
1195         
1196         // Speculations about variable types collected from the profiled code block,
1197         // which are based on OSR exit profiles that past DFG compilatins of this
1198         // code block had gathered.
1199         LazyOperandValueProfileParser m_lazyOperands;
1200         
1201         // Did we see any returns? We need to handle the (uncommon but necessary)
1202         // case where a procedure that does not return was inlined.
1203         bool m_didReturn;
1204         
1205         // Did we have any early returns?
1206         bool m_didEarlyReturn;
1207         
1208         // Pointers to the argument position trackers for this slice of code.
1209         Vector<ArgumentPosition*> m_argumentPositions;
1210         
1211         InlineStackEntry* m_caller;
1212         
1213         InlineStackEntry(
1214             ByteCodeParser*,
1215             CodeBlock*,
1216             CodeBlock* profiledBlock,
1217             BlockIndex callsiteBlockHead,
1218             VirtualRegister calleeVR,
1219             JSFunction* callee,
1220             VirtualRegister returnValueVR,
1221             VirtualRegister inlineCallFrameStart,
1222             int argumentCountIncludingThis,
1223             CodeSpecializationKind);
1224         
1225         ~InlineStackEntry()
1226         {
1227             m_byteCodeParser->m_inlineStackTop = m_caller;
1228         }
1229         
1230         int remapOperand(int operand) const
1231         {
1232             if (!m_inlineCallFrame)
1233                 return operand;
1234             
1235             if (operand >= FirstConstantRegisterIndex) {
1236                 int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1237                 ASSERT(result >= FirstConstantRegisterIndex);
1238                 return result;
1239             }
1240
1241             if (operand == JSStack::Callee)
1242                 return m_calleeVR;
1243
1244             return operand + m_inlineCallFrame->stackOffset;
1245         }
1246     };
1247     
1248     InlineStackEntry* m_inlineStackTop;
1249
1250     // Have we built operand maps? We initialize them lazily, and only when doing
1251     // inlining.
1252     bool m_haveBuiltOperandMaps;
1253     // Mapping between identifier names and numbers.
1254     IdentifierMap m_identifierMap;
1255     // Mapping between values and constant numbers.
1256     JSValueMap m_jsValueMap;
1257     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1258     // work-around for the fact that JSValueMap can't handle "empty" values.
1259     unsigned m_emptyJSValueIndex;
1260     
1261     // Cache of code blocks that we've generated bytecode for.
1262     ByteCodeCache<canInlineFunctionFor> m_codeBlockCache;
1263     
1264     Instruction* m_currentInstruction;
1265 };
1266
1267 #define NEXT_OPCODE(name) \
1268     m_currentIndex += OPCODE_LENGTH(name); \
1269     continue
1270
1271 #define LAST_OPCODE(name) \
1272     m_currentIndex += OPCODE_LENGTH(name); \
1273     return shouldContinueParsing
1274
1275
1276 void ByteCodeParser::handleCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1277 {
1278     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1279     
1280     NodeIndex callTarget = get(currentInstruction[1].u.operand);
1281     enum {
1282         ConstantFunction,
1283         ConstantInternalFunction,
1284         LinkedFunction,
1285         UnknownFunction
1286     } callType;
1287             
1288     CallLinkStatus callLinkStatus = CallLinkStatus::computeFor(
1289         m_inlineStackTop->m_profiledBlock, m_currentIndex);
1290     
1291 #if DFG_ENABLE(DEBUG_VERBOSE)
1292     dataLogF("For call at @%lu bc#%u: ", m_graph.size(), m_currentIndex);
1293     if (callLinkStatus.isSet()) {
1294         if (callLinkStatus.couldTakeSlowPath())
1295             dataLogF("could take slow path, ");
1296         dataLogF("target = %p\n", callLinkStatus.callTarget());
1297     } else
1298         dataLogF("not set.\n");
1299 #endif
1300     
1301     if (m_graph.isFunctionConstant(callTarget)) {
1302         callType = ConstantFunction;
1303 #if DFG_ENABLE(DEBUG_VERBOSE)
1304         dataLogF("Call at [@%lu, bc#%u] has a function constant: %p, exec %p.\n",
1305                 m_graph.size(), m_currentIndex,
1306                 m_graph.valueOfFunctionConstant(callTarget),
1307                 m_graph.valueOfFunctionConstant(callTarget)->executable());
1308 #endif
1309     } else if (m_graph.isInternalFunctionConstant(callTarget)) {
1310         callType = ConstantInternalFunction;
1311 #if DFG_ENABLE(DEBUG_VERBOSE)
1312         dataLogF("Call at [@%lu, bc#%u] has an internal function constant: %p.\n",
1313                 m_graph.size(), m_currentIndex,
1314                 m_graph.valueOfInternalFunctionConstant(callTarget));
1315 #endif
1316     } else if (callLinkStatus.isSet() && !callLinkStatus.couldTakeSlowPath()
1317                && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1318         callType = LinkedFunction;
1319 #if DFG_ENABLE(DEBUG_VERBOSE)
1320         dataLogF("Call at [@%lu, bc#%u] is linked to: %p, exec %p.\n",
1321                 m_graph.size(), m_currentIndex, callLinkStatus.callTarget(),
1322                 callLinkStatus.callTarget()->executable());
1323 #endif
1324     } else {
1325         callType = UnknownFunction;
1326 #if DFG_ENABLE(DEBUG_VERBOSE)
1327         dataLogF("Call at [@%lu, bc#%u] is has an unknown or ambiguous target.\n",
1328                 m_graph.size(), m_currentIndex);
1329 #endif
1330     }
1331     if (callType != UnknownFunction) {
1332         int argumentCountIncludingThis = currentInstruction[2].u.operand;
1333         int registerOffset = currentInstruction[3].u.operand;
1334
1335         // Do we have a result?
1336         bool usesResult = false;
1337         int resultOperand = 0; // make compiler happy
1338         unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1339         Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
1340         SpeculatedType prediction = SpecNone;
1341         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
1342             resultOperand = putInstruction[1].u.operand;
1343             usesResult = true;
1344             m_currentProfilingIndex = nextOffset;
1345             prediction = getPrediction();
1346             nextOffset += OPCODE_LENGTH(op_call_put_result);
1347         }
1348
1349         if (callType == ConstantInternalFunction) {
1350             if (handleConstantInternalFunction(usesResult, resultOperand, m_graph.valueOfInternalFunctionConstant(callTarget), registerOffset, argumentCountIncludingThis, prediction, kind))
1351                 return;
1352             
1353             // Can only handle this using the generic call handler.
1354             addCall(interpreter, currentInstruction, op);
1355             return;
1356         }
1357         
1358         JSFunction* expectedFunction;
1359         Intrinsic intrinsic;
1360         bool certainAboutExpectedFunction;
1361         if (callType == ConstantFunction) {
1362             expectedFunction = m_graph.valueOfFunctionConstant(callTarget);
1363             intrinsic = expectedFunction->executable()->intrinsicFor(kind);
1364             certainAboutExpectedFunction = true;
1365         } else {
1366             ASSERT(callType == LinkedFunction);
1367             expectedFunction = callLinkStatus.callTarget();
1368             intrinsic = expectedFunction->executable()->intrinsicFor(kind);
1369             certainAboutExpectedFunction = false;
1370         }
1371                 
1372         if (intrinsic != NoIntrinsic) {
1373             if (!certainAboutExpectedFunction)
1374                 emitFunctionCheck(expectedFunction, callTarget, registerOffset, kind);
1375             
1376             if (handleIntrinsic(usesResult, resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1377                 if (!certainAboutExpectedFunction) {
1378                     // Need to keep the call target alive for OSR. We could easily optimize this out if we wanted
1379                     // to, since at this point we know that the call target is a constant. It's just that OSR isn't
1380                     // smart enough to figure that out, since it doesn't understand CheckFunction.
1381                     addToGraph(Phantom, callTarget);
1382                 }
1383                 
1384                 return;
1385             }
1386         } else if (handleInlining(usesResult, currentInstruction[1].u.operand, callTarget, resultOperand, certainAboutExpectedFunction, expectedFunction, registerOffset, argumentCountIncludingThis, nextOffset, kind))
1387             return;
1388     }
1389     
1390     addCall(interpreter, currentInstruction, op);
1391 }
1392
1393 void ByteCodeParser::emitFunctionCheck(JSFunction* expectedFunction, NodeIndex callTarget, int registerOffset, CodeSpecializationKind kind)
1394 {
1395     NodeIndex thisArgument;
1396     if (kind == CodeForCall)
1397         thisArgument = get(registerOffset + argumentToOperand(0));
1398     else
1399         thisArgument = NoNode;
1400     addToGraph(CheckFunction, OpInfo(expectedFunction), callTarget, thisArgument);
1401 }
1402
1403 bool ByteCodeParser::handleInlining(bool usesResult, int callTarget, NodeIndex callTargetNodeIndex, int resultOperand, bool certainAboutExpectedFunction, JSFunction* expectedFunction, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1404 {
1405     // First, the really simple checks: do we have an actual JS function?
1406     if (!expectedFunction)
1407         return false;
1408     if (expectedFunction->isHostFunction())
1409         return false;
1410     
1411     FunctionExecutable* executable = expectedFunction->jsExecutable();
1412     
1413     // Does the number of arguments we're passing match the arity of the target? We currently
1414     // inline only if the number of arguments passed is greater than or equal to the number
1415     // arguments expected.
1416     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1417         return false;
1418     
1419     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1420     // If either of these are detected, then don't inline.
1421     unsigned depth = 0;
1422     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1423         ++depth;
1424         if (depth >= Options::maximumInliningDepth())
1425             return false; // Depth exceeded.
1426         
1427         if (entry->executable() == executable)
1428             return false; // Recursion detected.
1429     }
1430     
1431     // Does the code block's size match the heuristics/requirements for being
1432     // an inline candidate?
1433     CodeBlock* profiledBlock = executable->profiledCodeBlockFor(kind);
1434     if (!profiledBlock)
1435         return false;
1436     
1437     if (!mightInlineFunctionFor(profiledBlock, kind))
1438         return false;
1439     
1440     // If we get here then it looks like we should definitely inline this code. Proceed
1441     // with parsing the code to get bytecode, so that we can then parse the bytecode.
1442     // Note that if LLInt is enabled, the bytecode will always be available. Also note
1443     // that if LLInt is enabled, we may inline a code block that has never been JITted
1444     // before!
1445     CodeBlock* codeBlock = m_codeBlockCache.get(CodeBlockKey(executable, kind), expectedFunction->scope());
1446     if (!codeBlock)
1447         return false;
1448     
1449     ASSERT(canInlineFunctionFor(codeBlock, kind));
1450
1451 #if DFG_ENABLE(DEBUG_VERBOSE)
1452     dataLogF("Inlining executable %p.\n", executable);
1453 #endif
1454     
1455     // Now we know without a doubt that we are committed to inlining. So begin the process
1456     // by checking the callee (if necessary) and making sure that arguments and the callee
1457     // are flushed.
1458     if (!certainAboutExpectedFunction)
1459         emitFunctionCheck(expectedFunction, callTargetNodeIndex, registerOffset, kind);
1460     
1461     // FIXME: Don't flush constants!
1462     
1463     int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
1464     
1465     // Make sure that the area used by the call frame is reserved.
1466     for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
1467         m_preservedVars.set(arg);
1468     
1469     // Make sure that we have enough locals.
1470     unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1471     if (newNumLocals > m_numLocals) {
1472         m_numLocals = newNumLocals;
1473         for (size_t i = 0; i < m_graph.m_blocks.size(); ++i)
1474             m_graph.m_blocks[i]->ensureLocals(newNumLocals);
1475     }
1476     
1477     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1478
1479     InlineStackEntry inlineStackEntry(
1480         this, codeBlock, profiledBlock, m_graph.m_blocks.size() - 1,
1481         (VirtualRegister)m_inlineStackTop->remapOperand(callTarget), expectedFunction,
1482         (VirtualRegister)m_inlineStackTop->remapOperand(
1483             usesResult ? resultOperand : InvalidVirtualRegister),
1484         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1485     
1486     // This is where the actual inlining really happens.
1487     unsigned oldIndex = m_currentIndex;
1488     unsigned oldProfilingIndex = m_currentProfilingIndex;
1489     m_currentIndex = 0;
1490     m_currentProfilingIndex = 0;
1491
1492     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1493     
1494     parseCodeBlock();
1495     
1496     m_currentIndex = oldIndex;
1497     m_currentProfilingIndex = oldProfilingIndex;
1498     
1499     // If the inlined code created some new basic blocks, then we have linking to do.
1500     if (inlineStackEntry.m_callsiteBlockHead != m_graph.m_blocks.size() - 1) {
1501         
1502         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1503         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1504             linkBlock(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead].get(), inlineStackEntry.m_blockLinkingTargets);
1505         else
1506             ASSERT(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead]->isLinked);
1507         
1508         // It's possible that the callsite block head is not owned by the caller.
1509         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1510             // It's definitely owned by the caller, because the caller created new blocks.
1511             // Assert that this all adds up.
1512             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_blockIndex == inlineStackEntry.m_callsiteBlockHead);
1513             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1514             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1515         } else {
1516             // It's definitely not owned by the caller. Tell the caller that he does not
1517             // need to link his callsite block head, because we did it for him.
1518             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1519             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1520             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1521         }
1522         
1523         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1524     } else
1525         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1526     
1527     // If there was a return, but no early returns, then we're done. We allow parsing of
1528     // the caller to continue in whatever basic block we're in right now.
1529     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1530         BasicBlock* lastBlock = m_graph.m_blocks.last().get();
1531         ASSERT(lastBlock->isEmpty() || !m_graph.last().isTerminal());
1532         
1533         // If we created new blocks then the last block needs linking, but in the
1534         // caller. It doesn't need to be linked to, but it needs outgoing links.
1535         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1536 #if DFG_ENABLE(DEBUG_VERBOSE)
1537             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1538 #endif
1539             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1540             // for release builds because this block will never serve as a potential target
1541             // in the linker's binary search.
1542             lastBlock->bytecodeBegin = m_currentIndex;
1543             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size() - 1));
1544         }
1545         
1546         m_currentBlock = m_graph.m_blocks.last().get();
1547
1548 #if DFG_ENABLE(DEBUG_VERBOSE)
1549         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1550 #endif
1551         return true;
1552     }
1553     
1554     // If we get to this point then all blocks must end in some sort of terminals.
1555     ASSERT(m_graph.last().isTerminal());
1556     
1557     // Link the early returns to the basic block we're about to create.
1558     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1559         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1560             continue;
1561         BasicBlock* block = m_graph.m_blocks[inlineStackEntry.m_unlinkedBlocks[i].m_blockIndex].get();
1562         ASSERT(!block->isLinked);
1563         Node& node = m_graph[block->last()];
1564         ASSERT(node.op() == Jump);
1565         ASSERT(node.takenBlockIndex() == NoBlock);
1566         node.setTakenBlockIndex(m_graph.m_blocks.size());
1567         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1568 #if !ASSERT_DISABLED
1569         block->isLinked = true;
1570 #endif
1571     }
1572     
1573     // Need to create a new basic block for the continuation at the caller.
1574     OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1575 #if DFG_ENABLE(DEBUG_VERBOSE)
1576     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(m_inlineStackTop->m_inlineCallFrame));
1577 #endif
1578     m_currentBlock = block.get();
1579     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_caller->m_blockLinkingTargets.last()]->bytecodeBegin < nextOffset);
1580     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
1581     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(m_graph.m_blocks.size());
1582     m_graph.m_blocks.append(block.release());
1583     prepareToParseBlock();
1584     
1585     // At this point we return and continue to generate code for the caller, but
1586     // in the new basic block.
1587 #if DFG_ENABLE(DEBUG_VERBOSE)
1588     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1589 #endif
1590     return true;
1591 }
1592
1593 void ByteCodeParser::setIntrinsicResult(bool usesResult, int resultOperand, NodeIndex nodeIndex)
1594 {
1595     if (!usesResult)
1596         return;
1597     set(resultOperand, nodeIndex);
1598 }
1599
1600 bool ByteCodeParser::handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1601 {
1602     if (argumentCountIncludingThis == 1) { // Math.min()
1603         setIntrinsicResult(usesResult, resultOperand, constantNaN());
1604         return true;
1605     }
1606      
1607     if (argumentCountIncludingThis == 2) { // Math.min(x)
1608         // FIXME: what we'd really like is a ValueToNumber, except we don't support that right now. Oh well.
1609         NodeIndex result = get(registerOffset + argumentToOperand(1));
1610         addToGraph(CheckNumber, result);
1611         setIntrinsicResult(usesResult, resultOperand, result);
1612         return true;
1613     }
1614     
1615     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1616         setIntrinsicResult(usesResult, resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1617         return true;
1618     }
1619     
1620     // Don't handle >=3 arguments for now.
1621     return false;
1622 }
1623
1624 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1625 // they need to perform the ToNumber conversion, which can have side-effects.
1626 bool ByteCodeParser::handleIntrinsic(bool usesResult, int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1627 {
1628     switch (intrinsic) {
1629     case AbsIntrinsic: {
1630         if (argumentCountIncludingThis == 1) { // Math.abs()
1631             setIntrinsicResult(usesResult, resultOperand, constantNaN());
1632             return true;
1633         }
1634
1635         if (!MacroAssembler::supportsFloatingPointAbs())
1636             return false;
1637
1638         NodeIndex nodeIndex = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1639         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1640             m_graph[nodeIndex].mergeFlags(NodeMayOverflow);
1641         setIntrinsicResult(usesResult, resultOperand, nodeIndex);
1642         return true;
1643     }
1644
1645     case MinIntrinsic:
1646         return handleMinMax(usesResult, resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1647         
1648     case MaxIntrinsic:
1649         return handleMinMax(usesResult, resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1650         
1651     case SqrtIntrinsic: {
1652         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1653             setIntrinsicResult(usesResult, resultOperand, constantNaN());
1654             return true;
1655         }
1656         
1657         if (!MacroAssembler::supportsFloatingPointSqrt())
1658             return false;
1659         
1660         setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1661         return true;
1662     }
1663         
1664     case ArrayPushIntrinsic: {
1665         if (argumentCountIncludingThis != 2)
1666             return false;
1667         
1668         ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1669         if (!arrayMode.isJSArray())
1670             return false;
1671         switch (arrayMode.type()) {
1672         case Array::Undecided:
1673         case Array::Int32:
1674         case Array::Double:
1675         case Array::Contiguous:
1676         case Array::ArrayStorage: {
1677             NodeIndex arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1678             if (usesResult)
1679                 set(resultOperand, arrayPush);
1680             
1681             return true;
1682         }
1683             
1684         default:
1685             return false;
1686         }
1687     }
1688         
1689     case ArrayPopIntrinsic: {
1690         if (argumentCountIncludingThis != 1)
1691             return false;
1692         
1693         ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1694         if (!arrayMode.isJSArray())
1695             return false;
1696         switch (arrayMode.type()) {
1697         case Array::Int32:
1698         case Array::Double:
1699         case Array::Contiguous:
1700         case Array::ArrayStorage: {
1701             NodeIndex arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1702             if (usesResult)
1703                 set(resultOperand, arrayPop);
1704             return true;
1705         }
1706             
1707         default:
1708             return false;
1709         }
1710     }
1711
1712     case CharCodeAtIntrinsic: {
1713         if (argumentCountIncludingThis != 2)
1714             return false;
1715
1716         int thisOperand = registerOffset + argumentToOperand(0);
1717         int indexOperand = registerOffset + argumentToOperand(1);
1718         NodeIndex charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1719
1720         if (usesResult)
1721             set(resultOperand, charCode);
1722         return true;
1723     }
1724
1725     case CharAtIntrinsic: {
1726         if (argumentCountIncludingThis != 2)
1727             return false;
1728
1729         int thisOperand = registerOffset + argumentToOperand(0);
1730         int indexOperand = registerOffset + argumentToOperand(1);
1731         NodeIndex charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1732
1733         if (usesResult)
1734             set(resultOperand, charCode);
1735         return true;
1736     }
1737
1738     case RegExpExecIntrinsic: {
1739         if (argumentCountIncludingThis != 2)
1740             return false;
1741         
1742         NodeIndex regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1743         if (usesResult)
1744             set(resultOperand, regExpExec);
1745         
1746         return true;
1747     }
1748         
1749     case RegExpTestIntrinsic: {
1750         if (argumentCountIncludingThis != 2)
1751             return false;
1752         
1753         NodeIndex regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1754         if (usesResult)
1755             set(resultOperand, regExpExec);
1756         
1757         return true;
1758     }
1759         
1760     default:
1761         return false;
1762     }
1763 }
1764
1765 bool ByteCodeParser::handleConstantInternalFunction(
1766     bool usesResult, int resultOperand, InternalFunction* function, int registerOffset,
1767     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1768 {
1769     // If we ever find that we have a lot of internal functions that we specialize for,
1770     // then we should probably have some sort of hashtable dispatch, or maybe even
1771     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1772     // it seems that this case is hit infrequently enough, and the number of functions
1773     // we know about is small enough, that having just a linear cascade of if statements
1774     // is good enough.
1775     
1776     UNUSED_PARAM(prediction); // Remove this once we do more things.
1777     UNUSED_PARAM(kind); // Remove this once we do more things.
1778     
1779     if (function->classInfo() == &ArrayConstructor::s_info) {
1780         if (argumentCountIncludingThis == 2) {
1781             setIntrinsicResult(
1782                 usesResult, resultOperand,
1783                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1784             return true;
1785         }
1786         
1787         for (int i = 1; i < argumentCountIncludingThis; ++i)
1788             addVarArgChild(get(registerOffset + argumentToOperand(i)));
1789         setIntrinsicResult(
1790             usesResult, resultOperand,
1791             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1792         return true;
1793     }
1794     
1795     return false;
1796 }
1797
1798 NodeIndex ByteCodeParser::handleGetByOffset(SpeculatedType prediction, NodeIndex base, unsigned identifierNumber, PropertyOffset offset)
1799 {
1800     NodeIndex propertyStorage;
1801     if (isInlineOffset(offset))
1802         propertyStorage = base;
1803     else
1804         propertyStorage = addToGraph(GetButterfly, base);
1805     // FIXME: It would be far more efficient for load elimination (and safer from
1806     // an OSR standpoint) if GetByOffset also referenced the object we were loading
1807     // from, and if we could load eliminate a GetByOffset even if the butterfly
1808     // had changed. That would be a great success.
1809     NodeIndex getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage);
1810
1811     StorageAccessData storageAccessData;
1812     storageAccessData.offset = indexRelativeToBase(offset);
1813     storageAccessData.identifierNumber = identifierNumber;
1814     m_graph.m_storageAccessData.append(storageAccessData);
1815
1816     return getByOffset;
1817 }
1818
1819 void ByteCodeParser::handleGetByOffset(
1820     int destinationOperand, SpeculatedType prediction, NodeIndex base, unsigned identifierNumber,
1821     PropertyOffset offset)
1822 {
1823     set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1824 }
1825
1826 void ByteCodeParser::handleGetById(
1827     int destinationOperand, SpeculatedType prediction, NodeIndex base, unsigned identifierNumber,
1828     const GetByIdStatus& getByIdStatus)
1829 {
1830     if (!getByIdStatus.isSimple()
1831         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1832         set(destinationOperand,
1833             addToGraph(
1834                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1835                 OpInfo(identifierNumber), OpInfo(prediction), base));
1836         return;
1837     }
1838     
1839     ASSERT(getByIdStatus.structureSet().size());
1840                 
1841     // The implementation of GetByOffset does not know to terminate speculative
1842     // execution if it doesn't have a prediction, so we do it manually.
1843     if (prediction == SpecNone)
1844         addToGraph(ForceOSRExit);
1845     
1846     NodeIndex originalBaseForBaselineJIT = base;
1847                 
1848     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1849     
1850     if (!getByIdStatus.chain().isEmpty()) {
1851         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1852         JSObject* currentObject = 0;
1853         for (unsigned i = 0; i < getByIdStatus.chain().size(); ++i) {
1854             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1855             currentStructure = getByIdStatus.chain()[i];
1856             base = addStructureTransitionCheck(currentObject, currentStructure);
1857         }
1858     }
1859     
1860     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1861     // ensure that the base of the original get_by_id is kept alive until we're done with
1862     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1863     // on something other than the base following the CheckStructure on base, or if the
1864     // access was compiled to a WeakJSConstant specific value, in which case we might not
1865     // have any explicit use of the base at all.
1866     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1867         addToGraph(Phantom, originalBaseForBaselineJIT);
1868     
1869     if (getByIdStatus.specificValue()) {
1870         ASSERT(getByIdStatus.specificValue().isCell());
1871         
1872         set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1873         return;
1874     }
1875     
1876     handleGetByOffset(
1877         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1878 }
1879
1880 void ByteCodeParser::prepareToParseBlock()
1881 {
1882     for (unsigned i = 0; i < m_constants.size(); ++i)
1883         m_constants[i] = ConstantRecord();
1884     m_cellConstantNodes.clear();
1885 }
1886
1887 NodeIndex ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1888 {
1889     NodeIndex localBase = addToGraph(GetMyScope);
1890     if (skipTop)
1891         localBase = addToGraph(SkipTopScope, localBase);
1892     for (unsigned n = skipCount; n--;)
1893         localBase = addToGraph(SkipScope, localBase);
1894     return localBase;
1895 }
1896
1897 bool ByteCodeParser::parseResolveOperations(SpeculatedType prediction, unsigned identifier, unsigned operations, unsigned putToBaseOperation, NodeIndex* base, NodeIndex* value)
1898 {
1899     ResolveOperations* resolveOperations = m_codeBlock->resolveOperations(operations);
1900     if (resolveOperations->isEmpty()) {
1901         addToGraph(ForceOSRExit);
1902         return false;
1903     }
1904     JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
1905     int skipCount = 0;
1906     bool skipTop = false;
1907     bool skippedScopes = false;
1908     bool setBase = false;
1909     ResolveOperation* pc = resolveOperations->data();
1910     NodeIndex localBase = 0;
1911     bool resolvingBase = true;
1912     while (resolvingBase) {
1913         switch (pc->m_operation) {
1914         case ResolveOperation::ReturnGlobalObjectAsBase:
1915             *base = cellConstant(globalObject);
1916             ASSERT(!value);
1917             return true;
1918
1919         case ResolveOperation::SetBaseToGlobal:
1920             *base = cellConstant(globalObject);
1921             setBase = true;
1922             resolvingBase = false;
1923             ++pc;
1924             break;
1925
1926         case ResolveOperation::SetBaseToUndefined:
1927             *base = constantUndefined();
1928             setBase = true;
1929             resolvingBase = false;
1930             ++pc;
1931             break;
1932
1933         case ResolveOperation::SetBaseToScope:
1934             localBase = getScope(skipTop, skipCount);
1935             *base = localBase;
1936             setBase = true;
1937
1938             resolvingBase = false;
1939
1940             // Reset the scope skipping as we've already loaded it
1941             skippedScopes = false;
1942             ++pc;
1943             break;
1944         case ResolveOperation::ReturnScopeAsBase:
1945             *base = getScope(skipTop, skipCount);
1946             ASSERT(!value);
1947             return true;
1948
1949         case ResolveOperation::SkipTopScopeNode:
1950             ASSERT(!m_inlineStackTop->m_inlineCallFrame);
1951             skipTop = true;
1952             skippedScopes = true;
1953             ++pc;
1954             break;
1955
1956         case ResolveOperation::SkipScopes:
1957             ASSERT(!m_inlineStackTop->m_inlineCallFrame);
1958             skipCount += pc->m_scopesToSkip;
1959             skippedScopes = true;
1960             ++pc;
1961             break;
1962
1963         case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope:
1964             return false;
1965
1966         case ResolveOperation::Fail:
1967             return false;
1968
1969         default:
1970             resolvingBase = false;
1971         }
1972     }
1973     if (skippedScopes)
1974         localBase = getScope(skipTop, skipCount);
1975
1976     if (base && !setBase)
1977         *base = localBase;
1978
1979     ASSERT(value);
1980     ResolveOperation* resolveValueOperation = pc;
1981     switch (resolveValueOperation->m_operation) {
1982     case ResolveOperation::GetAndReturnGlobalProperty: {
1983         ResolveGlobalStatus status = ResolveGlobalStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex, resolveValueOperation, m_codeBlock->identifier(identifier));
1984         if (status.isSimple()) {
1985             ASSERT(status.structure());
1986
1987             NodeIndex globalObjectNode = addStructureTransitionCheck(globalObject, status.structure());
1988
1989             if (status.specificValue()) {
1990                 ASSERT(status.specificValue().isCell());
1991                 *value = cellConstant(status.specificValue().asCell());
1992             } else
1993                 *value = handleGetByOffset(prediction, globalObjectNode, identifier, status.offset());
1994             return true;
1995         }
1996
1997         NodeIndex resolve = addToGraph(ResolveGlobal, OpInfo(m_graph.m_resolveGlobalData.size()), OpInfo(prediction));
1998         m_graph.m_resolveGlobalData.append(ResolveGlobalData());
1999         ResolveGlobalData& data = m_graph.m_resolveGlobalData.last();
2000         data.identifierNumber = identifier;
2001         data.resolveOperationsIndex = operations;
2002         data.putToBaseOperationIndex = putToBaseOperation;
2003         data.resolvePropertyIndex = resolveValueOperation - resolveOperations->data();
2004         *value = resolve;
2005         return true;
2006     }
2007     case ResolveOperation::GetAndReturnGlobalVar: {
2008         *value = addToGraph(GetGlobalVar,
2009                             OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)),
2010                             OpInfo(prediction));
2011         return true;
2012     }
2013     case ResolveOperation::GetAndReturnGlobalVarWatchable: {
2014         SpeculatedType prediction = getPrediction();
2015
2016         JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
2017
2018         Identifier ident = m_codeBlock->identifier(identifier);
2019         SymbolTableEntry entry = globalObject->symbolTable()->get(ident.impl());
2020         if (!entry.couldBeWatched()) {
2021             *value = addToGraph(GetGlobalVar, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(prediction));
2022             return true;
2023         }
2024
2025         // The watchpoint is still intact! This means that we will get notified if the
2026         // current value in the global variable changes. So, we can inline that value.
2027         // Moreover, currently we can assume that this value is a JSFunction*, which
2028         // implies that it's a cell. This simplifies things, since in general we'd have
2029         // to use a JSConstant for non-cells and a WeakJSConstant for cells. So instead
2030         // of having both cases we just assert that the value is a cell.
2031
2032         // NB. If it wasn't for CSE, GlobalVarWatchpoint would have no need for the
2033         // register pointer. But CSE tracks effects on global variables by comparing
2034         // register pointers. Because CSE executes multiple times while the backend
2035         // executes once, we use the following performance trade-off:
2036         // - The node refers directly to the register pointer to make CSE super cheap.
2037         // - To perform backend code generation, the node only contains the identifier
2038         //   number, from which it is possible to get (via a few average-time O(1)
2039         //   lookups) to the WatchpointSet.
2040
2041         addToGraph(GlobalVarWatchpoint, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(identifier));
2042
2043         JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
2044         ASSERT(specificValue.isCell());
2045         *value = cellConstant(specificValue.asCell());
2046         return true;
2047     }
2048     case ResolveOperation::GetAndReturnScopedVar: {
2049         NodeIndex getScopeRegisters = addToGraph(GetScopeRegisters, localBase);
2050         *value = addToGraph(GetScopedVar, OpInfo(resolveValueOperation->m_offset), OpInfo(prediction), getScopeRegisters);
2051         return true;
2052     }
2053     default:
2054         CRASH();
2055         return false;
2056     }
2057
2058 }
2059
2060 bool ByteCodeParser::parseBlock(unsigned limit)
2061 {
2062     bool shouldContinueParsing = true;
2063
2064     Interpreter* interpreter = m_globalData->interpreter;
2065     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
2066     unsigned blockBegin = m_currentIndex;
2067     
2068     // If we are the first basic block, introduce markers for arguments. This allows
2069     // us to track if a use of an argument may use the actual argument passed, as
2070     // opposed to using a value we set explicitly.
2071     if (m_currentBlock == m_graph.m_blocks[0].get() && !m_inlineStackTop->m_inlineCallFrame) {
2072         m_graph.m_arguments.resize(m_numArguments);
2073         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
2074             VariableAccessData* variable = newVariableAccessData(
2075                 argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
2076             variable->mergeStructureCheckHoistingFailed(
2077                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
2078             NodeIndex setArgument = addToGraph(SetArgument, OpInfo(variable));
2079             m_graph.m_arguments[argument] = setArgument;
2080             m_currentBlock->variablesAtHead.setArgumentFirstTime(argument, setArgument);
2081             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
2082         }
2083     }
2084
2085     while (true) {
2086         m_currentProfilingIndex = m_currentIndex;
2087
2088         // Don't extend over jump destinations.
2089         if (m_currentIndex == limit) {
2090             // Ordinarily we want to plant a jump. But refuse to do this if the block is
2091             // empty. This is a special case for inlining, which might otherwise create
2092             // some empty blocks in some cases. When parseBlock() returns with an empty
2093             // block, it will get repurposed instead of creating a new one. Note that this
2094             // logic relies on every bytecode resulting in one or more nodes, which would
2095             // be true anyway except for op_loop_hint, which emits a Phantom to force this
2096             // to be true.
2097             if (!m_currentBlock->isEmpty())
2098                 addToGraph(Jump, OpInfo(m_currentIndex));
2099             else {
2100 #if DFG_ENABLE(DEBUG_VERBOSE)
2101                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
2102 #endif
2103             }
2104             return shouldContinueParsing;
2105         }
2106         
2107         // Switch on the current bytecode opcode.
2108         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
2109         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
2110         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
2111         switch (opcodeID) {
2112
2113         // === Function entry opcodes ===
2114
2115         case op_enter:
2116             // Initialize all locals to undefined.
2117             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
2118                 set(i, constantUndefined(), SetOnEntry);
2119             NEXT_OPCODE(op_enter);
2120
2121         case op_convert_this: {
2122             NodeIndex op1 = getThis();
2123             if (m_graph[op1].op() != ConvertThis) {
2124                 ValueProfile* profile =
2125                     m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentProfilingIndex);
2126                 profile->computeUpdatedPrediction();
2127 #if DFG_ENABLE(DEBUG_VERBOSE)
2128                 dataLogF("[@%lu bc#%u]: profile %p: ", m_graph.size(), m_currentProfilingIndex, profile);
2129                 profile->dump(WTF::dataFile());
2130                 dataLogF("\n");
2131 #endif
2132                 if (profile->m_singletonValueIsTop
2133                     || !profile->m_singletonValue
2134                     || !profile->m_singletonValue.isCell()
2135                     || profile->m_singletonValue.asCell()->classInfo() != &Structure::s_info)
2136                     setThis(addToGraph(ConvertThis, op1));
2137                 else {
2138                     addToGraph(
2139                         CheckStructure,
2140                         OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
2141                         op1);
2142                 }
2143             }
2144             NEXT_OPCODE(op_convert_this);
2145         }
2146
2147         case op_create_this: {
2148             int calleeOperand = currentInstruction[2].u.operand;
2149             NodeIndex callee = get(calleeOperand);
2150             bool alreadyEmitted = false;
2151             if (m_graph[callee].op() == WeakJSConstant) {
2152                 JSCell* cell = m_graph[callee].weakConstant();
2153                 ASSERT(cell->inherits(&JSFunction::s_info));
2154                 
2155                 JSFunction* function = jsCast<JSFunction*>(cell);
2156                 Structure* inheritorID = function->tryGetKnownInheritorID();
2157                 if (inheritorID) {
2158                     addToGraph(InheritorIDWatchpoint, OpInfo(function));
2159                     set(currentInstruction[1].u.operand, addToGraph(NewObject, OpInfo(inheritorID)));
2160                     alreadyEmitted = true;
2161                 }
2162             }
2163             if (!alreadyEmitted)
2164                 set(currentInstruction[1].u.operand, addToGraph(CreateThis, callee));
2165             NEXT_OPCODE(op_create_this);
2166         }
2167             
2168         case op_new_object: {
2169             set(currentInstruction[1].u.operand, addToGraph(NewObject, OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->emptyObjectStructure())));
2170             NEXT_OPCODE(op_new_object);
2171         }
2172             
2173         case op_new_array: {
2174             int startOperand = currentInstruction[2].u.operand;
2175             int numOperands = currentInstruction[3].u.operand;
2176             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2177             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
2178                 addVarArgChild(get(operandIdx));
2179             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
2180             NEXT_OPCODE(op_new_array);
2181         }
2182             
2183         case op_new_array_with_size: {
2184             int lengthOperand = currentInstruction[2].u.operand;
2185             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
2186             set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
2187             NEXT_OPCODE(op_new_array_with_size);
2188         }
2189             
2190         case op_new_array_buffer: {
2191             int startConstant = currentInstruction[2].u.operand;
2192             int numConstants = currentInstruction[3].u.operand;
2193             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2194             NewArrayBufferData data;
2195             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2196             data.numConstants = numConstants;
2197             data.indexingType = profile->selectIndexingType();
2198
2199             // If this statement has never executed, we'll have the wrong indexing type in the profile.
2200             for (int i = 0; i < numConstants; ++i) {
2201                 data.indexingType =
2202                     leastUpperBoundOfIndexingTypeAndValue(
2203                         data.indexingType,
2204                         m_codeBlock->constantBuffer(data.startConstant)[i]);
2205             }
2206             
2207             m_graph.m_newArrayBufferData.append(data);
2208             set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
2209             NEXT_OPCODE(op_new_array_buffer);
2210         }
2211             
2212         case op_new_regexp: {
2213             set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
2214             NEXT_OPCODE(op_new_regexp);
2215         }
2216             
2217         case op_get_callee: {
2218             ValueProfile* profile = currentInstruction[2].u.profile;
2219             profile->computeUpdatedPrediction();
2220             if (profile->m_singletonValueIsTop
2221                 || !profile->m_singletonValue
2222                 || !profile->m_singletonValue.isCell())
2223                 set(currentInstruction[1].u.operand, get(JSStack::Callee));
2224             else {
2225                 ASSERT(profile->m_singletonValue.asCell()->inherits(&JSFunction::s_info));
2226                 NodeIndex actualCallee = get(JSStack::Callee);
2227                 addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
2228                 set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
2229             }
2230             NEXT_OPCODE(op_get_callee);
2231         }
2232
2233         // === Bitwise operations ===
2234
2235         case op_bitand: {
2236             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2237             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2238             set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
2239             NEXT_OPCODE(op_bitand);
2240         }
2241
2242         case op_bitor: {
2243             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2244             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2245             set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
2246             NEXT_OPCODE(op_bitor);
2247         }
2248
2249         case op_bitxor: {
2250             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2251             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2252             set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
2253             NEXT_OPCODE(op_bitxor);
2254         }
2255
2256         case op_rshift: {
2257             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2258             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2259             NodeIndex result;
2260             // Optimize out shifts by zero.
2261             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2262                 result = op1;
2263             else
2264                 result = addToGraph(BitRShift, op1, op2);
2265             set(currentInstruction[1].u.operand, result);
2266             NEXT_OPCODE(op_rshift);
2267         }
2268
2269         case op_lshift: {
2270             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2271             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2272             NodeIndex result;
2273             // Optimize out shifts by zero.
2274             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2275                 result = op1;
2276             else
2277                 result = addToGraph(BitLShift, op1, op2);
2278             set(currentInstruction[1].u.operand, result);
2279             NEXT_OPCODE(op_lshift);
2280         }
2281
2282         case op_urshift: {
2283             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2284             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2285             NodeIndex result;
2286             // The result of a zero-extending right shift is treated as an unsigned value.
2287             // This means that if the top bit is set, the result is not in the int32 range,
2288             // and as such must be stored as a double. If the shift amount is a constant,
2289             // we may be able to optimize.
2290             if (isInt32Constant(op2)) {
2291                 // If we know we are shifting by a non-zero amount, then since the operation
2292                 // zero fills we know the top bit of the result must be zero, and as such the
2293                 // result must be within the int32 range. Conversely, if this is a shift by
2294                 // zero, then the result may be changed by the conversion to unsigned, but it
2295                 // is not necessary to perform the shift!
2296                 if (valueOfInt32Constant(op2) & 0x1f)
2297                     result = addToGraph(BitURShift, op1, op2);
2298                 else
2299                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
2300             }  else {
2301                 // Cannot optimize at this stage; shift & potentially rebox as a double.
2302                 result = addToGraph(BitURShift, op1, op2);
2303                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2304             }
2305             set(currentInstruction[1].u.operand, result);
2306             NEXT_OPCODE(op_urshift);
2307         }
2308
2309         // === Increment/Decrement opcodes ===
2310
2311         case op_pre_inc: {
2312             unsigned srcDst = currentInstruction[1].u.operand;
2313             NodeIndex op = get(srcDst);
2314             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2315             NEXT_OPCODE(op_pre_inc);
2316         }
2317
2318         case op_post_inc: {
2319             unsigned result = currentInstruction[1].u.operand;
2320             unsigned srcDst = currentInstruction[2].u.operand;
2321             ASSERT(result != srcDst); // Required for assumptions we make during OSR.
2322             NodeIndex op = get(srcDst);
2323             setPair(result, op, srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2324             NEXT_OPCODE(op_post_inc);
2325         }
2326
2327         case op_pre_dec: {
2328             unsigned srcDst = currentInstruction[1].u.operand;
2329             NodeIndex op = get(srcDst);
2330             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2331             NEXT_OPCODE(op_pre_dec);
2332         }
2333
2334         case op_post_dec: {
2335             unsigned result = currentInstruction[1].u.operand;
2336             unsigned srcDst = currentInstruction[2].u.operand;
2337             NodeIndex op = get(srcDst);
2338             setPair(result, op, srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2339             NEXT_OPCODE(op_post_dec);
2340         }
2341
2342         // === Arithmetic operations ===
2343
2344         case op_add: {
2345             NodeIndex op1 = get(currentInstruction[2].u.operand);
2346             NodeIndex op2 = get(currentInstruction[3].u.operand);
2347             if (m_graph[op1].hasNumberResult() && m_graph[op2].hasNumberResult())
2348                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2349             else
2350                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2351             NEXT_OPCODE(op_add);
2352         }
2353
2354         case op_sub: {
2355             NodeIndex op1 = get(currentInstruction[2].u.operand);
2356             NodeIndex op2 = get(currentInstruction[3].u.operand);
2357             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2358             NEXT_OPCODE(op_sub);
2359         }
2360
2361         case op_negate: {
2362             NodeIndex op1 = get(currentInstruction[2].u.operand);
2363             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2364             NEXT_OPCODE(op_negate);
2365         }
2366
2367         case op_mul: {
2368             // Multiply requires that the inputs are not truncated, unfortunately.
2369             NodeIndex op1 = get(currentInstruction[2].u.operand);
2370             NodeIndex op2 = get(currentInstruction[3].u.operand);
2371             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2372             NEXT_OPCODE(op_mul);
2373         }
2374
2375         case op_mod: {
2376             NodeIndex op1 = get(currentInstruction[2].u.operand);
2377             NodeIndex op2 = get(currentInstruction[3].u.operand);
2378             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2379             NEXT_OPCODE(op_mod);
2380         }
2381
2382         case op_div: {
2383             NodeIndex op1 = get(currentInstruction[2].u.operand);
2384             NodeIndex op2 = get(currentInstruction[3].u.operand);
2385             set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2386             NEXT_OPCODE(op_div);
2387         }
2388
2389         // === Misc operations ===
2390
2391 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2392         case op_debug:
2393             addToGraph(Breakpoint);
2394             NEXT_OPCODE(op_debug);
2395 #endif
2396         case op_mov: {
2397             NodeIndex op = get(currentInstruction[2].u.operand);
2398             set(currentInstruction[1].u.operand, op);
2399             NEXT_OPCODE(op_mov);
2400         }
2401
2402         case op_check_has_instance:
2403             addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2404             NEXT_OPCODE(op_check_has_instance);
2405
2406         case op_instanceof: {
2407             NodeIndex value = get(currentInstruction[2].u.operand);
2408             NodeIndex prototype = get(currentInstruction[3].u.operand);
2409             set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2410             NEXT_OPCODE(op_instanceof);
2411         }
2412             
2413         case op_is_undefined: {
2414             NodeIndex value = get(currentInstruction[2].u.operand);
2415             set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2416             NEXT_OPCODE(op_is_undefined);
2417         }
2418
2419         case op_is_boolean: {
2420             NodeIndex value = get(currentInstruction[2].u.operand);
2421             set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2422             NEXT_OPCODE(op_is_boolean);
2423         }
2424
2425         case op_is_number: {
2426             NodeIndex value = get(currentInstruction[2].u.operand);
2427             set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2428             NEXT_OPCODE(op_is_number);
2429         }
2430
2431         case op_is_string: {
2432             NodeIndex value = get(currentInstruction[2].u.operand);
2433             set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2434             NEXT_OPCODE(op_is_string);
2435         }
2436
2437         case op_is_object: {
2438             NodeIndex value = get(currentInstruction[2].u.operand);
2439             set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2440             NEXT_OPCODE(op_is_object);
2441         }
2442
2443         case op_is_function: {
2444             NodeIndex value = get(currentInstruction[2].u.operand);
2445             set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2446             NEXT_OPCODE(op_is_function);
2447         }
2448
2449         case op_not: {
2450             NodeIndex value = get(currentInstruction[2].u.operand);
2451             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2452             NEXT_OPCODE(op_not);
2453         }
2454             
2455         case op_to_primitive: {
2456             NodeIndex value = get(currentInstruction[2].u.operand);
2457             set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2458             NEXT_OPCODE(op_to_primitive);
2459         }
2460             
2461         case op_strcat: {
2462             int startOperand = currentInstruction[2].u.operand;
2463             int numOperands = currentInstruction[3].u.operand;
2464             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
2465                 addVarArgChild(get(operandIdx));
2466             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, StrCat, OpInfo(0), OpInfo(0)));
2467             NEXT_OPCODE(op_strcat);
2468         }
2469
2470         case op_less: {
2471             NodeIndex op1 = get(currentInstruction[2].u.operand);
2472             NodeIndex op2 = get(currentInstruction[3].u.operand);
2473             set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2474             NEXT_OPCODE(op_less);
2475         }
2476
2477         case op_lesseq: {
2478             NodeIndex op1 = get(currentInstruction[2].u.operand);
2479             NodeIndex op2 = get(currentInstruction[3].u.operand);
2480             set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2481             NEXT_OPCODE(op_lesseq);
2482         }
2483
2484         case op_greater: {
2485             NodeIndex op1 = get(currentInstruction[2].u.operand);
2486             NodeIndex op2 = get(currentInstruction[3].u.operand);
2487             set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2488             NEXT_OPCODE(op_greater);
2489         }
2490
2491         case op_greatereq: {
2492             NodeIndex op1 = get(currentInstruction[2].u.operand);
2493             NodeIndex op2 = get(currentInstruction[3].u.operand);
2494             set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2495             NEXT_OPCODE(op_greatereq);
2496         }
2497
2498         case op_eq: {
2499             NodeIndex op1 = get(currentInstruction[2].u.operand);
2500             NodeIndex op2 = get(currentInstruction[3].u.operand);
2501             set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2502             NEXT_OPCODE(op_eq);
2503         }
2504
2505         case op_eq_null: {
2506             NodeIndex value = get(currentInstruction[2].u.operand);
2507             set(currentInstruction[1].u.operand, addToGraph(CompareEq, value, constantNull()));
2508             NEXT_OPCODE(op_eq_null);
2509         }
2510
2511         case op_stricteq: {
2512             NodeIndex op1 = get(currentInstruction[2].u.operand);
2513             NodeIndex op2 = get(currentInstruction[3].u.operand);
2514             set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2515             NEXT_OPCODE(op_stricteq);
2516         }
2517
2518         case op_neq: {
2519             NodeIndex op1 = get(currentInstruction[2].u.operand);
2520             NodeIndex op2 = get(currentInstruction[3].u.operand);
2521             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2522             NEXT_OPCODE(op_neq);
2523         }
2524
2525         case op_neq_null: {
2526             NodeIndex value = get(currentInstruction[2].u.operand);
2527             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, value, constantNull())));
2528             NEXT_OPCODE(op_neq_null);
2529         }
2530
2531         case op_nstricteq: {
2532             NodeIndex op1 = get(currentInstruction[2].u.operand);
2533             NodeIndex op2 = get(currentInstruction[3].u.operand);
2534             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareStrictEq, op1, op2)));
2535             NEXT_OPCODE(op_nstricteq);
2536         }
2537
2538         // === Property access operations ===
2539
2540         case op_get_by_val: {
2541             SpeculatedType prediction = getPrediction();
2542             
2543             NodeIndex base = get(currentInstruction[2].u.operand);
2544             ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Read, base);
2545             NodeIndex property = get(currentInstruction[3].u.operand);
2546             NodeIndex getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2547             set(currentInstruction[1].u.operand, getByVal);
2548
2549             NEXT_OPCODE(op_get_by_val);
2550         }
2551
2552         case op_put_by_val: {
2553             NodeIndex base = get(currentInstruction[1].u.operand);
2554
2555             ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Write, base);
2556             
2557             NodeIndex property = get(currentInstruction[2].u.operand);
2558             NodeIndex value = get(currentInstruction[3].u.operand);
2559             
2560             addVarArgChild(base);
2561             addVarArgChild(property);
2562             addVarArgChild(value);
2563             addVarArgChild(NoNode); // Leave room for property storage.
2564             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2565
2566             NEXT_OPCODE(op_put_by_val);
2567         }
2568             
2569         case op_get_by_id:
2570         case op_get_by_id_out_of_line:
2571         case op_get_array_length: {
2572             SpeculatedType prediction = getPrediction();
2573             
2574             NodeIndex base = get(currentInstruction[2].u.operand);
2575             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2576             
2577             Identifier identifier = m_codeBlock->identifier(identifierNumber);
2578             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2579                 m_inlineStackTop->m_profiledBlock, m_currentIndex, identifier);
2580             
2581             handleGetById(
2582                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2583
2584             NEXT_OPCODE(op_get_by_id);
2585         }
2586         case op_put_by_id:
2587         case op_put_by_id_out_of_line:
2588         case op_put_by_id_transition_direct:
2589         case op_put_by_id_transition_normal:
2590         case op_put_by_id_transition_direct_out_of_line:
2591         case op_put_by_id_transition_normal_out_of_line: {
2592             NodeIndex value = get(currentInstruction[3].u.operand);
2593             NodeIndex base = get(currentInstruction[1].u.operand);
2594             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2595             bool direct = currentInstruction[8].u.operand;
2596
2597             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2598                 m_inlineStackTop->m_profiledBlock,
2599                 m_currentIndex,
2600                 m_codeBlock->identifier(identifierNumber));
2601             if (!putByIdStatus.isSet())
2602                 addToGraph(ForceOSRExit);
2603             
2604             bool hasExitSite = m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache);
2605             
2606             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2607                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2608                 NodeIndex propertyStorage;
2609                 if (isInlineOffset(putByIdStatus.offset()))
2610                     propertyStorage = base;
2611                 else
2612                     propertyStorage = addToGraph(GetButterfly, base);
2613                 addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
2614                 
2615                 StorageAccessData storageAccessData;
2616                 storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2617                 storageAccessData.identifierNumber = identifierNumber;
2618                 m_graph.m_storageAccessData.append(storageAccessData);
2619             } else if (!hasExitSite
2620                        && putByIdStatus.isSimpleTransition()
2621                        && structureChainIsStillValid(
2622                            direct,
2623                            putByIdStatus.oldStructure(),
2624                            putByIdStatus.structureChain())) {
2625
2626                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2627                 if (!direct) {
2628                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2629                         addStructureTransitionCheck(
2630                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2631                     }
2632                     
2633                     for (WriteBarrier<Structure>* it = putByIdStatus.structureChain()->head(); *it; ++it) {
2634                         JSValue prototype = (*it)->storedPrototype();
2635                         if (prototype.isNull())
2636                             continue;
2637                         ASSERT(prototype.isCell());
2638                         addStructureTransitionCheck(prototype.asCell());
2639                     }
2640                 }
2641                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2642                 
2643                 NodeIndex propertyStorage;
2644                 StructureTransitionData* transitionData =
2645                     m_graph.addStructureTransitionData(
2646                         StructureTransitionData(
2647                             putByIdStatus.oldStructure(),
2648                             putByIdStatus.newStructure()));
2649
2650                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2651                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2652                     
2653                     // If we're growing the property storage then it must be because we're
2654                     // storing into the out-of-line storage.
2655                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2656                     
2657                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2658                         propertyStorage = addToGraph(
2659                             AllocatePropertyStorage, OpInfo(transitionData), base);
2660                     } else {
2661                         propertyStorage = addToGraph(
2662                             ReallocatePropertyStorage, OpInfo(transitionData),
2663                             base, addToGraph(GetButterfly, base));
2664                     }
2665                 } else {
2666                     if (isInlineOffset(putByIdStatus.offset()))
2667                         propertyStorage = base;
2668                     else
2669                         propertyStorage = addToGraph(GetButterfly, base);
2670                 }
2671                 
2672                 addToGraph(PutStructure, OpInfo(transitionData), base);
2673                 
2674                 addToGraph(
2675                     PutByOffset,
2676                     OpInfo(m_graph.m_storageAccessData.size()),
2677                     propertyStorage,
2678                     base,
2679                     value);
2680                 
2681                 StorageAccessData storageAccessData;
2682                 storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2683                 storageAccessData.identifierNumber = identifierNumber;
2684                 m_graph.m_storageAccessData.append(storageAccessData);
2685             } else {
2686                 if (direct)
2687                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2688                 else
2689                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2690             }
2691
2692             NEXT_OPCODE(op_put_by_id);
2693         }
2694
2695         case op_init_global_const_nop: {
2696             NEXT_OPCODE(op_init_global_const_nop);
2697         }
2698
2699         case op_init_global_const: {
2700             NodeIndex value = get(currentInstruction[2].u.operand);
2701             addToGraph(
2702                 PutGlobalVar,
2703                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2704                 value);
2705             NEXT_OPCODE(op_init_global_const);
2706         }
2707
2708         case op_init_global_const_check: {
2709             NodeIndex value = get(currentInstruction[2].u.operand);
2710             CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
2711             JSGlobalObject* globalObject = codeBlock->globalObject();
2712             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[4].u.operand];
2713             Identifier identifier = m_codeBlock->identifier(identifierNumber);
2714             SymbolTableEntry entry = globalObject->symbolTable()->get(identifier.impl());
2715             if (!entry.couldBeWatched()) {
2716                 addToGraph(
2717                     PutGlobalVar,
2718                     OpInfo(globalObject->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2719                     value);
2720                 NEXT_OPCODE(op_init_global_const_check);
2721             }
2722             addToGraph(
2723                 PutGlobalVarCheck,
2724                 OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2725                 OpInfo(identifierNumber),
2726                 value);
2727             NEXT_OPCODE(op_init_global_const_check);
2728         }
2729
2730
2731         // === Block terminators. ===
2732
2733         case op_jmp: {
2734             unsigned relativeOffset = currentInstruction[1].u.operand;
2735             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2736             LAST_OPCODE(op_jmp);
2737         }
2738
2739         case op_loop: {
2740             unsigned relativeOffset = currentInstruction[1].u.operand;
2741             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2742             LAST_OPCODE(op_loop);
2743         }
2744
2745         case op_jtrue: {
2746             unsigned relativeOffset = currentInstruction[2].u.operand;
2747             NodeIndex condition = get(currentInstruction[1].u.operand);
2748             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2749             LAST_OPCODE(op_jtrue);
2750         }
2751
2752         case op_jfalse: {
2753             unsigned relativeOffset = currentInstruction[2].u.operand;
2754             NodeIndex condition = get(currentInstruction[1].u.operand);
2755             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2756             LAST_OPCODE(op_jfalse);
2757         }
2758
2759         case op_loop_if_true: {
2760             unsigned relativeOffset = currentInstruction[2].u.operand;
2761             NodeIndex condition = get(currentInstruction[1].u.operand);
2762             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_true)), condition);
2763             LAST_OPCODE(op_loop_if_true);
2764         }
2765
2766         case op_loop_if_false: {
2767             unsigned relativeOffset = currentInstruction[2].u.operand;
2768             NodeIndex condition = get(currentInstruction[1].u.operand);
2769             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_false)), OpInfo(m_currentIndex + relativeOffset), condition);
2770             LAST_OPCODE(op_loop_if_false);
2771         }
2772
2773         case op_jeq_null: {
2774             unsigned relativeOffset = currentInstruction[2].u.operand;
2775             NodeIndex value = get(currentInstruction[1].u.operand);
2776             NodeIndex condition = addToGraph(CompareEq, value, constantNull());
2777             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2778             LAST_OPCODE(op_jeq_null);
2779         }
2780
2781         case op_jneq_null: {
2782             unsigned relativeOffset = currentInstruction[2].u.operand;
2783             NodeIndex value = get(currentInstruction[1].u.operand);
2784             NodeIndex condition = addToGraph(CompareEq, value, constantNull());
2785             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2786             LAST_OPCODE(op_jneq_null);
2787         }
2788
2789         case op_jless: {
2790             unsigned relativeOffset = currentInstruction[3].u.operand;
2791             NodeIndex op1 = get(currentInstruction[1].u.operand);
2792             NodeIndex op2 = get(currentInstruction[2].u.operand);
2793             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2794             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2795             LAST_OPCODE(op_jless);
2796         }
2797
2798         case op_jlesseq: {
2799             unsigned relativeOffset = currentInstruction[3].u.operand;
2800             NodeIndex op1 = get(currentInstruction[1].u.operand);
2801             NodeIndex op2 = get(currentInstruction[2].u.operand);
2802             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2803             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2804             LAST_OPCODE(op_jlesseq);
2805         }
2806
2807         case op_jgreater: {
2808             unsigned relativeOffset = currentInstruction[3].u.operand;
2809             NodeIndex op1 = get(currentInstruction[1].u.operand);
2810             NodeIndex op2 = get(currentInstruction[2].u.operand);
2811             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2812             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2813             LAST_OPCODE(op_jgreater);
2814         }
2815
2816         case op_jgreatereq: {
2817             unsigned relativeOffset = currentInstruction[3].u.operand;
2818             NodeIndex op1 = get(currentInstruction[1].u.operand);
2819             NodeIndex op2 = get(currentInstruction[2].u.operand);
2820             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2821             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2822             LAST_OPCODE(op_jgreatereq);
2823         }
2824
2825         case op_jnless: {
2826             unsigned relativeOffset = currentInstruction[3].u.operand;
2827             NodeIndex op1 = get(currentInstruction[1].u.operand);
2828             NodeIndex op2 = get(currentInstruction[2].u.operand);
2829             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2830             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2831             LAST_OPCODE(op_jnless);
2832         }
2833
2834         case op_jnlesseq: {
2835             unsigned relativeOffset = currentInstruction[3].u.operand;
2836             NodeIndex op1 = get(currentInstruction[1].u.operand);
2837             NodeIndex op2 = get(currentInstruction[2].u.operand);
2838             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2839             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2840             LAST_OPCODE(op_jnlesseq);
2841         }
2842
2843         case op_jngreater: {
2844             unsigned relativeOffset = currentInstruction[3].u.operand;
2845             NodeIndex op1 = get(currentInstruction[1].u.operand);
2846             NodeIndex op2 = get(currentInstruction[2].u.operand);
2847             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2848             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2849             LAST_OPCODE(op_jngreater);
2850         }
2851
2852         case op_jngreatereq: {
2853             unsigned relativeOffset = currentInstruction[3].u.operand;
2854             NodeIndex op1 = get(currentInstruction[1].u.operand);
2855             NodeIndex op2 = get(currentInstruction[2].u.operand);
2856             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2857             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2858             LAST_OPCODE(op_jngreatereq);
2859         }
2860
2861         case op_loop_if_less: {
2862             unsigned relativeOffset = currentInstruction[3].u.operand;
2863             NodeIndex op1 = get(currentInstruction[1].u.operand);
2864             NodeIndex op2 = get(currentInstruction[2].u.operand);
2865             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2866             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_less)), condition);
2867             LAST_OPCODE(op_loop_if_less);
2868         }
2869
2870         case op_loop_if_lesseq: {
2871             unsigned relativeOffset = currentInstruction[3].u.operand;
2872             NodeIndex op1 = get(currentInstruction[1].u.operand);
2873             NodeIndex op2 = get(currentInstruction[2].u.operand);
2874             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2875             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_lesseq)), condition);
2876             LAST_OPCODE(op_loop_if_lesseq);
2877         }
2878
2879         case op_loop_if_greater: {
2880             unsigned relativeOffset = currentInstruction[3].u.operand;
2881             NodeIndex op1 = get(currentInstruction[1].u.operand);
2882             NodeIndex op2 = get(currentInstruction[2].u.operand);
2883             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2884             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_greater)), condition);
2885             LAST_OPCODE(op_loop_if_greater);
2886         }
2887
2888         case op_loop_if_greatereq: {
2889             unsigned relativeOffset = currentInstruction[3].u.operand;
2890             NodeIndex op1 = get(currentInstruction[1].u.operand);
2891             NodeIndex op2 = get(currentInstruction[2].u.operand);
2892             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2893             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_greatereq)), condition);
2894             LAST_OPCODE(op_loop_if_greatereq);
2895         }
2896
2897         case op_ret:
2898             flushArgumentsAndCapturedVariables();
2899             if (m_inlineStackTop->m_inlineCallFrame) {
2900                 if (m_inlineStackTop->m_returnValue != InvalidVirtualRegister)
2901                     setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
2902                 m_inlineStackTop->m_didReturn = true;
2903                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2904                     // If we're returning from the first block, then we're done parsing.
2905                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.m_blocks.size() - 1);
2906                     shouldContinueParsing = false;
2907                     LAST_OPCODE(op_ret);
2908                 } else {
2909                     // If inlining created blocks, and we're doing a return, then we need some
2910                     // special linking.
2911                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
2912                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2913                 }
2914                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2915                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2916                     addToGraph(Jump, OpInfo(NoBlock));
2917                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2918                     m_inlineStackTop->m_didEarlyReturn = true;
2919                 }
2920                 LAST_OPCODE(op_ret);
2921             }
2922             addToGraph(Return, get(currentInstruction[1].u.operand));
2923             LAST_OPCODE(op_ret);
2924             
2925         case op_end:
2926             flushArgumentsAndCapturedVariables();
2927             ASSERT(!m_inlineStackTop->m_inlineCallFrame);
2928             addToGraph(Return, get(currentInstruction[1].u.operand));
2929             LAST_OPCODE(op_end);
2930
2931         case op_throw:
2932             flushArgumentsAndCapturedVariables();
2933             addToGraph(Throw, get(currentInstruction[1].u.operand));
2934             LAST_OPCODE(op_throw);
2935             
2936         case op_throw_static_error:
2937             flushArgumentsAndCapturedVariables();
2938             addToGraph(ThrowReferenceError);
2939             LAST_OPCODE(op_throw_static_error);
2940             
2941         case op_call:
2942             handleCall(interpreter, currentInstruction, Call, CodeForCall);
2943             NEXT_OPCODE(op_call);
2944             
2945         case op_construct:
2946             handleCall(interpreter, currentInstruction, Construct, CodeForConstruct);
2947             NEXT_OPCODE(op_construct);
2948             
2949         case op_call_varargs: {
2950             ASSERT(m_inlineStackTop->m_inlineCallFrame);
2951             ASSERT(currentInstruction[3].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
2952             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2953             // It would be cool to funnel this into handleCall() so that it can handle
2954             // inlining. But currently that won't be profitable anyway, since none of the
2955             // uses of call_varargs will be inlineable. So we set this up manually and
2956             // without inline/intrinsic detection.
2957             
2958             Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call_varargs);
2959             
2960             SpeculatedType prediction = SpecNone;
2961             if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
2962                 m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call_varargs);
2963                 prediction = getPrediction();
2964             }
2965             
2966             addToGraph(CheckArgumentsNotCreated);
2967             
2968             unsigned argCount = m_inlineStackTop->m_inlineCallFrame->arguments.size();
2969             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2970                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2971             
2972             addVarArgChild(get(currentInstruction[1].u.operand)); // callee
2973             addVarArgChild(get(currentInstruction[2].u.operand)); // this
2974             for (unsigned argument = 1; argument < argCount; ++argument)
2975                 addVarArgChild(get(argumentToOperand(argument)));
2976             
2977             NodeIndex call = addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction));
2978             if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
2979                 set(putInstruction[1].u.operand, call);
2980             
2981             NEXT_OPCODE(op_call_varargs);
2982         }
2983             
2984         case op_call_put_result:
2985             NEXT_OPCODE(op_call_put_result);
2986             
2987         case op_jneq_ptr:
2988             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2989             // support simmer for a while before making it more general, since it's
2990             // already gnarly enough as it is.
2991             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2992             addToGraph(
2993                 CheckFunction,
2994                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2995                 get(currentInstruction[1].u.operand));
2996             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
2997             LAST_OPCODE(op_jneq_ptr);
2998
2999         case op_resolve:
3000         case op_resolve_global_property:
3001         case op_resolve_global_var:
3002         case op_resolve_scoped_var:
3003         case op_resolve_scoped_var_on_top_scope:
3004         case op_resolve_scoped_var_with_top_scope_check: {
3005             SpeculatedType prediction = getPrediction();
3006             
3007             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3008             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[3].u.operand];
3009             NodeIndex value = 0;
3010             if (parseResolveOperations(prediction, identifier, operations, 0, 0, &value)) {
3011                 set(currentInstruction[1].u.operand, value);
3012                 NEXT_OPCODE(op_resolve);
3013             }
3014
3015             NodeIndex resolve = addToGraph(Resolve, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
3016             m_graph.m_resolveOperationsData.append(ResolveOperationData());
3017             ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
3018             data.identifierNumber = identifier;
3019             data.resolveOperationsIndex = operations;
3020
3021             set(currentInstruction[1].u.operand, resolve);
3022
3023             NEXT_OPCODE(op_resolve);
3024         }
3025
3026         case op_put_to_base_variable:
3027         case op_put_to_base: {
3028             unsigned base = currentInstruction[1].u.operand;
3029             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3030             unsigned value = currentInstruction[3].u.operand;
3031             unsigned operation = m_inlineStackTop->m_putToBaseOperationRemap[currentInstruction[4].u.operand];
3032             PutToBaseOperation* putToBase = m_codeBlock->putToBaseOperation(operation);
3033
3034             if (putToBase->m_isDynamic) {
3035                 addToGraph(Phantom, get(base));
3036                 addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3037                 NEXT_OPCODE(op_put_to_base);
3038             }
3039
3040             switch (putToBase->m_kind) {
3041             case PutToBaseOperation::Uninitialised:
3042                 addToGraph(Phantom, get(base));
3043                 addToGraph(ForceOSRExit);
3044                 break;
3045
3046             case PutToBaseOperation::GlobalVariablePutChecked: {
3047                 CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3048                 JSGlobalObject* globalObject = codeBlock->globalObject();
3049                 SymbolTableEntry entry = globalObject->symbolTable()->get(m_codeBlock->identifier(identifier).impl());
3050                 if (entry.couldBeWatched()) {
3051                     addToGraph(PutGlobalVarCheck,
3052                                OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3053                                OpInfo(identifier),
3054                                get(value));
3055                     break;
3056                 }
3057             }
3058             case PutToBaseOperation::GlobalVariablePut:
3059                 addToGraph(PutGlobalVar,
3060                            OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3061                            get(value));
3062                 break;
3063             case PutToBaseOperation::VariablePut: {
3064                 NodeIndex scope = get(base);
3065                 NodeIndex scopeRegisters = addToGraph(GetScopeRegisters, scope);
3066                 addToGraph(PutScopedVar, OpInfo(putToBase->m_offset), scope, scopeRegisters, get(value));
3067                 break;
3068             }
3069             case PutToBaseOperation::GlobalPropertyPut: {
3070                 if (!putToBase->m_structure) {
3071                     addToGraph(Phantom, get(base));
3072                     addToGraph(ForceOSRExit);
3073                     NEXT_OPCODE(op_put_to_base);
3074                 }
3075                 NodeIndex baseNode = get(base);
3076                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putToBase->m_structure.get())), baseNode);
3077                 NodeIndex propertyStorage;
3078                 if (isInlineOffset(putToBase->m_offset))
3079                     propertyStorage = baseNode;
3080                 else
3081                     propertyStorage = addToGraph(GetButterfly, baseNode);
3082                 addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, baseNode, get(value));
3083
3084                 StorageAccessData storageAccessData;
3085                 storageAccessData.offset = indexRelativeToBase(putToBase->m_offset);
3086                 storageAccessData.identifierNumber = identifier;
3087                 m_graph.m_storageAccessData.append(storageAccessData);
3088                 break;
3089             }
3090             case PutToBaseOperation::Readonly:
3091             case PutToBaseOperation::Generic:
3092                 addToGraph(Phantom, get(base));
3093                 addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3094             }
3095             NEXT_OPCODE(op_put_to_base);
3096         }
3097
3098         case op_resolve_base_to_global:
3099         case op_resolve_base_to_global_dynamic:
3100         case op_resolve_base_to_scope:
3101         case op_resolve_base_to_scope_with_top_scope_check:
3102         case op_resolve_base: {
3103             SpeculatedType prediction = getPrediction();
3104             
3105             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3106             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[4].u.operand];
3107             unsigned putToBaseOperation = m_inlineStackTop->m_putToBaseOperationRemap[currentInstruction[5].u.operand];
3108
3109             NodeIndex base = 0;
3110             if (parseResolveOperations(prediction, identifier, operations, 0, &base, 0)) {
3111                 set(currentInstruction[1].u.operand, base);
3112                 NEXT_OPCODE(op_resolve_base);
3113             }
3114
3115             NodeIndex resolve = addToGraph(currentInstruction[3].u.operand ? ResolveBaseStrictPut : ResolveBase, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
3116             m_graph.m_resolveOperationsData.append(ResolveOperationData());
3117             ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
3118             data.identifierNumber = identifier;
3119             data.resolveOperationsIndex = operations;
3120             data.putToBaseOperationIndex = putToBaseOperation;
3121         
3122             set(currentInstruction[1].u.operand, resolve);
3123
3124             NEXT_OPCODE(op_resolve_base);
3125         }
3126         case op_resolve_with_base: {
3127             SpeculatedType prediction = getPrediction();
3128             unsigned baseDst = currentInstruction[1].u.operand;
3129             unsigned valueDst = currentInstruction[2].u.operand;
3130             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3131             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[4].u.operand];
3132             unsigned putToBaseOperation = m_inlineStackTop->m_putToBaseOperationRemap[currentInstruction[5].u.operand];
3133
3134             NodeIndex base = 0;
3135             NodeIndex value = 0;
3136             if (parseResolveOperations(prediction, identifier, operations, putToBaseOperation, &base, &value))
3137                 setPair(baseDst, base, valueDst, value);
3138             else {
3139                 addToGraph(ForceOSRExit);
3140                 setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
3141             }
3142
3143             NEXT_OPCODE(op_resolve_with_base);
3144         }
3145         case op_resolve_with_this: {
3146             SpeculatedType prediction = getPrediction();
3147             unsigned baseDst = currentInstruction[1].u.operand;
3148             unsigned valueDst = currentInstruction[2].u.operand;
3149             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3150             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[4].u.operand];
3151
3152             NodeIndex base = 0;
3153             NodeIndex value = 0;
3154             if (parseResolveOperations(prediction, identifier, operations, 0, &base, &value))
3155                 setPair(baseDst, base, valueDst, value);
3156             else {
3157                 addToGraph(ForceOSRExit);
3158                 setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
3159             }
3160
3161             NEXT_OPCODE(op_resolve_with_this);
3162         }
3163         case op_loop_hint: {
3164             // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3165             // OSR can only happen at basic block boundaries. Assert that these two statements
3166             // are compatible.
3167             ASSERT_UNUSED(blockBegin, m_currentIndex == blockBegin);
3168             
3169             // We never do OSR into an inlined code block. That could not happen, since OSR
3170             // looks up the code block that is the replacement for the baseline JIT code
3171             // block. Hence, machine code block = true code block = not inline code block.
3172             if (!m_inlineStackTop->m_caller)
3173                 m_currentBlock->isOSRTarget = true;
3174
3175             // Emit a phantom node to ensure that there is a placeholder node for this bytecode
3176             // op.
3177             addToGraph(Phantom);
3178             
3179             NEXT_OPCODE(op_loop_hint);
3180         }
3181             
3182         case op_init_lazy_reg: {
3183             set(currentInstruction[1].u.operand, getJSConstantForValue(JSValue()));
3184             NEXT_OPCODE(op_init_lazy_reg);
3185         }
3186             
3187         case op_create_activation: {
3188             set(currentInstruction[1].u.operand, addToGraph(CreateActivation, get(currentInstruction[1].u.operand)));
3189             NEXT_OPCODE(op_create_activation);
3190         }
3191             
3192         case op_create_arguments: {
3193             m_graph.m_hasArguments = true;
3194             NodeIndex createArguments = addToGraph(CreateArguments, get(currentInstruction[1].u.operand));
3195             set(currentInstruction[1].u.operand, createArguments);
3196             set(unmodifiedArgumentsRegister(currentInstruction[1].u.operand), createArguments);
3197             NEXT_OPCODE(op_create_arguments);
3198         }
3199             
3200         case op_tear_off_activation: {
3201             addToGraph(TearOffActivation, get(currentInstruction[1].u.operand));
3202             NEXT_OPCODE(op_tear_off_activation);
3203         }
3204
3205         case op_tear_off_arguments: {
3206             m_graph.m_hasArguments = true;
3207             addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(currentInstruction[1].u.operand)), get(currentInstruction[2].u.operand));
3208             NEXT_OPCODE(op_tear_off_arguments);
3209         }
3210             
3211         case op_get_arguments_length: {
3212             m_graph.m_hasArguments = true;
3213             set(currentInstruction[1].u.operand, addToGraph(GetMyArgumentsLengthSafe));
3214             NEXT_OPCODE(op_get_arguments_length);
3215         }
3216             
3217         case op_get_argument_by_val: {
3218             m_graph.m_hasArguments = true;
3219             set(currentInstruction[1].u.operand,
3220                 addToGraph(
3221                     GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
3222                     get(currentInstruction[3].u.operand)));
3223             NEXT_OPCODE(op_get_argument_by_val);
3224         }
3225             
3226         case op_new_func: {
3227             if (!currentInstruction[3].u.operand) {
3228                 set(currentInstruction[1].u.operand,
3229                     addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3230             } else {
3231                 set(currentInstruction[1].u.operand,
3232                     addToGraph(
3233                         NewFunction,
3234                         OpInfo(currentInstruction[2].u.operand),
3235                         get(currentInstruction[1].u.operand)));
3236             }
3237             NEXT_OPCODE(op_new_func);
3238         }
3239             
3240         case op_new_func_exp: {
3241             set(currentInstruction[1].u.operand,
3242                 addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3243             NEXT_OPCODE(op_new_func_exp);
3244         }
3245
3246         default:
3247             // Parse failed! This should not happen because the capabilities checker
3248             // should have caught it.
3249             ASSERT_NOT_REACHED();
3250             return false;
3251         }
3252     }
3253 }
3254
3255 template<ByteCodeParser::PhiStackType stackType>
3256 void ByteCodeParser::processPhiStack()
3257 {
3258     Vector<PhiStackEntry, 16>& phiStack = (stackType == ArgumentPhiStack) ? m_argumentPhiStack : m_localPhiStack;
3259     
3260     while (!phiStack.isEmpty()) {
3261         PhiStackEntry entry = phiStack.last();
3262         phiStack.removeLast();
3263         
3264         if (!entry.m_block->isReachable)
3265             continue;
3266         
3267         if (!entry.m_block->isReachable)
3268             continue;
3269         
3270         PredecessorList& predecessors = entry.m_block->m_predecessors;
3271         unsigned varNo = entry.m_varNo;
3272         VariableAccessData* dataForPhi = m_graph[entry.m_phi].variableAccessData();
3273
3274 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3275         dataLogF("   Handling phi entry for var %u, phi @%u.\n", entry.m_varNo, entry.m_phi);
3276 #endif
3277         
3278         for (size_t i = 0; i < predecessors.size(); ++i) {
3279 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3280             dataLogF("     Dealing with predecessor block %u.\n", predecessors[i]);
3281 #endif
3282             
3283             BasicBlock* predecessorBlock = m_graph.m_blocks[predecessors[i]].get();
3284
3285             NodeIndex& var = (stackType == ArgumentPhiStack) ? predecessorBlock->variablesAtTail.argument(varNo) : predecessorBlock->variablesAtTail.local(varNo);
3286             
3287             NodeIndex valueInPredecessor = var;
3288             if (valueInPredecessor == NoNode) {
3289 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3290                 dataLogF("      Did not find node, adding phi.\n");
3291 #endif
3292
3293                 valueInPredecessor = insertPhiNode(OpInfo(newVariableAccessData(stackType == ArgumentPhiStack ? argumentToOperand(varNo) : static_cast<int>(varNo), false)), predecessorBlock);
3294                 var = valueInPredecessor;
3295                 if (stackType == ArgumentPhiStack)
3296                     predecessorBlock->variablesAtHead.setArgumentFirstTime(varNo, valueInPredecessor);
3297                 else
3298                     predecessorBlock->variablesAtHead.setLocalFirstTime(varNo, valueInPredecessor);
3299                 phiStack.append(PhiStackEntry(predecessorBlock, valueInPredecessor, varNo));
3300             } else if (m_graph[valueInPredecessor].op() == GetLocal) {
3301 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3302                 dataLogF("      Found GetLocal @%u.\n", valueInPredecessor);
3303 #endif
3304
3305                 // We want to ensure that the VariableAccessDatas are identical between the
3306                 // GetLocal and its block-local Phi. Strictly speaking we only need the two
3307                 // to be unified. But for efficiency, we want the code that creates GetLocals
3308                 // and Phis to try to reuse VariableAccessDatas as much as possible.
3309                 ASSERT(m_graph[valueInPredecessor].variableAccessData() == m_graph[m_graph[valueInPredecessor].child1().index()].variableAccessData());
3310                 
3311                 valueInPredecessor = m_graph[valueInPredecessor].child1().index();
3312             } else {
3313 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3314                 dataLogF("      Found @%u.\n", valueInPredecessor);
3315 #endif
3316             }
3317             ASSERT(m_graph[valueInPredecessor].op() == SetLocal
3318                    || m_graph[valueInPredecessor].op() == Phi
3319                    || m_graph[valueInPredecessor].op() == Flush
3320                    || (m_graph[valueInPredecessor].op() == SetArgument
3321                        && stackType == ArgumentPhiStack));
3322             
3323             VariableAccessData* dataForPredecessor = m_graph[valueInPredecessor].variableAccessData();
3324             
3325             dataForPredecessor->unify(dataForPhi);
3326
3327             Node* phiNode = &m_graph[entry.m_phi];
3328 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3329             dataLogF("      Ref count of @%u = %u.\n", entry.m_phi, phiNode->refCount());
3330 #endif
3331             if (phiNode->refCount()) {
3332 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3333                 dataLogF("      Reffing @%u.\n", valueInPredecessor);
3334 #endif
3335                 m_graph.ref(valueInPredecessor);
3336             }
3337
3338             if (!phiNode->child1()) {
3339 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3340                 dataLogF("      Setting @%u->child1 = @%u.\n", entry.m_phi, valueInPredecessor);
3341 #endif
3342                 phiNode->children.setChild1(Edge(valueInPredecessor));
3343 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3344                 dataLogF("      Children of @%u: ", entry.m_phi);
3345                 phiNode->dumpChildren(WTF::dataFile());
3346                 dataLogF(".\n");
3347 #endif
3348                 continue;
3349             }
3350             if (!phiNode->child2()) {
3351 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3352                 dataLogF("      Setting @%u->child2 = @%u.\n", entry.m_phi, valueInPredecessor);
3353 #endif
3354                 phiNode->children.setChild2(Edge(valueInPredecessor));
3355 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3356                 dataLogF("      Children of @%u: ", entry.m_phi);
3357                 phiNode->dumpChildren(WTF::dataFile());
3358                 dataLogF(".\n");
3359 #endif
3360                 continue;
3361             }
3362             if (!phiNode->child3()) {
3363 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3364                 dataLogF("      Setting @%u->child3 = @%u.\n", entry.m_phi, valueInPredecessor);
3365 #endif
3366                 phiNode->children.setChild3(Edge(valueInPredecessor));
3367 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3368                 dataLogF("      Children of @%u: ", entry.m_phi);
3369                 phiNode->dumpChildren(WTF::dataFile());
3370                 dataLogF(".\n");
3371 #endif
3372                 continue;
3373             }
3374             
3375             NodeIndex newPhi = insertPhiNode(OpInfo(dataForPhi), entry.m_block);
3376             
3377 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3378             dataLogF("      Splitting @%u, created @%u.\n", entry.m_phi, newPhi);
3379 #endif
3380
3381             phiNode = &m_graph[entry.m_phi]; // reload after vector resize
3382             Node& newPhiNode = m_graph[newPhi];
3383             if (phiNode->refCount())
3384                 m_graph.ref(newPhi);
3385
3386             newPhiNode.children = phiNode->children;
3387
3388 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3389             dataLogF("      Children of @%u: ", newPhi);
3390             newPhiNode.dumpChildren(WTF::dataFile());
3391             dataLogF(".\n");
3392 #endif
3393
3394             phiNode->children.initialize(newPhi, valueInPredecessor, NoNode);
3395
3396 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
3397             dataLogF("      Children of @%u: ", entry.m_phi);
3398             phiNode->dumpChildren(WTF::dataFile());
3399             dataLogF(".\n");
3400 #endif
3401         }
3402     }
3403 }
3404
3405 void ByteCodeParser::fixVariableAccessPredictions()
3406 {
3407     for (unsigned i = 0; i < m_graph.m_variableAccessData.size(); ++i) {
3408         VariableAccessData* data = &m_graph.m_variableAccessData[i];
3409         data->find()->predict(data->nonUnifiedPrediction());
3410         data->find()->mergeIsCaptured(data->isCaptured());
3411         data->find()->mergeStructureCheckHoistingFailed(data->structureCheckHoistingFailed());
3412     }
3413 }
3414
3415 void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BlockIndex>& possibleTargets)
3416 {
3417     ASSERT(!block->isLinked);
3418     ASSERT(!block->isEmpty());
3419     Node& node = m_graph[block->last()];
3420     ASSERT(node.isTerminal());
3421     
3422     switch (node.op()) {
3423     case Jump:
3424         node.setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node.takenBytecodeOffsetDuringParsing()));
3425 #if DFG_ENABLE(DEBUG_VERBOSE)
3426         dataLogF("Linked basic block %p to %p, #%u.\n", block, m_graph.m_blocks[node.takenBlockIndex()].get(), node.takenBlockIndex());
3427 #endif
3428         break;
3429         
3430     case Branch:
3431         node.setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node.takenBytecodeOffsetDuringParsing()));
3432         node.setNotTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node.notTakenBytecodeOffsetDuringParsing()));
3433 #if DFG_ENABLE(DEBUG_VERBOSE)
3434         dataLogF("Linked basic block %p to %p, #%u and %p, #%u.\n", block, m_graph.m_blocks[node.takenBlockIndex()].get(), node.takenBlockIndex(), m_graph.m_blocks[node.notTakenBlockIndex()].get(), node.notTakenBlockIndex());
3435 #endif
3436         break;
3437         
3438     default:
3439 #if DFG_ENABLE(DEBUG_VERBOSE)
3440         dataLogF("Marking basic block %p as linked.\n", block);
3441 #endif
3442         break;
3443     }
3444     
3445 #if !ASSERT_DISABLED
3446     block->isLinked = true;
3447 #endif
3448 }
3449
3450 void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets)
3451 {
3452     for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3453         if (unlinkedBlocks[i].m_needsNormalLinking) {
3454             linkBlock(m_graph.m_blocks[unlinkedBlocks[i].m_blockIndex].get(), possibleTargets);
3455             unlinkedBlocks[i].m_needsNormalLinking = false;
3456         }
3457     }
3458 }
3459
3460 void ByteCodeParser::buildOperandMapsIfNecessary()
3461 {
3462     if (m_haveBuiltOperandMaps)
3463         return;
3464     
3465     for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3466         m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3467     for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3468         JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3469         if (!value)
3470             m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3471         else
3472             m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3473     }
3474     
3475     m_haveBuiltOperandMaps = true;
3476 }
3477
3478 ByteCodeParser::InlineStackEntry::InlineStackEntry(
3479     ByteCodeParser* byteCodeParser,
3480     CodeBlock* codeBlock,
3481     CodeBlock* profiledBlock,
3482     BlockIndex callsiteBlockHead,
3483     VirtualRegister calleeVR,
3484     JSFunction* callee,
3485     VirtualRegister returnValueVR,
3486     VirtualRegister inlineCallFrameStart,
3487     int argumentCountIncludingThis,
3488     CodeSpecializationKind kind)
3489     : m_byteCodeParser(byteCodeParser)
3490     , m_codeBlock(codeBlock)
3491     , m_profiledBlock(profiledBlock)
3492     , m_calleeVR(calleeVR)
3493     , m_exitProfile(profiledBlock->exitProfile())
3494     , m_callsiteBlockHead(callsiteBlockHead)
3495     , m_returnValue(returnValueVR)
3496     , m_lazyOperands(profiledBlock->lazyOperandValueProfiles())
3497     , m_didReturn(false)
3498     , m_didEarlyReturn(false)
3499     , m_caller(byteCodeParser->m_inlineStackTop)
3500 {
3501     m_argumentPositions.resize(argumentCountIncludingThis);
3502     for (int i = 0; i < argumentCountIncludingThis; ++i) {
3503         byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3504         ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3505         m_argumentPositions[i] = argumentPosition;
3506     }
3507     
3508     // Track the code-block-global exit sites.
3509     if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3510         byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3511             codeBlock->ownerExecutable());
3512     }
3513         
3514     if (m_caller) {
3515         // Inline case.
3516         ASSERT(codeBlock != byteCodeParser->m_codeBlock);
3517         ASSERT(callee);
3518         ASSERT(calleeVR != InvalidVirtualRegister);
3519         ASSERT(inlineCallFrameStart != InvalidVirtualRegister);
3520         ASSERT(callsiteBlockHead != NoBlock);
3521         
3522         InlineCallFrame inlineCallFrame;
3523         inlineCallFrame.executable.set(*byteCodeParser->m_globalData, byteCodeParser->m_codeBlock->ownerExecutable(), codeBlock->ownerExecutable());
3524         inlineCallFrame.stackOffset = inlineCallFrameStart + JSStack::CallFrameHeaderSize;
3525         inlineCallFrame.callee.set(*byteCodeParser->m_globalData, byteCodeParser->m_codeBlock->ownerExecutable(), callee);
3526         inlineCallFrame.caller = byteCodeParser->currentCodeOrigin();
3527         inlineCallFrame.arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
3528         inlineCallFrame.isCall = isCall(kind);
3529         
3530         if (inlineCallFrame.caller.inlineCallFrame)
3531             inlineCallFrame.capturedVars = inlineCallFrame.caller.inlineCallFrame->capturedVars;
3532         else {
3533             for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
3534                 if (byteCodeParser->m_codeBlock->isCaptured(i))
3535                     inlineCallFrame.capturedVars.set(i);
3536             }
3537         }
3538
3539         for (int i = argumentCountIncludingThis; i--;) {
3540             if (codeBlock->isCaptured(argumentToOperand(i)))
3541                 inlineCallFrame.capturedVars.set(argumentToOperand(i) + inlineCallFrame.stackOffset);
3542         }
3543         for (size_t i = codeBlock->m_numVars; i--;) {
3544             if (codeBlock->isCaptured(i))
3545                 inlineCallFrame.capturedVars.set(i + inlineCallFrame.stackOffset);
3546         }
3547
3548 #if DFG_ENABLE(DEBUG_VERBOSE)
3549         dataLogF("Current captured variables: ");
3550         inlineCallFrame.capturedVars.dump(WTF::dataFile());
3551         dataLogF("\n");
3552 #endif
3553         
3554         byteCodeParser->m_codeBlock->inlineCallFrames().append(inlineCallFrame);
3555         m_inlineCallFrame = &byteCodeParser->m_codeBlock->inlineCallFrames().last();
3556         
3557         byteCodeParser->buildOperandMapsIfNecessary();
3558         
3559         m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3560         m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3561         m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3562         m_resolveOperationRemap.resize(codeBlock->numberOfResolveOperations());
3563         m_putToBaseOperationRemap.resize(codeBlock->numberOfPutToBaseOperations());
3564
3565         for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {