Rename dataLog() and dataLogV() to dataLogF() and dataLogFV()
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "DFGArrayMode.h"
35 #include "DFGByteCodeCache.h"
36 #include "DFGCapabilities.h"
37 #include "GetByIdStatus.h"
38 #include "PutByIdStatus.h"
39 #include "ResolveGlobalStatus.h"
40 #include <wtf/HashMap.h>
41 #include <wtf/MathExtras.h>
42
43 namespace JSC { namespace DFG {
44
45 class ConstantBufferKey {
46 public:
47     ConstantBufferKey()
48         : m_codeBlock(0)
49         , m_index(0)
50     {
51     }
52     
53     ConstantBufferKey(WTF::HashTableDeletedValueType)
54         : m_codeBlock(0)
55         , m_index(1)
56     {
57     }
58     
59     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
60         : m_codeBlock(codeBlock)
61         , m_index(index)
62     {
63     }
64     
65     bool operator==(const ConstantBufferKey& other) const
66     {
67         return m_codeBlock == other.m_codeBlock
68             && m_index == other.m_index;
69     }
70     
71     unsigned hash() const
72     {
73         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
74     }
75     
76     bool isHashTableDeletedValue() const
77     {
78         return !m_codeBlock && m_index;
79     }
80     
81     CodeBlock* codeBlock() const { return m_codeBlock; }
82     unsigned index() const { return m_index; }
83     
84 private:
85     CodeBlock* m_codeBlock;
86     unsigned m_index;
87 };
88
89 struct ConstantBufferKeyHash {
90     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
91     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
92     {
93         return a == b;
94     }
95     
96     static const bool safeToCompareToEmptyOrDeleted = true;
97 };
98
99 } } // namespace JSC::DFG
100
101 namespace WTF {
102
103 template<typename T> struct DefaultHash;
104 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
105     typedef JSC::DFG::ConstantBufferKeyHash Hash;
106 };
107
108 template<typename T> struct HashTraits;
109 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
110
111 } // namespace WTF
112
113 namespace JSC { namespace DFG {
114
115 // === ByteCodeParser ===
116 //
117 // This class is used to compile the dataflow graph from a CodeBlock.
118 class ByteCodeParser {
119 public:
120     ByteCodeParser(ExecState* exec, Graph& graph)
121         : m_exec(exec)
122         , m_globalData(&graph.m_globalData)
123         , m_codeBlock(graph.m_codeBlock)
124         , m_profiledBlock(graph.m_profiledBlock)
125         , m_graph(graph)
126         , m_currentBlock(0)
127         , m_currentIndex(0)
128         , m_currentProfilingIndex(0)
129         , m_constantUndefined(UINT_MAX)
130         , m_constantNull(UINT_MAX)
131         , m_constantNaN(UINT_MAX)
132         , m_constant1(UINT_MAX)
133         , m_constants(m_codeBlock->numberOfConstantRegisters())
134         , m_numArguments(m_codeBlock->numParameters())
135         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
136         , m_preservedVars(m_codeBlock->m_numVars)
137         , m_parameterSlots(0)
138         , m_numPassedVarArgs(0)
139         , m_globalResolveNumber(0)
140         , m_inlineStackTop(0)
141         , m_haveBuiltOperandMaps(false)
142         , m_emptyJSValueIndex(UINT_MAX)
143         , m_currentInstruction(0)
144     {
145         ASSERT(m_profiledBlock);
146         
147         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
148             m_preservedVars.set(i);
149     }
150     
151     // Parse a full CodeBlock of bytecode.
152     bool parse();
153     
154 private:
155     // Just parse from m_currentIndex to the end of the current CodeBlock.
156     void parseCodeBlock();
157
158     // Helper for min and max.
159     bool handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
160     
161     // Handle calls. This resolves issues surrounding inlining and intrinsics.
162     void handleCall(Interpreter*, Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
163     void emitFunctionCheck(JSFunction* expectedFunction, NodeIndex callTarget, int registerOffset, CodeSpecializationKind);
164     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
165     bool handleInlining(bool usesResult, int callTarget, NodeIndex callTargetNodeIndex, int resultOperand, bool certainAboutExpectedFunction, JSFunction*, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
166     // Handle setting the result of an intrinsic.
167     void setIntrinsicResult(bool usesResult, int resultOperand, NodeIndex);
168     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
169     bool handleIntrinsic(bool usesResult, int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
170     bool handleConstantInternalFunction(bool usesResult, int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
171     NodeIndex handleGetByOffset(SpeculatedType, NodeIndex base, unsigned identifierNumber, PropertyOffset);
172     void handleGetByOffset(
173         int destinationOperand, SpeculatedType, NodeIndex base, unsigned identifierNumber,
174         PropertyOffset);
175     void handleGetById(
176         int destinationOperand, SpeculatedType, NodeIndex base, unsigned identifierNumber,
177         const GetByIdStatus&);
178
179     // Convert a set of ResolveOperations into graph nodes
180     bool parseResolveOperations(SpeculatedType, unsigned identifierNumber, unsigned operations, unsigned putToBaseOperation, NodeIndex* base, NodeIndex* value);
181
182     // Prepare to parse a block.
183     void prepareToParseBlock();
184     // Parse a single basic block of bytecode instructions.
185     bool parseBlock(unsigned limit);
186     // Link block successors.
187     void linkBlock(BasicBlock*, Vector<BlockIndex>& possibleTargets);
188     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets);
189     // Link GetLocal & SetLocal nodes, to ensure live values are generated.
190     enum PhiStackType {
191         LocalPhiStack,
192         ArgumentPhiStack
193     };
194     template<PhiStackType stackType>
195     void processPhiStack();
196     
197     void fixVariableAccessPredictions();
198     // Add spill locations to nodes.
199     void allocateVirtualRegisters();
200     
201     VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
202     {
203         ASSERT(operand < FirstConstantRegisterIndex);
204         
205         m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
206         return &m_graph.m_variableAccessData.last();
207     }
208     
209     // Get/Set the operands/result of a bytecode instruction.
210     NodeIndex getDirect(int operand)
211     {
212         // Is this a constant?
213         if (operand >= FirstConstantRegisterIndex) {
214             unsigned constant = operand - FirstConstantRegisterIndex;
215             ASSERT(constant < m_constants.size());
216             return getJSConstant(constant);
217         }
218
219         if (operand == JSStack::Callee)
220             return getCallee();
221         
222         // Is this an argument?
223         if (operandIsArgument(operand))
224             return getArgument(operand);
225
226         // Must be a local.
227         return getLocal((unsigned)operand);
228     }
229     NodeIndex get(int operand)
230     {
231         return getDirect(m_inlineStackTop->remapOperand(operand));
232     }
233     enum SetMode { NormalSet, SetOnEntry };
234     void setDirect(int operand, NodeIndex value, SetMode setMode = NormalSet)
235     {
236         // Is this an argument?
237         if (operandIsArgument(operand)) {
238             setArgument(operand, value, setMode);
239             return;
240         }
241
242         // Must be a local.
243         setLocal((unsigned)operand, value, setMode);
244     }
245     void set(int operand, NodeIndex value, SetMode setMode = NormalSet)
246     {
247         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
248     }
249     
250     NodeIndex injectLazyOperandSpeculation(NodeIndex nodeIndex)
251     {
252         Node& node = m_graph[nodeIndex];
253         ASSERT(node.op() == GetLocal);
254         ASSERT(node.codeOrigin.bytecodeIndex == m_currentIndex);
255         SpeculatedType prediction = 
256             m_inlineStackTop->m_lazyOperands.prediction(
257                 LazyOperandValueProfileKey(m_currentIndex, node.local()));
258 #if DFG_ENABLE(DEBUG_VERBOSE)
259         dataLogF("Lazy operand [@%u, bc#%u, r%d] prediction: %s\n",
260                 nodeIndex, m_currentIndex, node.local(), speculationToString(prediction));
261 #endif
262         node.variableAccessData()->predict(prediction);
263         return nodeIndex;
264     }
265
266     // Used in implementing get/set, above, where the operand is a local variable.
267     NodeIndex getLocal(unsigned operand)
268     {
269         NodeIndex nodeIndex = m_currentBlock->variablesAtTail.local(operand);
270         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
271         
272         if (nodeIndex != NoNode) {
273             Node* nodePtr = &m_graph[nodeIndex];
274             if (nodePtr->op() == Flush) {
275                 // Two possibilities: either the block wants the local to be live
276                 // but has not loaded its value, or it has loaded its value, in
277                 // which case we're done.
278                 nodeIndex = nodePtr->child1().index();
279                 Node& flushChild = m_graph[nodeIndex];
280                 if (flushChild.op() == Phi) {
281                     VariableAccessData* variableAccessData = flushChild.variableAccessData();
282                     variableAccessData->mergeIsCaptured(isCaptured);
283                     nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), nodeIndex));
284                     m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
285                     return nodeIndex;
286                 }
287                 nodePtr = &flushChild;
288             }
289             
290             ASSERT(&m_graph[nodeIndex] == nodePtr);
291             ASSERT(nodePtr->op() != Flush);
292
293             nodePtr->variableAccessData()->mergeIsCaptured(isCaptured);
294                 
295             if (isCaptured) {
296                 // We wish to use the same variable access data as the previous access,
297                 // but for all other purposes we want to issue a load since for all we
298                 // know, at this stage of compilation, the local has been clobbered.
299                 
300                 // Make sure we link to the Phi node, not to the GetLocal.
301                 if (nodePtr->op() == GetLocal)
302                     nodeIndex = nodePtr->child1().index();
303                 
304                 NodeIndex newGetLocal = injectLazyOperandSpeculation(
305                     addToGraph(GetLocal, OpInfo(nodePtr->variableAccessData()), nodeIndex));
306                 m_currentBlock->variablesAtTail.local(operand) = newGetLocal;
307                 return newGetLocal;
308             }
309             
310             if (nodePtr->op() == GetLocal)
311                 return nodeIndex;
312             ASSERT(nodePtr->op() == SetLocal);
313             return nodePtr->child1().index();
314         }
315
316         // Check for reads of temporaries from prior blocks,
317         // expand m_preservedVars to cover these.
318         m_preservedVars.set(operand);
319         
320         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
321         
322         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
323         m_localPhiStack.append(PhiStackEntry(m_currentBlock, phi, operand));
324         nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), phi));
325         m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
326         
327         m_currentBlock->variablesAtHead.setLocalFirstTime(operand, nodeIndex);
328         
329         return nodeIndex;
330     }
331     void setLocal(unsigned operand, NodeIndex value, SetMode setMode = NormalSet)
332     {
333         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
334         
335         if (setMode == NormalSet) {
336             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
337             if (isCaptured || argumentPosition)
338                 flushDirect(operand, argumentPosition);
339         }
340
341         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
342         variableAccessData->mergeStructureCheckHoistingFailed(
343             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
344         NodeIndex nodeIndex = addToGraph(SetLocal, OpInfo(variableAccessData), value);
345         m_currentBlock->variablesAtTail.local(operand) = nodeIndex;
346     }
347
348     // Used in implementing get/set, above, where the operand is an argument.
349     NodeIndex getArgument(unsigned operand)
350     {
351         unsigned argument = operandToArgument(operand);
352         ASSERT(argument < m_numArguments);
353         
354         NodeIndex nodeIndex = m_currentBlock->variablesAtTail.argument(argument);
355         bool isCaptured = m_codeBlock->isCaptured(operand);
356
357         if (nodeIndex != NoNode) {
358             Node* nodePtr = &m_graph[nodeIndex];
359             if (nodePtr->op() == Flush) {
360                 // Two possibilities: either the block wants the local to be live
361                 // but has not loaded its value, or it has loaded its value, in
362                 // which case we're done.
363                 nodeIndex = nodePtr->child1().index();
364                 Node& flushChild = m_graph[nodeIndex];
365                 if (flushChild.op() == Phi) {
366                     VariableAccessData* variableAccessData = flushChild.variableAccessData();
367                     variableAccessData->mergeIsCaptured(isCaptured);
368                     nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), nodeIndex));
369                     m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
370                     return nodeIndex;
371                 }
372                 nodePtr = &flushChild;
373             }
374             
375             ASSERT(&m_graph[nodeIndex] == nodePtr);
376             ASSERT(nodePtr->op() != Flush);
377             
378             nodePtr->variableAccessData()->mergeIsCaptured(isCaptured);
379             
380             if (nodePtr->op() == SetArgument) {
381                 // We're getting an argument in the first basic block; link
382                 // the GetLocal to the SetArgument.
383                 ASSERT(nodePtr->local() == static_cast<VirtualRegister>(operand));
384                 VariableAccessData* variable = nodePtr->variableAccessData();
385                 nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable), nodeIndex));
386                 m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
387                 return nodeIndex;
388             }
389             
390             if (isCaptured) {
391                 if (nodePtr->op() == GetLocal)
392                     nodeIndex = nodePtr->child1().index();
393                 return injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(nodePtr->variableAccessData()), nodeIndex));
394             }
395             
396             if (nodePtr->op() == GetLocal)
397                 return nodeIndex;
398             
399             ASSERT(nodePtr->op() == SetLocal);
400             return nodePtr->child1().index();
401         }
402         
403         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
404
405         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
406         m_argumentPhiStack.append(PhiStackEntry(m_currentBlock, phi, argument));
407         nodeIndex = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variableAccessData), phi));
408         m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
409         
410         m_currentBlock->variablesAtHead.setArgumentFirstTime(argument, nodeIndex);
411         
412         return nodeIndex;
413     }
414     void setArgument(int operand, NodeIndex value, SetMode setMode = NormalSet)
415     {
416         unsigned argument = operandToArgument(operand);
417         ASSERT(argument < m_numArguments);
418         
419         bool isCaptured = m_codeBlock->isCaptured(operand);
420
421         // Always flush arguments, except for 'this'.
422         if (argument && setMode == NormalSet)
423             flushDirect(operand);
424         
425         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
426         variableAccessData->mergeStructureCheckHoistingFailed(
427             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
428         NodeIndex nodeIndex = addToGraph(SetLocal, OpInfo(variableAccessData), value);
429         m_currentBlock->variablesAtTail.argument(argument) = nodeIndex;
430     }
431     
432     ArgumentPosition* findArgumentPositionForArgument(int argument)
433     {
434         InlineStackEntry* stack = m_inlineStackTop;
435         while (stack->m_inlineCallFrame)
436             stack = stack->m_caller;
437         return stack->m_argumentPositions[argument];
438     }
439     
440     ArgumentPosition* findArgumentPositionForLocal(int operand)
441     {
442         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
443             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
444             if (!inlineCallFrame)
445                 break;
446             if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
447                 continue;
448             if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
449                 continue;
450             if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
451                 continue;
452             int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
453             return stack->m_argumentPositions[argument];
454         }
455         return 0;
456     }
457     
458     ArgumentPosition* findArgumentPosition(int operand)
459     {
460         if (operandIsArgument(operand))
461             return findArgumentPositionForArgument(operandToArgument(operand));
462         return findArgumentPositionForLocal(operand);
463     }
464     
465     void flush(int operand)
466     {
467         flushDirect(m_inlineStackTop->remapOperand(operand));
468     }
469     
470     void flushDirect(int operand)
471     {
472         flushDirect(operand, findArgumentPosition(operand));
473     }
474     
475     void flushDirect(int operand, ArgumentPosition* argumentPosition)
476     {
477         // FIXME: This should check if the same operand had already been flushed to
478         // some other local variable.
479         
480         bool isCaptured = m_codeBlock->isCaptured(operand, m_inlineStackTop->m_inlineCallFrame);
481         
482         ASSERT(operand < FirstConstantRegisterIndex);
483         
484         NodeIndex nodeIndex;
485         int index;
486         if (operandIsArgument(operand)) {
487             index = operandToArgument(operand);
488             nodeIndex = m_currentBlock->variablesAtTail.argument(index);
489         } else {
490             index = operand;
491             nodeIndex = m_currentBlock->variablesAtTail.local(index);
492             m_preservedVars.set(operand);
493         }
494         
495         if (nodeIndex != NoNode) {
496             Node& node = m_graph[nodeIndex];
497             switch (node.op()) {
498             case Flush:
499                 nodeIndex = node.child1().index();
500                 break;
501             case GetLocal:
502                 nodeIndex = node.child1().index();
503                 break;
504             default:
505                 break;
506             }
507             
508             ASSERT(m_graph[nodeIndex].op() != Flush
509                    && m_graph[nodeIndex].op() != GetLocal);
510             
511             // Emit a Flush regardless of whether we already flushed it.
512             // This gives us guidance to see that the variable also needs to be flushed
513             // for arguments, even if it already had to be flushed for other reasons.
514             VariableAccessData* variableAccessData = node.variableAccessData();
515             variableAccessData->mergeIsCaptured(isCaptured);
516             addToGraph(Flush, OpInfo(variableAccessData), nodeIndex);
517             if (argumentPosition)
518                 argumentPosition->addVariable(variableAccessData);
519             return;
520         }
521         
522         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
523         NodeIndex phi = addToGraph(Phi, OpInfo(variableAccessData));
524         nodeIndex = addToGraph(Flush, OpInfo(variableAccessData), phi);
525         if (operandIsArgument(operand)) {
526             m_argumentPhiStack.append(PhiStackEntry(m_currentBlock, phi, index));
527             m_currentBlock->variablesAtTail.argument(index) = nodeIndex;
528             m_currentBlock->variablesAtHead.setArgumentFirstTime(index, nodeIndex);
529         } else {
530             m_localPhiStack.append(PhiStackEntry(m_currentBlock, phi, index));
531             m_currentBlock->variablesAtTail.local(index) = nodeIndex;
532             m_currentBlock->variablesAtHead.setLocalFirstTime(index, nodeIndex);
533         }
534         if (argumentPosition)
535             argumentPosition->addVariable(variableAccessData);
536     }
537     
538     void flushArgumentsAndCapturedVariables()
539     {
540         int numArguments;
541         if (m_inlineStackTop->m_inlineCallFrame)
542             numArguments = m_inlineStackTop->m_inlineCallFrame->arguments.size();
543         else
544             numArguments = m_inlineStackTop->m_codeBlock->numParameters();
545         for (unsigned argument = numArguments; argument-- > 1;)
546             flush(argumentToOperand(argument));
547         for (int local = 0; local < m_inlineStackTop->m_codeBlock->m_numVars; ++local) {
548             if (!m_inlineStackTop->m_codeBlock->isCaptured(local))
549                 continue;
550             flush(local);
551         }
552     }
553
554     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
555     NodeIndex getToInt32(int operand)
556     {
557         return toInt32(get(operand));
558     }
559
560     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
561     NodeIndex toInt32(NodeIndex index)
562     {
563         Node& node = m_graph[index];
564
565         if (node.hasInt32Result())
566             return index;
567
568         if (node.op() == UInt32ToNumber)
569             return node.child1().index();
570
571         // Check for numeric constants boxed as JSValues.
572         if (node.op() == JSConstant) {
573             JSValue v = valueOfJSConstant(index);
574             if (v.isInt32())
575                 return getJSConstant(node.constantNumber());
576             if (v.isNumber())
577                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
578         }
579
580         return addToGraph(ValueToInt32, index);
581     }
582
583     NodeIndex getJSConstantForValue(JSValue constantValue)
584     {
585         unsigned constantIndex = m_codeBlock->addOrFindConstant(constantValue);
586         if (constantIndex >= m_constants.size())
587             m_constants.append(ConstantRecord());
588         
589         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
590         
591         return getJSConstant(constantIndex);
592     }
593
594     NodeIndex getJSConstant(unsigned constant)
595     {
596         NodeIndex index = m_constants[constant].asJSValue;
597         if (index != NoNode)
598             return index;
599
600         NodeIndex resultIndex = addToGraph(JSConstant, OpInfo(constant));
601         m_constants[constant].asJSValue = resultIndex;
602         return resultIndex;
603     }
604
605     NodeIndex getCallee()
606     {
607         return addToGraph(GetCallee);
608     }
609
610     // Helper functions to get/set the this value.
611     NodeIndex getThis()
612     {
613         return get(m_inlineStackTop->m_codeBlock->thisRegister());
614     }
615     void setThis(NodeIndex value)
616     {
617         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
618     }
619
620     // Convenience methods for checking nodes for constants.
621     bool isJSConstant(NodeIndex index)
622     {
623         return m_graph[index].op() == JSConstant;
624     }
625     bool isInt32Constant(NodeIndex nodeIndex)
626     {
627         return isJSConstant(nodeIndex) && valueOfJSConstant(nodeIndex).isInt32();
628     }
629     // Convenience methods for getting constant values.
630     JSValue valueOfJSConstant(NodeIndex index)
631     {
632         ASSERT(isJSConstant(index));
633         return m_codeBlock->getConstant(FirstConstantRegisterIndex + m_graph[index].constantNumber());
634     }
635     int32_t valueOfInt32Constant(NodeIndex nodeIndex)
636     {
637         ASSERT(isInt32Constant(nodeIndex));
638         return valueOfJSConstant(nodeIndex).asInt32();
639     }
640     
641     // This method returns a JSConstant with the value 'undefined'.
642     NodeIndex constantUndefined()
643     {
644         // Has m_constantUndefined been set up yet?
645         if (m_constantUndefined == UINT_MAX) {
646             // Search the constant pool for undefined, if we find it, we can just reuse this!
647             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
648             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
649                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
650                 if (testMe.isUndefined())
651                     return getJSConstant(m_constantUndefined);
652             }
653
654             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
655             ASSERT(m_constants.size() == numberOfConstants);
656             m_codeBlock->addConstant(jsUndefined());
657             m_constants.append(ConstantRecord());
658             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
659         }
660
661         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
662         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
663         return getJSConstant(m_constantUndefined);
664     }
665
666     // This method returns a JSConstant with the value 'null'.
667     NodeIndex constantNull()
668     {
669         // Has m_constantNull been set up yet?
670         if (m_constantNull == UINT_MAX) {
671             // Search the constant pool for null, if we find it, we can just reuse this!
672             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
673             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
674                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
675                 if (testMe.isNull())
676                     return getJSConstant(m_constantNull);
677             }
678
679             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
680             ASSERT(m_constants.size() == numberOfConstants);
681             m_codeBlock->addConstant(jsNull());
682             m_constants.append(ConstantRecord());
683             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
684         }
685
686         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
687         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
688         return getJSConstant(m_constantNull);
689     }
690
691     // This method returns a DoubleConstant with the value 1.
692     NodeIndex one()
693     {
694         // Has m_constant1 been set up yet?
695         if (m_constant1 == UINT_MAX) {
696             // Search the constant pool for the value 1, if we find it, we can just reuse this!
697             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
698             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
699                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
700                 if (testMe.isInt32() && testMe.asInt32() == 1)
701                     return getJSConstant(m_constant1);
702             }
703
704             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
705             ASSERT(m_constants.size() == numberOfConstants);
706             m_codeBlock->addConstant(jsNumber(1));
707             m_constants.append(ConstantRecord());
708             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
709         }
710
711         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
712         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
713         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
714         return getJSConstant(m_constant1);
715     }
716     
717     // This method returns a DoubleConstant with the value NaN.
718     NodeIndex constantNaN()
719     {
720         JSValue nan = jsNaN();
721         
722         // Has m_constantNaN been set up yet?
723         if (m_constantNaN == UINT_MAX) {
724             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
725             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
726             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
727                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
728                 if (JSValue::encode(testMe) == JSValue::encode(nan))
729                     return getJSConstant(m_constantNaN);
730             }
731
732             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
733             ASSERT(m_constants.size() == numberOfConstants);
734             m_codeBlock->addConstant(nan);
735             m_constants.append(ConstantRecord());
736             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
737         }
738
739         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
740         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
741         ASSERT(isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
742         return getJSConstant(m_constantNaN);
743     }
744     
745     NodeIndex cellConstant(JSCell* cell)
746     {
747         HashMap<JSCell*, NodeIndex>::AddResult result = m_cellConstantNodes.add(cell, NoNode);
748         if (result.isNewEntry)
749             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
750         
751         return result.iterator->value;
752     }
753     
754     CodeOrigin currentCodeOrigin()
755     {
756         return CodeOrigin(m_currentIndex, m_inlineStackTop->m_inlineCallFrame, m_currentProfilingIndex - m_currentIndex);
757     }
758
759     // These methods create a node and add it to the graph. If nodes of this type are
760     // 'mustGenerate' then the node  will implicitly be ref'ed to ensure generation.
761     NodeIndex addToGraph(NodeType op, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
762     {
763         NodeIndex resultIndex = (NodeIndex)m_graph.size();
764         m_graph.append(Node(op, currentCodeOrigin(), child1, child2, child3));
765         ASSERT(op != Phi);
766         m_currentBlock->append(resultIndex);
767
768         if (defaultFlags(op) & NodeMustGenerate)
769             m_graph.ref(resultIndex);
770         return resultIndex;
771     }
772     NodeIndex addToGraph(NodeType op, OpInfo info, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
773     {
774         NodeIndex resultIndex = (NodeIndex)m_graph.size();
775         m_graph.append(Node(op, currentCodeOrigin(), info, child1, child2, child3));
776         if (op == Phi)
777             m_currentBlock->phis.append(resultIndex);
778         else
779             m_currentBlock->append(resultIndex);
780
781         if (defaultFlags(op) & NodeMustGenerate)
782             m_graph.ref(resultIndex);
783         return resultIndex;
784     }
785     NodeIndex addToGraph(NodeType op, OpInfo info1, OpInfo info2, NodeIndex child1 = NoNode, NodeIndex child2 = NoNode, NodeIndex child3 = NoNode)
786     {
787         NodeIndex resultIndex = (NodeIndex)m_graph.size();
788         m_graph.append(Node(op, currentCodeOrigin(), info1, info2, child1, child2, child3));
789         ASSERT(op != Phi);
790         m_currentBlock->append(resultIndex);
791
792         if (defaultFlags(op) & NodeMustGenerate)
793             m_graph.ref(resultIndex);
794         return resultIndex;
795     }
796     
797     NodeIndex addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
798     {
799         NodeIndex resultIndex = (NodeIndex)m_graph.size();
800         m_graph.append(Node(Node::VarArg, op, currentCodeOrigin(), info1, info2, m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs));
801         ASSERT(op != Phi);
802         m_currentBlock->append(resultIndex);
803         
804         m_numPassedVarArgs = 0;
805         
806         if (defaultFlags(op) & NodeMustGenerate)
807             m_graph.ref(resultIndex);
808         return resultIndex;
809     }
810
811     NodeIndex insertPhiNode(OpInfo info, BasicBlock* block)
812     {
813         NodeIndex resultIndex = (NodeIndex)m_graph.size();
814         m_graph.append(Node(Phi, currentCodeOrigin(), info));
815         block->phis.append(resultIndex);
816
817         return resultIndex;
818     }
819
820     void addVarArgChild(NodeIndex child)
821     {
822         m_graph.m_varArgChildren.append(Edge(child));
823         m_numPassedVarArgs++;
824     }
825     
826     NodeIndex addCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op)
827     {
828         Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
829
830         SpeculatedType prediction = SpecNone;
831         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
832             m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call);
833             prediction = getPrediction();
834         }
835         
836         addVarArgChild(get(currentInstruction[1].u.operand));
837         int argCount = currentInstruction[2].u.operand;
838         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
839             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
840
841         int registerOffset = currentInstruction[3].u.operand;
842         int dummyThisArgument = op == Call ? 0 : 1;
843         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
844             addVarArgChild(get(registerOffset + argumentToOperand(i)));
845
846         NodeIndex call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
847         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
848             set(putInstruction[1].u.operand, call);
849         return call;
850     }
851     
852     NodeIndex addStructureTransitionCheck(JSCell* object, Structure* structure)
853     {
854         // Add a weak JS constant for the object regardless, since the code should
855         // be jettisoned if the object ever dies.
856         NodeIndex objectIndex = cellConstant(object);
857         
858         if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
859             addToGraph(StructureTransitionWatchpoint, OpInfo(structure), objectIndex);
860             return objectIndex;
861         }
862         
863         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectIndex);
864         
865         return objectIndex;
866     }
867     
868     NodeIndex addStructureTransitionCheck(JSCell* object)
869     {
870         return addStructureTransitionCheck(object, object->structure());
871     }
872     
873     SpeculatedType getPredictionWithoutOSRExit(NodeIndex nodeIndex, unsigned bytecodeIndex)
874     {
875         UNUSED_PARAM(nodeIndex);
876         
877         SpeculatedType prediction = m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(bytecodeIndex);
878 #if DFG_ENABLE(DEBUG_VERBOSE)
879         dataLogF("Dynamic [@%u, bc#%u] prediction: %s\n", nodeIndex, bytecodeIndex, speculationToString(prediction));
880 #endif
881         
882         return prediction;
883     }
884
885     SpeculatedType getPrediction(NodeIndex nodeIndex, unsigned bytecodeIndex)
886     {
887         SpeculatedType prediction = getPredictionWithoutOSRExit(nodeIndex, bytecodeIndex);
888         
889         if (prediction == SpecNone) {
890             // We have no information about what values this node generates. Give up
891             // on executing this code, since we're likely to do more damage than good.
892             addToGraph(ForceOSRExit);
893         }
894         
895         return prediction;
896     }
897     
898     SpeculatedType getPredictionWithoutOSRExit()
899     {
900         return getPredictionWithoutOSRExit(m_graph.size(), m_currentProfilingIndex);
901     }
902     
903     SpeculatedType getPrediction()
904     {
905         return getPrediction(m_graph.size(), m_currentProfilingIndex);
906     }
907     
908     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
909     {
910         profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
911         return ArrayMode::fromObserved(profile, action, false);
912     }
913     
914     ArrayMode getArrayMode(ArrayProfile* profile)
915     {
916         return getArrayMode(profile, Array::Read);
917     }
918     
919     ArrayMode getArrayModeAndEmitChecks(ArrayProfile* profile, Array::Action action, NodeIndex base)
920     {
921         profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
922         
923 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
924         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
925             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
926         dataLogF("Array profile for bc#%u: %p%s%s, %u\n", m_currentIndex, profile->expectedStructure(), profile->structureIsPolymorphic() ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses() ? " (may intercept)" : "", profile->observedArrayModes());
927 #endif
928         
929         bool makeSafe =
930             m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
931             || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, OutOfBounds);
932         
933         ArrayMode result = ArrayMode::fromObserved(profile, action, makeSafe);
934         
935         if (profile->hasDefiniteStructure() && result.benefitsFromStructureCheck())
936             addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(profile->expectedStructure())), base);
937         
938         return result;
939     }
940     
941     NodeIndex makeSafe(NodeIndex nodeIndex)
942     {
943         Node& node = m_graph[nodeIndex];
944         
945         bool likelyToTakeSlowCase;
946         if (!isX86() && node.op() == ArithMod)
947             likelyToTakeSlowCase = false;
948         else
949             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
950         
951         if (!likelyToTakeSlowCase
952             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
953             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
954             return nodeIndex;
955         
956         switch (m_graph[nodeIndex].op()) {
957         case UInt32ToNumber:
958         case ArithAdd:
959         case ArithSub:
960         case ArithNegate:
961         case ValueAdd:
962         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
963             m_graph[nodeIndex].mergeFlags(NodeMayOverflow);
964             break;
965             
966         case ArithMul:
967             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
968                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
969 #if DFG_ENABLE(DEBUG_VERBOSE)
970                 dataLogF("Making ArithMul @%u take deepest slow case.\n", nodeIndex);
971 #endif
972                 m_graph[nodeIndex].mergeFlags(NodeMayOverflow | NodeMayNegZero);
973             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
974                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
975 #if DFG_ENABLE(DEBUG_VERBOSE)
976                 dataLogF("Making ArithMul @%u take faster slow case.\n", nodeIndex);
977 #endif
978                 m_graph[nodeIndex].mergeFlags(NodeMayNegZero);
979             }
980             break;
981             
982         default:
983             ASSERT_NOT_REACHED();
984             break;
985         }
986         
987         return nodeIndex;
988     }
989     
990     NodeIndex makeDivSafe(NodeIndex nodeIndex)
991     {
992         ASSERT(m_graph[nodeIndex].op() == ArithDiv);
993         
994         // The main slow case counter for op_div in the old JIT counts only when
995         // the operands are not numbers. We don't care about that since we already
996         // have speculations in place that take care of that separately. We only
997         // care about when the outcome of the division is not an integer, which
998         // is what the special fast case counter tells us.
999         
1000         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
1001             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
1002             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
1003             return nodeIndex;
1004         
1005 #if DFG_ENABLE(DEBUG_VERBOSE)
1006         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(m_graph[nodeIndex].op()), nodeIndex, m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
1007 #endif
1008         
1009         // FIXME: It might be possible to make this more granular. The DFG certainly can
1010         // distinguish between negative zero and overflow in its exit profiles.
1011         m_graph[nodeIndex].mergeFlags(NodeMayOverflow | NodeMayNegZero);
1012         
1013         return nodeIndex;
1014     }
1015     
1016     bool willNeedFlush(StructureStubInfo& stubInfo)
1017     {
1018         PolymorphicAccessStructureList* list;
1019         int listSize;
1020         switch (stubInfo.accessType) {
1021         case access_get_by_id_self_list:
1022             list = stubInfo.u.getByIdSelfList.structureList;
1023             listSize = stubInfo.u.getByIdSelfList.listSize;
1024             break;
1025         case access_get_by_id_proto_list:
1026             list = stubInfo.u.getByIdProtoList.structureList;
1027             listSize = stubInfo.u.getByIdProtoList.listSize;
1028             break;
1029         default:
1030             return false;
1031         }
1032         for (int i = 0; i < listSize; ++i) {
1033             if (!list->list[i].isDirect)
1034                 return true;
1035         }
1036         return false;
1037     }
1038     
1039     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
1040     {
1041         if (direct)
1042             return true;
1043         
1044         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
1045             return false;
1046         
1047         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
1048             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
1049                 return false;
1050         }
1051         
1052         return true;
1053     }
1054     
1055     void buildOperandMapsIfNecessary();
1056     
1057     ExecState* m_exec;
1058     JSGlobalData* m_globalData;
1059     CodeBlock* m_codeBlock;
1060     CodeBlock* m_profiledBlock;
1061     Graph& m_graph;
1062
1063     // The current block being generated.
1064     BasicBlock* m_currentBlock;
1065     // The bytecode index of the current instruction being generated.
1066     unsigned m_currentIndex;
1067     // The bytecode index of the value profile of the current instruction being generated.
1068     unsigned m_currentProfilingIndex;
1069
1070     // We use these values during code generation, and to avoid the need for
1071     // special handling we make sure they are available as constants in the
1072     // CodeBlock's constant pool. These variables are initialized to
1073     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
1074     // constant pool, as necessary.
1075     unsigned m_constantUndefined;
1076     unsigned m_constantNull;
1077     unsigned m_constantNaN;
1078     unsigned m_constant1;
1079     HashMap<JSCell*, unsigned> m_cellConstants;
1080     HashMap<JSCell*, NodeIndex> m_cellConstantNodes;
1081
1082     // A constant in the constant pool may be represented by more than one
1083     // node in the graph, depending on the context in which it is being used.
1084     struct ConstantRecord {
1085         ConstantRecord()
1086             : asInt32(NoNode)
1087             , asNumeric(NoNode)
1088             , asJSValue(NoNode)
1089         {
1090         }
1091
1092         NodeIndex asInt32;
1093         NodeIndex asNumeric;
1094         NodeIndex asJSValue;
1095     };
1096
1097     // Track the index of the node whose result is the current value for every
1098     // register value in the bytecode - argument, local, and temporary.
1099     Vector<ConstantRecord, 16> m_constants;
1100
1101     // The number of arguments passed to the function.
1102     unsigned m_numArguments;
1103     // The number of locals (vars + temporaries) used in the function.
1104     unsigned m_numLocals;
1105     // The set of registers we need to preserve across BasicBlock boundaries;
1106     // typically equal to the set of vars, but we expand this to cover all
1107     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
1108     BitVector m_preservedVars;
1109     // The number of slots (in units of sizeof(Register)) that we need to
1110     // preallocate for calls emanating from this frame. This includes the
1111     // size of the CallFrame, only if this is not a leaf function.  (I.e.
1112     // this is 0 if and only if this function is a leaf.)
1113     unsigned m_parameterSlots;
1114     // The number of var args passed to the next var arg node.
1115     unsigned m_numPassedVarArgs;
1116     // The index in the global resolve info.
1117     unsigned m_globalResolveNumber;
1118
1119     struct PhiStackEntry {
1120         PhiStackEntry(BasicBlock* block, NodeIndex phi, unsigned varNo)
1121             : m_block(block)
1122             , m_phi(phi)
1123             , m_varNo(varNo)
1124         {
1125         }
1126
1127         BasicBlock* m_block;
1128         NodeIndex m_phi;
1129         unsigned m_varNo;
1130     };
1131     Vector<PhiStackEntry, 16> m_argumentPhiStack;
1132     Vector<PhiStackEntry, 16> m_localPhiStack;
1133     
1134     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1135     
1136     struct InlineStackEntry {
1137         ByteCodeParser* m_byteCodeParser;
1138         
1139         CodeBlock* m_codeBlock;
1140         CodeBlock* m_profiledBlock;
1141         InlineCallFrame* m_inlineCallFrame;
1142         VirtualRegister m_calleeVR; // absolute virtual register, not relative to call frame
1143         
1144         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1145         
1146         QueryableExitProfile m_exitProfile;
1147         
1148         // Remapping of identifier and constant numbers from the code block being
1149         // inlined (inline callee) to the code block that we're inlining into
1150         // (the machine code block, which is the transitive, though not necessarily
1151         // direct, caller).
1152         Vector<unsigned> m_identifierRemap;
1153         Vector<unsigned> m_constantRemap;
1154         Vector<unsigned> m_constantBufferRemap;
1155         Vector<unsigned> m_resolveOperationRemap;
1156         Vector<unsigned> m_putToBaseOperationRemap;
1157         
1158         // Blocks introduced by this code block, which need successor linking.
1159         // May include up to one basic block that includes the continuation after
1160         // the callsite in the caller. These must be appended in the order that they
1161         // are created, but their bytecodeBegin values need not be in order as they
1162         // are ignored.
1163         Vector<UnlinkedBlock> m_unlinkedBlocks;
1164         
1165         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1166         // cannot have two blocks that have the same bytecodeBegin. For this very
1167         // reason, this is not equivalent to 
1168         Vector<BlockIndex> m_blockLinkingTargets;
1169         
1170         // If the callsite's basic block was split into two, then this will be
1171         // the head of the callsite block. It needs its successors linked to the
1172         // m_unlinkedBlocks, but not the other way around: there's no way for
1173         // any blocks in m_unlinkedBlocks to jump back into this block.
1174         BlockIndex m_callsiteBlockHead;
1175         
1176         // Does the callsite block head need linking? This is typically true
1177         // but will be false for the machine code block's inline stack entry
1178         // (since that one is not inlined) and for cases where an inline callee
1179         // did the linking for us.
1180         bool m_callsiteBlockHeadNeedsLinking;
1181         
1182         VirtualRegister m_returnValue;
1183         
1184         // Speculations about variable types collected from the profiled code block,
1185         // which are based on OSR exit profiles that past DFG compilatins of this
1186         // code block had gathered.
1187         LazyOperandValueProfileParser m_lazyOperands;
1188         
1189         // Did we see any returns? We need to handle the (uncommon but necessary)
1190         // case where a procedure that does not return was inlined.
1191         bool m_didReturn;
1192         
1193         // Did we have any early returns?
1194         bool m_didEarlyReturn;
1195         
1196         // Pointers to the argument position trackers for this slice of code.
1197         Vector<ArgumentPosition*> m_argumentPositions;
1198         
1199         InlineStackEntry* m_caller;
1200         
1201         InlineStackEntry(
1202             ByteCodeParser*,
1203             CodeBlock*,
1204             CodeBlock* profiledBlock,
1205             BlockIndex callsiteBlockHead,
1206             VirtualRegister calleeVR,
1207             JSFunction* callee,
1208             VirtualRegister returnValueVR,
1209             VirtualRegister inlineCallFrameStart,
1210             int argumentCountIncludingThis,
1211             CodeSpecializationKind);
1212         
1213         ~InlineStackEntry()
1214         {
1215             m_byteCodeParser->m_inlineStackTop = m_caller;
1216         }
1217         
1218         int remapOperand(int operand) const
1219         {
1220             if (!m_inlineCallFrame)
1221                 return operand;
1222             
1223             if (operand >= FirstConstantRegisterIndex) {
1224                 int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1225                 ASSERT(result >= FirstConstantRegisterIndex);
1226                 return result;
1227             }
1228
1229             if (operand == JSStack::Callee)
1230                 return m_calleeVR;
1231
1232             return operand + m_inlineCallFrame->stackOffset;
1233         }
1234     };
1235     
1236     InlineStackEntry* m_inlineStackTop;
1237
1238     // Have we built operand maps? We initialize them lazily, and only when doing
1239     // inlining.
1240     bool m_haveBuiltOperandMaps;
1241     // Mapping between identifier names and numbers.
1242     IdentifierMap m_identifierMap;
1243     // Mapping between values and constant numbers.
1244     JSValueMap m_jsValueMap;
1245     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1246     // work-around for the fact that JSValueMap can't handle "empty" values.
1247     unsigned m_emptyJSValueIndex;
1248     
1249     // Cache of code blocks that we've generated bytecode for.
1250     ByteCodeCache<canInlineFunctionFor> m_codeBlockCache;
1251     
1252     Instruction* m_currentInstruction;
1253 };
1254
1255 #define NEXT_OPCODE(name) \
1256     m_currentIndex += OPCODE_LENGTH(name); \
1257     continue
1258
1259 #define LAST_OPCODE(name) \
1260     m_currentIndex += OPCODE_LENGTH(name); \
1261     return shouldContinueParsing
1262
1263
1264 void ByteCodeParser::handleCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1265 {
1266     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1267     
1268     NodeIndex callTarget = get(currentInstruction[1].u.operand);
1269     enum {
1270         ConstantFunction,
1271         ConstantInternalFunction,
1272         LinkedFunction,
1273         UnknownFunction
1274     } callType;
1275             
1276     CallLinkStatus callLinkStatus = CallLinkStatus::computeFor(
1277         m_inlineStackTop->m_profiledBlock, m_currentIndex);
1278     
1279 #if DFG_ENABLE(DEBUG_VERBOSE)
1280     dataLogF("For call at @%lu bc#%u: ", m_graph.size(), m_currentIndex);
1281     if (callLinkStatus.isSet()) {
1282         if (callLinkStatus.couldTakeSlowPath())
1283             dataLogF("could take slow path, ");
1284         dataLogF("target = %p\n", callLinkStatus.callTarget());
1285     } else
1286         dataLogF("not set.\n");
1287 #endif
1288     
1289     if (m_graph.isFunctionConstant(callTarget)) {
1290         callType = ConstantFunction;
1291 #if DFG_ENABLE(DEBUG_VERBOSE)
1292         dataLogF("Call at [@%lu, bc#%u] has a function constant: %p, exec %p.\n",
1293                 m_graph.size(), m_currentIndex,
1294                 m_graph.valueOfFunctionConstant(callTarget),
1295                 m_graph.valueOfFunctionConstant(callTarget)->executable());
1296 #endif
1297     } else if (m_graph.isInternalFunctionConstant(callTarget)) {
1298         callType = ConstantInternalFunction;
1299 #if DFG_ENABLE(DEBUG_VERBOSE)
1300         dataLogF("Call at [@%lu, bc#%u] has an internal function constant: %p.\n",
1301                 m_graph.size(), m_currentIndex,
1302                 m_graph.valueOfInternalFunctionConstant(callTarget));
1303 #endif
1304     } else if (callLinkStatus.isSet() && !callLinkStatus.couldTakeSlowPath()
1305                && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1306         callType = LinkedFunction;
1307 #if DFG_ENABLE(DEBUG_VERBOSE)
1308         dataLogF("Call at [@%lu, bc#%u] is linked to: %p, exec %p.\n",
1309                 m_graph.size(), m_currentIndex, callLinkStatus.callTarget(),
1310                 callLinkStatus.callTarget()->executable());
1311 #endif
1312     } else {
1313         callType = UnknownFunction;
1314 #if DFG_ENABLE(DEBUG_VERBOSE)
1315         dataLogF("Call at [@%lu, bc#%u] is has an unknown or ambiguous target.\n",
1316                 m_graph.size(), m_currentIndex);
1317 #endif
1318     }
1319     if (callType != UnknownFunction) {
1320         int argumentCountIncludingThis = currentInstruction[2].u.operand;
1321         int registerOffset = currentInstruction[3].u.operand;
1322
1323         // Do we have a result?
1324         bool usesResult = false;
1325         int resultOperand = 0; // make compiler happy
1326         unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1327         Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
1328         SpeculatedType prediction = SpecNone;
1329         if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
1330             resultOperand = putInstruction[1].u.operand;
1331             usesResult = true;
1332             m_currentProfilingIndex = nextOffset;
1333             prediction = getPrediction();
1334             nextOffset += OPCODE_LENGTH(op_call_put_result);
1335         }
1336
1337         if (callType == ConstantInternalFunction) {
1338             if (handleConstantInternalFunction(usesResult, resultOperand, m_graph.valueOfInternalFunctionConstant(callTarget), registerOffset, argumentCountIncludingThis, prediction, kind))
1339                 return;
1340             
1341             // Can only handle this using the generic call handler.
1342             addCall(interpreter, currentInstruction, op);
1343             return;
1344         }
1345         
1346         JSFunction* expectedFunction;
1347         Intrinsic intrinsic;
1348         bool certainAboutExpectedFunction;
1349         if (callType == ConstantFunction) {
1350             expectedFunction = m_graph.valueOfFunctionConstant(callTarget);
1351             intrinsic = expectedFunction->executable()->intrinsicFor(kind);
1352             certainAboutExpectedFunction = true;
1353         } else {
1354             ASSERT(callType == LinkedFunction);
1355             expectedFunction = callLinkStatus.callTarget();
1356             intrinsic = expectedFunction->executable()->intrinsicFor(kind);
1357             certainAboutExpectedFunction = false;
1358         }
1359                 
1360         if (intrinsic != NoIntrinsic) {
1361             if (!certainAboutExpectedFunction)
1362                 emitFunctionCheck(expectedFunction, callTarget, registerOffset, kind);
1363             
1364             if (handleIntrinsic(usesResult, resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1365                 if (!certainAboutExpectedFunction) {
1366                     // Need to keep the call target alive for OSR. We could easily optimize this out if we wanted
1367                     // to, since at this point we know that the call target is a constant. It's just that OSR isn't
1368                     // smart enough to figure that out, since it doesn't understand CheckFunction.
1369                     addToGraph(Phantom, callTarget);
1370                 }
1371                 
1372                 return;
1373             }
1374         } else if (handleInlining(usesResult, currentInstruction[1].u.operand, callTarget, resultOperand, certainAboutExpectedFunction, expectedFunction, registerOffset, argumentCountIncludingThis, nextOffset, kind))
1375             return;
1376     }
1377     
1378     addCall(interpreter, currentInstruction, op);
1379 }
1380
1381 void ByteCodeParser::emitFunctionCheck(JSFunction* expectedFunction, NodeIndex callTarget, int registerOffset, CodeSpecializationKind kind)
1382 {
1383     NodeIndex thisArgument;
1384     if (kind == CodeForCall)
1385         thisArgument = get(registerOffset + argumentToOperand(0));
1386     else
1387         thisArgument = NoNode;
1388     addToGraph(CheckFunction, OpInfo(expectedFunction), callTarget, thisArgument);
1389 }
1390
1391 bool ByteCodeParser::handleInlining(bool usesResult, int callTarget, NodeIndex callTargetNodeIndex, int resultOperand, bool certainAboutExpectedFunction, JSFunction* expectedFunction, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1392 {
1393     // First, the really simple checks: do we have an actual JS function?
1394     if (!expectedFunction)
1395         return false;
1396     if (expectedFunction->isHostFunction())
1397         return false;
1398     
1399     FunctionExecutable* executable = expectedFunction->jsExecutable();
1400     
1401     // Does the number of arguments we're passing match the arity of the target? We currently
1402     // inline only if the number of arguments passed is greater than or equal to the number
1403     // arguments expected.
1404     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1405         return false;
1406     
1407     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1408     // If either of these are detected, then don't inline.
1409     unsigned depth = 0;
1410     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1411         ++depth;
1412         if (depth >= Options::maximumInliningDepth())
1413             return false; // Depth exceeded.
1414         
1415         if (entry->executable() == executable)
1416             return false; // Recursion detected.
1417     }
1418     
1419     // Does the code block's size match the heuristics/requirements for being
1420     // an inline candidate?
1421     CodeBlock* profiledBlock = executable->profiledCodeBlockFor(kind);
1422     if (!profiledBlock)
1423         return false;
1424     
1425     if (!mightInlineFunctionFor(profiledBlock, kind))
1426         return false;
1427     
1428     // If we get here then it looks like we should definitely inline this code. Proceed
1429     // with parsing the code to get bytecode, so that we can then parse the bytecode.
1430     // Note that if LLInt is enabled, the bytecode will always be available. Also note
1431     // that if LLInt is enabled, we may inline a code block that has never been JITted
1432     // before!
1433     CodeBlock* codeBlock = m_codeBlockCache.get(CodeBlockKey(executable, kind), expectedFunction->scope());
1434     if (!codeBlock)
1435         return false;
1436     
1437     ASSERT(canInlineFunctionFor(codeBlock, kind));
1438
1439 #if DFG_ENABLE(DEBUG_VERBOSE)
1440     dataLogF("Inlining executable %p.\n", executable);
1441 #endif
1442     
1443     // Now we know without a doubt that we are committed to inlining. So begin the process
1444     // by checking the callee (if necessary) and making sure that arguments and the callee
1445     // are flushed.
1446     if (!certainAboutExpectedFunction)
1447         emitFunctionCheck(expectedFunction, callTargetNodeIndex, registerOffset, kind);
1448     
1449     // FIXME: Don't flush constants!
1450     
1451     int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
1452     
1453     // Make sure that the area used by the call frame is reserved.
1454     for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
1455         m_preservedVars.set(arg);
1456     
1457     // Make sure that we have enough locals.
1458     unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1459     if (newNumLocals > m_numLocals) {
1460         m_numLocals = newNumLocals;
1461         for (size_t i = 0; i < m_graph.m_blocks.size(); ++i)
1462             m_graph.m_blocks[i]->ensureLocals(newNumLocals);
1463     }
1464     
1465     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1466
1467     InlineStackEntry inlineStackEntry(
1468         this, codeBlock, profiledBlock, m_graph.m_blocks.size() - 1,
1469         (VirtualRegister)m_inlineStackTop->remapOperand(callTarget), expectedFunction,
1470         (VirtualRegister)m_inlineStackTop->remapOperand(
1471             usesResult ? resultOperand : InvalidVirtualRegister),
1472         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1473     
1474     // This is where the actual inlining really happens.
1475     unsigned oldIndex = m_currentIndex;
1476     unsigned oldProfilingIndex = m_currentProfilingIndex;
1477     m_currentIndex = 0;
1478     m_currentProfilingIndex = 0;
1479
1480     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1481     
1482     parseCodeBlock();
1483     
1484     m_currentIndex = oldIndex;
1485     m_currentProfilingIndex = oldProfilingIndex;
1486     
1487     // If the inlined code created some new basic blocks, then we have linking to do.
1488     if (inlineStackEntry.m_callsiteBlockHead != m_graph.m_blocks.size() - 1) {
1489         
1490         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1491         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1492             linkBlock(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead].get(), inlineStackEntry.m_blockLinkingTargets);
1493         else
1494             ASSERT(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead]->isLinked);
1495         
1496         // It's possible that the callsite block head is not owned by the caller.
1497         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1498             // It's definitely owned by the caller, because the caller created new blocks.
1499             // Assert that this all adds up.
1500             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_blockIndex == inlineStackEntry.m_callsiteBlockHead);
1501             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1502             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1503         } else {
1504             // It's definitely not owned by the caller. Tell the caller that he does not
1505             // need to link his callsite block head, because we did it for him.
1506             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1507             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1508             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1509         }
1510         
1511         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1512     } else
1513         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1514     
1515     // If there was a return, but no early returns, then we're done. We allow parsing of
1516     // the caller to continue in whatever basic block we're in right now.
1517     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1518         BasicBlock* lastBlock = m_graph.m_blocks.last().get();
1519         ASSERT(lastBlock->isEmpty() || !m_graph.last().isTerminal());
1520         
1521         // If we created new blocks then the last block needs linking, but in the
1522         // caller. It doesn't need to be linked to, but it needs outgoing links.
1523         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1524 #if DFG_ENABLE(DEBUG_VERBOSE)
1525             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1526 #endif
1527             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1528             // for release builds because this block will never serve as a potential target
1529             // in the linker's binary search.
1530             lastBlock->bytecodeBegin = m_currentIndex;
1531             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size() - 1));
1532         }
1533         
1534         m_currentBlock = m_graph.m_blocks.last().get();
1535
1536 #if DFG_ENABLE(DEBUG_VERBOSE)
1537         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1538 #endif
1539         return true;
1540     }
1541     
1542     // If we get to this point then all blocks must end in some sort of terminals.
1543     ASSERT(m_graph.last().isTerminal());
1544     
1545     // Link the early returns to the basic block we're about to create.
1546     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1547         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1548             continue;
1549         BasicBlock* block = m_graph.m_blocks[inlineStackEntry.m_unlinkedBlocks[i].m_blockIndex].get();
1550         ASSERT(!block->isLinked);
1551         Node& node = m_graph[block->last()];
1552         ASSERT(node.op() == Jump);
1553         ASSERT(node.takenBlockIndex() == NoBlock);
1554         node.setTakenBlockIndex(m_graph.m_blocks.size());
1555         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1556 #if !ASSERT_DISABLED
1557         block->isLinked = true;
1558 #endif
1559     }
1560     
1561     // Need to create a new basic block for the continuation at the caller.
1562     OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1563 #if DFG_ENABLE(DEBUG_VERBOSE)
1564     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(m_inlineStackTop->m_inlineCallFrame));
1565 #endif
1566     m_currentBlock = block.get();
1567     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_caller->m_blockLinkingTargets.last()]->bytecodeBegin < nextOffset);
1568     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
1569     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(m_graph.m_blocks.size());
1570     m_graph.m_blocks.append(block.release());
1571     prepareToParseBlock();
1572     
1573     // At this point we return and continue to generate code for the caller, but
1574     // in the new basic block.
1575 #if DFG_ENABLE(DEBUG_VERBOSE)
1576     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1577 #endif
1578     return true;
1579 }
1580
1581 void ByteCodeParser::setIntrinsicResult(bool usesResult, int resultOperand, NodeIndex nodeIndex)
1582 {
1583     if (!usesResult)
1584         return;
1585     set(resultOperand, nodeIndex);
1586 }
1587
1588 bool ByteCodeParser::handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1589 {
1590     if (argumentCountIncludingThis == 1) { // Math.min()
1591         setIntrinsicResult(usesResult, resultOperand, constantNaN());
1592         return true;
1593     }
1594      
1595     if (argumentCountIncludingThis == 2) { // Math.min(x)
1596         // FIXME: what we'd really like is a ValueToNumber, except we don't support that right now. Oh well.
1597         NodeIndex result = get(registerOffset + argumentToOperand(1));
1598         addToGraph(CheckNumber, result);
1599         setIntrinsicResult(usesResult, resultOperand, result);
1600         return true;
1601     }
1602     
1603     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1604         setIntrinsicResult(usesResult, resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1605         return true;
1606     }
1607     
1608     // Don't handle >=3 arguments for now.
1609     return false;
1610 }
1611
1612 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1613 // they need to perform the ToNumber conversion, which can have side-effects.
1614 bool ByteCodeParser::handleIntrinsic(bool usesResult, int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1615 {
1616     switch (intrinsic) {
1617     case AbsIntrinsic: {
1618         if (argumentCountIncludingThis == 1) { // Math.abs()
1619             setIntrinsicResult(usesResult, resultOperand, constantNaN());
1620             return true;
1621         }
1622
1623         if (!MacroAssembler::supportsFloatingPointAbs())
1624             return false;
1625
1626         NodeIndex nodeIndex = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1627         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1628             m_graph[nodeIndex].mergeFlags(NodeMayOverflow);
1629         setIntrinsicResult(usesResult, resultOperand, nodeIndex);
1630         return true;
1631     }
1632
1633     case MinIntrinsic:
1634         return handleMinMax(usesResult, resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1635         
1636     case MaxIntrinsic:
1637         return handleMinMax(usesResult, resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1638         
1639     case SqrtIntrinsic: {
1640         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1641             setIntrinsicResult(usesResult, resultOperand, constantNaN());
1642             return true;
1643         }
1644         
1645         if (!MacroAssembler::supportsFloatingPointSqrt())
1646             return false;
1647         
1648         setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1649         return true;
1650     }
1651         
1652     case ArrayPushIntrinsic: {
1653         if (argumentCountIncludingThis != 2)
1654             return false;
1655         
1656         ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1657         if (!arrayMode.isJSArray())
1658             return false;
1659         switch (arrayMode.type()) {
1660         case Array::Undecided:
1661         case Array::Int32:
1662         case Array::Double:
1663         case Array::Contiguous:
1664         case Array::ArrayStorage: {
1665             NodeIndex arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1666             if (usesResult)
1667                 set(resultOperand, arrayPush);
1668             
1669             return true;
1670         }
1671             
1672         default:
1673             return false;
1674         }
1675     }
1676         
1677     case ArrayPopIntrinsic: {
1678         if (argumentCountIncludingThis != 1)
1679             return false;
1680         
1681         ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1682         if (!arrayMode.isJSArray())
1683             return false;
1684         switch (arrayMode.type()) {
1685         case Array::Int32:
1686         case Array::Double:
1687         case Array::Contiguous:
1688         case Array::ArrayStorage: {
1689             NodeIndex arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1690             if (usesResult)
1691                 set(resultOperand, arrayPop);
1692             return true;
1693         }
1694             
1695         default:
1696             return false;
1697         }
1698     }
1699
1700     case CharCodeAtIntrinsic: {
1701         if (argumentCountIncludingThis != 2)
1702             return false;
1703
1704         int thisOperand = registerOffset + argumentToOperand(0);
1705         int indexOperand = registerOffset + argumentToOperand(1);
1706         NodeIndex charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1707
1708         if (usesResult)
1709             set(resultOperand, charCode);
1710         return true;
1711     }
1712
1713     case CharAtIntrinsic: {
1714         if (argumentCountIncludingThis != 2)
1715             return false;
1716
1717         int thisOperand = registerOffset + argumentToOperand(0);
1718         int indexOperand = registerOffset + argumentToOperand(1);
1719         NodeIndex charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1720
1721         if (usesResult)
1722             set(resultOperand, charCode);
1723         return true;
1724     }
1725
1726     case RegExpExecIntrinsic: {
1727         if (argumentCountIncludingThis != 2)
1728             return false;
1729         
1730         NodeIndex regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1731         if (usesResult)
1732             set(resultOperand, regExpExec);
1733         
1734         return true;
1735     }
1736         
1737     case RegExpTestIntrinsic: {
1738         if (argumentCountIncludingThis != 2)
1739             return false;
1740         
1741         NodeIndex regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1742         if (usesResult)
1743             set(resultOperand, regExpExec);
1744         
1745         return true;
1746     }
1747         
1748     default:
1749         return false;
1750     }
1751 }
1752
1753 bool ByteCodeParser::handleConstantInternalFunction(
1754     bool usesResult, int resultOperand, InternalFunction* function, int registerOffset,
1755     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1756 {
1757     // If we ever find that we have a lot of internal functions that we specialize for,
1758     // then we should probably have some sort of hashtable dispatch, or maybe even
1759     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1760     // it seems that this case is hit infrequently enough, and the number of functions
1761     // we know about is small enough, that having just a linear cascade of if statements
1762     // is good enough.
1763     
1764     UNUSED_PARAM(prediction); // Remove this once we do more things.
1765     UNUSED_PARAM(kind); // Remove this once we do more things.
1766     
1767     if (function->classInfo() == &ArrayConstructor::s_info) {
1768         if (argumentCountIncludingThis == 2) {
1769             setIntrinsicResult(
1770                 usesResult, resultOperand,
1771                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1772             return true;
1773         }
1774         
1775         for (int i = 1; i < argumentCountIncludingThis; ++i)
1776             addVarArgChild(get(registerOffset + argumentToOperand(i)));
1777         setIntrinsicResult(
1778             usesResult, resultOperand,
1779             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1780         return true;
1781     }
1782     
1783     return false;
1784 }
1785
1786 NodeIndex ByteCodeParser::handleGetByOffset(SpeculatedType prediction, NodeIndex base, unsigned identifierNumber, PropertyOffset offset)
1787 {
1788     NodeIndex propertyStorage;
1789     if (isInlineOffset(offset))
1790         propertyStorage = base;
1791     else
1792         propertyStorage = addToGraph(GetButterfly, base);
1793     // FIXME: It would be far more efficient for load elimination (and safer from
1794     // an OSR standpoint) if GetByOffset also referenced the object we were loading
1795     // from, and if we could load eliminate a GetByOffset even if the butterfly
1796     // had changed. That would be a great success.
1797     NodeIndex getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage);
1798
1799     StorageAccessData storageAccessData;
1800     storageAccessData.offset = indexRelativeToBase(offset);
1801     storageAccessData.identifierNumber = identifierNumber;
1802     m_graph.m_storageAccessData.append(storageAccessData);
1803
1804     return getByOffset;
1805 }
1806
1807 void ByteCodeParser::handleGetByOffset(
1808     int destinationOperand, SpeculatedType prediction, NodeIndex base, unsigned identifierNumber,
1809     PropertyOffset offset)
1810 {
1811     set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1812 }
1813
1814 void ByteCodeParser::handleGetById(
1815     int destinationOperand, SpeculatedType prediction, NodeIndex base, unsigned identifierNumber,
1816     const GetByIdStatus& getByIdStatus)
1817 {
1818     if (!getByIdStatus.isSimple()
1819         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1820         set(destinationOperand,
1821             addToGraph(
1822                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1823                 OpInfo(identifierNumber), OpInfo(prediction), base));
1824         return;
1825     }
1826     
1827     ASSERT(getByIdStatus.structureSet().size());
1828                 
1829     // The implementation of GetByOffset does not know to terminate speculative
1830     // execution if it doesn't have a prediction, so we do it manually.
1831     if (prediction == SpecNone)
1832         addToGraph(ForceOSRExit);
1833     
1834     NodeIndex originalBaseForBaselineJIT = base;
1835                 
1836     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1837     
1838     if (!getByIdStatus.chain().isEmpty()) {
1839         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1840         JSObject* currentObject = 0;
1841         for (unsigned i = 0; i < getByIdStatus.chain().size(); ++i) {
1842             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1843             currentStructure = getByIdStatus.chain()[i];
1844             base = addStructureTransitionCheck(currentObject, currentStructure);
1845         }
1846     }
1847     
1848     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1849     // ensure that the base of the original get_by_id is kept alive until we're done with
1850     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1851     // on something other than the base following the CheckStructure on base, or if the
1852     // access was compiled to a WeakJSConstant specific value, in which case we might not
1853     // have any explicit use of the base at all.
1854     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1855         addToGraph(Phantom, originalBaseForBaselineJIT);
1856     
1857     if (getByIdStatus.specificValue()) {
1858         ASSERT(getByIdStatus.specificValue().isCell());
1859         
1860         set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1861         return;
1862     }
1863     
1864     handleGetByOffset(
1865         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1866 }
1867
1868 void ByteCodeParser::prepareToParseBlock()
1869 {
1870     for (unsigned i = 0; i < m_constants.size(); ++i)
1871         m_constants[i] = ConstantRecord();
1872     m_cellConstantNodes.clear();
1873 }
1874
1875 bool ByteCodeParser::parseResolveOperations(SpeculatedType prediction, unsigned identifier, unsigned operations, unsigned putToBaseOperation, NodeIndex* base, NodeIndex* value)
1876 {
1877     ResolveOperations* resolveOperations = m_codeBlock->resolveOperations(operations);
1878     if (resolveOperations->isEmpty()) {
1879         addToGraph(ForceOSRExit);
1880         return false;
1881     }
1882     JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
1883     int skipCount = 0;
1884     bool skippedScopes = false;
1885     bool setBase = false;
1886     ResolveOperation* pc = resolveOperations->data();
1887     NodeIndex localBase = 0;
1888     bool resolvingBase = true;
1889     while (resolvingBase) {
1890         switch (pc->m_operation) {
1891         case ResolveOperation::ReturnGlobalObjectAsBase:
1892             *base = cellConstant(globalObject);
1893             ASSERT(!value);
1894             return true;
1895
1896         case ResolveOperation::SetBaseToGlobal:
1897             *base = cellConstant(globalObject);
1898             setBase = true;
1899             resolvingBase = false;
1900             ++pc;
1901             break;
1902
1903         case ResolveOperation::SetBaseToUndefined:
1904             *base = constantUndefined();
1905             setBase = true;
1906             resolvingBase = false;
1907             ++pc;
1908             break;
1909
1910         case ResolveOperation::SetBaseToScope:
1911             localBase = addToGraph(GetScope, OpInfo(skipCount));
1912             *base = localBase;
1913             setBase = true;
1914
1915             resolvingBase = false;
1916
1917             // Reset the scope skipping as we've already loaded it
1918             skippedScopes = false;
1919             ++pc;
1920             break;
1921         case ResolveOperation::ReturnScopeAsBase:
1922             *base = addToGraph(GetScope, OpInfo(skipCount));
1923             ASSERT(!value);
1924             return true;
1925
1926         case ResolveOperation::SkipTopScopeNode:
1927             if (m_inlineStackTop->m_inlineCallFrame)
1928                 return false;
1929             skipCount = 1;
1930             skippedScopes = true;
1931             ++pc;
1932             break;
1933
1934         case ResolveOperation::SkipScopes:
1935             if (m_inlineStackTop->m_inlineCallFrame)
1936                 return false;
1937             skipCount += pc->m_scopesToSkip;
1938             skippedScopes = true;
1939             ++pc;
1940             break;
1941
1942         case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope:
1943             return false;
1944
1945         case ResolveOperation::Fail:
1946             return false;
1947
1948         default:
1949             resolvingBase = false;
1950         }
1951     }
1952     if (skippedScopes)
1953         localBase = addToGraph(GetScope, OpInfo(skipCount));
1954
1955     if (base && !setBase)
1956         *base = localBase;
1957
1958     ASSERT(value);
1959     ResolveOperation* resolveValueOperation = pc;
1960     switch (resolveValueOperation->m_operation) {
1961     case ResolveOperation::GetAndReturnGlobalProperty: {
1962         ResolveGlobalStatus status = ResolveGlobalStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex, resolveValueOperation, m_codeBlock->identifier(identifier));
1963         if (status.isSimple()) {
1964             ASSERT(status.structure());
1965
1966             NodeIndex globalObjectNode = addStructureTransitionCheck(globalObject, status.structure());
1967
1968             if (status.specificValue()) {
1969                 ASSERT(status.specificValue().isCell());
1970                 *value = cellConstant(status.specificValue().asCell());
1971             } else
1972                 *value = handleGetByOffset(prediction, globalObjectNode, identifier, status.offset());
1973             return true;
1974         }
1975
1976         NodeIndex resolve = addToGraph(ResolveGlobal, OpInfo(m_graph.m_resolveGlobalData.size()), OpInfo(prediction));
1977         m_graph.m_resolveGlobalData.append(ResolveGlobalData());
1978         ResolveGlobalData& data = m_graph.m_resolveGlobalData.last();
1979         data.identifierNumber = identifier;
1980         data.resolveOperationsIndex = operations;
1981         data.putToBaseOperationIndex = putToBaseOperation;
1982         data.resolvePropertyIndex = resolveValueOperation - resolveOperations->data();
1983         *value = resolve;
1984         return true;
1985     }
1986     case ResolveOperation::GetAndReturnGlobalVar: {
1987         *value = addToGraph(GetGlobalVar,
1988                             OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)),
1989                             OpInfo(prediction));
1990         return true;
1991     }
1992     case ResolveOperation::GetAndReturnGlobalVarWatchable: {
1993         SpeculatedType prediction = getPrediction();
1994
1995         JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
1996
1997         Identifier ident = m_codeBlock->identifier(identifier);
1998         SymbolTableEntry entry = globalObject->symbolTable()->get(ident.impl());
1999         if (!entry.couldBeWatched()) {
2000             *value = addToGraph(GetGlobalVar, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(prediction));
2001             return true;
2002         }
2003
2004         // The watchpoint is still intact! This means that we will get notified if the
2005         // current value in the global variable changes. So, we can inline that value.
2006         // Moreover, currently we can assume that this value is a JSFunction*, which
2007         // implies that it's a cell. This simplifies things, since in general we'd have
2008         // to use a JSConstant for non-cells and a WeakJSConstant for cells. So instead
2009         // of having both cases we just assert that the value is a cell.
2010
2011         // NB. If it wasn't for CSE, GlobalVarWatchpoint would have no need for the
2012         // register pointer. But CSE tracks effects on global variables by comparing
2013         // register pointers. Because CSE executes multiple times while the backend
2014         // executes once, we use the following performance trade-off:
2015         // - The node refers directly to the register pointer to make CSE super cheap.
2016         // - To perform backend code generation, the node only contains the identifier
2017         //   number, from which it is possible to get (via a few average-time O(1)
2018         //   lookups) to the WatchpointSet.
2019
2020         addToGraph(GlobalVarWatchpoint, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(identifier));
2021
2022         JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
2023         ASSERT(specificValue.isCell());
2024         *value = cellConstant(specificValue.asCell());
2025         return true;
2026     }
2027     case ResolveOperation::GetAndReturnScopedVar: {
2028         NodeIndex getScopeRegisters = addToGraph(GetScopeRegisters, localBase);
2029         *value = addToGraph(GetScopedVar, OpInfo(resolveValueOperation->m_offset), OpInfo(prediction), getScopeRegisters);
2030         return true;
2031     }
2032     default:
2033         CRASH();
2034         return false;
2035     }
2036
2037 }
2038
2039 bool ByteCodeParser::parseBlock(unsigned limit)
2040 {
2041     bool shouldContinueParsing = true;
2042
2043     Interpreter* interpreter = m_globalData->interpreter;
2044     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
2045     unsigned blockBegin = m_currentIndex;
2046     
2047     // If we are the first basic block, introduce markers for arguments. This allows
2048     // us to track if a use of an argument may use the actual argument passed, as
2049     // opposed to using a value we set explicitly.
2050     if (m_currentBlock == m_graph.m_blocks[0].get() && !m_inlineStackTop->m_inlineCallFrame) {
2051         m_graph.m_arguments.resize(m_numArguments);
2052         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
2053             VariableAccessData* variable = newVariableAccessData(
2054                 argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
2055             variable->mergeStructureCheckHoistingFailed(
2056                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
2057             NodeIndex setArgument = addToGraph(SetArgument, OpInfo(variable));
2058             m_graph.m_arguments[argument] = setArgument;
2059             m_currentBlock->variablesAtHead.setArgumentFirstTime(argument, setArgument);
2060             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
2061         }
2062     }
2063
2064     while (true) {
2065         m_currentProfilingIndex = m_currentIndex;
2066
2067         // Don't extend over jump destinations.
2068         if (m_currentIndex == limit) {
2069             // Ordinarily we want to plant a jump. But refuse to do this if the block is
2070             // empty. This is a special case for inlining, which might otherwise create
2071             // some empty blocks in some cases. When parseBlock() returns with an empty
2072             // block, it will get repurposed instead of creating a new one. Note that this
2073             // logic relies on every bytecode resulting in one or more nodes, which would
2074             // be true anyway except for op_loop_hint, which emits a Phantom to force this
2075             // to be true.
2076             if (!m_currentBlock->isEmpty())
2077                 addToGraph(Jump, OpInfo(m_currentIndex));
2078             else {
2079 #if DFG_ENABLE(DEBUG_VERBOSE)
2080                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
2081 #endif
2082             }
2083             return shouldContinueParsing;
2084         }
2085         
2086         // Switch on the current bytecode opcode.
2087         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
2088         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
2089         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
2090         switch (opcodeID) {
2091
2092         // === Function entry opcodes ===
2093
2094         case op_enter:
2095             // Initialize all locals to undefined.
2096             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
2097                 set(i, constantUndefined(), SetOnEntry);
2098             NEXT_OPCODE(op_enter);
2099
2100         case op_convert_this: {
2101             NodeIndex op1 = getThis();
2102             if (m_graph[op1].op() != ConvertThis) {
2103                 ValueProfile* profile =
2104                     m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentProfilingIndex);
2105                 profile->computeUpdatedPrediction();
2106 #if DFG_ENABLE(DEBUG_VERBOSE)
2107                 dataLogF("[@%lu bc#%u]: profile %p: ", m_graph.size(), m_currentProfilingIndex, profile);
2108                 profile->dump(WTF::dataFile());
2109                 dataLogF("\n");
2110 #endif
2111                 if (profile->m_singletonValueIsTop
2112                     || !profile->m_singletonValue
2113                     || !profile->m_singletonValue.isCell()
2114                     || profile->m_singletonValue.asCell()->classInfo() != &Structure::s_info)
2115                     setThis(addToGraph(ConvertThis, op1));
2116                 else {
2117                     addToGraph(
2118                         CheckStructure,
2119                         OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
2120                         op1);
2121                 }
2122             }
2123             NEXT_OPCODE(op_convert_this);
2124         }
2125
2126         case op_create_this: {
2127             int calleeOperand = currentInstruction[2].u.operand;
2128             NodeIndex callee = get(calleeOperand);
2129             bool alreadyEmitted = false;
2130             if (m_graph[callee].op() == WeakJSConstant) {
2131                 JSCell* cell = m_graph[callee].weakConstant();
2132                 ASSERT(cell->inherits(&JSFunction::s_info));
2133                 
2134                 JSFunction* function = jsCast<JSFunction*>(cell);
2135                 Structure* inheritorID = function->tryGetKnownInheritorID();
2136                 if (inheritorID) {
2137                     addToGraph(InheritorIDWatchpoint, OpInfo(function));
2138                     set(currentInstruction[1].u.operand, addToGraph(NewObject, OpInfo(inheritorID)));
2139                     alreadyEmitted = true;
2140                 }
2141             }
2142             if (!alreadyEmitted)
2143                 set(currentInstruction[1].u.operand, addToGraph(CreateThis, callee));
2144             NEXT_OPCODE(op_create_this);
2145         }
2146             
2147         case op_new_object: {
2148             set(currentInstruction[1].u.operand, addToGraph(NewObject, OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->emptyObjectStructure())));
2149             NEXT_OPCODE(op_new_object);
2150         }
2151             
2152         case op_new_array: {
2153             int startOperand = currentInstruction[2].u.operand;
2154             int numOperands = currentInstruction[3].u.operand;
2155             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2156             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
2157                 addVarArgChild(get(operandIdx));
2158             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
2159             NEXT_OPCODE(op_new_array);
2160         }
2161             
2162         case op_new_array_with_size: {
2163             int lengthOperand = currentInstruction[2].u.operand;
2164             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
2165             set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
2166             NEXT_OPCODE(op_new_array_with_size);
2167         }
2168             
2169         case op_new_array_buffer: {
2170             int startConstant = currentInstruction[2].u.operand;
2171             int numConstants = currentInstruction[3].u.operand;
2172             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2173             NewArrayBufferData data;
2174             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2175             data.numConstants = numConstants;
2176             data.indexingType = profile->selectIndexingType();
2177
2178             // If this statement has never executed, we'll have the wrong indexing type in the profile.
2179             for (int i = 0; i < numConstants; ++i) {
2180                 data.indexingType =
2181                     leastUpperBoundOfIndexingTypeAndValue(
2182                         data.indexingType,
2183                         m_codeBlock->constantBuffer(data.startConstant)[i]);
2184             }
2185             
2186             m_graph.m_newArrayBufferData.append(data);
2187             set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
2188             NEXT_OPCODE(op_new_array_buffer);
2189         }
2190             
2191         case op_new_regexp: {
2192             set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
2193             NEXT_OPCODE(op_new_regexp);
2194         }
2195             
2196         case op_get_callee: {
2197             ValueProfile* profile = currentInstruction[2].u.profile;
2198             profile->computeUpdatedPrediction();
2199             if (profile->m_singletonValueIsTop
2200                 || !profile->m_singletonValue
2201                 || !profile->m_singletonValue.isCell())
2202                 set(currentInstruction[1].u.operand, get(JSStack::Callee));
2203             else {
2204                 ASSERT(profile->m_singletonValue.asCell()->inherits(&JSFunction::s_info));
2205                 NodeIndex actualCallee = get(JSStack::Callee);
2206                 addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
2207                 set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
2208             }
2209             NEXT_OPCODE(op_get_callee);
2210         }
2211
2212         // === Bitwise operations ===
2213
2214         case op_bitand: {
2215             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2216             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2217             set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
2218             NEXT_OPCODE(op_bitand);
2219         }
2220
2221         case op_bitor: {
2222             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2223             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2224             set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
2225             NEXT_OPCODE(op_bitor);
2226         }
2227
2228         case op_bitxor: {
2229             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2230             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2231             set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
2232             NEXT_OPCODE(op_bitxor);
2233         }
2234
2235         case op_rshift: {
2236             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2237             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2238             NodeIndex result;
2239             // Optimize out shifts by zero.
2240             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2241                 result = op1;
2242             else
2243                 result = addToGraph(BitRShift, op1, op2);
2244             set(currentInstruction[1].u.operand, result);
2245             NEXT_OPCODE(op_rshift);
2246         }
2247
2248         case op_lshift: {
2249             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2250             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2251             NodeIndex result;
2252             // Optimize out shifts by zero.
2253             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2254                 result = op1;
2255             else
2256                 result = addToGraph(BitLShift, op1, op2);
2257             set(currentInstruction[1].u.operand, result);
2258             NEXT_OPCODE(op_lshift);
2259         }
2260
2261         case op_urshift: {
2262             NodeIndex op1 = getToInt32(currentInstruction[2].u.operand);
2263             NodeIndex op2 = getToInt32(currentInstruction[3].u.operand);
2264             NodeIndex result;
2265             // The result of a zero-extending right shift is treated as an unsigned value.
2266             // This means that if the top bit is set, the result is not in the int32 range,
2267             // and as such must be stored as a double. If the shift amount is a constant,
2268             // we may be able to optimize.
2269             if (isInt32Constant(op2)) {
2270                 // If we know we are shifting by a non-zero amount, then since the operation
2271                 // zero fills we know the top bit of the result must be zero, and as such the
2272                 // result must be within the int32 range. Conversely, if this is a shift by
2273                 // zero, then the result may be changed by the conversion to unsigned, but it
2274                 // is not necessary to perform the shift!
2275                 if (valueOfInt32Constant(op2) & 0x1f)
2276                     result = addToGraph(BitURShift, op1, op2);
2277                 else
2278                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
2279             }  else {
2280                 // Cannot optimize at this stage; shift & potentially rebox as a double.
2281                 result = addToGraph(BitURShift, op1, op2);
2282                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2283             }
2284             set(currentInstruction[1].u.operand, result);
2285             NEXT_OPCODE(op_urshift);
2286         }
2287
2288         // === Increment/Decrement opcodes ===
2289
2290         case op_pre_inc: {
2291             unsigned srcDst = currentInstruction[1].u.operand;
2292             NodeIndex op = get(srcDst);
2293             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2294             NEXT_OPCODE(op_pre_inc);
2295         }
2296
2297         case op_post_inc: {
2298             unsigned result = currentInstruction[1].u.operand;
2299             unsigned srcDst = currentInstruction[2].u.operand;
2300             ASSERT(result != srcDst); // Required for assumptions we make during OSR.
2301             NodeIndex op = get(srcDst);
2302             set(result, op);
2303             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2304             NEXT_OPCODE(op_post_inc);
2305         }
2306
2307         case op_pre_dec: {
2308             unsigned srcDst = currentInstruction[1].u.operand;
2309             NodeIndex op = get(srcDst);
2310             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2311             NEXT_OPCODE(op_pre_dec);
2312         }
2313
2314         case op_post_dec: {
2315             unsigned result = currentInstruction[1].u.operand;
2316             unsigned srcDst = currentInstruction[2].u.operand;
2317             NodeIndex op = get(srcDst);
2318             set(result, op);
2319             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2320             NEXT_OPCODE(op_post_dec);
2321         }
2322
2323         // === Arithmetic operations ===
2324
2325         case op_add: {
2326             NodeIndex op1 = get(currentInstruction[2].u.operand);
2327             NodeIndex op2 = get(currentInstruction[3].u.operand);
2328             if (m_graph[op1].hasNumberResult() && m_graph[op2].hasNumberResult())
2329                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2330             else
2331                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2332             NEXT_OPCODE(op_add);
2333         }
2334
2335         case op_sub: {
2336             NodeIndex op1 = get(currentInstruction[2].u.operand);
2337             NodeIndex op2 = get(currentInstruction[3].u.operand);
2338             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2339             NEXT_OPCODE(op_sub);
2340         }
2341
2342         case op_negate: {
2343             NodeIndex op1 = get(currentInstruction[2].u.operand);
2344             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2345             NEXT_OPCODE(op_negate);
2346         }
2347
2348         case op_mul: {
2349             // Multiply requires that the inputs are not truncated, unfortunately.
2350             NodeIndex op1 = get(currentInstruction[2].u.operand);
2351             NodeIndex op2 = get(currentInstruction[3].u.operand);
2352             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2353             NEXT_OPCODE(op_mul);
2354         }
2355
2356         case op_mod: {
2357             NodeIndex op1 = get(currentInstruction[2].u.operand);
2358             NodeIndex op2 = get(currentInstruction[3].u.operand);
2359             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2360             NEXT_OPCODE(op_mod);
2361         }
2362
2363         case op_div: {
2364             NodeIndex op1 = get(currentInstruction[2].u.operand);
2365             NodeIndex op2 = get(currentInstruction[3].u.operand);
2366             set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2367             NEXT_OPCODE(op_div);
2368         }
2369
2370         // === Misc operations ===
2371
2372 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2373         case op_debug:
2374             addToGraph(Breakpoint);
2375             NEXT_OPCODE(op_debug);
2376 #endif
2377         case op_mov: {
2378             NodeIndex op = get(currentInstruction[2].u.operand);
2379             set(currentInstruction[1].u.operand, op);
2380             NEXT_OPCODE(op_mov);
2381         }
2382
2383         case op_check_has_instance:
2384             addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2385             NEXT_OPCODE(op_check_has_instance);
2386
2387         case op_instanceof: {
2388             NodeIndex value = get(currentInstruction[2].u.operand);
2389             NodeIndex prototype = get(currentInstruction[3].u.operand);
2390             set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2391             NEXT_OPCODE(op_instanceof);
2392         }
2393             
2394         case op_is_undefined: {
2395             NodeIndex value = get(currentInstruction[2].u.operand);
2396             set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2397             NEXT_OPCODE(op_is_undefined);
2398         }
2399
2400         case op_is_boolean: {
2401             NodeIndex value = get(currentInstruction[2].u.operand);
2402             set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2403             NEXT_OPCODE(op_is_boolean);
2404         }
2405
2406         case op_is_number: {
2407             NodeIndex value = get(currentInstruction[2].u.operand);
2408             set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2409             NEXT_OPCODE(op_is_number);
2410         }
2411
2412         case op_is_string: {
2413             NodeIndex value = get(currentInstruction[2].u.operand);
2414             set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2415             NEXT_OPCODE(op_is_string);
2416         }
2417
2418         case op_is_object: {
2419             NodeIndex value = get(currentInstruction[2].u.operand);
2420             set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2421             NEXT_OPCODE(op_is_object);
2422         }
2423
2424         case op_is_function: {
2425             NodeIndex value = get(currentInstruction[2].u.operand);
2426             set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2427             NEXT_OPCODE(op_is_function);
2428         }
2429
2430         case op_not: {
2431             NodeIndex value = get(currentInstruction[2].u.operand);
2432             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2433             NEXT_OPCODE(op_not);
2434         }
2435             
2436         case op_to_primitive: {
2437             NodeIndex value = get(currentInstruction[2].u.operand);
2438             set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2439             NEXT_OPCODE(op_to_primitive);
2440         }
2441             
2442         case op_strcat: {
2443             int startOperand = currentInstruction[2].u.operand;
2444             int numOperands = currentInstruction[3].u.operand;
2445             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
2446                 addVarArgChild(get(operandIdx));
2447             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, StrCat, OpInfo(0), OpInfo(0)));
2448             NEXT_OPCODE(op_strcat);
2449         }
2450
2451         case op_less: {
2452             NodeIndex op1 = get(currentInstruction[2].u.operand);
2453             NodeIndex op2 = get(currentInstruction[3].u.operand);
2454             set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2455             NEXT_OPCODE(op_less);
2456         }
2457
2458         case op_lesseq: {
2459             NodeIndex op1 = get(currentInstruction[2].u.operand);
2460             NodeIndex op2 = get(currentInstruction[3].u.operand);
2461             set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2462             NEXT_OPCODE(op_lesseq);
2463         }
2464
2465         case op_greater: {
2466             NodeIndex op1 = get(currentInstruction[2].u.operand);
2467             NodeIndex op2 = get(currentInstruction[3].u.operand);
2468             set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2469             NEXT_OPCODE(op_greater);
2470         }
2471
2472         case op_greatereq: {
2473             NodeIndex op1 = get(currentInstruction[2].u.operand);
2474             NodeIndex op2 = get(currentInstruction[3].u.operand);
2475             set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2476             NEXT_OPCODE(op_greatereq);
2477         }
2478
2479         case op_eq: {
2480             NodeIndex op1 = get(currentInstruction[2].u.operand);
2481             NodeIndex op2 = get(currentInstruction[3].u.operand);
2482             set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2483             NEXT_OPCODE(op_eq);
2484         }
2485
2486         case op_eq_null: {
2487             NodeIndex value = get(currentInstruction[2].u.operand);
2488             set(currentInstruction[1].u.operand, addToGraph(CompareEq, value, constantNull()));
2489             NEXT_OPCODE(op_eq_null);
2490         }
2491
2492         case op_stricteq: {
2493             NodeIndex op1 = get(currentInstruction[2].u.operand);
2494             NodeIndex op2 = get(currentInstruction[3].u.operand);
2495             set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2496             NEXT_OPCODE(op_stricteq);
2497         }
2498
2499         case op_neq: {
2500             NodeIndex op1 = get(currentInstruction[2].u.operand);
2501             NodeIndex op2 = get(currentInstruction[3].u.operand);
2502             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2503             NEXT_OPCODE(op_neq);
2504         }
2505
2506         case op_neq_null: {
2507             NodeIndex value = get(currentInstruction[2].u.operand);
2508             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, value, constantNull())));
2509             NEXT_OPCODE(op_neq_null);
2510         }
2511
2512         case op_nstricteq: {
2513             NodeIndex op1 = get(currentInstruction[2].u.operand);
2514             NodeIndex op2 = get(currentInstruction[3].u.operand);
2515             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareStrictEq, op1, op2)));
2516             NEXT_OPCODE(op_nstricteq);
2517         }
2518
2519         // === Property access operations ===
2520
2521         case op_get_by_val: {
2522             SpeculatedType prediction = getPrediction();
2523             
2524             NodeIndex base = get(currentInstruction[2].u.operand);
2525             ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Read, base);
2526             NodeIndex property = get(currentInstruction[3].u.operand);
2527             NodeIndex getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2528             set(currentInstruction[1].u.operand, getByVal);
2529
2530             NEXT_OPCODE(op_get_by_val);
2531         }
2532
2533         case op_put_by_val: {
2534             NodeIndex base = get(currentInstruction[1].u.operand);
2535
2536             ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Write, base);
2537             
2538             NodeIndex property = get(currentInstruction[2].u.operand);
2539             NodeIndex value = get(currentInstruction[3].u.operand);
2540             
2541             addVarArgChild(base);
2542             addVarArgChild(property);
2543             addVarArgChild(value);
2544             addVarArgChild(NoNode); // Leave room for property storage.
2545             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2546
2547             NEXT_OPCODE(op_put_by_val);
2548         }
2549             
2550         case op_get_by_id:
2551         case op_get_by_id_out_of_line:
2552         case op_get_array_length: {
2553             SpeculatedType prediction = getPrediction();
2554             
2555             NodeIndex base = get(currentInstruction[2].u.operand);
2556             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2557             
2558             Identifier identifier = m_codeBlock->identifier(identifierNumber);
2559             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2560                 m_inlineStackTop->m_profiledBlock, m_currentIndex, identifier);
2561             
2562             handleGetById(
2563                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2564
2565             NEXT_OPCODE(op_get_by_id);
2566         }
2567         case op_put_by_id:
2568         case op_put_by_id_out_of_line:
2569         case op_put_by_id_transition_direct:
2570         case op_put_by_id_transition_normal:
2571         case op_put_by_id_transition_direct_out_of_line:
2572         case op_put_by_id_transition_normal_out_of_line: {
2573             NodeIndex value = get(currentInstruction[3].u.operand);
2574             NodeIndex base = get(currentInstruction[1].u.operand);
2575             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2576             bool direct = currentInstruction[8].u.operand;
2577
2578             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2579                 m_inlineStackTop->m_profiledBlock,
2580                 m_currentIndex,
2581                 m_codeBlock->identifier(identifierNumber));
2582             if (!putByIdStatus.isSet())
2583                 addToGraph(ForceOSRExit);
2584             
2585             bool hasExitSite = m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache);
2586             
2587             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2588                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2589                 NodeIndex propertyStorage;
2590                 if (isInlineOffset(putByIdStatus.offset()))
2591                     propertyStorage = base;
2592                 else
2593                     propertyStorage = addToGraph(GetButterfly, base);
2594                 addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
2595                 
2596                 StorageAccessData storageAccessData;
2597                 storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2598                 storageAccessData.identifierNumber = identifierNumber;
2599                 m_graph.m_storageAccessData.append(storageAccessData);
2600             } else if (!hasExitSite
2601                        && putByIdStatus.isSimpleTransition()
2602                        && structureChainIsStillValid(
2603                            direct,
2604                            putByIdStatus.oldStructure(),
2605                            putByIdStatus.structureChain())) {
2606
2607                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2608                 if (!direct) {
2609                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2610                         addStructureTransitionCheck(
2611                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2612                     }
2613                     
2614                     for (WriteBarrier<Structure>* it = putByIdStatus.structureChain()->head(); *it; ++it) {
2615                         JSValue prototype = (*it)->storedPrototype();
2616                         if (prototype.isNull())
2617                             continue;
2618                         ASSERT(prototype.isCell());
2619                         addStructureTransitionCheck(prototype.asCell());
2620                     }
2621                 }
2622                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2623                 
2624                 NodeIndex propertyStorage;
2625                 StructureTransitionData* transitionData =
2626                     m_graph.addStructureTransitionData(
2627                         StructureTransitionData(
2628                             putByIdStatus.oldStructure(),
2629                             putByIdStatus.newStructure()));
2630
2631                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2632                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2633                     
2634                     // If we're growing the property storage then it must be because we're
2635                     // storing into the out-of-line storage.
2636                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2637                     
2638                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2639                         propertyStorage = addToGraph(
2640                             AllocatePropertyStorage, OpInfo(transitionData), base);
2641                     } else {
2642                         propertyStorage = addToGraph(
2643                             ReallocatePropertyStorage, OpInfo(transitionData),
2644                             base, addToGraph(GetButterfly, base));
2645                     }
2646                 } else {
2647                     if (isInlineOffset(putByIdStatus.offset()))
2648                         propertyStorage = base;
2649                     else
2650                         propertyStorage = addToGraph(GetButterfly, base);
2651                 }
2652                 
2653                 addToGraph(PutStructure, OpInfo(transitionData), base);
2654                 
2655                 addToGraph(
2656                     PutByOffset,
2657                     OpInfo(m_graph.m_storageAccessData.size()),
2658                     propertyStorage,
2659                     base,
2660                     value);
2661                 
2662                 StorageAccessData storageAccessData;
2663                 storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2664                 storageAccessData.identifierNumber = identifierNumber;
2665                 m_graph.m_storageAccessData.append(storageAccessData);
2666             } else {
2667                 if (direct)
2668                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2669                 else
2670                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2671             }
2672
2673             NEXT_OPCODE(op_put_by_id);
2674         }
2675
2676         case op_init_global_const_nop: {
2677             NEXT_OPCODE(op_init_global_const_nop);
2678         }
2679
2680         case op_init_global_const: {
2681             NodeIndex value = get(currentInstruction[2].u.operand);
2682             addToGraph(
2683                 PutGlobalVar,
2684                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2685                 value);
2686             NEXT_OPCODE(op_init_global_const);
2687         }
2688
2689         case op_init_global_const_check: {
2690             NodeIndex value = get(currentInstruction[2].u.operand);
2691             CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
2692             JSGlobalObject* globalObject = codeBlock->globalObject();
2693             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[4].u.operand];
2694             Identifier identifier = m_codeBlock->identifier(identifierNumber);
2695             SymbolTableEntry entry = globalObject->symbolTable()->get(identifier.impl());
2696             if (!entry.couldBeWatched()) {
2697                 addToGraph(
2698                     PutGlobalVar,
2699                     OpInfo(globalObject->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2700                     value);
2701                 NEXT_OPCODE(op_init_global_const_check);
2702             }
2703             addToGraph(
2704                 PutGlobalVarCheck,
2705                 OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2706                 OpInfo(identifierNumber),
2707                 value);
2708             NEXT_OPCODE(op_init_global_const_check);
2709         }
2710
2711
2712         // === Block terminators. ===
2713
2714         case op_jmp: {
2715             unsigned relativeOffset = currentInstruction[1].u.operand;
2716             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2717             LAST_OPCODE(op_jmp);
2718         }
2719
2720         case op_loop: {
2721             unsigned relativeOffset = currentInstruction[1].u.operand;
2722             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2723             LAST_OPCODE(op_loop);
2724         }
2725
2726         case op_jtrue: {
2727             unsigned relativeOffset = currentInstruction[2].u.operand;
2728             NodeIndex condition = get(currentInstruction[1].u.operand);
2729             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2730             LAST_OPCODE(op_jtrue);
2731         }
2732
2733         case op_jfalse: {
2734             unsigned relativeOffset = currentInstruction[2].u.operand;
2735             NodeIndex condition = get(currentInstruction[1].u.operand);
2736             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2737             LAST_OPCODE(op_jfalse);
2738         }
2739
2740         case op_loop_if_true: {
2741             unsigned relativeOffset = currentInstruction[2].u.operand;
2742             NodeIndex condition = get(currentInstruction[1].u.operand);
2743             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_true)), condition);
2744             LAST_OPCODE(op_loop_if_true);
2745         }
2746
2747         case op_loop_if_false: {
2748             unsigned relativeOffset = currentInstruction[2].u.operand;
2749             NodeIndex condition = get(currentInstruction[1].u.operand);
2750             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_false)), OpInfo(m_currentIndex + relativeOffset), condition);
2751             LAST_OPCODE(op_loop_if_false);
2752         }
2753
2754         case op_jeq_null: {
2755             unsigned relativeOffset = currentInstruction[2].u.operand;
2756             NodeIndex value = get(currentInstruction[1].u.operand);
2757             NodeIndex condition = addToGraph(CompareEq, value, constantNull());
2758             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2759             LAST_OPCODE(op_jeq_null);
2760         }
2761
2762         case op_jneq_null: {
2763             unsigned relativeOffset = currentInstruction[2].u.operand;
2764             NodeIndex value = get(currentInstruction[1].u.operand);
2765             NodeIndex condition = addToGraph(CompareEq, value, constantNull());
2766             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2767             LAST_OPCODE(op_jneq_null);
2768         }
2769
2770         case op_jless: {
2771             unsigned relativeOffset = currentInstruction[3].u.operand;
2772             NodeIndex op1 = get(currentInstruction[1].u.operand);
2773             NodeIndex op2 = get(currentInstruction[2].u.operand);
2774             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2775             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2776             LAST_OPCODE(op_jless);
2777         }
2778
2779         case op_jlesseq: {
2780             unsigned relativeOffset = currentInstruction[3].u.operand;
2781             NodeIndex op1 = get(currentInstruction[1].u.operand);
2782             NodeIndex op2 = get(currentInstruction[2].u.operand);
2783             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2784             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2785             LAST_OPCODE(op_jlesseq);
2786         }
2787
2788         case op_jgreater: {
2789             unsigned relativeOffset = currentInstruction[3].u.operand;
2790             NodeIndex op1 = get(currentInstruction[1].u.operand);
2791             NodeIndex op2 = get(currentInstruction[2].u.operand);
2792             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2793             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2794             LAST_OPCODE(op_jgreater);
2795         }
2796
2797         case op_jgreatereq: {
2798             unsigned relativeOffset = currentInstruction[3].u.operand;
2799             NodeIndex op1 = get(currentInstruction[1].u.operand);
2800             NodeIndex op2 = get(currentInstruction[2].u.operand);
2801             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2802             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2803             LAST_OPCODE(op_jgreatereq);
2804         }
2805
2806         case op_jnless: {
2807             unsigned relativeOffset = currentInstruction[3].u.operand;
2808             NodeIndex op1 = get(currentInstruction[1].u.operand);
2809             NodeIndex op2 = get(currentInstruction[2].u.operand);
2810             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2811             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2812             LAST_OPCODE(op_jnless);
2813         }
2814
2815         case op_jnlesseq: {
2816             unsigned relativeOffset = currentInstruction[3].u.operand;
2817             NodeIndex op1 = get(currentInstruction[1].u.operand);
2818             NodeIndex op2 = get(currentInstruction[2].u.operand);
2819             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2820             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2821             LAST_OPCODE(op_jnlesseq);
2822         }
2823
2824         case op_jngreater: {
2825             unsigned relativeOffset = currentInstruction[3].u.operand;
2826             NodeIndex op1 = get(currentInstruction[1].u.operand);
2827             NodeIndex op2 = get(currentInstruction[2].u.operand);
2828             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2829             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2830             LAST_OPCODE(op_jngreater);
2831         }
2832
2833         case op_jngreatereq: {
2834             unsigned relativeOffset = currentInstruction[3].u.operand;
2835             NodeIndex op1 = get(currentInstruction[1].u.operand);
2836             NodeIndex op2 = get(currentInstruction[2].u.operand);
2837             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2838             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2839             LAST_OPCODE(op_jngreatereq);
2840         }
2841
2842         case op_loop_if_less: {
2843             unsigned relativeOffset = currentInstruction[3].u.operand;
2844             NodeIndex op1 = get(currentInstruction[1].u.operand);
2845             NodeIndex op2 = get(currentInstruction[2].u.operand);
2846             NodeIndex condition = addToGraph(CompareLess, op1, op2);
2847             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_less)), condition);
2848             LAST_OPCODE(op_loop_if_less);
2849         }
2850
2851         case op_loop_if_lesseq: {
2852             unsigned relativeOffset = currentInstruction[3].u.operand;
2853             NodeIndex op1 = get(currentInstruction[1].u.operand);
2854             NodeIndex op2 = get(currentInstruction[2].u.operand);
2855             NodeIndex condition = addToGraph(CompareLessEq, op1, op2);
2856             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_lesseq)), condition);
2857             LAST_OPCODE(op_loop_if_lesseq);
2858         }
2859
2860         case op_loop_if_greater: {
2861             unsigned relativeOffset = currentInstruction[3].u.operand;
2862             NodeIndex op1 = get(currentInstruction[1].u.operand);
2863             NodeIndex op2 = get(currentInstruction[2].u.operand);
2864             NodeIndex condition = addToGraph(CompareGreater, op1, op2);
2865             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_greater)), condition);
2866             LAST_OPCODE(op_loop_if_greater);
2867         }
2868
2869         case op_loop_if_greatereq: {
2870             unsigned relativeOffset = currentInstruction[3].u.operand;
2871             NodeIndex op1 = get(currentInstruction[1].u.operand);
2872             NodeIndex op2 = get(currentInstruction[2].u.operand);
2873             NodeIndex condition = addToGraph(CompareGreaterEq, op1, op2);
2874             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_loop_if_greatereq)), condition);
2875             LAST_OPCODE(op_loop_if_greatereq);
2876         }
2877
2878         case op_ret:
2879             flushArgumentsAndCapturedVariables();
2880             if (m_inlineStackTop->m_inlineCallFrame) {
2881                 if (m_inlineStackTop->m_returnValue != InvalidVirtualRegister)
2882                     setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
2883                 m_inlineStackTop->m_didReturn = true;
2884                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2885                     // If we're returning from the first block, then we're done parsing.
2886                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.m_blocks.size() - 1);
2887                     shouldContinueParsing = false;
2888                     LAST_OPCODE(op_ret);
2889                 } else {
2890                     // If inlining created blocks, and we're doing a return, then we need some
2891                     // special linking.
2892                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
2893                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2894                 }
2895                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2896                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2897                     addToGraph(Jump, OpInfo(NoBlock));
2898                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2899                     m_inlineStackTop->m_didEarlyReturn = true;
2900                 }
2901                 LAST_OPCODE(op_ret);
2902             }
2903             addToGraph(Return, get(currentInstruction[1].u.operand));
2904             LAST_OPCODE(op_ret);
2905             
2906         case op_end:
2907             flushArgumentsAndCapturedVariables();
2908             ASSERT(!m_inlineStackTop->m_inlineCallFrame);
2909             addToGraph(Return, get(currentInstruction[1].u.operand));
2910             LAST_OPCODE(op_end);
2911
2912         case op_throw:
2913             flushArgumentsAndCapturedVariables();
2914             addToGraph(Throw, get(currentInstruction[1].u.operand));
2915             LAST_OPCODE(op_throw);
2916             
2917         case op_throw_static_error:
2918             flushArgumentsAndCapturedVariables();
2919             addToGraph(ThrowReferenceError);
2920             LAST_OPCODE(op_throw_static_error);
2921             
2922         case op_call:
2923             handleCall(interpreter, currentInstruction, Call, CodeForCall);
2924             NEXT_OPCODE(op_call);
2925             
2926         case op_construct:
2927             handleCall(interpreter, currentInstruction, Construct, CodeForConstruct);
2928             NEXT_OPCODE(op_construct);
2929             
2930         case op_call_varargs: {
2931             ASSERT(m_inlineStackTop->m_inlineCallFrame);
2932             ASSERT(currentInstruction[3].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
2933             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2934             // It would be cool to funnel this into handleCall() so that it can handle
2935             // inlining. But currently that won't be profitable anyway, since none of the
2936             // uses of call_varargs will be inlineable. So we set this up manually and
2937             // without inline/intrinsic detection.
2938             
2939             Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call_varargs);
2940             
2941             SpeculatedType prediction = SpecNone;
2942             if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
2943                 m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call_varargs);
2944                 prediction = getPrediction();
2945             }
2946             
2947             addToGraph(CheckArgumentsNotCreated);
2948             
2949             unsigned argCount = m_inlineStackTop->m_inlineCallFrame->arguments.size();
2950             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2951                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2952             
2953             addVarArgChild(get(currentInstruction[1].u.operand)); // callee
2954             addVarArgChild(get(currentInstruction[2].u.operand)); // this
2955             for (unsigned argument = 1; argument < argCount; ++argument)
2956                 addVarArgChild(get(argumentToOperand(argument)));
2957             
2958             NodeIndex call = addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction));
2959             if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
2960                 set(putInstruction[1].u.operand, call);
2961             
2962             NEXT_OPCODE(op_call_varargs);
2963         }
2964             
2965         case op_call_put_result:
2966             NEXT_OPCODE(op_call_put_result);
2967             
2968         case op_jneq_ptr:
2969             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2970             // support simmer for a while before making it more general, since it's
2971             // already gnarly enough as it is.
2972             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2973             addToGraph(
2974                 CheckFunction,
2975                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2976                 get(currentInstruction[1].u.operand));
2977             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
2978             LAST_OPCODE(op_jneq_ptr);
2979
2980         case op_resolve:
2981         case op_resolve_global_property:
2982         case op_resolve_global_var:
2983         case op_resolve_scoped_var:
2984         case op_resolve_scoped_var_on_top_scope:
2985         case op_resolve_scoped_var_with_top_scope_check: {
2986             SpeculatedType prediction = getPrediction();
2987             
2988             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2989             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[3].u.operand];
2990             NodeIndex value = 0;
2991             if (parseResolveOperations(prediction, identifier, operations, 0, 0, &value)) {
2992                 set(currentInstruction[1].u.operand, value);
2993                 NEXT_OPCODE(op_resolve);
2994             }
2995
2996             NodeIndex resolve = addToGraph(Resolve, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
2997             m_graph.m_resolveOperationsData.append(ResolveOperationData());
2998             ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
2999             data.identifierNumber = identifier;
3000             data.resolveOperationsIndex = operations;
3001
3002             set(currentInstruction[1].u.operand, resolve);
3003
3004             NEXT_OPCODE(op_resolve);
3005         }
3006
3007         case op_put_to_base_variable:
3008         case op_put_to_base: {
3009             unsigned base = currentInstruction[1].u.operand;
3010             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3011             unsigned value = currentInstruction[3].u.operand;
3012             unsigned operation = m_inlineStackTop->m_putToBaseOperationRemap[currentInstruction[4].u.operand];
3013             PutToBaseOperation* putToBase = m_codeBlock->putToBaseOperation(operation);
3014
3015             if (putToBase->m_isDynamic) {
3016                 addToGraph(Phantom, get(base));
3017                 addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3018                 NEXT_OPCODE(op_put_to_base);
3019             }
3020
3021             switch (putToBase->m_kind) {
3022             case PutToBaseOperation::Uninitialised:
3023                 addToGraph(Phantom, get(base));
3024                 addToGraph(ForceOSRExit);
3025                 break;
3026
3027             case PutToBaseOperation::GlobalVariablePutChecked: {
3028                 CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3029                 JSGlobalObject* globalObject = codeBlock->globalObject();
3030                 SymbolTableEntry entry = globalObject->symbolTable()->get(m_codeBlock->identifier(identifier).impl());
3031                 if (entry.couldBeWatched()) {
3032                     addToGraph(PutGlobalVarCheck,
3033                                OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3034                                OpInfo(identifier),
3035                                get(value));
3036                     break;
3037                 }
3038             }
3039             case PutToBaseOperation::GlobalVariablePut:
3040                 addToGraph(PutGlobalVar,
3041                            OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3042                            get(value));
3043                 break;
3044             case PutToBaseOperation::VariablePut: {
3045                 addToGraph(Phantom, get(base));
3046                 NodeIndex getScope = addToGraph(GetScope, OpInfo(putToBase->m_scopeDepth));
3047                 NodeIndex getScopeRegisters = addToGraph(GetScopeRegisters, getScope);
3048                 addToGraph(PutScopedVar, OpInfo(putToBase->m_offset), getScope, getScopeRegisters, get(value));
3049                 break;
3050             }
3051             case PutToBaseOperation::GlobalPropertyPut: {
3052                 if (!putToBase->m_structure) {
3053                     addToGraph(Phantom, get(base));
3054                     addToGraph(ForceOSRExit);
3055                     NEXT_OPCODE(op_put_to_base);
3056                 }
3057                 NodeIndex baseNode = get(base);
3058                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putToBase->m_structure.get())), baseNode);
3059                 NodeIndex propertyStorage;
3060                 if (isInlineOffset(putToBase->m_offset))
3061                     propertyStorage = baseNode;
3062                 else
3063                     propertyStorage = addToGraph(GetButterfly, baseNode);
3064                 addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, baseNode, get(value));
3065
3066                 StorageAccessData storageAccessData;
3067                 storageAccessData.offset = indexRelativeToBase(putToBase->m_offset);
3068                 storageAccessData.identifierNumber = identifier;
3069                 m_graph.m_storageAccessData.append(storageAccessData);
3070                 break;
3071             }
3072             case PutToBaseOperation::Readonly:
3073             case PutToBaseOperation::Generic:
3074                 addToGraph(Phantom, get(base));
3075                 addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3076             }
3077             NEXT_OPCODE(op_put_to_base);
3078         }
3079
3080         case op_resolve_base_to_global:
3081         case op_resolve_base_to_global_dynamic:
3082         case op_resolve_base_to_scope:
3083         case op_resolve_base_to_scope_with_top_scope_check:
3084         case op_resolve_base: {
3085             SpeculatedType prediction = getPrediction();
3086             
3087             unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3088             unsigned operations = m_inlineStackTop->m_resolveOperationRemap[currentInstruction[4].u.operand];
3089             unsigned putToBaseOperation = m_inlineStackTop->m_putToBaseOperationRemap[currentInstruction[5].u.operand];
3090
3091             NodeIndex base = 0;