DFG should inline new typedArray()
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGArrayMode.h"
36 #include "DFGCapabilities.h"
37 #include "GetByIdStatus.h"
38 #include "Operations.h"
39 #include "PreciseJumpTargets.h"
40 #include "PutByIdStatus.h"
41 #include "StringConstructor.h"
42 #include <wtf/CommaPrinter.h>
43 #include <wtf/HashMap.h>
44 #include <wtf/MathExtras.h>
45
46 namespace JSC { namespace DFG {
47
48 class ConstantBufferKey {
49 public:
50     ConstantBufferKey()
51         : m_codeBlock(0)
52         , m_index(0)
53     {
54     }
55     
56     ConstantBufferKey(WTF::HashTableDeletedValueType)
57         : m_codeBlock(0)
58         , m_index(1)
59     {
60     }
61     
62     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
63         : m_codeBlock(codeBlock)
64         , m_index(index)
65     {
66     }
67     
68     bool operator==(const ConstantBufferKey& other) const
69     {
70         return m_codeBlock == other.m_codeBlock
71             && m_index == other.m_index;
72     }
73     
74     unsigned hash() const
75     {
76         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
77     }
78     
79     bool isHashTableDeletedValue() const
80     {
81         return !m_codeBlock && m_index;
82     }
83     
84     CodeBlock* codeBlock() const { return m_codeBlock; }
85     unsigned index() const { return m_index; }
86     
87 private:
88     CodeBlock* m_codeBlock;
89     unsigned m_index;
90 };
91
92 struct ConstantBufferKeyHash {
93     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
94     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
95     {
96         return a == b;
97     }
98     
99     static const bool safeToCompareToEmptyOrDeleted = true;
100 };
101
102 } } // namespace JSC::DFG
103
104 namespace WTF {
105
106 template<typename T> struct DefaultHash;
107 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
108     typedef JSC::DFG::ConstantBufferKeyHash Hash;
109 };
110
111 template<typename T> struct HashTraits;
112 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
113
114 } // namespace WTF
115
116 namespace JSC { namespace DFG {
117
118 // === ByteCodeParser ===
119 //
120 // This class is used to compile the dataflow graph from a CodeBlock.
121 class ByteCodeParser {
122 public:
123     ByteCodeParser(Graph& graph)
124         : m_vm(&graph.m_vm)
125         , m_codeBlock(graph.m_codeBlock)
126         , m_profiledBlock(graph.m_profiledBlock)
127         , m_graph(graph)
128         , m_currentBlock(0)
129         , m_currentIndex(0)
130         , m_constantUndefined(UINT_MAX)
131         , m_constantNull(UINT_MAX)
132         , m_constantNaN(UINT_MAX)
133         , m_constant1(UINT_MAX)
134         , m_constants(m_codeBlock->numberOfConstantRegisters())
135         , m_numArguments(m_codeBlock->numParameters())
136         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
137         , m_preservedVars(m_codeBlock->m_numVars)
138         , m_parameterSlots(0)
139         , m_numPassedVarArgs(0)
140         , m_inlineStackTop(0)
141         , m_haveBuiltOperandMaps(false)
142         , m_emptyJSValueIndex(UINT_MAX)
143         , m_currentInstruction(0)
144     {
145         ASSERT(m_profiledBlock);
146         
147         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
148             m_preservedVars.set(i);
149     }
150     
151     // Parse a full CodeBlock of bytecode.
152     bool parse();
153     
154 private:
155     struct InlineStackEntry;
156
157     // Just parse from m_currentIndex to the end of the current CodeBlock.
158     void parseCodeBlock();
159
160     // Helper for min and max.
161     bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
162     
163     // Handle calls. This resolves issues surrounding inlining and intrinsics.
164     void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
165     void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
166     void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
167     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
168     bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
169     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
170     bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
171     bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
172     bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
173     Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
174     Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
175     void handleGetByOffset(
176         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
177         PropertyOffset);
178     void handleGetById(
179         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
180         const GetByIdStatus&);
181
182     Node* getScope(bool skipTop, unsigned skipCount);
183     
184     // Prepare to parse a block.
185     void prepareToParseBlock();
186     // Parse a single basic block of bytecode instructions.
187     bool parseBlock(unsigned limit);
188     // Link block successors.
189     void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
190     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
191     
192     VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
193     {
194         ASSERT(operand < FirstConstantRegisterIndex);
195         
196         m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
197         return &m_graph.m_variableAccessData.last();
198     }
199     
200     // Get/Set the operands/result of a bytecode instruction.
201     Node* getDirect(int operand)
202     {
203         // Is this a constant?
204         if (operand >= FirstConstantRegisterIndex) {
205             unsigned constant = operand - FirstConstantRegisterIndex;
206             ASSERT(constant < m_constants.size());
207             return getJSConstant(constant);
208         }
209
210         ASSERT(operand != JSStack::Callee);
211         
212         // Is this an argument?
213         if (operandIsArgument(operand))
214             return getArgument(operand);
215
216         // Must be a local.
217         return getLocal((unsigned)operand);
218     }
219     Node* get(int operand)
220     {
221         if (operand == JSStack::Callee) {
222             if (inlineCallFrame() && inlineCallFrame()->callee)
223                 return cellConstant(inlineCallFrame()->callee.get());
224             
225             return getCallee();
226         }
227         
228         return getDirect(m_inlineStackTop->remapOperand(operand));
229     }
230     enum SetMode { NormalSet, SetOnEntry };
231     void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
232     {
233         // Is this an argument?
234         if (operandIsArgument(operand)) {
235             setArgument(operand, value, setMode);
236             return;
237         }
238
239         // Must be a local.
240         setLocal((unsigned)operand, value, setMode);
241     }
242     void set(int operand, Node* value, SetMode setMode = NormalSet)
243     {
244         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
245     }
246     
247     Node* injectLazyOperandSpeculation(Node* node)
248     {
249         ASSERT(node->op() == GetLocal);
250         ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
251         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
252         LazyOperandValueProfileKey key(m_currentIndex, node->local());
253         SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
254 #if DFG_ENABLE(DEBUG_VERBOSE)
255         dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
256 #endif
257         node->variableAccessData()->predict(prediction);
258         return node;
259     }
260
261     // Used in implementing get/set, above, where the operand is a local variable.
262     Node* getLocal(unsigned operand)
263     {
264         Node* node = m_currentBlock->variablesAtTail.local(operand);
265         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
266         
267         // This has two goals: 1) link together variable access datas, and 2)
268         // try to avoid creating redundant GetLocals. (1) is required for
269         // correctness - no other phase will ensure that block-local variable
270         // access data unification is done correctly. (2) is purely opportunistic
271         // and is meant as an compile-time optimization only.
272         
273         VariableAccessData* variable;
274         
275         if (node) {
276             variable = node->variableAccessData();
277             variable->mergeIsCaptured(isCaptured);
278             
279             if (!isCaptured) {
280                 switch (node->op()) {
281                 case GetLocal:
282                     return node;
283                 case SetLocal:
284                     return node->child1().node();
285                 default:
286                     break;
287                 }
288             }
289         } else {
290             m_preservedVars.set(operand);
291             variable = newVariableAccessData(operand, isCaptured);
292         }
293         
294         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
295         m_currentBlock->variablesAtTail.local(operand) = node;
296         return node;
297     }
298     void setLocal(unsigned operand, Node* value, SetMode setMode = NormalSet)
299     {
300         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
301         
302         if (setMode == NormalSet) {
303             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
304             if (isCaptured || argumentPosition)
305                 flushDirect(operand, argumentPosition);
306         }
307
308         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
309         variableAccessData->mergeStructureCheckHoistingFailed(
310             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
311         variableAccessData->mergeCheckArrayHoistingFailed(
312             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
313         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
314         m_currentBlock->variablesAtTail.local(operand) = node;
315     }
316
317     // Used in implementing get/set, above, where the operand is an argument.
318     Node* getArgument(unsigned operand)
319     {
320         unsigned argument = operandToArgument(operand);
321         ASSERT(argument < m_numArguments);
322         
323         Node* node = m_currentBlock->variablesAtTail.argument(argument);
324         bool isCaptured = m_codeBlock->isCaptured(operand);
325
326         VariableAccessData* variable;
327         
328         if (node) {
329             variable = node->variableAccessData();
330             variable->mergeIsCaptured(isCaptured);
331             
332             switch (node->op()) {
333             case GetLocal:
334                 return node;
335             case SetLocal:
336                 return node->child1().node();
337             default:
338                 break;
339             }
340         } else
341             variable = newVariableAccessData(operand, isCaptured);
342         
343         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
344         m_currentBlock->variablesAtTail.argument(argument) = node;
345         return node;
346     }
347     void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
348     {
349         unsigned argument = operandToArgument(operand);
350         ASSERT(argument < m_numArguments);
351         
352         bool isCaptured = m_codeBlock->isCaptured(operand);
353
354         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
355
356         // Always flush arguments, except for 'this'. If 'this' is created by us,
357         // then make sure that it's never unboxed.
358         if (argument) {
359             if (setMode == NormalSet)
360                 flushDirect(operand);
361         } else if (m_codeBlock->specializationKind() == CodeForConstruct)
362             variableAccessData->mergeShouldNeverUnbox(true);
363         
364         variableAccessData->mergeStructureCheckHoistingFailed(
365             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
366         variableAccessData->mergeCheckArrayHoistingFailed(
367             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
368         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
369         m_currentBlock->variablesAtTail.argument(argument) = node;
370     }
371     
372     ArgumentPosition* findArgumentPositionForArgument(int argument)
373     {
374         InlineStackEntry* stack = m_inlineStackTop;
375         while (stack->m_inlineCallFrame)
376             stack = stack->m_caller;
377         return stack->m_argumentPositions[argument];
378     }
379     
380     ArgumentPosition* findArgumentPositionForLocal(int operand)
381     {
382         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
383             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
384             if (!inlineCallFrame)
385                 break;
386             if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
387                 continue;
388             if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
389                 continue;
390             if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
391                 continue;
392             int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
393             return stack->m_argumentPositions[argument];
394         }
395         return 0;
396     }
397     
398     ArgumentPosition* findArgumentPosition(int operand)
399     {
400         if (operandIsArgument(operand))
401             return findArgumentPositionForArgument(operandToArgument(operand));
402         return findArgumentPositionForLocal(operand);
403     }
404
405     void addConstant(JSValue value)
406     {
407         unsigned constantIndex = m_codeBlock->addConstantLazily();
408         initializeLazyWriteBarrierForConstant(
409             m_graph.m_plan.writeBarriers,
410             m_codeBlock->constants()[constantIndex],
411             m_codeBlock,
412             constantIndex,
413             m_codeBlock->ownerExecutable(), 
414             value);
415     }
416     
417     void flush(int operand)
418     {
419         flushDirect(m_inlineStackTop->remapOperand(operand));
420     }
421     
422     void flushDirect(int operand)
423     {
424         flushDirect(operand, findArgumentPosition(operand));
425     }
426     
427     void flushDirect(int operand, ArgumentPosition* argumentPosition)
428     {
429         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
430         
431         ASSERT(operand < FirstConstantRegisterIndex);
432         
433         if (!operandIsArgument(operand))
434             m_preservedVars.set(operand);
435         
436         Node* node = m_currentBlock->variablesAtTail.operand(operand);
437         
438         VariableAccessData* variable;
439         
440         if (node) {
441             variable = node->variableAccessData();
442             variable->mergeIsCaptured(isCaptured);
443         } else
444             variable = newVariableAccessData(operand, isCaptured);
445         
446         node = addToGraph(Flush, OpInfo(variable));
447         m_currentBlock->variablesAtTail.operand(operand) = node;
448         if (argumentPosition)
449             argumentPosition->addVariable(variable);
450     }
451
452     void flush(InlineStackEntry* inlineStackEntry)
453     {
454         int numArguments;
455         if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
456             numArguments = inlineCallFrame->arguments.size();
457         else
458             numArguments = inlineStackEntry->m_codeBlock->numParameters();
459         for (unsigned argument = numArguments; argument-- > 1;)
460             flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
461         for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
462             if (!inlineStackEntry->m_codeBlock->isCaptured(local))
463                 continue;
464             flushDirect(inlineStackEntry->remapOperand(local));
465         }
466     }
467
468     void flushAllArgumentsAndCapturedVariablesInInlineStack()
469     {
470         for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
471             flush(inlineStackEntry);
472     }
473
474     void flushArgumentsAndCapturedVariables()
475     {
476         flush(m_inlineStackTop);
477     }
478
479     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
480     Node* getToInt32(int operand)
481     {
482         return toInt32(get(operand));
483     }
484
485     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
486     Node* toInt32(Node* node)
487     {
488         if (node->hasInt32Result())
489             return node;
490
491         if (node->op() == UInt32ToNumber)
492             return node->child1().node();
493
494         // Check for numeric constants boxed as JSValues.
495         if (canFold(node)) {
496             JSValue v = valueOfJSConstant(node);
497             if (v.isInt32())
498                 return getJSConstant(node->constantNumber());
499             if (v.isNumber())
500                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
501         }
502
503         return addToGraph(ValueToInt32, node);
504     }
505
506     // NOTE: Only use this to construct constants that arise from non-speculative
507     // constant folding. I.e. creating constants using this if we had constant
508     // field inference would be a bad idea, since the bytecode parser's folding
509     // doesn't handle liveness preservation.
510     Node* getJSConstantForValue(JSValue constantValue)
511     {
512         unsigned constantIndex;
513         if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
514             addConstant(constantValue);
515             m_constants.append(ConstantRecord());
516         }
517         
518         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
519         
520         return getJSConstant(constantIndex);
521     }
522
523     Node* getJSConstant(unsigned constant)
524     {
525         Node* node = m_constants[constant].asJSValue;
526         if (node)
527             return node;
528
529         Node* result = addToGraph(JSConstant, OpInfo(constant));
530         m_constants[constant].asJSValue = result;
531         return result;
532     }
533
534     Node* getCallee()
535     {
536         return addToGraph(GetCallee);
537     }
538
539     // Helper functions to get/set the this value.
540     Node* getThis()
541     {
542         return get(m_inlineStackTop->m_codeBlock->thisRegister());
543     }
544     void setThis(Node* value)
545     {
546         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
547     }
548
549     // Convenience methods for checking nodes for constants.
550     bool isJSConstant(Node* node)
551     {
552         return node->op() == JSConstant;
553     }
554     bool isInt32Constant(Node* node)
555     {
556         return isJSConstant(node) && valueOfJSConstant(node).isInt32();
557     }
558     // Convenience methods for getting constant values.
559     JSValue valueOfJSConstant(Node* node)
560     {
561         ASSERT(isJSConstant(node));
562         return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
563     }
564     int32_t valueOfInt32Constant(Node* node)
565     {
566         ASSERT(isInt32Constant(node));
567         return valueOfJSConstant(node).asInt32();
568     }
569     
570     // This method returns a JSConstant with the value 'undefined'.
571     Node* constantUndefined()
572     {
573         // Has m_constantUndefined been set up yet?
574         if (m_constantUndefined == UINT_MAX) {
575             // Search the constant pool for undefined, if we find it, we can just reuse this!
576             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
577             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
578                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
579                 if (testMe.isUndefined())
580                     return getJSConstant(m_constantUndefined);
581             }
582
583             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
584             ASSERT(m_constants.size() == numberOfConstants);
585             addConstant(jsUndefined());
586             m_constants.append(ConstantRecord());
587             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
588         }
589
590         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
591         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
592         return getJSConstant(m_constantUndefined);
593     }
594
595     // This method returns a JSConstant with the value 'null'.
596     Node* constantNull()
597     {
598         // Has m_constantNull been set up yet?
599         if (m_constantNull == UINT_MAX) {
600             // Search the constant pool for null, if we find it, we can just reuse this!
601             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
602             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
603                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
604                 if (testMe.isNull())
605                     return getJSConstant(m_constantNull);
606             }
607
608             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
609             ASSERT(m_constants.size() == numberOfConstants);
610             addConstant(jsNull());
611             m_constants.append(ConstantRecord());
612             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
613         }
614
615         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
616         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
617         return getJSConstant(m_constantNull);
618     }
619
620     // This method returns a DoubleConstant with the value 1.
621     Node* one()
622     {
623         // Has m_constant1 been set up yet?
624         if (m_constant1 == UINT_MAX) {
625             // Search the constant pool for the value 1, if we find it, we can just reuse this!
626             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
627             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
628                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
629                 if (testMe.isInt32() && testMe.asInt32() == 1)
630                     return getJSConstant(m_constant1);
631             }
632
633             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
634             ASSERT(m_constants.size() == numberOfConstants);
635             addConstant(jsNumber(1));
636             m_constants.append(ConstantRecord());
637             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
638         }
639
640         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
641         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
642         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
643         return getJSConstant(m_constant1);
644     }
645     
646     // This method returns a DoubleConstant with the value NaN.
647     Node* constantNaN()
648     {
649         JSValue nan = jsNaN();
650         
651         // Has m_constantNaN been set up yet?
652         if (m_constantNaN == UINT_MAX) {
653             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
654             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
655             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
656                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
657                 if (JSValue::encode(testMe) == JSValue::encode(nan))
658                     return getJSConstant(m_constantNaN);
659             }
660
661             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
662             ASSERT(m_constants.size() == numberOfConstants);
663             addConstant(nan);
664             m_constants.append(ConstantRecord());
665             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
666         }
667
668         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
669         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
670         ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
671         return getJSConstant(m_constantNaN);
672     }
673     
674     Node* cellConstant(JSCell* cell)
675     {
676         HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, 0);
677         if (result.isNewEntry)
678             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
679         
680         return result.iterator->value;
681     }
682     
683     InlineCallFrame* inlineCallFrame()
684     {
685         return m_inlineStackTop->m_inlineCallFrame;
686     }
687
688     CodeOrigin currentCodeOrigin()
689     {
690         return CodeOrigin(m_currentIndex, inlineCallFrame());
691     }
692     
693     bool canFold(Node* node)
694     {
695         return node->isStronglyProvedConstantIn(inlineCallFrame());
696     }
697
698     // Our codegen for constant strict equality performs a bitwise comparison,
699     // so we can only select values that have a consistent bitwise identity.
700     bool isConstantForCompareStrictEq(Node* node)
701     {
702         if (!node->isConstant())
703             return false;
704         JSValue value = valueOfJSConstant(node);
705         return value.isBoolean() || value.isUndefinedOrNull();
706     }
707     
708     Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
709     {
710         Node* result = m_graph.addNode(
711             SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
712         ASSERT(op != Phi);
713         m_currentBlock->append(result);
714         return result;
715     }
716     Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
717     {
718         Node* result = m_graph.addNode(
719             SpecNone, op, currentCodeOrigin(), child1, child2, child3);
720         ASSERT(op != Phi);
721         m_currentBlock->append(result);
722         return result;
723     }
724     Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
725     {
726         Node* result = m_graph.addNode(
727             SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
728         ASSERT(op != Phi);
729         m_currentBlock->append(result);
730         return result;
731     }
732     Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
733     {
734         Node* result = m_graph.addNode(
735             SpecNone, op, currentCodeOrigin(), info1, info2,
736             Edge(child1), Edge(child2), Edge(child3));
737         ASSERT(op != Phi);
738         m_currentBlock->append(result);
739         return result;
740     }
741     
742     Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
743     {
744         Node* result = m_graph.addNode(
745             SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
746             m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
747         ASSERT(op != Phi);
748         m_currentBlock->append(result);
749         
750         m_numPassedVarArgs = 0;
751         
752         return result;
753     }
754
755     void addVarArgChild(Node* child)
756     {
757         m_graph.m_varArgChildren.append(Edge(child));
758         m_numPassedVarArgs++;
759     }
760     
761     Node* addCall(Instruction* currentInstruction, NodeType op)
762     {
763         SpeculatedType prediction = getPrediction();
764         
765         addVarArgChild(get(currentInstruction[2].u.operand));
766         int argCount = currentInstruction[3].u.operand;
767         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
768             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
769
770         int registerOffset = currentInstruction[4].u.operand;
771         int dummyThisArgument = op == Call ? 0 : 1;
772         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
773             addVarArgChild(get(registerOffset + argumentToOperand(i)));
774
775         Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
776         set(currentInstruction[1].u.operand, call);
777         return call;
778     }
779     
780     Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
781     {
782         Node* objectNode = cellConstant(object);
783         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
784         return objectNode;
785     }
786     
787     Node* cellConstantWithStructureCheck(JSCell* object)
788     {
789         return cellConstantWithStructureCheck(object, object->structure());
790     }
791
792     SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
793     {
794         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
795         return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
796     }
797
798     SpeculatedType getPrediction(unsigned bytecodeIndex)
799     {
800         SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
801         
802         if (prediction == SpecNone) {
803             // We have no information about what values this node generates. Give up
804             // on executing this code, since we're likely to do more damage than good.
805             addToGraph(ForceOSRExit);
806         }
807         
808         return prediction;
809     }
810     
811     SpeculatedType getPredictionWithoutOSRExit()
812     {
813         return getPredictionWithoutOSRExit(m_currentIndex);
814     }
815     
816     SpeculatedType getPrediction()
817     {
818         return getPrediction(m_currentIndex);
819     }
820     
821     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
822     {
823         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
824         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
825         return ArrayMode::fromObserved(locker, profile, action, false);
826     }
827     
828     ArrayMode getArrayMode(ArrayProfile* profile)
829     {
830         return getArrayMode(profile, Array::Read);
831     }
832     
833     ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
834     {
835         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
836         
837         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
838         
839 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
840         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
841             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
842         dataLogF("Array profile for bc#%u: %u %s%s\n", m_currentIndex, profile->observedArrayModes(locker), profile->structureIsPolymorphic(locker) ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses(locker) ? " (may intercept)" : "");
843 #endif
844         
845         bool makeSafe =
846             m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
847             || profile->outOfBounds(locker);
848         
849         ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
850         
851         return result;
852     }
853     
854     Node* makeSafe(Node* node)
855     {
856         bool likelyToTakeSlowCase;
857         if (!isX86() && node->op() == ArithMod)
858             likelyToTakeSlowCase = false;
859         else
860             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
861         
862         if (!likelyToTakeSlowCase
863             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
864             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
865             return node;
866         
867         switch (node->op()) {
868         case UInt32ToNumber:
869         case ArithAdd:
870         case ArithSub:
871         case ArithNegate:
872         case ValueAdd:
873         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
874             node->mergeFlags(NodeMayOverflow);
875             break;
876             
877         case ArithMul:
878             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
879                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
880 #if DFG_ENABLE(DEBUG_VERBOSE)
881                 dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
882 #endif
883                 node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
884             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
885                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
886 #if DFG_ENABLE(DEBUG_VERBOSE)
887                 dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
888 #endif
889                 node->mergeFlags(NodeMayNegZero);
890             }
891             break;
892             
893         default:
894             RELEASE_ASSERT_NOT_REACHED();
895             break;
896         }
897         
898         return node;
899     }
900     
901     Node* makeDivSafe(Node* node)
902     {
903         ASSERT(node->op() == ArithDiv);
904         
905         // The main slow case counter for op_div in the old JIT counts only when
906         // the operands are not numbers. We don't care about that since we already
907         // have speculations in place that take care of that separately. We only
908         // care about when the outcome of the division is not an integer, which
909         // is what the special fast case counter tells us.
910         
911         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
912             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
913             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
914             return node;
915         
916 #if DFG_ENABLE(DEBUG_VERBOSE)
917         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
918 #endif
919         
920         // FIXME: It might be possible to make this more granular. The DFG certainly can
921         // distinguish between negative zero and overflow in its exit profiles.
922         node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
923         
924         return node;
925     }
926     
927     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
928     {
929         if (direct)
930             return true;
931         
932         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
933             return false;
934         
935         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
936             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
937                 return false;
938         }
939         
940         return true;
941     }
942     
943     void buildOperandMapsIfNecessary();
944     
945     VM* m_vm;
946     CodeBlock* m_codeBlock;
947     CodeBlock* m_profiledBlock;
948     Graph& m_graph;
949
950     // The current block being generated.
951     BasicBlock* m_currentBlock;
952     // The bytecode index of the current instruction being generated.
953     unsigned m_currentIndex;
954
955     // We use these values during code generation, and to avoid the need for
956     // special handling we make sure they are available as constants in the
957     // CodeBlock's constant pool. These variables are initialized to
958     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
959     // constant pool, as necessary.
960     unsigned m_constantUndefined;
961     unsigned m_constantNull;
962     unsigned m_constantNaN;
963     unsigned m_constant1;
964     HashMap<JSCell*, unsigned> m_cellConstants;
965     HashMap<JSCell*, Node*> m_cellConstantNodes;
966
967     // A constant in the constant pool may be represented by more than one
968     // node in the graph, depending on the context in which it is being used.
969     struct ConstantRecord {
970         ConstantRecord()
971             : asInt32(0)
972             , asNumeric(0)
973             , asJSValue(0)
974         {
975         }
976
977         Node* asInt32;
978         Node* asNumeric;
979         Node* asJSValue;
980     };
981
982     // Track the index of the node whose result is the current value for every
983     // register value in the bytecode - argument, local, and temporary.
984     Vector<ConstantRecord, 16> m_constants;
985
986     // The number of arguments passed to the function.
987     unsigned m_numArguments;
988     // The number of locals (vars + temporaries) used in the function.
989     unsigned m_numLocals;
990     // The set of registers we need to preserve across BasicBlock boundaries;
991     // typically equal to the set of vars, but we expand this to cover all
992     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
993     BitVector m_preservedVars;
994     // The number of slots (in units of sizeof(Register)) that we need to
995     // preallocate for calls emanating from this frame. This includes the
996     // size of the CallFrame, only if this is not a leaf function.  (I.e.
997     // this is 0 if and only if this function is a leaf.)
998     unsigned m_parameterSlots;
999     // The number of var args passed to the next var arg node.
1000     unsigned m_numPassedVarArgs;
1001
1002     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1003     
1004     struct InlineStackEntry {
1005         ByteCodeParser* m_byteCodeParser;
1006         
1007         CodeBlock* m_codeBlock;
1008         CodeBlock* m_profiledBlock;
1009         InlineCallFrame* m_inlineCallFrame;
1010         
1011         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1012         
1013         QueryableExitProfile m_exitProfile;
1014         
1015         // Remapping of identifier and constant numbers from the code block being
1016         // inlined (inline callee) to the code block that we're inlining into
1017         // (the machine code block, which is the transitive, though not necessarily
1018         // direct, caller).
1019         Vector<unsigned> m_identifierRemap;
1020         Vector<unsigned> m_constantRemap;
1021         Vector<unsigned> m_constantBufferRemap;
1022         Vector<unsigned> m_switchRemap;
1023         
1024         // Blocks introduced by this code block, which need successor linking.
1025         // May include up to one basic block that includes the continuation after
1026         // the callsite in the caller. These must be appended in the order that they
1027         // are created, but their bytecodeBegin values need not be in order as they
1028         // are ignored.
1029         Vector<UnlinkedBlock> m_unlinkedBlocks;
1030         
1031         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1032         // cannot have two blocks that have the same bytecodeBegin. For this very
1033         // reason, this is not equivalent to 
1034         Vector<BasicBlock*> m_blockLinkingTargets;
1035         
1036         // If the callsite's basic block was split into two, then this will be
1037         // the head of the callsite block. It needs its successors linked to the
1038         // m_unlinkedBlocks, but not the other way around: there's no way for
1039         // any blocks in m_unlinkedBlocks to jump back into this block.
1040         BasicBlock* m_callsiteBlockHead;
1041         
1042         // Does the callsite block head need linking? This is typically true
1043         // but will be false for the machine code block's inline stack entry
1044         // (since that one is not inlined) and for cases where an inline callee
1045         // did the linking for us.
1046         bool m_callsiteBlockHeadNeedsLinking;
1047         
1048         VirtualRegister m_returnValue;
1049         
1050         // Speculations about variable types collected from the profiled code block,
1051         // which are based on OSR exit profiles that past DFG compilatins of this
1052         // code block had gathered.
1053         LazyOperandValueProfileParser m_lazyOperands;
1054         
1055         // Did we see any returns? We need to handle the (uncommon but necessary)
1056         // case where a procedure that does not return was inlined.
1057         bool m_didReturn;
1058         
1059         // Did we have any early returns?
1060         bool m_didEarlyReturn;
1061         
1062         // Pointers to the argument position trackers for this slice of code.
1063         Vector<ArgumentPosition*> m_argumentPositions;
1064         
1065         InlineStackEntry* m_caller;
1066         
1067         InlineStackEntry(
1068             ByteCodeParser*,
1069             CodeBlock*,
1070             CodeBlock* profiledBlock,
1071             BasicBlock* callsiteBlockHead,
1072             JSFunction* callee, // Null if this is a closure call.
1073             VirtualRegister returnValueVR,
1074             VirtualRegister inlineCallFrameStart,
1075             int argumentCountIncludingThis,
1076             CodeSpecializationKind);
1077         
1078         ~InlineStackEntry()
1079         {
1080             m_byteCodeParser->m_inlineStackTop = m_caller;
1081         }
1082         
1083         int remapOperand(int operand) const
1084         {
1085             if (!m_inlineCallFrame)
1086                 return operand;
1087             
1088             if (operand >= FirstConstantRegisterIndex) {
1089                 int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1090                 ASSERT(result >= FirstConstantRegisterIndex);
1091                 return result;
1092             }
1093
1094             ASSERT(operand != JSStack::Callee);
1095
1096             return operand + m_inlineCallFrame->stackOffset;
1097         }
1098     };
1099     
1100     InlineStackEntry* m_inlineStackTop;
1101
1102     // Have we built operand maps? We initialize them lazily, and only when doing
1103     // inlining.
1104     bool m_haveBuiltOperandMaps;
1105     // Mapping between identifier names and numbers.
1106     BorrowedIdentifierMap m_identifierMap;
1107     // Mapping between values and constant numbers.
1108     JSValueMap m_jsValueMap;
1109     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1110     // work-around for the fact that JSValueMap can't handle "empty" values.
1111     unsigned m_emptyJSValueIndex;
1112     
1113     Instruction* m_currentInstruction;
1114 };
1115
1116 #define NEXT_OPCODE(name) \
1117     m_currentIndex += OPCODE_LENGTH(name); \
1118     continue
1119
1120 #define LAST_OPCODE(name) \
1121     m_currentIndex += OPCODE_LENGTH(name); \
1122     return shouldContinueParsing
1123
1124
1125 void ByteCodeParser::handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1126 {
1127     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1128     
1129     Node* callTarget = get(currentInstruction[2].u.operand);
1130     
1131     CallLinkStatus callLinkStatus;
1132
1133     if (m_graph.isConstant(callTarget))
1134         callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1135     else {
1136         callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
1137         callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
1138         callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1139         callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
1140     }
1141     
1142 #if DFG_ENABLE(DEBUG_VERBOSE)
1143     dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
1144 #endif
1145     
1146     if (!callLinkStatus.canOptimize()) {
1147         // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1148         // that we cannot optimize them.
1149         
1150         addCall(currentInstruction, op);
1151         return;
1152     }
1153     
1154     int argumentCountIncludingThis = currentInstruction[3].u.operand;
1155     int registerOffset = currentInstruction[4].u.operand;
1156
1157     int resultOperand = currentInstruction[1].u.operand;
1158     unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1159     SpeculatedType prediction = getPrediction();
1160
1161     if (InternalFunction* function = callLinkStatus.internalFunction()) {
1162         if (handleConstantInternalFunction(resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1163             // This phantoming has to be *after* the code for the intrinsic, to signify that
1164             // the inputs must be kept alive whatever exits the intrinsic may do.
1165             addToGraph(Phantom, callTarget);
1166             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1167             return;
1168         }
1169         
1170         // Can only handle this using the generic call handler.
1171         addCall(currentInstruction, op);
1172         return;
1173     }
1174         
1175     Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1176     if (intrinsic != NoIntrinsic) {
1177         emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1178             
1179         if (handleIntrinsic(resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1180             // This phantoming has to be *after* the code for the intrinsic, to signify that
1181             // the inputs must be kept alive whatever exits the intrinsic may do.
1182             addToGraph(Phantom, callTarget);
1183             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1184             if (m_graph.compilation())
1185                 m_graph.compilation()->noticeInlinedCall();
1186             return;
1187         }
1188     } else if (handleInlining(callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1189         if (m_graph.compilation())
1190             m_graph.compilation()->noticeInlinedCall();
1191         return;
1192     }
1193     
1194     addCall(currentInstruction, op);
1195 }
1196
1197 void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1198 {
1199     Node* thisArgument;
1200     if (kind == CodeForCall)
1201         thisArgument = get(registerOffset + argumentToOperand(0));
1202     else
1203         thisArgument = 0;
1204
1205     if (callLinkStatus.isProved()) {
1206         addToGraph(Phantom, callTarget, thisArgument);
1207         return;
1208     }
1209     
1210     ASSERT(callLinkStatus.canOptimize());
1211     
1212     if (JSFunction* function = callLinkStatus.function())
1213         addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1214     else {
1215         ASSERT(callLinkStatus.structure());
1216         ASSERT(callLinkStatus.executable());
1217         
1218         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1219         addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1220     }
1221 }
1222
1223 void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1224 {
1225     for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1226         addToGraph(Phantom, get(registerOffset + argumentToOperand(i)));
1227 }
1228
1229 bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1230 {
1231     // First, the really simple checks: do we have an actual JS function?
1232     if (!callLinkStatus.executable())
1233         return false;
1234     if (callLinkStatus.executable()->isHostFunction())
1235         return false;
1236     
1237     FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1238     
1239     // Does the number of arguments we're passing match the arity of the target? We currently
1240     // inline only if the number of arguments passed is greater than or equal to the number
1241     // arguments expected.
1242     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1243         return false;
1244     
1245     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1246     // If either of these are detected, then don't inline.
1247     unsigned depth = 0;
1248     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1249         ++depth;
1250         if (depth >= Options::maximumInliningDepth())
1251             return false; // Depth exceeded.
1252         
1253         if (entry->executable() == executable)
1254             return false; // Recursion detected.
1255     }
1256     
1257     // Do we have a code block, and does the code block's size match the heuristics/requirements for
1258     // being an inline candidate? We might not have a code block if code was thrown away or if we
1259     // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1260     // if we had a static proof of what was being called; this might happen for example if you call a
1261     // global function, where watchpointing gives us static information. Overall, it's a rare case
1262     // because we expect that any hot callees would have already been compiled.
1263     CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1264     if (!codeBlock)
1265         return false;
1266     if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
1267         return false;
1268     
1269 #if DFG_ENABLE(DEBUG_VERBOSE)
1270     dataLogF("Inlining executable %p.\n", executable);
1271 #endif
1272     
1273     // Now we know without a doubt that we are committed to inlining. So begin the process
1274     // by checking the callee (if necessary) and making sure that arguments and the callee
1275     // are flushed.
1276     emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1277     
1278     // FIXME: Don't flush constants!
1279     
1280     int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
1281     
1282     // Make sure that the area used by the call frame is reserved.
1283     for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
1284         m_preservedVars.set(arg);
1285     
1286     // Make sure that we have enough locals.
1287     unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1288     if (newNumLocals > m_numLocals) {
1289         m_numLocals = newNumLocals;
1290         for (size_t i = 0; i < m_graph.numBlocks(); ++i)
1291             m_graph.block(i)->ensureLocals(newNumLocals);
1292     }
1293     
1294     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1295
1296     InlineStackEntry inlineStackEntry(
1297         this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1298         (VirtualRegister)m_inlineStackTop->remapOperand(resultOperand),
1299         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1300     
1301     // This is where the actual inlining really happens.
1302     unsigned oldIndex = m_currentIndex;
1303     m_currentIndex = 0;
1304
1305     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1306     if (callLinkStatus.isClosureCall()) {
1307         addToGraph(SetCallee, callTargetNode);
1308         addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
1309     }
1310     
1311     parseCodeBlock();
1312     
1313     m_currentIndex = oldIndex;
1314     
1315     // If the inlined code created some new basic blocks, then we have linking to do.
1316     if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
1317         
1318         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1319         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1320             linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
1321         else
1322             ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
1323         
1324         // It's possible that the callsite block head is not owned by the caller.
1325         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1326             // It's definitely owned by the caller, because the caller created new blocks.
1327             // Assert that this all adds up.
1328             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
1329             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1330             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1331         } else {
1332             // It's definitely not owned by the caller. Tell the caller that he does not
1333             // need to link his callsite block head, because we did it for him.
1334             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1335             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1336             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1337         }
1338         
1339         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1340     } else
1341         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1342     
1343     BasicBlock* lastBlock = m_graph.lastBlock();
1344     // If there was a return, but no early returns, then we're done. We allow parsing of
1345     // the caller to continue in whatever basic block we're in right now.
1346     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1347         ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1348         
1349         // If we created new blocks then the last block needs linking, but in the
1350         // caller. It doesn't need to be linked to, but it needs outgoing links.
1351         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1352 #if DFG_ENABLE(DEBUG_VERBOSE)
1353             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1354 #endif
1355             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1356             // for release builds because this block will never serve as a potential target
1357             // in the linker's binary search.
1358             lastBlock->bytecodeBegin = m_currentIndex;
1359             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1360         }
1361         
1362         m_currentBlock = m_graph.lastBlock();
1363         
1364 #if DFG_ENABLE(DEBUG_VERBOSE)
1365         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1366 #endif
1367         return true;
1368     }
1369     
1370     // If we get to this point then all blocks must end in some sort of terminals.
1371     ASSERT(lastBlock->last()->isTerminal());
1372     
1373
1374     // Need to create a new basic block for the continuation at the caller.
1375     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1376
1377 #if DFG_ENABLE(DEBUG_VERBOSE)
1378     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.numBlocks(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
1379 #endif
1380
1381     // Link the early returns to the basic block we're about to create.
1382     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1383         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1384             continue;
1385         BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1386         ASSERT(!blockToLink->isLinked);
1387         Node* node = blockToLink->last();
1388         ASSERT(node->op() == Jump);
1389         ASSERT(node->takenBlock() == 0);
1390         node->setTakenBlock(block.get());
1391         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1392 #if !ASSERT_DISABLED
1393         blockToLink->isLinked = true;
1394 #endif
1395     }
1396     
1397     m_currentBlock = block.get();
1398     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1399     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1400     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1401     m_graph.appendBlock(block);
1402     prepareToParseBlock();
1403     
1404     // At this point we return and continue to generate code for the caller, but
1405     // in the new basic block.
1406 #if DFG_ENABLE(DEBUG_VERBOSE)
1407     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1408 #endif
1409     return true;
1410 }
1411
1412 bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1413 {
1414     if (argumentCountIncludingThis == 1) { // Math.min()
1415         set(resultOperand, constantNaN());
1416         return true;
1417     }
1418      
1419     if (argumentCountIncludingThis == 2) { // Math.min(x)
1420         Node* result = get(registerOffset + argumentToOperand(1));
1421         addToGraph(Phantom, Edge(result, NumberUse));
1422         set(resultOperand, result);
1423         return true;
1424     }
1425     
1426     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1427         set(resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1428         return true;
1429     }
1430     
1431     // Don't handle >=3 arguments for now.
1432     return false;
1433 }
1434
1435 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1436 // they need to perform the ToNumber conversion, which can have side-effects.
1437 bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1438 {
1439     switch (intrinsic) {
1440     case AbsIntrinsic: {
1441         if (argumentCountIncludingThis == 1) { // Math.abs()
1442             set(resultOperand, constantNaN());
1443             return true;
1444         }
1445
1446         if (!MacroAssembler::supportsFloatingPointAbs())
1447             return false;
1448
1449         Node* node = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1450         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1451             node->mergeFlags(NodeMayOverflow);
1452         set(resultOperand, node);
1453         return true;
1454     }
1455
1456     case MinIntrinsic:
1457         return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1458         
1459     case MaxIntrinsic:
1460         return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1461         
1462     case SqrtIntrinsic: {
1463         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1464             set(resultOperand, constantNaN());
1465             return true;
1466         }
1467         
1468         if (!MacroAssembler::supportsFloatingPointSqrt())
1469             return false;
1470
1471         set(resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1472         return true;
1473     }
1474         
1475     case ArrayPushIntrinsic: {
1476         if (argumentCountIncludingThis != 2)
1477             return false;
1478         
1479         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1480         if (!arrayMode.isJSArray())
1481             return false;
1482         switch (arrayMode.type()) {
1483         case Array::Undecided:
1484         case Array::Int32:
1485         case Array::Double:
1486         case Array::Contiguous:
1487         case Array::ArrayStorage: {
1488             Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1489             set(resultOperand, arrayPush);
1490             
1491             return true;
1492         }
1493             
1494         default:
1495             return false;
1496         }
1497     }
1498         
1499     case ArrayPopIntrinsic: {
1500         if (argumentCountIncludingThis != 1)
1501             return false;
1502         
1503         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1504         if (!arrayMode.isJSArray())
1505             return false;
1506         switch (arrayMode.type()) {
1507         case Array::Int32:
1508         case Array::Double:
1509         case Array::Contiguous:
1510         case Array::ArrayStorage: {
1511             Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1512             set(resultOperand, arrayPop);
1513             return true;
1514         }
1515             
1516         default:
1517             return false;
1518         }
1519     }
1520
1521     case CharCodeAtIntrinsic: {
1522         if (argumentCountIncludingThis != 2)
1523             return false;
1524
1525         int thisOperand = registerOffset + argumentToOperand(0);
1526         int indexOperand = registerOffset + argumentToOperand(1);
1527         Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1528
1529         set(resultOperand, charCode);
1530         return true;
1531     }
1532
1533     case CharAtIntrinsic: {
1534         if (argumentCountIncludingThis != 2)
1535             return false;
1536
1537         int thisOperand = registerOffset + argumentToOperand(0);
1538         int indexOperand = registerOffset + argumentToOperand(1);
1539         Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1540
1541         set(resultOperand, charCode);
1542         return true;
1543     }
1544     case FromCharCodeIntrinsic: {
1545         if (argumentCountIncludingThis != 2)
1546             return false;
1547
1548         int indexOperand = registerOffset + argumentToOperand(1);
1549         Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
1550
1551         set(resultOperand, charCode);
1552
1553         return true;
1554     }
1555
1556     case RegExpExecIntrinsic: {
1557         if (argumentCountIncludingThis != 2)
1558             return false;
1559         
1560         Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1561         set(resultOperand, regExpExec);
1562         
1563         return true;
1564     }
1565         
1566     case RegExpTestIntrinsic: {
1567         if (argumentCountIncludingThis != 2)
1568             return false;
1569         
1570         Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1571         set(resultOperand, regExpExec);
1572         
1573         return true;
1574     }
1575
1576     case IMulIntrinsic: {
1577         if (argumentCountIncludingThis != 3)
1578             return false;
1579         int leftOperand = registerOffset + argumentToOperand(1);
1580         int rightOperand = registerOffset + argumentToOperand(2);
1581         Node* left = getToInt32(leftOperand);
1582         Node* right = getToInt32(rightOperand);
1583         set(resultOperand, addToGraph(ArithIMul, left, right));
1584         return true;
1585     }
1586         
1587     default:
1588         return false;
1589     }
1590 }
1591
1592 bool ByteCodeParser::handleTypedArrayConstructor(
1593     int resultOperand, InternalFunction* function, int registerOffset,
1594     int argumentCountIncludingThis, TypedArrayType type)
1595 {
1596     if (!isTypedView(type))
1597         return false;
1598     
1599     if (function->classInfo() != constructorClassInfoForType(type))
1600         return false;
1601     
1602     if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1603         return false;
1604     
1605     // We only have an intrinsic for the case where you say:
1606     //
1607     // new FooArray(blah);
1608     //
1609     // Of course, 'blah' could be any of the following:
1610     //
1611     // - Integer, indicating that you want to allocate an array of that length.
1612     //   This is the thing we're hoping for, and what we can actually do meaningful
1613     //   optimizations for.
1614     //
1615     // - Array buffer, indicating that you want to create a view onto that _entire_
1616     //   buffer.
1617     //
1618     // - Non-buffer object, indicating that you want to create a copy of that
1619     //   object by pretending that it quacks like an array.
1620     //
1621     // - Anything else, indicating that you want to have an exception thrown at
1622     //   you.
1623     //
1624     // The intrinsic, NewTypedArray, will behave as if it could do any of these
1625     // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
1626     // predicted Int32, then we lock it in as a normal typed array allocation.
1627     // Otherwise, NewTypedArray turns into a totally opaque function call that
1628     // may clobber the world - by virtue of it accessing properties on what could
1629     // be an object.
1630     //
1631     // Note that although the generic form of NewTypedArray sounds sort of awful,
1632     // it is actually quite likely to be more efficient than a fully generic
1633     // Construct. So, we might want to think about making NewTypedArray variadic,
1634     // or else making Construct not super slow.
1635     
1636     if (argumentCountIncludingThis != 2)
1637         return false;
1638     
1639     set(resultOperand,
1640         addToGraph(NewTypedArray, OpInfo(type), get(registerOffset + argumentToOperand(1))));
1641     return true;
1642 }
1643
1644 bool ByteCodeParser::handleConstantInternalFunction(
1645     int resultOperand, InternalFunction* function, int registerOffset,
1646     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1647 {
1648     // If we ever find that we have a lot of internal functions that we specialize for,
1649     // then we should probably have some sort of hashtable dispatch, or maybe even
1650     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1651     // it seems that this case is hit infrequently enough, and the number of functions
1652     // we know about is small enough, that having just a linear cascade of if statements
1653     // is good enough.
1654     
1655     UNUSED_PARAM(prediction); // Remove this once we do more things.
1656     
1657     if (function->classInfo() == ArrayConstructor::info()) {
1658         if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1659             return false;
1660         
1661         if (argumentCountIncludingThis == 2) {
1662             set(resultOperand,
1663                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1664             return true;
1665         }
1666         
1667         for (int i = 1; i < argumentCountIncludingThis; ++i)
1668             addVarArgChild(get(registerOffset + argumentToOperand(i)));
1669         set(resultOperand,
1670             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1671         return true;
1672     }
1673     
1674     if (function->classInfo() == StringConstructor::info()) {
1675         Node* result;
1676         
1677         if (argumentCountIncludingThis <= 1)
1678             result = cellConstant(m_vm->smallStrings.emptyString());
1679         else
1680             result = addToGraph(ToString, get(registerOffset + argumentToOperand(1)));
1681         
1682         if (kind == CodeForConstruct)
1683             result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1684         
1685         set(resultOperand, result);
1686         return true;
1687     }
1688     
1689     for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
1690         bool result = handleTypedArrayConstructor(
1691             resultOperand, function, registerOffset, argumentCountIncludingThis,
1692             indexToTypedArrayType(typeIndex));
1693         if (result)
1694             return true;
1695     }
1696     
1697     return false;
1698 }
1699
1700 Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1701 {
1702     Node* propertyStorage;
1703     if (isInlineOffset(offset))
1704         propertyStorage = base;
1705     else
1706         propertyStorage = addToGraph(GetButterfly, base);
1707     Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
1708
1709     StorageAccessData storageAccessData;
1710     storageAccessData.offset = offset;
1711     storageAccessData.identifierNumber = identifierNumber;
1712     m_graph.m_storageAccessData.append(storageAccessData);
1713
1714     return getByOffset;
1715 }
1716
1717 void ByteCodeParser::handleGetByOffset(
1718     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1719     PropertyOffset offset)
1720 {
1721     set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1722 }
1723
1724 Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1725 {
1726     Node* propertyStorage;
1727     if (isInlineOffset(offset))
1728         propertyStorage = base;
1729     else
1730         propertyStorage = addToGraph(GetButterfly, base);
1731     Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1732     
1733     StorageAccessData storageAccessData;
1734     storageAccessData.offset = offset;
1735     storageAccessData.identifierNumber = identifier;
1736     m_graph.m_storageAccessData.append(storageAccessData);
1737
1738     return result;
1739 }
1740
1741 void ByteCodeParser::handleGetById(
1742     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1743     const GetByIdStatus& getByIdStatus)
1744 {
1745     if (!getByIdStatus.isSimple()
1746         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1747         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
1748         set(destinationOperand,
1749             addToGraph(
1750                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1751                 OpInfo(identifierNumber), OpInfo(prediction), base));
1752         return;
1753     }
1754     
1755     ASSERT(getByIdStatus.structureSet().size());
1756                 
1757     // The implementation of GetByOffset does not know to terminate speculative
1758     // execution if it doesn't have a prediction, so we do it manually.
1759     if (prediction == SpecNone)
1760         addToGraph(ForceOSRExit);
1761     else if (m_graph.compilation())
1762         m_graph.compilation()->noticeInlinedGetById();
1763     
1764     Node* originalBaseForBaselineJIT = base;
1765                 
1766     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1767     
1768     if (getByIdStatus.chain()) {
1769         m_graph.chains().addLazily(getByIdStatus.chain());
1770         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1771         JSObject* currentObject = 0;
1772         for (unsigned i = 0; i < getByIdStatus.chain()->size(); ++i) {
1773             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1774             currentStructure = getByIdStatus.chain()->at(i);
1775             base = cellConstantWithStructureCheck(currentObject, currentStructure);
1776         }
1777     }
1778     
1779     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1780     // ensure that the base of the original get_by_id is kept alive until we're done with
1781     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1782     // on something other than the base following the CheckStructure on base, or if the
1783     // access was compiled to a WeakJSConstant specific value, in which case we might not
1784     // have any explicit use of the base at all.
1785     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1786         addToGraph(Phantom, originalBaseForBaselineJIT);
1787     
1788     if (getByIdStatus.specificValue()) {
1789         ASSERT(getByIdStatus.specificValue().isCell());
1790         
1791         set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1792         return;
1793     }
1794     
1795     handleGetByOffset(
1796         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1797 }
1798
1799 void ByteCodeParser::prepareToParseBlock()
1800 {
1801     for (unsigned i = 0; i < m_constants.size(); ++i)
1802         m_constants[i] = ConstantRecord();
1803     m_cellConstantNodes.clear();
1804 }
1805
1806 Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1807 {
1808     Node* localBase;
1809     if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
1810         ASSERT(inlineCallFrame()->callee);
1811         localBase = cellConstant(inlineCallFrame()->callee->scope());
1812     } else
1813         localBase = addToGraph(GetMyScope);
1814     if (skipTop) {
1815         ASSERT(!inlineCallFrame());
1816         localBase = addToGraph(SkipTopScope, localBase);
1817     }
1818     for (unsigned n = skipCount; n--;)
1819         localBase = addToGraph(SkipScope, localBase);
1820     return localBase;
1821 }
1822
1823 bool ByteCodeParser::parseBlock(unsigned limit)
1824 {
1825     bool shouldContinueParsing = true;
1826
1827     Interpreter* interpreter = m_vm->interpreter;
1828     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1829     unsigned blockBegin = m_currentIndex;
1830     
1831     // If we are the first basic block, introduce markers for arguments. This allows
1832     // us to track if a use of an argument may use the actual argument passed, as
1833     // opposed to using a value we set explicitly.
1834     if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
1835         m_graph.m_arguments.resize(m_numArguments);
1836         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1837             VariableAccessData* variable = newVariableAccessData(
1838                 argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
1839             variable->mergeStructureCheckHoistingFailed(
1840                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1841             variable->mergeCheckArrayHoistingFailed(
1842                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
1843             
1844             Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1845             m_graph.m_arguments[argument] = setArgument;
1846             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1847         }
1848     }
1849
1850     while (true) {
1851         // Don't extend over jump destinations.
1852         if (m_currentIndex == limit) {
1853             // Ordinarily we want to plant a jump. But refuse to do this if the block is
1854             // empty. This is a special case for inlining, which might otherwise create
1855             // some empty blocks in some cases. When parseBlock() returns with an empty
1856             // block, it will get repurposed instead of creating a new one. Note that this
1857             // logic relies on every bytecode resulting in one or more nodes, which would
1858             // be true anyway except for op_loop_hint, which emits a Phantom to force this
1859             // to be true.
1860             if (!m_currentBlock->isEmpty())
1861                 addToGraph(Jump, OpInfo(m_currentIndex));
1862             else {
1863 #if DFG_ENABLE(DEBUG_VERBOSE)
1864                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
1865 #endif
1866             }
1867             return shouldContinueParsing;
1868         }
1869         
1870         // Switch on the current bytecode opcode.
1871         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1872         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1873         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
1874         
1875         if (m_graph.compilation()) {
1876             addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
1877                 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
1878         }
1879         
1880         switch (opcodeID) {
1881
1882         // === Function entry opcodes ===
1883
1884         case op_enter:
1885             // Initialize all locals to undefined.
1886             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
1887                 set(i, constantUndefined(), SetOnEntry);
1888             NEXT_OPCODE(op_enter);
1889
1890         case op_to_this: {
1891             Node* op1 = getThis();
1892             if (op1->op() != ToThis) {
1893                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
1894                 ValueProfile* profile =
1895                     m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentIndex);
1896                 profile->computeUpdatedPrediction(locker);
1897 #if DFG_ENABLE(DEBUG_VERBOSE)
1898                 dataLogF("[bc#%u]: profile %p: ", m_currentIndex, profile);
1899                 profile->dump(WTF::dataFile());
1900                 dataLogF("\n");
1901 #endif
1902                 if (profile->m_singletonValueIsTop
1903                     || !profile->m_singletonValue
1904                     || !profile->m_singletonValue.isCell()
1905                     || profile->m_singletonValue.asCell()->classInfo() != Structure::info())
1906                     setThis(addToGraph(ToThis, op1));
1907                 else {
1908                     addToGraph(
1909                         CheckStructure,
1910                         OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
1911                         op1);
1912                 }
1913             }
1914             NEXT_OPCODE(op_to_this);
1915         }
1916
1917         case op_create_this: {
1918             int calleeOperand = currentInstruction[2].u.operand;
1919             Node* callee = get(calleeOperand);
1920             bool alreadyEmitted = false;
1921             if (callee->op() == WeakJSConstant) {
1922                 JSCell* cell = callee->weakConstant();
1923                 ASSERT(cell->inherits(JSFunction::info()));
1924                 
1925                 JSFunction* function = jsCast<JSFunction*>(cell);
1926                 ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
1927                 if (allocationProfile) {
1928                     addToGraph(AllocationProfileWatchpoint, OpInfo(function));
1929                     // The callee is still live up to this point.
1930                     addToGraph(Phantom, callee);
1931                     set(currentInstruction[1].u.operand,
1932                         addToGraph(NewObject, OpInfo(allocationProfile->structure())));
1933                     alreadyEmitted = true;
1934                 }
1935             }
1936             if (!alreadyEmitted)
1937                 set(currentInstruction[1].u.operand,
1938                     addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
1939             NEXT_OPCODE(op_create_this);
1940         }
1941
1942         case op_new_object: {
1943             set(currentInstruction[1].u.operand,
1944                 addToGraph(NewObject,
1945                     OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
1946             NEXT_OPCODE(op_new_object);
1947         }
1948             
1949         case op_new_array: {
1950             int startOperand = currentInstruction[2].u.operand;
1951             int numOperands = currentInstruction[3].u.operand;
1952             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1953             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
1954                 addVarArgChild(get(operandIdx));
1955             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
1956             NEXT_OPCODE(op_new_array);
1957         }
1958             
1959         case op_new_array_with_size: {
1960             int lengthOperand = currentInstruction[2].u.operand;
1961             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
1962             set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
1963             NEXT_OPCODE(op_new_array_with_size);
1964         }
1965             
1966         case op_new_array_buffer: {
1967             int startConstant = currentInstruction[2].u.operand;
1968             int numConstants = currentInstruction[3].u.operand;
1969             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1970             NewArrayBufferData data;
1971             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
1972             data.numConstants = numConstants;
1973             data.indexingType = profile->selectIndexingType();
1974
1975             // If this statement has never executed, we'll have the wrong indexing type in the profile.
1976             for (int i = 0; i < numConstants; ++i) {
1977                 data.indexingType =
1978                     leastUpperBoundOfIndexingTypeAndValue(
1979                         data.indexingType,
1980                         m_codeBlock->constantBuffer(data.startConstant)[i]);
1981             }
1982             
1983             m_graph.m_newArrayBufferData.append(data);
1984             set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
1985             NEXT_OPCODE(op_new_array_buffer);
1986         }
1987             
1988         case op_new_regexp: {
1989             set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
1990             NEXT_OPCODE(op_new_regexp);
1991         }
1992             
1993         case op_get_callee: {
1994             ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
1995             ValueProfile* profile = currentInstruction[2].u.profile;
1996             profile->computeUpdatedPrediction(locker);
1997             if (profile->m_singletonValueIsTop
1998                 || !profile->m_singletonValue
1999                 || !profile->m_singletonValue.isCell())
2000                 set(currentInstruction[1].u.operand, get(JSStack::Callee));
2001             else {
2002                 ASSERT(profile->m_singletonValue.asCell()->inherits(JSFunction::info()));
2003                 Node* actualCallee = get(JSStack::Callee);
2004                 addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
2005                 set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
2006             }
2007             NEXT_OPCODE(op_get_callee);
2008         }
2009
2010         // === Bitwise operations ===
2011
2012         case op_bitand: {
2013             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2014             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2015             set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
2016             NEXT_OPCODE(op_bitand);
2017         }
2018
2019         case op_bitor: {
2020             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2021             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2022             set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
2023             NEXT_OPCODE(op_bitor);
2024         }
2025
2026         case op_bitxor: {
2027             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2028             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2029             set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
2030             NEXT_OPCODE(op_bitxor);
2031         }
2032
2033         case op_rshift: {
2034             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2035             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2036             Node* result;
2037             // Optimize out shifts by zero.
2038             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2039                 result = op1;
2040             else
2041                 result = addToGraph(BitRShift, op1, op2);
2042             set(currentInstruction[1].u.operand, result);
2043             NEXT_OPCODE(op_rshift);
2044         }
2045
2046         case op_lshift: {
2047             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2048             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2049             Node* result;
2050             // Optimize out shifts by zero.
2051             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2052                 result = op1;
2053             else
2054                 result = addToGraph(BitLShift, op1, op2);
2055             set(currentInstruction[1].u.operand, result);
2056             NEXT_OPCODE(op_lshift);
2057         }
2058
2059         case op_urshift: {
2060             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2061             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2062             Node* result;
2063             // The result of a zero-extending right shift is treated as an unsigned value.
2064             // This means that if the top bit is set, the result is not in the int32 range,
2065             // and as such must be stored as a double. If the shift amount is a constant,
2066             // we may be able to optimize.
2067             if (isInt32Constant(op2)) {
2068                 // If we know we are shifting by a non-zero amount, then since the operation
2069                 // zero fills we know the top bit of the result must be zero, and as such the
2070                 // result must be within the int32 range. Conversely, if this is a shift by
2071                 // zero, then the result may be changed by the conversion to unsigned, but it
2072                 // is not necessary to perform the shift!
2073                 if (valueOfInt32Constant(op2) & 0x1f)
2074                     result = addToGraph(BitURShift, op1, op2);
2075                 else
2076                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
2077             }  else {
2078                 // Cannot optimize at this stage; shift & potentially rebox as a double.
2079                 result = addToGraph(BitURShift, op1, op2);
2080                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2081             }
2082             set(currentInstruction[1].u.operand, result);
2083             NEXT_OPCODE(op_urshift);
2084         }
2085
2086         // === Increment/Decrement opcodes ===
2087
2088         case op_inc: {
2089             unsigned srcDst = currentInstruction[1].u.operand;
2090             Node* op = get(srcDst);
2091             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2092             NEXT_OPCODE(op_inc);
2093         }
2094
2095         case op_dec: {
2096             unsigned srcDst = currentInstruction[1].u.operand;
2097             Node* op = get(srcDst);
2098             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2099             NEXT_OPCODE(op_dec);
2100         }
2101
2102         // === Arithmetic operations ===
2103
2104         case op_add: {
2105             Node* op1 = get(currentInstruction[2].u.operand);
2106             Node* op2 = get(currentInstruction[3].u.operand);
2107             if (op1->hasNumberResult() && op2->hasNumberResult())
2108                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2109             else
2110                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2111             NEXT_OPCODE(op_add);
2112         }
2113
2114         case op_sub: {
2115             Node* op1 = get(currentInstruction[2].u.operand);
2116             Node* op2 = get(currentInstruction[3].u.operand);
2117             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2118             NEXT_OPCODE(op_sub);
2119         }
2120
2121         case op_negate: {
2122             Node* op1 = get(currentInstruction[2].u.operand);
2123             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2124             NEXT_OPCODE(op_negate);
2125         }
2126
2127         case op_mul: {
2128             // Multiply requires that the inputs are not truncated, unfortunately.
2129             Node* op1 = get(currentInstruction[2].u.operand);
2130             Node* op2 = get(currentInstruction[3].u.operand);
2131             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2132             NEXT_OPCODE(op_mul);
2133         }
2134
2135         case op_mod: {
2136             Node* op1 = get(currentInstruction[2].u.operand);
2137             Node* op2 = get(currentInstruction[3].u.operand);
2138             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2139             NEXT_OPCODE(op_mod);
2140         }
2141
2142         case op_div: {
2143             Node* op1 = get(currentInstruction[2].u.operand);
2144             Node* op2 = get(currentInstruction[3].u.operand);
2145             set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2146             NEXT_OPCODE(op_div);
2147         }
2148
2149         // === Misc operations ===
2150
2151 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2152         case op_debug:
2153             addToGraph(Breakpoint);
2154             NEXT_OPCODE(op_debug);
2155 #endif
2156         case op_mov: {
2157             Node* op = get(currentInstruction[2].u.operand);
2158             set(currentInstruction[1].u.operand, op);
2159             NEXT_OPCODE(op_mov);
2160         }
2161
2162         case op_check_has_instance:
2163             addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2164             NEXT_OPCODE(op_check_has_instance);
2165
2166         case op_instanceof: {
2167             Node* value = get(currentInstruction[2].u.operand);
2168             Node* prototype = get(currentInstruction[3].u.operand);
2169             set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2170             NEXT_OPCODE(op_instanceof);
2171         }
2172             
2173         case op_is_undefined: {
2174             Node* value = get(currentInstruction[2].u.operand);
2175             set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2176             NEXT_OPCODE(op_is_undefined);
2177         }
2178
2179         case op_is_boolean: {
2180             Node* value = get(currentInstruction[2].u.operand);
2181             set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2182             NEXT_OPCODE(op_is_boolean);
2183         }
2184
2185         case op_is_number: {
2186             Node* value = get(currentInstruction[2].u.operand);
2187             set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2188             NEXT_OPCODE(op_is_number);
2189         }
2190
2191         case op_is_string: {
2192             Node* value = get(currentInstruction[2].u.operand);
2193             set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2194             NEXT_OPCODE(op_is_string);
2195         }
2196
2197         case op_is_object: {
2198             Node* value = get(currentInstruction[2].u.operand);
2199             set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2200             NEXT_OPCODE(op_is_object);
2201         }
2202
2203         case op_is_function: {
2204             Node* value = get(currentInstruction[2].u.operand);
2205             set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2206             NEXT_OPCODE(op_is_function);
2207         }
2208
2209         case op_not: {
2210             Node* value = get(currentInstruction[2].u.operand);
2211             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2212             NEXT_OPCODE(op_not);
2213         }
2214             
2215         case op_to_primitive: {
2216             Node* value = get(currentInstruction[2].u.operand);
2217             set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2218             NEXT_OPCODE(op_to_primitive);
2219         }
2220             
2221         case op_strcat: {
2222             int startOperand = currentInstruction[2].u.operand;
2223             int numOperands = currentInstruction[3].u.operand;
2224 #if CPU(X86)
2225             // X86 doesn't have enough registers to compile MakeRope with three arguments.
2226             // Rather than try to be clever, we just make MakeRope dumber on this processor.
2227             const unsigned maxRopeArguments = 2;
2228 #else
2229             const unsigned maxRopeArguments = 3;
2230 #endif
2231             OwnArrayPtr<Node*> toStringNodes = adoptArrayPtr(new Node*[numOperands]);
2232             for (int i = 0; i < numOperands; i++)
2233                 toStringNodes[i] = addToGraph(ToString, get(startOperand + i));
2234
2235             for (int i = 0; i < numOperands; i++)
2236                 addToGraph(Phantom, toStringNodes[i]);
2237
2238             Node* operands[AdjacencyList::Size];
2239             unsigned indexInOperands = 0;
2240             for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2241                 operands[i] = 0;
2242             for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2243                 if (indexInOperands == maxRopeArguments) {
2244                     operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2245                     for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2246                         operands[i] = 0;
2247                     indexInOperands = 1;
2248                 }
2249                 
2250                 ASSERT(indexInOperands < AdjacencyList::Size);
2251                 ASSERT(indexInOperands < maxRopeArguments);
2252                 operands[indexInOperands++] = toStringNodes[operandIdx];
2253             }
2254             set(currentInstruction[1].u.operand,
2255                 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2256             NEXT_OPCODE(op_strcat);
2257         }
2258
2259         case op_less: {
2260             Node* op1 = get(currentInstruction[2].u.operand);
2261             Node* op2 = get(currentInstruction[3].u.operand);
2262             if (canFold(op1) && canFold(op2)) {
2263                 JSValue a = valueOfJSConstant(op1);
2264                 JSValue b = valueOfJSConstant(op2);
2265                 if (a.isNumber() && b.isNumber()) {
2266                     set(currentInstruction[1].u.operand,
2267                         getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2268                     NEXT_OPCODE(op_less);
2269                 }
2270             }
2271             set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2272             NEXT_OPCODE(op_less);
2273         }
2274
2275         case op_lesseq: {
2276             Node* op1 = get(currentInstruction[2].u.operand);
2277             Node* op2 = get(currentInstruction[3].u.operand);
2278             if (canFold(op1) && canFold(op2)) {
2279                 JSValue a = valueOfJSConstant(op1);
2280                 JSValue b = valueOfJSConstant(op2);
2281                 if (a.isNumber() && b.isNumber()) {
2282                     set(currentInstruction[1].u.operand,
2283                         getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2284                     NEXT_OPCODE(op_lesseq);
2285                 }
2286             }
2287             set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2288             NEXT_OPCODE(op_lesseq);
2289         }
2290
2291         case op_greater: {
2292             Node* op1 = get(currentInstruction[2].u.operand);
2293             Node* op2 = get(currentInstruction[3].u.operand);
2294             if (canFold(op1) && canFold(op2)) {
2295                 JSValue a = valueOfJSConstant(op1);
2296                 JSValue b = valueOfJSConstant(op2);
2297                 if (a.isNumber() && b.isNumber()) {
2298                     set(currentInstruction[1].u.operand,
2299                         getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2300                     NEXT_OPCODE(op_greater);
2301                 }
2302             }
2303             set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2304             NEXT_OPCODE(op_greater);
2305         }
2306
2307         case op_greatereq: {
2308             Node* op1 = get(currentInstruction[2].u.operand);
2309             Node* op2 = get(currentInstruction[3].u.operand);
2310             if (canFold(op1) && canFold(op2)) {
2311                 JSValue a = valueOfJSConstant(op1);
2312                 JSValue b = valueOfJSConstant(op2);
2313                 if (a.isNumber() && b.isNumber()) {
2314                     set(currentInstruction[1].u.operand,
2315                         getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2316                     NEXT_OPCODE(op_greatereq);
2317                 }
2318             }
2319             set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2320             NEXT_OPCODE(op_greatereq);
2321         }
2322
2323         case op_eq: {
2324             Node* op1 = get(currentInstruction[2].u.operand);
2325             Node* op2 = get(currentInstruction[3].u.operand);
2326             if (canFold(op1) && canFold(op2)) {
2327                 JSValue a = valueOfJSConstant(op1);
2328                 JSValue b = valueOfJSConstant(op2);
2329                 set(currentInstruction[1].u.operand,
2330                     getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2331                 NEXT_OPCODE(op_eq);
2332             }
2333             set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2334             NEXT_OPCODE(op_eq);
2335         }
2336
2337         case op_eq_null: {
2338             Node* value = get(currentInstruction[2].u.operand);
2339             set(currentInstruction[1].u.operand, addToGraph(CompareEqConstant, value, constantNull()));
2340             NEXT_OPCODE(op_eq_null);
2341         }
2342
2343         case op_stricteq: {
2344             Node* op1 = get(currentInstruction[2].u.operand);
2345             Node* op2 = get(currentInstruction[3].u.operand);
2346             if (canFold(op1) && canFold(op2)) {
2347                 JSValue a = valueOfJSConstant(op1);
2348                 JSValue b = valueOfJSConstant(op2);
2349                 set(currentInstruction[1].u.operand,
2350                     getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2351                 NEXT_OPCODE(op_stricteq);
2352             }
2353             if (isConstantForCompareStrictEq(op1))
2354                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op2, op1));
2355             else if (isConstantForCompareStrictEq(op2))
2356                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op1, op2));
2357             else
2358                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2359             NEXT_OPCODE(op_stricteq);
2360         }
2361
2362         case op_neq: {
2363             Node* op1 = get(currentInstruction[2].u.operand);
2364             Node* op2 = get(currentInstruction[3].u.operand);
2365             if (canFold(op1) && canFold(op2)) {
2366                 JSValue a = valueOfJSConstant(op1);
2367                 JSValue b = valueOfJSConstant(op2);
2368                 set(currentInstruction[1].u.operand,
2369                     getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2370                 NEXT_OPCODE(op_neq);
2371             }
2372             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2373             NEXT_OPCODE(op_neq);
2374         }
2375
2376         case op_neq_null: {
2377             Node* value = get(currentInstruction[2].u.operand);
2378             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2379             NEXT_OPCODE(op_neq_null);
2380         }
2381
2382         case op_nstricteq: {
2383             Node* op1 = get(currentInstruction[2].u.operand);
2384             Node* op2 = get(currentInstruction[3].u.operand);
2385             if (canFold(op1) && canFold(op2)) {
2386                 JSValue a = valueOfJSConstant(op1);
2387                 JSValue b = valueOfJSConstant(op2);
2388                 set(currentInstruction[1].u.operand,
2389                     getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2390                 NEXT_OPCODE(op_nstricteq);
2391             }
2392             Node* invertedResult;
2393             if (isConstantForCompareStrictEq(op1))
2394                 invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2395             else if (isConstantForCompareStrictEq(op2))
2396                 invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2397             else
2398                 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2399             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, invertedResult));
2400             NEXT_OPCODE(op_nstricteq);
2401         }
2402
2403         // === Property access operations ===
2404
2405         case op_get_by_val: {
2406             SpeculatedType prediction = getPrediction();
2407             
2408             Node* base = get(currentInstruction[2].u.operand);
2409             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2410             Node* property = get(currentInstruction[3].u.operand);
2411             Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2412             set(currentInstruction[1].u.operand, getByVal);
2413
2414             NEXT_OPCODE(op_get_by_val);
2415         }
2416
2417         case op_put_by_val: {
2418             Node* base = get(currentInstruction[1].u.operand);
2419
2420             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
2421             
2422             Node* property = get(currentInstruction[2].u.operand);
2423             Node* value = get(currentInstruction[3].u.operand);
2424             
2425             addVarArgChild(base);
2426             addVarArgChild(property);
2427             addVarArgChild(value);
2428             addVarArgChild(0); // Leave room for property storage.
2429             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2430
2431             NEXT_OPCODE(op_put_by_val);
2432         }
2433             
2434         case op_get_by_id:
2435         case op_get_by_id_out_of_line:
2436         case op_get_array_length: {
2437             SpeculatedType prediction = getPrediction();
2438             
2439             Node* base = get(currentInstruction[2].u.operand);
2440             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2441             
2442             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2443             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2444                 m_inlineStackTop->m_profiledBlock, m_currentIndex, uid);
2445             
2446             handleGetById(
2447                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2448
2449             NEXT_OPCODE(op_get_by_id);
2450         }
2451         case op_put_by_id:
2452         case op_put_by_id_out_of_line:
2453         case op_put_by_id_transition_direct:
2454         case op_put_by_id_transition_normal:
2455         case op_put_by_id_transition_direct_out_of_line:
2456         case op_put_by_id_transition_normal_out_of_line: {
2457             Node* value = get(currentInstruction[3].u.operand);
2458             Node* base = get(currentInstruction[1].u.operand);
2459             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2460             bool direct = currentInstruction[8].u.operand;
2461
2462             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2463                 m_inlineStackTop->m_profiledBlock,
2464                 m_currentIndex,
2465                 m_graph.identifiers()[identifierNumber]);
2466             bool canCountAsInlined = true;
2467             if (!putByIdStatus.isSet()) {
2468                 addToGraph(ForceOSRExit);
2469                 canCountAsInlined = false;
2470             }
2471             
2472             bool hasExitSite =
2473                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2474                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
2475             
2476             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2477                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2478                 handlePutByOffset(base, identifierNumber, putByIdStatus.offset(), value);
2479             } else if (
2480                 !hasExitSite
2481                 && putByIdStatus.isSimpleTransition()
2482                 && (!putByIdStatus.structureChain()
2483                     || putByIdStatus.structureChain()->isStillValid())) {
2484                 
2485                 m_graph.chains().addLazily(putByIdStatus.structureChain());
2486                 
2487                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2488                 if (!direct) {
2489                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2490                         cellConstantWithStructureCheck(
2491                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2492                     }
2493                     
2494                     for (unsigned i = 0; i < putByIdStatus.structureChain()->size(); ++i) {
2495                         JSValue prototype = putByIdStatus.structureChain()->at(i)->storedPrototype();
2496                         if (prototype.isNull())
2497                             continue;
2498                         cellConstantWithStructureCheck(prototype.asCell());
2499                     }
2500                 }
2501                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2502                 
2503                 Node* propertyStorage;
2504                 StructureTransitionData* transitionData =
2505                     m_graph.addStructureTransitionData(
2506                         StructureTransitionData(
2507                             putByIdStatus.oldStructure(),
2508                             putByIdStatus.newStructure()));
2509
2510                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2511                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2512                     
2513                     // If we're growing the property storage then it must be because we're
2514                     // storing into the out-of-line storage.
2515                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2516                     
2517                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2518                         propertyStorage = addToGraph(
2519                             AllocatePropertyStorage, OpInfo(transitionData), base);
2520                     } else {
2521                         propertyStorage = addToGraph(
2522                             ReallocatePropertyStorage, OpInfo(transitionData),
2523                             base, addToGraph(GetButterfly, base));
2524                     }
2525                 } else {
2526                     if (isInlineOffset(putByIdStatus.offset()))
2527                         propertyStorage = base;
2528                     else
2529                         propertyStorage = addToGraph(GetButterfly, base);
2530                 }
2531                 
2532                 addToGraph(PutStructure, OpInfo(transitionData), base);
2533                 
2534                 addToGraph(
2535                     PutByOffset,
2536                     OpInfo(m_graph.m_storageAccessData.size()),
2537                     propertyStorage,
2538                     base,
2539                     value);
2540                 
2541                 StorageAccessData storageAccessData;
2542                 storageAccessData.offset = putByIdStatus.offset();
2543                 storageAccessData.identifierNumber = identifierNumber;
2544                 m_graph.m_storageAccessData.append(storageAccessData);
2545             } else {
2546                 if (direct)
2547                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2548                 else
2549                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2550                 canCountAsInlined = false;
2551             }
2552             
2553             if (canCountAsInlined && m_graph.compilation())
2554                 m_graph.compilation()->noticeInlinedPutById();
2555
2556             NEXT_OPCODE(op_put_by_id);
2557         }
2558
2559         case op_init_global_const_nop: {
2560             NEXT_OPCODE(op_init_global_const_nop);
2561         }
2562
2563         case op_init_global_const: {
2564             Node* value = get(currentInstruction[2].u.operand);
2565             addToGraph(
2566                 PutGlobalVar,
2567                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2568                 value);
2569             NEXT_OPCODE(op_init_global_const);
2570         }
2571
2572         // === Block terminators. ===
2573
2574         case op_jmp: {
2575             unsigned relativeOffset = currentInstruction[1].u.operand;
2576             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2577             LAST_OPCODE(op_jmp);
2578         }
2579
2580         case op_jtrue: {
2581             unsigned relativeOffset = currentInstruction[2].u.operand;
2582             Node* condition = get(currentInstruction[1].u.operand);
2583             if (canFold(condition)) {
2584                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2585                 if (state == TrueTriState) {
2586                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2587                     LAST_OPCODE(op_jtrue);
2588                 } else if (state == FalseTriState) {
2589                     // Emit a placeholder for this bytecode operation but otherwise
2590                     // just fall through.
2591                     addToGraph(Phantom);
2592                     NEXT_OPCODE(op_jtrue);
2593                 }
2594             }
2595             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2596             LAST_OPCODE(op_jtrue);
2597         }
2598
2599         case op_jfalse: {
2600             unsigned relativeOffset = currentInstruction[2].u.operand;
2601             Node* condition = get(currentInstruction[1].u.operand);
2602             if (canFold(condition)) {
2603                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2604                 if (state == FalseTriState) {
2605                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2606                     LAST_OPCODE(op_jfalse);
2607                 } else if (state == TrueTriState) {
2608                     // Emit a placeholder for this bytecode operation but otherwise
2609                     // just fall through.
2610                     addToGraph(Phantom);
2611                     NEXT_OPCODE(op_jfalse);
2612                 }
2613             }
2614             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2615             LAST_OPCODE(op_jfalse);
2616         }
2617
2618         case op_jeq_null: {
2619             unsigned relativeOffset = currentInstruction[2].u.operand;
2620             Node* value = get(currentInstruction[1].u.operand);
2621             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2622             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2623             LAST_OPCODE(op_jeq_null);
2624         }
2625
2626         case op_jneq_null: {
2627             unsigned relativeOffset = currentInstruction[2].u.operand;
2628             Node* value = get(currentInstruction[1].u.operand);
2629             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2630             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2631             LAST_OPCODE(op_jneq_null);
2632         }
2633
2634         case op_jless: {
2635             unsigned relativeOffset = currentInstruction[3].u.operand;
2636             Node* op1 = get(currentInstruction[1].u.operand);
2637             Node* op2 = get(currentInstruction[2].u.operand);
2638             if (canFold(op1) && canFold(op2)) {
2639                 JSValue aValue = valueOfJSConstant(op1);
2640                 JSValue bValue = valueOfJSConstant(op2);
2641                 if (aValue.isNumber() && bValue.isNumber()) {
2642                     double a = aValue.asNumber();
2643                     double b = bValue.asNumber();
2644                     if (a < b) {
2645                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2646                         LAST_OPCODE(op_jless);
2647                     } else {
2648                         // Emit a placeholder for this bytecode operation but otherwise
2649                         // just fall through.
2650                         addToGraph(Phantom);
2651                         NEXT_OPCODE(op_jless);
2652                     }
2653                 }
2654             }
2655             Node* condition = addToGraph(CompareLess, op1, op2);
2656             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2657             LAST_OPCODE(op_jless);
2658         }
2659
2660         case op_jlesseq: {
2661             unsigned relativeOffset = currentInstruction[3].u.operand;
2662             Node* op1 = get(currentInstruction[1].u.operand);
2663             Node* op2 = get(currentInstruction[2].u.operand);
2664             if (canFold(op1) && canFold(op2)) {
2665                 JSValue aValue = valueOfJSConstant(op1);
2666                 JSValue bValue = valueOfJSConstant(op2);
2667                 if (aValue.isNumber() && bValue.isNumber()) {
2668                     double a = aValue.asNumber();
2669                     double b = bValue.asNumber();
2670                     if (a <= b) {
2671                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2672                         LAST_OPCODE(op_jlesseq);
2673                     } else {
2674                         // Emit a placeholder for this bytecode operation but otherwise
2675                         // just fall through.
2676                         addToGraph(Phantom);
2677                         NEXT_OPCODE(op_jlesseq);
2678                     }
2679                 }
2680             }
2681             Node* condition = addToGraph(CompareLessEq, op1, op2);
2682             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2683             LAST_OPCODE(op_jlesseq);
2684         }
2685
2686         case op_jgreater: {
2687             unsigned relativeOffset = currentInstruction[3].u.operand;
2688             Node* op1 = get(currentInstruction[1].u.operand);
2689             Node* op2 = get(currentInstruction[2].u.operand);
2690             if (canFold(op1) && canFold(op2)) {
2691                 JSValue aValue = valueOfJSConstant(op1);
2692                 JSValue bValue = valueOfJSConstant(op2);
2693                 if (aValue.isNumber() && bValue.isNumber()) {
2694                     double a = aValue.asNumber();
2695                     double b = bValue.asNumber();
2696                     if (a > b) {
2697                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2698                         LAST_OPCODE(op_jgreater);
2699                     } else {
2700                         // Emit a placeholder for this bytecode operation but otherwise
2701                         // just fall through.
2702                         addToGraph(Phantom);
2703                         NEXT_OPCODE(op_jgreater);
2704                     }
2705                 }
2706             }
2707             Node* condition = addToGraph(CompareGreater, op1, op2);
2708             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2709             LAST_OPCODE(op_jgreater);
2710         }
2711
2712         case op_jgreatereq: {
2713             unsigned relativeOffset = currentInstruction[3].u.operand;
2714             Node* op1 = get(currentInstruction[1].u.operand);
2715             Node* op2 = get(currentInstruction[2].u.operand);
2716             if (canFold(op1) && canFold(op2)) {
2717                 JSValue aValue = valueOfJSConstant(op1);
2718                 JSValue bValue = valueOfJSConstant(op2);
2719                 if (aValue.isNumber() && bValue.isNumber()) {
2720                     double a = aValue.asNumber();
2721                     double b = bValue.asNumber();
2722                     if (a >= b) {
2723                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2724                         LAST_OPCODE(op_jgreatereq);
2725                     } else {
2726                         // Emit a placeholder for this bytecode operation but otherwise
2727                         // just fall through.
2728                         addToGraph(Phantom);
2729                         NEXT_OPCODE(op_jgreatereq);
2730                     }
2731                 }
2732             }
2733             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2734             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2735             LAST_OPCODE(op_jgreatereq);
2736         }
2737
2738         case op_jnless: {
2739             unsigned relativeOffset = currentInstruction[3].u.operand;
2740             Node* op1 = get(currentInstruction[1].u.operand);
2741             Node* op2 = get(currentInstruction[2].u.operand);
2742             if (canFold(op1) && canFold(op2)) {
2743                 JSValue aValue = valueOfJSConstant(op1);
2744                 JSValue bValue = valueOfJSConstant(op2);
2745                 if (aValue.isNumber() && bValue.isNumber()) {
2746                     double a = aValue.asNumber();
2747                     double b = bValue.asNumber();
2748                     if (a < b) {
2749                         // Emit a placeholder for this bytecode operation but otherwise
2750                         // just fall through.
2751                         addToGraph(Phantom);
2752                         NEXT_OPCODE(op_jnless);
2753                     } else {
2754                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2755                         LAST_OPCODE(op_jnless);
2756                     }
2757                 }
2758             }
2759             Node* condition = addToGraph(CompareLess, op1, op2);
2760             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2761             LAST_OPCODE(op_jnless);
2762         }
2763
2764         case op_jnlesseq: {
2765             unsigned relativeOffset = currentInstruction[3].u.operand;
2766             Node* op1 = get(currentInstruction[1].u.operand);
2767             Node* op2 = get(currentInstruction[2].u.operand);
2768             if (canFold(op1) && canFold(op2)) {
2769                 JSValue aValue = valueOfJSConstant(op1);
2770                 JSValue bValue = valueOfJSConstant(op2);
2771                 if (aValue.isNumber() && bValue.isNumber()) {
2772                     double a = aValue.asNumber();
2773                     double b = bValue.asNumber();
2774                     if (a <= b) {
2775                         // Emit a placeholder for this bytecode operation but otherwise
2776                         // just fall through.
2777                         addToGraph(Phantom);
2778                         NEXT_OPCODE(op_jnlesseq);
2779                     } else {
2780                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2781                         LAST_OPCODE(op_jnlesseq);
2782                     }
2783                 }
2784             }
2785             Node* condition = addToGraph(CompareLessEq, op1, op2);
2786             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2787             LAST_OPCODE(op_jnlesseq);
2788         }
2789
2790         case op_jngreater: {
2791             unsigned relativeOffset = currentInstruction[3].u.operand;
2792             Node* op1 = get(currentInstruction[1].u.operand);
2793             Node* op2 = get(currentInstruction[2].u.operand);
2794             if (canFold(op1) && canFold(op2)) {
2795                 JSValue aValue = valueOfJSConstant(op1);
2796                 JSValue bValue = valueOfJSConstant(op2);
2797                 if (aValue.isNumber() && bValue.isNumber()) {
2798                     double a = aValue.asNumber();
2799                     double b = bValue.asNumber();
2800                     if (a > b) {
2801                         // Emit a placeholder for this bytecode operation but otherwise
2802                         // just fall through.
2803                         addToGraph(Phantom);
2804                         NEXT_OPCODE(op_jngreater);
2805                     } else {
2806                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2807                         LAST_OPCODE(op_jngreater);
2808                     }
2809                 }
2810             }
2811             Node* condition = addToGraph(CompareGreater, op1, op2);
2812             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2813             LAST_OPCODE(op_jngreater);
2814         }
2815
2816         case op_jngreatereq: {
2817             unsigned relativeOffset = currentInstruction[3].u.operand;
2818             Node* op1 = get(currentInstruction[1].u.operand);
2819             Node* op2 = get(currentInstruction[2].u.operand);
2820             if (canFold(op1) && canFold(op2)) {
2821                 JSValue aValue = valueOfJSConstant(op1);
2822                 JSValue bValue = valueOfJSConstant(op2);
2823                 if (aValue.isNumber() && bValue.isNumber()) {
2824                     double a = aValue.asNumber();
2825                     double b = bValue.asNumber();
2826                     if (a >= b) {
2827                         // Emit a placeholder for this bytecode operation but otherwise
2828                         // just fall through.
2829                         addToGraph(Phantom);
2830                         NEXT_OPCODE(op_jngreatereq);
2831                     } else {
2832                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2833                         LAST_OPCODE(op_jngreatereq);
2834                     }
2835                 }
2836             }
2837             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2838             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2839             LAST_OPCODE(op_jngreatereq);
2840         }
2841             
2842         case op_switch_imm: {
2843             SwitchData data;
2844             data.kind = SwitchImm;
2845             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2846             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2847             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2848             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2849                 if (!table.branchOffsets[i])
2850                     continue;
2851                 unsigned target = m_currentIndex + table.branchOffsets[i];
2852                 if (target == data.fallThroughBytecodeIndex())
2853                     continue;
2854                 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(table.min + i), target));
2855             }
2856             m_graph.m_switchData.append(data);
2857             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2858             LAST_OPCODE(op_switch_imm);
2859         }
2860             
2861         case op_switch_char: {
2862             SwitchData data;
2863             data.kind = SwitchChar;
2864             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2865             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2866             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2867             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2868                 if (!table.branchOffsets[i])
2869                     continue;
2870                 unsigned target = m_currentIndex + table.branchOffsets[i];
2871                 if (target == data.fallThroughBytecodeIndex())
2872                     continue;
2873                 data.cases.append(
2874                     SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2875             }
2876             m_graph.m_switchData.append(data);
2877             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2878             LAST_OPCODE(op_switch_char);
2879         }
2880
2881         case op_switch_string: {
2882             SwitchData data;
2883             data.kind = SwitchString;
2884             data.switchTableIndex = currentInstruction[1].u.operand;
2885             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2886             StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2887             StringJumpTable::StringOffsetTable::iterator iter;
2888             StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2889             for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2890                 unsigned target = m_currentIndex + iter->value.branchOffset;
2891                 if (target == data.fallThroughBytecodeIndex())
2892                     continue;
2893                 data.cases.append(
2894                     SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
2895             }
2896             m_graph.m_switchData.append(data);
2897             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2898             LAST_OPCODE(op_switch_string);
2899         }
2900
2901         case op_ret:
2902             flushArgumentsAndCapturedVariables();
2903             if (inlineCallFrame()) {
2904                 ASSERT(m_inlineStackTop->m_returnValue != InvalidVirtualRegister);
2905                 setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
2906                 m_inlineStackTop->m_didReturn = true;
2907                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2908                     // If we're returning from the first block, then we're done parsing.
2909                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
2910                     shouldContinueParsing = false;
2911                     LAST_OPCODE(op_ret);
2912                 } else {
2913                     // If inlining created blocks, and we're doing a return, then we need some
2914                     // special linking.
2915                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
2916                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2917                 }
2918                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2919                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2920                     addToGraph(Jump, OpInfo(0));
2921                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2922                     m_inlineStackTop->m_didEarlyReturn = true;
2923                 }
2924                 LAST_OPCODE(op_ret);
2925             }
2926             addToGraph(Return, get(currentInstruction[1].u.operand));
2927             LAST_OPCODE(op_ret);
2928             
2929         case op_end:
2930             flushArgumentsAndCapturedVariables();
2931             ASSERT(!inlineCallFrame());
2932             addToGraph(Return, get(currentInstruction[1].u.operand));
2933             LAST_OPCODE(op_end);
2934
2935         case op_throw:
2936             addToGraph(Throw, get(currentInstruction[1].u.operand));
2937             flushAllArgumentsAndCapturedVariablesInInlineStack();
2938             addToGraph(Unreachable);
2939             LAST_OPCODE(op_throw);
2940             
2941         case op_throw_static_error:
2942             addToGraph(ThrowReferenceError);
2943             flushAllArgumentsAndCapturedVariablesInInlineStack();
2944             addToGraph(Unreachable);
2945             LAST_OPCODE(op_throw_static_error);
2946             
2947         case op_call:
2948             handleCall(currentInstruction, Call, CodeForCall);
2949             NEXT_OPCODE(op_call);
2950             
2951         case op_construct:
2952             handleCall(currentInstruction, Construct, CodeForConstruct);
2953             NEXT_OPCODE(op_construct);
2954             
2955         case op_call_varargs: {
2956             ASSERT(inlineCallFrame());
2957             ASSERT(currentInstruction[4].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
2958             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2959             // It would be cool to funnel this into handleCall() so that it can handle
2960             // inlining. But currently that won't be profitable anyway, since none of the
2961             // uses of call_varargs will be inlineable. So we set this up manually and
2962             // without inline/intrinsic detection.
2963             
2964             SpeculatedType prediction = getPrediction();
2965             
2966             addToGraph(CheckArgumentsNotCreated);
2967             
2968             unsigned argCount = inlineCallFrame()->arguments.size();
2969             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2970                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2971             
2972             addVarArgChild(get(currentInstruction[2].u.operand)); // callee
2973             addVarArgChild(get(currentInstruction[3].u.operand)); // this
2974             for (unsigned argument = 1; argument < argCount; ++argument)
2975                 addVarArgChild(get(argumentToOperand(argument)));
2976             
2977             set(currentInstruction[1].u.operand,
2978                 addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction)));
2979             
2980             NEXT_OPCODE(op_call_varargs);
2981         }
2982             
2983         case op_jneq_ptr:
2984             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2985             // support simmer for a while before making it more general, since it's
2986             // already gnarly enough as it is.
2987             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2988             addToGraph(
2989                 CheckFunction,
2990                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2991                 get(currentInstruction[1].u.operand));
2992             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
2993             LAST_OPCODE(op_jneq_ptr);
2994
2995         case op_resolve_scope: {
2996             unsigned dst = currentInstruction[1].u.operand;
2997             ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
2998             unsigned depth = currentInstruction[4].u.operand;
2999
3000             // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3001             if (needsVarInjectionChecks(resolveType))
3002                 addToGraph(VarInjectionWatchpoint);
3003
3004             switch (resolveType) {
3005             case GlobalProperty:
3006             case GlobalVar:
3007             case GlobalPropertyWithVarInjectionChecks:
3008             case GlobalVarWithVarInjectionChecks:
3009                 set(dst, cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3010                 break;
3011             case ClosureVar:
3012             case ClosureVarWithVarInjectionChecks:
3013                 set(dst, getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
3014                 break;
3015             case Dynamic:
3016                 RELEASE_ASSERT_NOT_REACHED();
3017                 break;
3018             }
3019             NEXT_OPCODE(op_resolve_scope);
3020         }
3021
3022         case op_get_from_scope: {
3023             unsigned dst = currentInstruction[1].u.operand;
3024             unsigned scope = currentInstruction[2].u.operand;
3025             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3026             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3027             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3028
3029             Structure* structure;
3030             uintptr_t operand;
3031             {
3032                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3033                 structure = currentInstruction[5].u.structure.get();
3034                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3035             }
3036
3037             SpeculatedType prediction = getPrediction();
3038             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3039
3040             switch (resolveType) {
3041             case GlobalProperty:
3042             case GlobalPropertyWithVarInjectionChecks: {
3043                 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
3044                 if (status.takesSlowPath()) {
3045                     set(dst, addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(scope)));
3046                     break;
3047                 }
3048                 Node* base = cellConstantWithStructureCheck(globalObject, status.structureSet().singletonStructure());
3049                 if (JSValue specificValue = status.specificValue())
3050                     set(dst, cellConstant(specificValue.asCell()));
3051                 else
3052                     set(dst, handleGetByOffset(prediction, base, identifierNumber, operand));
3053                 break;
3054             }
3055             case GlobalVar:
3056             case GlobalVarWithVarInjectionChecks: {
3057                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3058                 if (!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet())) {
3059                     set(dst, addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
3060                     break;
3061                 }
3062
3063                 addToGraph(GlobalVarWatchpoint, OpInfo(operand), OpInfo(identifierNumber));
3064                 JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
3065                 set(dst, cellConstant(specificValue.asCell()));
3066                 break;
3067             }
3068             case ClosureVar:
3069             case ClosureVarWithVarInjectionChecks:
3070                 set(dst, 
3071                     addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), 
3072                         addToGraph(GetClosureRegisters, get(scope))));
3073                 break;
3074             case Dynamic:
3075                 RELEASE_ASSERT_NOT_REACHED();
3076                 break;
3077             }
3078             NEXT_OPCODE(op_get_from_scope);
3079         }
3080