ca47e1d7b83f041cd10990e2037245415b13389e
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGArrayMode.h"
36 #include "DFGCapabilities.h"
37 #include "DFGJITCode.h"
38 #include "GetByIdStatus.h"
39 #include "Operations.h"
40 #include "PreciseJumpTargets.h"
41 #include "PutByIdStatus.h"
42 #include "StringConstructor.h"
43 #include <wtf/CommaPrinter.h>
44 #include <wtf/HashMap.h>
45 #include <wtf/MathExtras.h>
46 #include <wtf/StdLibExtras.h>
47
48 namespace JSC { namespace DFG {
49
50 class ConstantBufferKey {
51 public:
52     ConstantBufferKey()
53         : m_codeBlock(0)
54         , m_index(0)
55     {
56     }
57     
58     ConstantBufferKey(WTF::HashTableDeletedValueType)
59         : m_codeBlock(0)
60         , m_index(1)
61     {
62     }
63     
64     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
65         : m_codeBlock(codeBlock)
66         , m_index(index)
67     {
68     }
69     
70     bool operator==(const ConstantBufferKey& other) const
71     {
72         return m_codeBlock == other.m_codeBlock
73             && m_index == other.m_index;
74     }
75     
76     unsigned hash() const
77     {
78         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
79     }
80     
81     bool isHashTableDeletedValue() const
82     {
83         return !m_codeBlock && m_index;
84     }
85     
86     CodeBlock* codeBlock() const { return m_codeBlock; }
87     unsigned index() const { return m_index; }
88     
89 private:
90     CodeBlock* m_codeBlock;
91     unsigned m_index;
92 };
93
94 struct ConstantBufferKeyHash {
95     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
96     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
97     {
98         return a == b;
99     }
100     
101     static const bool safeToCompareToEmptyOrDeleted = true;
102 };
103
104 } } // namespace JSC::DFG
105
106 namespace WTF {
107
108 template<typename T> struct DefaultHash;
109 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
110     typedef JSC::DFG::ConstantBufferKeyHash Hash;
111 };
112
113 template<typename T> struct HashTraits;
114 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
115
116 } // namespace WTF
117
118 namespace JSC { namespace DFG {
119
120 // === ByteCodeParser ===
121 //
122 // This class is used to compile the dataflow graph from a CodeBlock.
123 class ByteCodeParser {
124 public:
125     ByteCodeParser(Graph& graph)
126         : m_vm(&graph.m_vm)
127         , m_codeBlock(graph.m_codeBlock)
128         , m_profiledBlock(graph.m_profiledBlock)
129         , m_graph(graph)
130         , m_currentBlock(0)
131         , m_currentIndex(0)
132         , m_constantUndefined(UINT_MAX)
133         , m_constantNull(UINT_MAX)
134         , m_constantNaN(UINT_MAX)
135         , m_constant1(UINT_MAX)
136         , m_constants(m_codeBlock->numberOfConstantRegisters())
137         , m_numArguments(m_codeBlock->numParameters())
138         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
139         , m_preservedVars(m_codeBlock->m_numVars)
140         , m_parameterSlots(0)
141         , m_numPassedVarArgs(0)
142         , m_inlineStackTop(0)
143         , m_haveBuiltOperandMaps(false)
144         , m_emptyJSValueIndex(UINT_MAX)
145         , m_currentInstruction(0)
146     {
147         ASSERT(m_profiledBlock);
148         
149         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
150             m_preservedVars.set(i);
151     }
152     
153     // Parse a full CodeBlock of bytecode.
154     bool parse();
155     
156 private:
157     struct InlineStackEntry;
158
159     // Just parse from m_currentIndex to the end of the current CodeBlock.
160     void parseCodeBlock();
161
162     // Helper for min and max.
163     bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
164     
165     // Handle calls. This resolves issues surrounding inlining and intrinsics.
166     void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
167     void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
168     void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
169     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
170     bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
171     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
172     bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
173     bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
174     bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
175     Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
176     Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
177     void handleGetByOffset(
178         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179         PropertyOffset);
180     void handleGetById(
181         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
182         const GetByIdStatus&);
183
184     Node* getScope(bool skipTop, unsigned skipCount);
185     
186     // Prepare to parse a block.
187     void prepareToParseBlock();
188     // Parse a single basic block of bytecode instructions.
189     bool parseBlock(unsigned limit);
190     // Link block successors.
191     void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
192     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
193     
194     VariableAccessData* newVariableAccessData(VirtualRegister operand, bool isCaptured)
195     {
196         ASSERT(!operand.isConstant());
197         
198         m_graph.m_variableAccessData.append(VariableAccessData(operand, isCaptured));
199         return &m_graph.m_variableAccessData.last();
200     }
201     
202     // Get/Set the operands/result of a bytecode instruction.
203     Node* getDirect(VirtualRegister operand)
204     {
205         // Is this a constant?
206         if (operand.isConstant()) {
207             unsigned constant = operand.toConstantIndex();
208             ASSERT(constant < m_constants.size());
209             return getJSConstant(constant);
210         }
211
212         ASSERT(operand.offset() != JSStack::Callee);
213         
214         // Is this an argument?
215         if (operand.isArgument())
216             return getArgument(operand);
217
218         // Must be a local.
219         return getLocal(operand);
220     }
221
222     Node* get(VirtualRegister operand)
223     {
224         if (operand.offset() == JSStack::Callee) {
225             if (inlineCallFrame() && inlineCallFrame()->callee)
226                 return cellConstant(inlineCallFrame()->callee.get());
227             
228             return getCallee();
229         }
230         
231         return getDirect(m_inlineStackTop->remapOperand(operand));
232     }
233     
234     enum SetMode { NormalSet, SetOnEntry };
235     void setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
236     {
237         // Is this an argument?
238         if (operand.isArgument()) {
239             setArgument(operand, value, setMode);
240             return;
241         }
242
243         // Must be a local.
244         setLocal(operand, value, setMode);
245     }
246
247     void set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
248     {
249         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
250     }
251     
252     Node* injectLazyOperandSpeculation(Node* node)
253     {
254         ASSERT(node->op() == GetLocal);
255         ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
256         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
257         LazyOperandValueProfileKey key(m_currentIndex, node->local());
258         SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
259 #if DFG_ENABLE(DEBUG_VERBOSE)
260         dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
261 #endif
262         node->variableAccessData()->predict(prediction);
263         return node;
264     }
265
266     // Used in implementing get/set, above, where the operand is a local variable.
267     Node* getLocal(VirtualRegister operand)
268     {
269         unsigned local = operand.toLocal();
270         Node* node = m_currentBlock->variablesAtTail.local(local);
271         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
272         
273         // This has two goals: 1) link together variable access datas, and 2)
274         // try to avoid creating redundant GetLocals. (1) is required for
275         // correctness - no other phase will ensure that block-local variable
276         // access data unification is done correctly. (2) is purely opportunistic
277         // and is meant as an compile-time optimization only.
278         
279         VariableAccessData* variable;
280         
281         if (node) {
282             variable = node->variableAccessData();
283             variable->mergeIsCaptured(isCaptured);
284             
285             if (!isCaptured) {
286                 switch (node->op()) {
287                 case GetLocal:
288                     return node;
289                 case SetLocal:
290                     return node->child1().node();
291                 default:
292                     break;
293                 }
294             }
295         } else {
296             m_preservedVars.set(local);
297             variable = newVariableAccessData(operand, isCaptured);
298         }
299         
300         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
301         m_currentBlock->variablesAtTail.local(local) = node;
302         return node;
303     }
304
305     void setLocal(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
306     {
307         unsigned local = operand.toLocal();
308         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
309         
310         if (setMode == NormalSet) {
311             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
312             if (isCaptured || argumentPosition)
313                 flushDirect(operand, argumentPosition);
314         }
315
316         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
317         variableAccessData->mergeStructureCheckHoistingFailed(
318             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
319         variableAccessData->mergeCheckArrayHoistingFailed(
320             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
321         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
322         m_currentBlock->variablesAtTail.local(local) = node;
323     }
324
325     // Used in implementing get/set, above, where the operand is an argument.
326     Node* getArgument(VirtualRegister operand)
327     {
328         unsigned argument = operand.toArgument();
329         ASSERT(argument < m_numArguments);
330         
331         Node* node = m_currentBlock->variablesAtTail.argument(argument);
332         bool isCaptured = m_codeBlock->isCaptured(operand);
333
334         VariableAccessData* variable;
335         
336         if (node) {
337             variable = node->variableAccessData();
338             variable->mergeIsCaptured(isCaptured);
339             
340             switch (node->op()) {
341             case GetLocal:
342                 return node;
343             case SetLocal:
344                 return node->child1().node();
345             default:
346                 break;
347             }
348         } else
349             variable = newVariableAccessData(operand, isCaptured);
350         
351         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
352         m_currentBlock->variablesAtTail.argument(argument) = node;
353         return node;
354     }
355     void setArgument(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
356     {
357         unsigned argument = operand.toArgument();
358         ASSERT(argument < m_numArguments);
359         
360         bool isCaptured = m_codeBlock->isCaptured(operand);
361
362         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
363
364         // Always flush arguments, except for 'this'. If 'this' is created by us,
365         // then make sure that it's never unboxed.
366         if (argument) {
367             if (setMode == NormalSet)
368                 flushDirect(operand);
369         } else if (m_codeBlock->specializationKind() == CodeForConstruct)
370             variableAccessData->mergeShouldNeverUnbox(true);
371         
372         variableAccessData->mergeStructureCheckHoistingFailed(
373             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
374         variableAccessData->mergeCheckArrayHoistingFailed(
375             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
376         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
377         m_currentBlock->variablesAtTail.argument(argument) = node;
378     }
379     
380     ArgumentPosition* findArgumentPositionForArgument(int argument)
381     {
382         InlineStackEntry* stack = m_inlineStackTop;
383         while (stack->m_inlineCallFrame)
384             stack = stack->m_caller;
385         return stack->m_argumentPositions[argument];
386     }
387     
388     ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
389     {
390         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
391             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
392             if (!inlineCallFrame)
393                 break;
394             if (operand.offset() <= static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
395                 continue;
396             if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
397                 continue;
398             if (operand.offset() > static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize + inlineCallFrame->arguments.size()))
399                 continue;
400             int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
401             return stack->m_argumentPositions[argument];
402         }
403         return 0;
404     }
405     
406     ArgumentPosition* findArgumentPosition(VirtualRegister operand)
407     {
408         if (operand.isArgument())
409             return findArgumentPositionForArgument(operand.toArgument());
410         return findArgumentPositionForLocal(operand);
411     }
412
413     void addConstant(JSValue value)
414     {
415         unsigned constantIndex = m_codeBlock->addConstantLazily();
416         initializeLazyWriteBarrierForConstant(
417             m_graph.m_plan.writeBarriers,
418             m_codeBlock->constants()[constantIndex],
419             m_codeBlock,
420             constantIndex,
421             m_codeBlock->ownerExecutable(), 
422             value);
423     }
424     
425     void flush(VirtualRegister operand)
426     {
427         flushDirect(m_inlineStackTop->remapOperand(operand));
428     }
429     
430     void flushDirect(VirtualRegister operand)
431     {
432         flushDirect(operand, findArgumentPosition(operand));
433     }
434     
435     void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
436     {
437         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
438         
439         ASSERT(!operand.isConstant());
440         
441         if (operand.isLocal())
442             m_preservedVars.set(operand.toLocal());
443         
444         Node* node = m_currentBlock->variablesAtTail.operand(operand);
445         
446         VariableAccessData* variable;
447         
448         if (node) {
449             variable = node->variableAccessData();
450             variable->mergeIsCaptured(isCaptured);
451         } else
452             variable = newVariableAccessData(operand, isCaptured);
453         
454         node = addToGraph(Flush, OpInfo(variable));
455         m_currentBlock->variablesAtTail.operand(operand) = node;
456         if (argumentPosition)
457             argumentPosition->addVariable(variable);
458     }
459
460     void flush(InlineStackEntry* inlineStackEntry)
461     {
462         int numArguments;
463         if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
464             numArguments = inlineCallFrame->arguments.size();
465         else
466             numArguments = inlineStackEntry->m_codeBlock->numParameters();
467         for (unsigned argument = numArguments; argument-- > 1;)
468             flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
469         for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
470             if (!inlineStackEntry->m_codeBlock->isCaptured(virtualRegisterForLocal(local)))
471                 continue;
472             flushDirect(inlineStackEntry->remapOperand(virtualRegisterForLocal(local)));
473         }
474     }
475
476     void flushAllArgumentsAndCapturedVariablesInInlineStack()
477     {
478         for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
479             flush(inlineStackEntry);
480     }
481
482     void flushArgumentsAndCapturedVariables()
483     {
484         flush(m_inlineStackTop);
485     }
486
487     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
488     Node* getToInt32(int operand)
489     {
490         return toInt32(get(VirtualRegister(operand)));
491     }
492
493     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
494     Node* toInt32(Node* node)
495     {
496         if (node->hasInt32Result())
497             return node;
498
499         if (node->op() == UInt32ToNumber)
500             return node->child1().node();
501
502         // Check for numeric constants boxed as JSValues.
503         if (canFold(node)) {
504             JSValue v = valueOfJSConstant(node);
505             if (v.isInt32())
506                 return getJSConstant(node->constantNumber());
507             if (v.isNumber())
508                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
509         }
510
511         return addToGraph(ValueToInt32, node);
512     }
513
514     // NOTE: Only use this to construct constants that arise from non-speculative
515     // constant folding. I.e. creating constants using this if we had constant
516     // field inference would be a bad idea, since the bytecode parser's folding
517     // doesn't handle liveness preservation.
518     Node* getJSConstantForValue(JSValue constantValue)
519     {
520         unsigned constantIndex;
521         if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
522             addConstant(constantValue);
523             m_constants.append(ConstantRecord());
524         }
525         
526         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
527         
528         return getJSConstant(constantIndex);
529     }
530
531     Node* getJSConstant(unsigned constant)
532     {
533         Node* node = m_constants[constant].asJSValue;
534         if (node)
535             return node;
536
537         Node* result = addToGraph(JSConstant, OpInfo(constant));
538         m_constants[constant].asJSValue = result;
539         return result;
540     }
541
542     Node* getCallee()
543     {
544         return addToGraph(GetCallee);
545     }
546
547     // Helper functions to get/set the this value.
548     Node* getThis()
549     {
550         return get(m_inlineStackTop->m_codeBlock->thisRegister());
551     }
552
553     void setThis(Node* value)
554     {
555         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
556     }
557
558     // Convenience methods for checking nodes for constants.
559     bool isJSConstant(Node* node)
560     {
561         return node->op() == JSConstant;
562     }
563     bool isInt32Constant(Node* node)
564     {
565         return isJSConstant(node) && valueOfJSConstant(node).isInt32();
566     }
567     // Convenience methods for getting constant values.
568     JSValue valueOfJSConstant(Node* node)
569     {
570         ASSERT(isJSConstant(node));
571         return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
572     }
573     int32_t valueOfInt32Constant(Node* node)
574     {
575         ASSERT(isInt32Constant(node));
576         return valueOfJSConstant(node).asInt32();
577     }
578     
579     // This method returns a JSConstant with the value 'undefined'.
580     Node* constantUndefined()
581     {
582         // Has m_constantUndefined been set up yet?
583         if (m_constantUndefined == UINT_MAX) {
584             // Search the constant pool for undefined, if we find it, we can just reuse this!
585             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
586             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
587                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
588                 if (testMe.isUndefined())
589                     return getJSConstant(m_constantUndefined);
590             }
591
592             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
593             ASSERT(m_constants.size() == numberOfConstants);
594             addConstant(jsUndefined());
595             m_constants.append(ConstantRecord());
596             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
597         }
598
599         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
600         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
601         return getJSConstant(m_constantUndefined);
602     }
603
604     // This method returns a JSConstant with the value 'null'.
605     Node* constantNull()
606     {
607         // Has m_constantNull been set up yet?
608         if (m_constantNull == UINT_MAX) {
609             // Search the constant pool for null, if we find it, we can just reuse this!
610             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
611             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
612                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
613                 if (testMe.isNull())
614                     return getJSConstant(m_constantNull);
615             }
616
617             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
618             ASSERT(m_constants.size() == numberOfConstants);
619             addConstant(jsNull());
620             m_constants.append(ConstantRecord());
621             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
622         }
623
624         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
625         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
626         return getJSConstant(m_constantNull);
627     }
628
629     // This method returns a DoubleConstant with the value 1.
630     Node* one()
631     {
632         // Has m_constant1 been set up yet?
633         if (m_constant1 == UINT_MAX) {
634             // Search the constant pool for the value 1, if we find it, we can just reuse this!
635             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
636             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
637                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
638                 if (testMe.isInt32() && testMe.asInt32() == 1)
639                     return getJSConstant(m_constant1);
640             }
641
642             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
643             ASSERT(m_constants.size() == numberOfConstants);
644             addConstant(jsNumber(1));
645             m_constants.append(ConstantRecord());
646             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
647         }
648
649         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
650         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
651         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
652         return getJSConstant(m_constant1);
653     }
654     
655     // This method returns a DoubleConstant with the value NaN.
656     Node* constantNaN()
657     {
658         JSValue nan = jsNaN();
659         
660         // Has m_constantNaN been set up yet?
661         if (m_constantNaN == UINT_MAX) {
662             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
663             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
664             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
665                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
666                 if (JSValue::encode(testMe) == JSValue::encode(nan))
667                     return getJSConstant(m_constantNaN);
668             }
669
670             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
671             ASSERT(m_constants.size() == numberOfConstants);
672             addConstant(nan);
673             m_constants.append(ConstantRecord());
674             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
675         }
676
677         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
678         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
679         ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
680         return getJSConstant(m_constantNaN);
681     }
682     
683     Node* cellConstant(JSCell* cell)
684     {
685         HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
686         if (result.isNewEntry)
687             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
688         
689         return result.iterator->value;
690     }
691     
692     InlineCallFrame* inlineCallFrame()
693     {
694         return m_inlineStackTop->m_inlineCallFrame;
695     }
696
697     CodeOrigin currentCodeOrigin()
698     {
699         return CodeOrigin(m_currentIndex, inlineCallFrame());
700     }
701     
702     bool canFold(Node* node)
703     {
704         return node->isStronglyProvedConstantIn(inlineCallFrame());
705     }
706
707     // Our codegen for constant strict equality performs a bitwise comparison,
708     // so we can only select values that have a consistent bitwise identity.
709     bool isConstantForCompareStrictEq(Node* node)
710     {
711         if (!node->isConstant())
712             return false;
713         JSValue value = valueOfJSConstant(node);
714         return value.isBoolean() || value.isUndefinedOrNull();
715     }
716     
717     Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
718     {
719         Node* result = m_graph.addNode(
720             SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
721         ASSERT(op != Phi);
722         m_currentBlock->append(result);
723         return result;
724     }
725     Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
726     {
727         Node* result = m_graph.addNode(
728             SpecNone, op, currentCodeOrigin(), child1, child2, child3);
729         ASSERT(op != Phi);
730         m_currentBlock->append(result);
731         return result;
732     }
733     Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
734     {
735         Node* result = m_graph.addNode(
736             SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
737         ASSERT(op != Phi);
738         m_currentBlock->append(result);
739         return result;
740     }
741     Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
742     {
743         Node* result = m_graph.addNode(
744             SpecNone, op, currentCodeOrigin(), info1, info2,
745             Edge(child1), Edge(child2), Edge(child3));
746         ASSERT(op != Phi);
747         m_currentBlock->append(result);
748         return result;
749     }
750     
751     Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
752     {
753         Node* result = m_graph.addNode(
754             SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
755             m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
756         ASSERT(op != Phi);
757         m_currentBlock->append(result);
758         
759         m_numPassedVarArgs = 0;
760         
761         return result;
762     }
763
764     void addVarArgChild(Node* child)
765     {
766         m_graph.m_varArgChildren.append(Edge(child));
767         m_numPassedVarArgs++;
768     }
769     
770     Node* addCall(Instruction* currentInstruction, NodeType op)
771     {
772         SpeculatedType prediction = getPrediction();
773         
774         addVarArgChild(get(VirtualRegister(currentInstruction[2].u.operand)));
775         int argCount = currentInstruction[3].u.operand;
776         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
777             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
778
779         int registerOffset = -currentInstruction[4].u.operand;
780         int dummyThisArgument = op == Call ? 0 : 1;
781         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
782             addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
783
784         Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
785         set(VirtualRegister(currentInstruction[1].u.operand), call);
786         return call;
787     }
788     
789     Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
790     {
791         Node* objectNode = cellConstant(object);
792         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
793         return objectNode;
794     }
795     
796     Node* cellConstantWithStructureCheck(JSCell* object)
797     {
798         return cellConstantWithStructureCheck(object, object->structure());
799     }
800
801     SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
802     {
803         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
804         return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
805     }
806
807     SpeculatedType getPrediction(unsigned bytecodeIndex)
808     {
809         SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
810         
811         if (prediction == SpecNone) {
812             // We have no information about what values this node generates. Give up
813             // on executing this code, since we're likely to do more damage than good.
814             addToGraph(ForceOSRExit);
815         }
816         
817         return prediction;
818     }
819     
820     SpeculatedType getPredictionWithoutOSRExit()
821     {
822         return getPredictionWithoutOSRExit(m_currentIndex);
823     }
824     
825     SpeculatedType getPrediction()
826     {
827         return getPrediction(m_currentIndex);
828     }
829     
830     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
831     {
832         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
833         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
834         return ArrayMode::fromObserved(locker, profile, action, false);
835     }
836     
837     ArrayMode getArrayMode(ArrayProfile* profile)
838     {
839         return getArrayMode(profile, Array::Read);
840     }
841     
842     ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
843     {
844         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
845         
846         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
847         
848 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
849         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
850             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
851         dataLogF("Array profile for bc#%u: %u %s%s\n", m_currentIndex, profile->observedArrayModes(locker), profile->structureIsPolymorphic(locker) ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses(locker) ? " (may intercept)" : "");
852 #endif
853         
854         bool makeSafe =
855             m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
856             || profile->outOfBounds(locker);
857         
858         ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
859         
860         return result;
861     }
862     
863     Node* makeSafe(Node* node)
864     {
865         bool likelyToTakeSlowCase;
866         if (!isX86() && node->op() == ArithMod)
867             likelyToTakeSlowCase = false;
868         else
869             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
870         
871         if (!likelyToTakeSlowCase
872             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
873             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
874             return node;
875         
876         switch (node->op()) {
877         case UInt32ToNumber:
878         case ArithAdd:
879         case ArithSub:
880         case ValueAdd:
881         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
882             node->mergeFlags(NodeMayOverflow);
883             break;
884             
885         case ArithNegate:
886             // Currently we can't tell the difference between a negation overflowing
887             // (i.e. -(1 << 31)) or generating negative zero (i.e. -0). If it took slow
888             // path then we assume that it did both of those things.
889             node->mergeFlags(NodeMayOverflow);
890             node->mergeFlags(NodeMayNegZero);
891             break;
892
893         case ArithMul:
894             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
895                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
896 #if DFG_ENABLE(DEBUG_VERBOSE)
897                 dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
898 #endif
899                 node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
900             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
901                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
902 #if DFG_ENABLE(DEBUG_VERBOSE)
903                 dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
904 #endif
905                 node->mergeFlags(NodeMayNegZero);
906             }
907             break;
908             
909         default:
910             RELEASE_ASSERT_NOT_REACHED();
911             break;
912         }
913         
914         return node;
915     }
916     
917     Node* makeDivSafe(Node* node)
918     {
919         ASSERT(node->op() == ArithDiv);
920         
921         // The main slow case counter for op_div in the old JIT counts only when
922         // the operands are not numbers. We don't care about that since we already
923         // have speculations in place that take care of that separately. We only
924         // care about when the outcome of the division is not an integer, which
925         // is what the special fast case counter tells us.
926         
927         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
928             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
929             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
930             return node;
931         
932 #if DFG_ENABLE(DEBUG_VERBOSE)
933         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
934 #endif
935         
936         // FIXME: It might be possible to make this more granular. The DFG certainly can
937         // distinguish between negative zero and overflow in its exit profiles.
938         node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
939         
940         return node;
941     }
942     
943     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
944     {
945         if (direct)
946             return true;
947         
948         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
949             return false;
950         
951         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
952             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
953                 return false;
954         }
955         
956         return true;
957     }
958     
959     void buildOperandMapsIfNecessary();
960     
961     VM* m_vm;
962     CodeBlock* m_codeBlock;
963     CodeBlock* m_profiledBlock;
964     Graph& m_graph;
965
966     // The current block being generated.
967     BasicBlock* m_currentBlock;
968     // The bytecode index of the current instruction being generated.
969     unsigned m_currentIndex;
970
971     // We use these values during code generation, and to avoid the need for
972     // special handling we make sure they are available as constants in the
973     // CodeBlock's constant pool. These variables are initialized to
974     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
975     // constant pool, as necessary.
976     unsigned m_constantUndefined;
977     unsigned m_constantNull;
978     unsigned m_constantNaN;
979     unsigned m_constant1;
980     HashMap<JSCell*, unsigned> m_cellConstants;
981     HashMap<JSCell*, Node*> m_cellConstantNodes;
982
983     // A constant in the constant pool may be represented by more than one
984     // node in the graph, depending on the context in which it is being used.
985     struct ConstantRecord {
986         ConstantRecord()
987             : asInt32(0)
988             , asNumeric(0)
989             , asJSValue(0)
990         {
991         }
992
993         Node* asInt32;
994         Node* asNumeric;
995         Node* asJSValue;
996     };
997
998     // Track the index of the node whose result is the current value for every
999     // register value in the bytecode - argument, local, and temporary.
1000     Vector<ConstantRecord, 16> m_constants;
1001
1002     // The number of arguments passed to the function.
1003     unsigned m_numArguments;
1004     // The number of locals (vars + temporaries) used in the function.
1005     unsigned m_numLocals;
1006     // The set of registers we need to preserve across BasicBlock boundaries;
1007     // typically equal to the set of vars, but we expand this to cover all
1008     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
1009     BitVector m_preservedVars;
1010     // The number of slots (in units of sizeof(Register)) that we need to
1011     // preallocate for calls emanating from this frame. This includes the
1012     // size of the CallFrame, only if this is not a leaf function.  (I.e.
1013     // this is 0 if and only if this function is a leaf.)
1014     unsigned m_parameterSlots;
1015     // The number of var args passed to the next var arg node.
1016     unsigned m_numPassedVarArgs;
1017
1018     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1019     
1020     struct InlineStackEntry {
1021         ByteCodeParser* m_byteCodeParser;
1022         
1023         CodeBlock* m_codeBlock;
1024         CodeBlock* m_profiledBlock;
1025         InlineCallFrame* m_inlineCallFrame;
1026         
1027         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1028         
1029         QueryableExitProfile m_exitProfile;
1030         
1031         // Remapping of identifier and constant numbers from the code block being
1032         // inlined (inline callee) to the code block that we're inlining into
1033         // (the machine code block, which is the transitive, though not necessarily
1034         // direct, caller).
1035         Vector<unsigned> m_identifierRemap;
1036         Vector<unsigned> m_constantRemap;
1037         Vector<unsigned> m_constantBufferRemap;
1038         Vector<unsigned> m_switchRemap;
1039         
1040         // Blocks introduced by this code block, which need successor linking.
1041         // May include up to one basic block that includes the continuation after
1042         // the callsite in the caller. These must be appended in the order that they
1043         // are created, but their bytecodeBegin values need not be in order as they
1044         // are ignored.
1045         Vector<UnlinkedBlock> m_unlinkedBlocks;
1046         
1047         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1048         // cannot have two blocks that have the same bytecodeBegin. For this very
1049         // reason, this is not equivalent to 
1050         Vector<BasicBlock*> m_blockLinkingTargets;
1051         
1052         // If the callsite's basic block was split into two, then this will be
1053         // the head of the callsite block. It needs its successors linked to the
1054         // m_unlinkedBlocks, but not the other way around: there's no way for
1055         // any blocks in m_unlinkedBlocks to jump back into this block.
1056         BasicBlock* m_callsiteBlockHead;
1057         
1058         // Does the callsite block head need linking? This is typically true
1059         // but will be false for the machine code block's inline stack entry
1060         // (since that one is not inlined) and for cases where an inline callee
1061         // did the linking for us.
1062         bool m_callsiteBlockHeadNeedsLinking;
1063         
1064         VirtualRegister m_returnValue;
1065         
1066         // Speculations about variable types collected from the profiled code block,
1067         // which are based on OSR exit profiles that past DFG compilatins of this
1068         // code block had gathered.
1069         LazyOperandValueProfileParser m_lazyOperands;
1070         
1071         // Did we see any returns? We need to handle the (uncommon but necessary)
1072         // case where a procedure that does not return was inlined.
1073         bool m_didReturn;
1074         
1075         // Did we have any early returns?
1076         bool m_didEarlyReturn;
1077         
1078         // Pointers to the argument position trackers for this slice of code.
1079         Vector<ArgumentPosition*> m_argumentPositions;
1080         
1081         InlineStackEntry* m_caller;
1082         
1083         InlineStackEntry(
1084             ByteCodeParser*,
1085             CodeBlock*,
1086             CodeBlock* profiledBlock,
1087             BasicBlock* callsiteBlockHead,
1088             JSFunction* callee, // Null if this is a closure call.
1089             VirtualRegister returnValueVR,
1090             VirtualRegister inlineCallFrameStart,
1091             int argumentCountIncludingThis,
1092             CodeSpecializationKind);
1093         
1094         ~InlineStackEntry()
1095         {
1096             m_byteCodeParser->m_inlineStackTop = m_caller;
1097         }
1098         
1099         VirtualRegister remapOperand(VirtualRegister operand) const
1100         {
1101             if (!m_inlineCallFrame)
1102                 return operand;
1103             
1104             if (operand.isConstant()) {
1105                 VirtualRegister result = VirtualRegister(m_constantRemap[operand.toConstantIndex()]);
1106                 ASSERT(result.isConstant());
1107                 return result;
1108             }
1109
1110             ASSERT(operand.offset() != JSStack::Callee);
1111
1112             return VirtualRegister(operand.offset() + m_inlineCallFrame->stackOffset);
1113         }
1114     };
1115     
1116     InlineStackEntry* m_inlineStackTop;
1117
1118     // Have we built operand maps? We initialize them lazily, and only when doing
1119     // inlining.
1120     bool m_haveBuiltOperandMaps;
1121     // Mapping between identifier names and numbers.
1122     BorrowedIdentifierMap m_identifierMap;
1123     // Mapping between values and constant numbers.
1124     JSValueMap m_jsValueMap;
1125     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1126     // work-around for the fact that JSValueMap can't handle "empty" values.
1127     unsigned m_emptyJSValueIndex;
1128     
1129     Instruction* m_currentInstruction;
1130 };
1131
1132 #define NEXT_OPCODE(name) \
1133     m_currentIndex += OPCODE_LENGTH(name); \
1134     continue
1135
1136 #define LAST_OPCODE(name) \
1137     m_currentIndex += OPCODE_LENGTH(name); \
1138     return shouldContinueParsing
1139
1140
1141 void ByteCodeParser::handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1142 {
1143     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1144     
1145     Node* callTarget = get(VirtualRegister(currentInstruction[2].u.operand));
1146     
1147     CallLinkStatus callLinkStatus;
1148
1149     if (m_graph.isConstant(callTarget))
1150         callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1151     else {
1152         callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
1153         callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
1154         callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1155         callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
1156     }
1157     
1158 #if DFG_ENABLE(DEBUG_VERBOSE)
1159     dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
1160 #endif
1161     
1162     if (!callLinkStatus.canOptimize()) {
1163         // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1164         // that we cannot optimize them.
1165         
1166         addCall(currentInstruction, op);
1167         return;
1168     }
1169     
1170     int argumentCountIncludingThis = currentInstruction[3].u.operand;
1171     int registerOffset = -currentInstruction[4].u.operand;
1172
1173     int resultOperand = currentInstruction[1].u.operand;
1174     unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1175     SpeculatedType prediction = getPrediction();
1176
1177     if (InternalFunction* function = callLinkStatus.internalFunction()) {
1178         if (handleConstantInternalFunction(resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1179             // This phantoming has to be *after* the code for the intrinsic, to signify that
1180             // the inputs must be kept alive whatever exits the intrinsic may do.
1181             addToGraph(Phantom, callTarget);
1182             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1183             return;
1184         }
1185         
1186         // Can only handle this using the generic call handler.
1187         addCall(currentInstruction, op);
1188         return;
1189     }
1190         
1191     Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1192     if (intrinsic != NoIntrinsic) {
1193         emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1194             
1195         if (handleIntrinsic(resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1196             // This phantoming has to be *after* the code for the intrinsic, to signify that
1197             // the inputs must be kept alive whatever exits the intrinsic may do.
1198             addToGraph(Phantom, callTarget);
1199             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1200             if (m_graph.compilation())
1201                 m_graph.compilation()->noticeInlinedCall();
1202             return;
1203         }
1204     } else if (handleInlining(callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1205         if (m_graph.compilation())
1206             m_graph.compilation()->noticeInlinedCall();
1207         return;
1208     }
1209     
1210     addCall(currentInstruction, op);
1211 }
1212
1213 void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1214 {
1215     Node* thisArgument;
1216     if (kind == CodeForCall)
1217         thisArgument = get(virtualRegisterForArgument(0, registerOffset));
1218     else
1219         thisArgument = 0;
1220
1221     if (callLinkStatus.isProved()) {
1222         addToGraph(Phantom, callTarget, thisArgument);
1223         return;
1224     }
1225     
1226     ASSERT(callLinkStatus.canOptimize());
1227     
1228     if (JSFunction* function = callLinkStatus.function())
1229         addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1230     else {
1231         ASSERT(callLinkStatus.structure());
1232         ASSERT(callLinkStatus.executable());
1233         
1234         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1235         addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1236     }
1237 }
1238
1239 void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1240 {
1241     for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1242         addToGraph(Phantom, get(virtualRegisterForArgument(i, registerOffset)));
1243 }
1244
1245 bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1246 {
1247     // First, the really simple checks: do we have an actual JS function?
1248     if (!callLinkStatus.executable())
1249         return false;
1250     if (callLinkStatus.executable()->isHostFunction())
1251         return false;
1252     
1253     FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1254     
1255     // Does the number of arguments we're passing match the arity of the target? We currently
1256     // inline only if the number of arguments passed is greater than or equal to the number
1257     // arguments expected.
1258     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1259         return false;
1260     
1261     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1262     // If either of these are detected, then don't inline.
1263     unsigned depth = 0;
1264     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1265         ++depth;
1266         if (depth >= Options::maximumInliningDepth())
1267             return false; // Depth exceeded.
1268         
1269         if (entry->executable() == executable)
1270             return false; // Recursion detected.
1271     }
1272     
1273     // Do we have a code block, and does the code block's size match the heuristics/requirements for
1274     // being an inline candidate? We might not have a code block if code was thrown away or if we
1275     // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1276     // if we had a static proof of what was being called; this might happen for example if you call a
1277     // global function, where watchpointing gives us static information. Overall, it's a rare case
1278     // because we expect that any hot callees would have already been compiled.
1279     CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1280     if (!codeBlock)
1281         return false;
1282     if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
1283         return false;
1284     
1285 #if DFG_ENABLE(DEBUG_VERBOSE)
1286     dataLogF("Inlining executable %p.\n", executable);
1287 #endif
1288     
1289     // Now we know without a doubt that we are committed to inlining. So begin the process
1290     // by checking the callee (if necessary) and making sure that arguments and the callee
1291     // are flushed.
1292     emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1293     
1294     // FIXME: Don't flush constants!
1295     
1296     int inlineCallFrameStart = m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).offset() + JSStack::CallFrameHeaderSize;
1297     
1298     // Make sure that the area used by the call frame is reserved.
1299     for (int arg = VirtualRegister(inlineCallFrameStart).toLocal() + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > VirtualRegister(inlineCallFrameStart).toLocal();)
1300         m_preservedVars.set(arg);
1301     
1302     // Make sure that we have enough locals.
1303     unsigned newNumLocals = VirtualRegister(inlineCallFrameStart).toLocal() + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1304     if (newNumLocals > m_numLocals) {
1305         m_numLocals = newNumLocals;
1306         for (size_t i = 0; i < m_graph.numBlocks(); ++i)
1307             m_graph.block(i)->ensureLocals(newNumLocals);
1308     }
1309     
1310     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1311
1312     InlineStackEntry inlineStackEntry(
1313         this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1314         m_inlineStackTop->remapOperand(VirtualRegister(resultOperand)),
1315         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1316     
1317     // This is where the actual inlining really happens.
1318     unsigned oldIndex = m_currentIndex;
1319     m_currentIndex = 0;
1320
1321     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1322     if (callLinkStatus.isClosureCall()) {
1323         addToGraph(SetCallee, callTargetNode);
1324         addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
1325     }
1326     
1327     parseCodeBlock();
1328     
1329     m_currentIndex = oldIndex;
1330     
1331     // If the inlined code created some new basic blocks, then we have linking to do.
1332     if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
1333         
1334         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1335         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1336             linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
1337         else
1338             ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
1339         
1340         // It's possible that the callsite block head is not owned by the caller.
1341         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1342             // It's definitely owned by the caller, because the caller created new blocks.
1343             // Assert that this all adds up.
1344             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
1345             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1346             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1347         } else {
1348             // It's definitely not owned by the caller. Tell the caller that he does not
1349             // need to link his callsite block head, because we did it for him.
1350             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1351             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1352             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1353         }
1354         
1355         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1356     } else
1357         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1358     
1359     BasicBlock* lastBlock = m_graph.lastBlock();
1360     // If there was a return, but no early returns, then we're done. We allow parsing of
1361     // the caller to continue in whatever basic block we're in right now.
1362     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1363         ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1364         
1365         // If we created new blocks then the last block needs linking, but in the
1366         // caller. It doesn't need to be linked to, but it needs outgoing links.
1367         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1368 #if DFG_ENABLE(DEBUG_VERBOSE)
1369             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1370 #endif
1371             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1372             // for release builds because this block will never serve as a potential target
1373             // in the linker's binary search.
1374             lastBlock->bytecodeBegin = m_currentIndex;
1375             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1376         }
1377         
1378         m_currentBlock = m_graph.lastBlock();
1379         
1380 #if DFG_ENABLE(DEBUG_VERBOSE)
1381         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1382 #endif
1383         return true;
1384     }
1385     
1386     // If we get to this point then all blocks must end in some sort of terminals.
1387     ASSERT(lastBlock->last()->isTerminal());
1388     
1389
1390     // Need to create a new basic block for the continuation at the caller.
1391     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1392
1393 #if DFG_ENABLE(DEBUG_VERBOSE)
1394     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.numBlocks(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
1395 #endif
1396
1397     // Link the early returns to the basic block we're about to create.
1398     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1399         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1400             continue;
1401         BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1402         ASSERT(!blockToLink->isLinked);
1403         Node* node = blockToLink->last();
1404         ASSERT(node->op() == Jump);
1405         ASSERT(node->takenBlock() == 0);
1406         node->setTakenBlock(block.get());
1407         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1408 #if !ASSERT_DISABLED
1409         blockToLink->isLinked = true;
1410 #endif
1411     }
1412     
1413     m_currentBlock = block.get();
1414     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1415     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1416     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1417     m_graph.appendBlock(block);
1418     prepareToParseBlock();
1419     
1420     // At this point we return and continue to generate code for the caller, but
1421     // in the new basic block.
1422 #if DFG_ENABLE(DEBUG_VERBOSE)
1423     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1424 #endif
1425     return true;
1426 }
1427
1428 bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1429 {
1430     if (argumentCountIncludingThis == 1) { // Math.min()
1431         set(VirtualRegister(resultOperand), constantNaN());
1432         return true;
1433     }
1434      
1435     if (argumentCountIncludingThis == 2) { // Math.min(x)
1436         Node* result = get(VirtualRegister(virtualRegisterForArgument(1, registerOffset)));
1437         addToGraph(Phantom, Edge(result, NumberUse));
1438         set(VirtualRegister(resultOperand), result);
1439         return true;
1440     }
1441     
1442     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1443         set(VirtualRegister(resultOperand), addToGraph(op, get(virtualRegisterForArgument(1, registerOffset)), get(virtualRegisterForArgument(2, registerOffset))));
1444         return true;
1445     }
1446     
1447     // Don't handle >=3 arguments for now.
1448     return false;
1449 }
1450
1451 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1452 // they need to perform the ToNumber conversion, which can have side-effects.
1453 bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1454 {
1455     switch (intrinsic) {
1456     case AbsIntrinsic: {
1457         if (argumentCountIncludingThis == 1) { // Math.abs()
1458             set(VirtualRegister(resultOperand), constantNaN());
1459             return true;
1460         }
1461
1462         if (!MacroAssembler::supportsFloatingPointAbs())
1463             return false;
1464
1465         Node* node = addToGraph(ArithAbs, get(virtualRegisterForArgument(1, registerOffset)));
1466         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1467             node->mergeFlags(NodeMayOverflow);
1468         set(VirtualRegister(resultOperand), node);
1469         return true;
1470     }
1471
1472     case MinIntrinsic:
1473         return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1474         
1475     case MaxIntrinsic:
1476         return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1477         
1478     case SqrtIntrinsic: {
1479         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1480             set(VirtualRegister(resultOperand), constantNaN());
1481             return true;
1482         }
1483         
1484         if (!MacroAssembler::supportsFloatingPointSqrt())
1485             return false;
1486
1487         set(VirtualRegister(resultOperand), addToGraph(ArithSqrt, get(virtualRegisterForArgument(1, registerOffset))));
1488         return true;
1489     }
1490         
1491     case ArrayPushIntrinsic: {
1492         if (argumentCountIncludingThis != 2)
1493             return false;
1494         
1495         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1496         if (!arrayMode.isJSArray())
1497             return false;
1498         switch (arrayMode.type()) {
1499         case Array::Undecided:
1500         case Array::Int32:
1501         case Array::Double:
1502         case Array::Contiguous:
1503         case Array::ArrayStorage: {
1504             Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1505             set(VirtualRegister(resultOperand), arrayPush);
1506             
1507             return true;
1508         }
1509             
1510         default:
1511             return false;
1512         }
1513     }
1514         
1515     case ArrayPopIntrinsic: {
1516         if (argumentCountIncludingThis != 1)
1517             return false;
1518         
1519         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1520         if (!arrayMode.isJSArray())
1521             return false;
1522         switch (arrayMode.type()) {
1523         case Array::Int32:
1524         case Array::Double:
1525         case Array::Contiguous:
1526         case Array::ArrayStorage: {
1527             Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)));
1528             set(VirtualRegister(resultOperand), arrayPop);
1529             return true;
1530         }
1531             
1532         default:
1533             return false;
1534         }
1535     }
1536
1537     case CharCodeAtIntrinsic: {
1538         if (argumentCountIncludingThis != 2)
1539             return false;
1540
1541         int thisOperand = virtualRegisterForArgument(0, registerOffset).offset();
1542         int indexOperand = virtualRegisterForArgument(1, registerOffset).offset();
1543         Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(VirtualRegister(thisOperand)), getToInt32(indexOperand));
1544
1545         set(VirtualRegister(resultOperand), charCode);
1546         return true;
1547     }
1548
1549     case CharAtIntrinsic: {
1550         if (argumentCountIncludingThis != 2)
1551             return false;
1552
1553         int thisOperand = virtualRegisterForArgument(0, registerOffset).offset();
1554         int indexOperand = virtualRegisterForArgument(1, registerOffset).offset();
1555         Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(VirtualRegister(thisOperand)), getToInt32(indexOperand));
1556
1557         set(VirtualRegister(resultOperand), charCode);
1558         return true;
1559     }
1560     case FromCharCodeIntrinsic: {
1561         if (argumentCountIncludingThis != 2)
1562             return false;
1563
1564         int indexOperand = virtualRegisterForArgument(1, registerOffset).offset();
1565         Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
1566
1567         set(VirtualRegister(resultOperand), charCode);
1568
1569         return true;
1570     }
1571
1572     case RegExpExecIntrinsic: {
1573         if (argumentCountIncludingThis != 2)
1574             return false;
1575         
1576         Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1577         set(VirtualRegister(resultOperand), regExpExec);
1578         
1579         return true;
1580     }
1581         
1582     case RegExpTestIntrinsic: {
1583         if (argumentCountIncludingThis != 2)
1584             return false;
1585         
1586         Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1587         set(VirtualRegister(resultOperand), regExpExec);
1588         
1589         return true;
1590     }
1591
1592     case IMulIntrinsic: {
1593         if (argumentCountIncludingThis != 3)
1594             return false;
1595         int leftOperand = virtualRegisterForArgument(1, registerOffset).offset();
1596         int rightOperand = virtualRegisterForArgument(2, registerOffset).offset();
1597         Node* left = getToInt32(leftOperand);
1598         Node* right = getToInt32(rightOperand);
1599         set(VirtualRegister(resultOperand), addToGraph(ArithIMul, left, right));
1600         return true;
1601     }
1602         
1603     default:
1604         return false;
1605     }
1606 }
1607
1608 bool ByteCodeParser::handleTypedArrayConstructor(
1609     int resultOperand, InternalFunction* function, int registerOffset,
1610     int argumentCountIncludingThis, TypedArrayType type)
1611 {
1612     if (!isTypedView(type))
1613         return false;
1614     
1615     if (function->classInfo() != constructorClassInfoForType(type))
1616         return false;
1617     
1618     if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1619         return false;
1620     
1621     // We only have an intrinsic for the case where you say:
1622     //
1623     // new FooArray(blah);
1624     //
1625     // Of course, 'blah' could be any of the following:
1626     //
1627     // - Integer, indicating that you want to allocate an array of that length.
1628     //   This is the thing we're hoping for, and what we can actually do meaningful
1629     //   optimizations for.
1630     //
1631     // - Array buffer, indicating that you want to create a view onto that _entire_
1632     //   buffer.
1633     //
1634     // - Non-buffer object, indicating that you want to create a copy of that
1635     //   object by pretending that it quacks like an array.
1636     //
1637     // - Anything else, indicating that you want to have an exception thrown at
1638     //   you.
1639     //
1640     // The intrinsic, NewTypedArray, will behave as if it could do any of these
1641     // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
1642     // predicted Int32, then we lock it in as a normal typed array allocation.
1643     // Otherwise, NewTypedArray turns into a totally opaque function call that
1644     // may clobber the world - by virtue of it accessing properties on what could
1645     // be an object.
1646     //
1647     // Note that although the generic form of NewTypedArray sounds sort of awful,
1648     // it is actually quite likely to be more efficient than a fully generic
1649     // Construct. So, we might want to think about making NewTypedArray variadic,
1650     // or else making Construct not super slow.
1651     
1652     if (argumentCountIncludingThis != 2)
1653         return false;
1654     
1655     set(VirtualRegister(resultOperand),
1656         addToGraph(NewTypedArray, OpInfo(type), get(virtualRegisterForArgument(1, registerOffset))));
1657     return true;
1658 }
1659
1660 bool ByteCodeParser::handleConstantInternalFunction(
1661     int resultOperand, InternalFunction* function, int registerOffset,
1662     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1663 {
1664     // If we ever find that we have a lot of internal functions that we specialize for,
1665     // then we should probably have some sort of hashtable dispatch, or maybe even
1666     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1667     // it seems that this case is hit infrequently enough, and the number of functions
1668     // we know about is small enough, that having just a linear cascade of if statements
1669     // is good enough.
1670     
1671     UNUSED_PARAM(prediction); // Remove this once we do more things.
1672     
1673     if (function->classInfo() == ArrayConstructor::info()) {
1674         if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1675             return false;
1676         
1677         if (argumentCountIncludingThis == 2) {
1678             set(VirtualRegister(resultOperand),
1679                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(virtualRegisterForArgument(1, registerOffset))));
1680             return true;
1681         }
1682         
1683         for (int i = 1; i < argumentCountIncludingThis; ++i)
1684             addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
1685         set(VirtualRegister(resultOperand),
1686             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1687         return true;
1688     }
1689     
1690     if (function->classInfo() == StringConstructor::info()) {
1691         Node* result;
1692         
1693         if (argumentCountIncludingThis <= 1)
1694             result = cellConstant(m_vm->smallStrings.emptyString());
1695         else
1696             result = addToGraph(ToString, get(virtualRegisterForArgument(1, registerOffset)));
1697         
1698         if (kind == CodeForConstruct)
1699             result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1700         
1701         set(VirtualRegister(resultOperand), result);
1702         return true;
1703     }
1704     
1705     for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
1706         bool result = handleTypedArrayConstructor(
1707             resultOperand, function, registerOffset, argumentCountIncludingThis,
1708             indexToTypedArrayType(typeIndex));
1709         if (result)
1710             return true;
1711     }
1712     
1713     return false;
1714 }
1715
1716 Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1717 {
1718     Node* propertyStorage;
1719     if (isInlineOffset(offset))
1720         propertyStorage = base;
1721     else
1722         propertyStorage = addToGraph(GetButterfly, base);
1723     Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
1724
1725     StorageAccessData storageAccessData;
1726     storageAccessData.offset = offset;
1727     storageAccessData.identifierNumber = identifierNumber;
1728     m_graph.m_storageAccessData.append(storageAccessData);
1729
1730     return getByOffset;
1731 }
1732
1733 void ByteCodeParser::handleGetByOffset(
1734     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1735     PropertyOffset offset)
1736 {
1737     set(VirtualRegister(destinationOperand), handleGetByOffset(prediction, base, identifierNumber, offset));
1738 }
1739
1740 Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1741 {
1742     Node* propertyStorage;
1743     if (isInlineOffset(offset))
1744         propertyStorage = base;
1745     else
1746         propertyStorage = addToGraph(GetButterfly, base);
1747     Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1748     
1749     StorageAccessData storageAccessData;
1750     storageAccessData.offset = offset;
1751     storageAccessData.identifierNumber = identifier;
1752     m_graph.m_storageAccessData.append(storageAccessData);
1753
1754     return result;
1755 }
1756
1757 void ByteCodeParser::handleGetById(
1758     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1759     const GetByIdStatus& getByIdStatus)
1760 {
1761     if (!getByIdStatus.isSimple()
1762         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1763         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
1764         set(VirtualRegister(destinationOperand),
1765             addToGraph(
1766                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1767                 OpInfo(identifierNumber), OpInfo(prediction), base));
1768         return;
1769     }
1770     
1771     ASSERT(getByIdStatus.structureSet().size());
1772                 
1773     // The implementation of GetByOffset does not know to terminate speculative
1774     // execution if it doesn't have a prediction, so we do it manually.
1775     if (prediction == SpecNone)
1776         addToGraph(ForceOSRExit);
1777     else if (m_graph.compilation())
1778         m_graph.compilation()->noticeInlinedGetById();
1779     
1780     Node* originalBaseForBaselineJIT = base;
1781                 
1782     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1783     
1784     if (getByIdStatus.chain()) {
1785         m_graph.chains().addLazily(getByIdStatus.chain());
1786         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1787         JSObject* currentObject = 0;
1788         for (unsigned i = 0; i < getByIdStatus.chain()->size(); ++i) {
1789             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1790             currentStructure = getByIdStatus.chain()->at(i);
1791             base = cellConstantWithStructureCheck(currentObject, currentStructure);
1792         }
1793     }
1794     
1795     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1796     // ensure that the base of the original get_by_id is kept alive until we're done with
1797     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1798     // on something other than the base following the CheckStructure on base, or if the
1799     // access was compiled to a WeakJSConstant specific value, in which case we might not
1800     // have any explicit use of the base at all.
1801     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1802         addToGraph(Phantom, originalBaseForBaselineJIT);
1803     
1804     if (getByIdStatus.specificValue()) {
1805         ASSERT(getByIdStatus.specificValue().isCell());
1806         
1807         set(VirtualRegister(destinationOperand), cellConstant(getByIdStatus.specificValue().asCell()));
1808         return;
1809     }
1810     
1811     handleGetByOffset(
1812         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1813 }
1814
1815 void ByteCodeParser::prepareToParseBlock()
1816 {
1817     for (unsigned i = 0; i < m_constants.size(); ++i)
1818         m_constants[i] = ConstantRecord();
1819     m_cellConstantNodes.clear();
1820 }
1821
1822 Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1823 {
1824     Node* localBase;
1825     if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
1826         ASSERT(inlineCallFrame()->callee);
1827         localBase = cellConstant(inlineCallFrame()->callee->scope());
1828     } else
1829         localBase = addToGraph(GetMyScope);
1830     if (skipTop) {
1831         ASSERT(!inlineCallFrame());
1832         localBase = addToGraph(SkipTopScope, localBase);
1833     }
1834     for (unsigned n = skipCount; n--;)
1835         localBase = addToGraph(SkipScope, localBase);
1836     return localBase;
1837 }
1838
1839 bool ByteCodeParser::parseBlock(unsigned limit)
1840 {
1841     bool shouldContinueParsing = true;
1842
1843     Interpreter* interpreter = m_vm->interpreter;
1844     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1845     unsigned blockBegin = m_currentIndex;
1846     
1847     // If we are the first basic block, introduce markers for arguments. This allows
1848     // us to track if a use of an argument may use the actual argument passed, as
1849     // opposed to using a value we set explicitly.
1850     if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
1851         m_graph.m_arguments.resize(m_numArguments);
1852         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1853             VariableAccessData* variable = newVariableAccessData(
1854                 virtualRegisterForArgument(argument), m_codeBlock->isCaptured(virtualRegisterForArgument(argument)));
1855             variable->mergeStructureCheckHoistingFailed(
1856                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1857             variable->mergeCheckArrayHoistingFailed(
1858                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
1859             
1860             Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1861             m_graph.m_arguments[argument] = setArgument;
1862             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1863         }
1864     }
1865
1866     while (true) {
1867         // Don't extend over jump destinations.
1868         if (m_currentIndex == limit) {
1869             // Ordinarily we want to plant a jump. But refuse to do this if the block is
1870             // empty. This is a special case for inlining, which might otherwise create
1871             // some empty blocks in some cases. When parseBlock() returns with an empty
1872             // block, it will get repurposed instead of creating a new one. Note that this
1873             // logic relies on every bytecode resulting in one or more nodes, which would
1874             // be true anyway except for op_loop_hint, which emits a Phantom to force this
1875             // to be true.
1876             if (!m_currentBlock->isEmpty())
1877                 addToGraph(Jump, OpInfo(m_currentIndex));
1878             else {
1879 #if DFG_ENABLE(DEBUG_VERBOSE)
1880                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
1881 #endif
1882             }
1883             return shouldContinueParsing;
1884         }
1885         
1886         // Switch on the current bytecode opcode.
1887         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1888         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1889         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
1890         
1891         if (m_graph.compilation()) {
1892             addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
1893                 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
1894         }
1895         
1896         switch (opcodeID) {
1897
1898         // === Function entry opcodes ===
1899
1900         case op_enter:
1901             // Initialize all locals to undefined.
1902             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
1903                 set(virtualRegisterForLocal(i), constantUndefined(), SetOnEntry);
1904             NEXT_OPCODE(op_enter);
1905
1906         case op_to_this: {
1907             Node* op1 = getThis();
1908             if (op1->op() != ToThis) {
1909                 Structure* cachedStructure = currentInstruction[2].u.structure.get();
1910                 if (!cachedStructure
1911                     || cachedStructure->classInfo()->methodTable.toThis != JSObject::info()->methodTable.toThis
1912                     || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
1913                     || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1914                     setThis(addToGraph(ToThis, op1));
1915                 } else {
1916                     addToGraph(
1917                         CheckStructure,
1918                         OpInfo(m_graph.addStructureSet(cachedStructure)),
1919                         op1);
1920                 }
1921             }
1922             NEXT_OPCODE(op_to_this);
1923         }
1924
1925         case op_create_this: {
1926             int calleeOperand = currentInstruction[2].u.operand;
1927             Node* callee = get(VirtualRegister(calleeOperand));
1928             bool alreadyEmitted = false;
1929             if (callee->op() == WeakJSConstant) {
1930                 JSCell* cell = callee->weakConstant();
1931                 ASSERT(cell->inherits(JSFunction::info()));
1932                 
1933                 JSFunction* function = jsCast<JSFunction*>(cell);
1934                 ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
1935                 if (allocationProfile) {
1936                     addToGraph(AllocationProfileWatchpoint, OpInfo(function));
1937                     // The callee is still live up to this point.
1938                     addToGraph(Phantom, callee);
1939                     set(VirtualRegister(currentInstruction[1].u.operand),
1940                         addToGraph(NewObject, OpInfo(allocationProfile->structure())));
1941                     alreadyEmitted = true;
1942                 }
1943             }
1944             if (!alreadyEmitted)
1945                 set(VirtualRegister(currentInstruction[1].u.operand),
1946                     addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
1947             NEXT_OPCODE(op_create_this);
1948         }
1949
1950         case op_new_object: {
1951             set(VirtualRegister(currentInstruction[1].u.operand),
1952                 addToGraph(NewObject,
1953                     OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
1954             NEXT_OPCODE(op_new_object);
1955         }
1956             
1957         case op_new_array: {
1958             int startOperand = currentInstruction[2].u.operand;
1959             int numOperands = currentInstruction[3].u.operand;
1960             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1961             for (int operandIdx = startOperand; operandIdx > startOperand - numOperands; --operandIdx)
1962                 addVarArgChild(get(VirtualRegister(operandIdx)));
1963             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
1964             NEXT_OPCODE(op_new_array);
1965         }
1966             
1967         case op_new_array_with_size: {
1968             int lengthOperand = currentInstruction[2].u.operand;
1969             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
1970             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(VirtualRegister(lengthOperand))));
1971             NEXT_OPCODE(op_new_array_with_size);
1972         }
1973             
1974         case op_new_array_buffer: {
1975             int startConstant = currentInstruction[2].u.operand;
1976             int numConstants = currentInstruction[3].u.operand;
1977             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1978             NewArrayBufferData data;
1979             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
1980             data.numConstants = numConstants;
1981             data.indexingType = profile->selectIndexingType();
1982
1983             // If this statement has never executed, we'll have the wrong indexing type in the profile.
1984             for (int i = 0; i < numConstants; ++i) {
1985                 data.indexingType =
1986                     leastUpperBoundOfIndexingTypeAndValue(
1987                         data.indexingType,
1988                         m_codeBlock->constantBuffer(data.startConstant)[i]);
1989             }
1990             
1991             m_graph.m_newArrayBufferData.append(data);
1992             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
1993             NEXT_OPCODE(op_new_array_buffer);
1994         }
1995             
1996         case op_new_regexp: {
1997             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
1998             NEXT_OPCODE(op_new_regexp);
1999         }
2000             
2001         case op_get_callee: {
2002             JSCell* cachedFunction = currentInstruction[2].u.jsCell.get();
2003             if (!cachedFunction 
2004                 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2005                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction)) {
2006                 set(VirtualRegister(currentInstruction[1].u.operand), get(VirtualRegister(JSStack::Callee)));
2007             } else {
2008                 ASSERT(cachedFunction->inherits(JSFunction::info()));
2009                 Node* actualCallee = get(VirtualRegister(JSStack::Callee));
2010                 addToGraph(CheckFunction, OpInfo(cachedFunction), actualCallee);
2011                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(WeakJSConstant, OpInfo(cachedFunction)));
2012             }
2013             NEXT_OPCODE(op_get_callee);
2014         }
2015
2016         // === Bitwise operations ===
2017
2018         case op_bitand: {
2019             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2020             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2021             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitAnd, op1, op2));
2022             NEXT_OPCODE(op_bitand);
2023         }
2024
2025         case op_bitor: {
2026             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2027             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2028             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitOr, op1, op2));
2029             NEXT_OPCODE(op_bitor);
2030         }
2031
2032         case op_bitxor: {
2033             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2034             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2035             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitXor, op1, op2));
2036             NEXT_OPCODE(op_bitxor);
2037         }
2038
2039         case op_rshift: {
2040             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2041             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2042             Node* result;
2043             // Optimize out shifts by zero.
2044             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2045                 result = op1;
2046             else
2047                 result = addToGraph(BitRShift, op1, op2);
2048             set(VirtualRegister(currentInstruction[1].u.operand), result);
2049             NEXT_OPCODE(op_rshift);
2050         }
2051
2052         case op_lshift: {
2053             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2054             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2055             Node* result;
2056             // Optimize out shifts by zero.
2057             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2058                 result = op1;
2059             else
2060                 result = addToGraph(BitLShift, op1, op2);
2061             set(VirtualRegister(currentInstruction[1].u.operand), result);
2062             NEXT_OPCODE(op_lshift);
2063         }
2064
2065         case op_urshift: {
2066             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2067             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2068             Node* result;
2069             // The result of a zero-extending right shift is treated as an unsigned value.
2070             // This means that if the top bit is set, the result is not in the int32 range,
2071             // and as such must be stored as a double. If the shift amount is a constant,
2072             // we may be able to optimize.
2073             if (isInt32Constant(op2)) {
2074                 // If we know we are shifting by a non-zero amount, then since the operation
2075                 // zero fills we know the top bit of the result must be zero, and as such the
2076                 // result must be within the int32 range. Conversely, if this is a shift by
2077                 // zero, then the result may be changed by the conversion to unsigned, but it
2078                 // is not necessary to perform the shift!
2079                 if (valueOfInt32Constant(op2) & 0x1f)
2080                     result = addToGraph(BitURShift, op1, op2);
2081                 else
2082                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
2083             }  else {
2084                 // Cannot optimize at this stage; shift & potentially rebox as a double.
2085                 result = addToGraph(BitURShift, op1, op2);
2086                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2087             }
2088             set(VirtualRegister(currentInstruction[1].u.operand), result);
2089             NEXT_OPCODE(op_urshift);
2090         }
2091
2092         // === Increment/Decrement opcodes ===
2093
2094         case op_inc: {
2095             int srcDst = currentInstruction[1].u.operand;
2096             VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2097             Node* op = get(srcDstVirtualRegister);
2098             set(srcDstVirtualRegister, makeSafe(addToGraph(ArithAdd, op, one())));
2099             NEXT_OPCODE(op_inc);
2100         }
2101
2102         case op_dec: {
2103             int srcDst = currentInstruction[1].u.operand;
2104             VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2105             Node* op = get(srcDstVirtualRegister);
2106             set(srcDstVirtualRegister, makeSafe(addToGraph(ArithSub, op, one())));
2107             NEXT_OPCODE(op_dec);
2108         }
2109
2110         // === Arithmetic operations ===
2111
2112         case op_add: {
2113             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2114             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2115             if (op1->hasNumberResult() && op2->hasNumberResult())
2116                 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithAdd, op1, op2)));
2117             else
2118                 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ValueAdd, op1, op2)));
2119             NEXT_OPCODE(op_add);
2120         }
2121
2122         case op_sub: {
2123             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2124             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2125             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithSub, op1, op2)));
2126             NEXT_OPCODE(op_sub);
2127         }
2128
2129         case op_negate: {
2130             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2131             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithNegate, op1)));
2132             NEXT_OPCODE(op_negate);
2133         }
2134
2135         case op_mul: {
2136             // Multiply requires that the inputs are not truncated, unfortunately.
2137             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2138             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2139             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMul, op1, op2)));
2140             NEXT_OPCODE(op_mul);
2141         }
2142
2143         case op_mod: {
2144             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2145             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2146             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMod, op1, op2)));
2147             NEXT_OPCODE(op_mod);
2148         }
2149
2150         case op_div: {
2151             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2152             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2153             set(VirtualRegister(currentInstruction[1].u.operand), makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2154             NEXT_OPCODE(op_div);
2155         }
2156
2157         // === Misc operations ===
2158
2159 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2160         case op_debug:
2161             addToGraph(Breakpoint);
2162             NEXT_OPCODE(op_debug);
2163 #endif
2164         case op_mov: {
2165             Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2166             set(VirtualRegister(currentInstruction[1].u.operand), op);
2167             NEXT_OPCODE(op_mov);
2168         }
2169
2170         case op_check_has_instance:
2171             addToGraph(CheckHasInstance, get(VirtualRegister(currentInstruction[3].u.operand)));
2172             NEXT_OPCODE(op_check_has_instance);
2173
2174         case op_instanceof: {
2175             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2176             Node* prototype = get(VirtualRegister(currentInstruction[3].u.operand));
2177             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(InstanceOf, value, prototype));
2178             NEXT_OPCODE(op_instanceof);
2179         }
2180             
2181         case op_is_undefined: {
2182             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2183             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsUndefined, value));
2184             NEXT_OPCODE(op_is_undefined);
2185         }
2186
2187         case op_is_boolean: {
2188             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2189             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsBoolean, value));
2190             NEXT_OPCODE(op_is_boolean);
2191         }
2192
2193         case op_is_number: {
2194             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2195             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsNumber, value));
2196             NEXT_OPCODE(op_is_number);
2197         }
2198
2199         case op_is_string: {
2200             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2201             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsString, value));
2202             NEXT_OPCODE(op_is_string);
2203         }
2204
2205         case op_is_object: {
2206             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2207             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsObject, value));
2208             NEXT_OPCODE(op_is_object);
2209         }
2210
2211         case op_is_function: {
2212             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2213             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsFunction, value));
2214             NEXT_OPCODE(op_is_function);
2215         }
2216
2217         case op_not: {
2218             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2219             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, value));
2220             NEXT_OPCODE(op_not);
2221         }
2222             
2223         case op_to_primitive: {
2224             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2225             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToPrimitive, value));
2226             NEXT_OPCODE(op_to_primitive);
2227         }
2228             
2229         case op_strcat: {
2230             int startOperand = currentInstruction[2].u.operand;
2231             int numOperands = currentInstruction[3].u.operand;
2232 #if CPU(X86)
2233             // X86 doesn't have enough registers to compile MakeRope with three arguments.
2234             // Rather than try to be clever, we just make MakeRope dumber on this processor.
2235             const unsigned maxRopeArguments = 2;
2236 #else
2237             const unsigned maxRopeArguments = 3;
2238 #endif
2239             auto toStringNodes = std::make_unique<Node*[]>(numOperands);
2240             for (int i = 0; i < numOperands; i++)
2241                 toStringNodes[i] = addToGraph(ToString, get(VirtualRegister(startOperand - i)));
2242
2243             for (int i = 0; i < numOperands; i++)
2244                 addToGraph(Phantom, toStringNodes[i]);
2245
2246             Node* operands[AdjacencyList::Size];
2247             unsigned indexInOperands = 0;
2248             for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2249                 operands[i] = 0;
2250             for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2251                 if (indexInOperands == maxRopeArguments) {
2252                     operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2253                     for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2254                         operands[i] = 0;
2255                     indexInOperands = 1;
2256                 }
2257                 
2258                 ASSERT(indexInOperands < AdjacencyList::Size);
2259                 ASSERT(indexInOperands < maxRopeArguments);
2260                 operands[indexInOperands++] = toStringNodes[operandIdx];
2261             }
2262             set(VirtualRegister(currentInstruction[1].u.operand),
2263                 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2264             NEXT_OPCODE(op_strcat);
2265         }
2266
2267         case op_less: {
2268             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2269             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2270             if (canFold(op1) && canFold(op2)) {
2271                 JSValue a = valueOfJSConstant(op1);
2272                 JSValue b = valueOfJSConstant(op2);
2273                 if (a.isNumber() && b.isNumber()) {
2274                     set(VirtualRegister(currentInstruction[1].u.operand),
2275                         getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2276                     NEXT_OPCODE(op_less);
2277                 }
2278             }
2279             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLess, op1, op2));
2280             NEXT_OPCODE(op_less);
2281         }
2282
2283         case op_lesseq: {
2284             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2285             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2286             if (canFold(op1) && canFold(op2)) {
2287                 JSValue a = valueOfJSConstant(op1);
2288                 JSValue b = valueOfJSConstant(op2);
2289                 if (a.isNumber() && b.isNumber()) {
2290                     set(VirtualRegister(currentInstruction[1].u.operand),
2291                         getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2292                     NEXT_OPCODE(op_lesseq);
2293                 }
2294             }
2295             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLessEq, op1, op2));
2296             NEXT_OPCODE(op_lesseq);
2297         }
2298
2299         case op_greater: {
2300             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2301             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2302             if (canFold(op1) && canFold(op2)) {
2303                 JSValue a = valueOfJSConstant(op1);
2304                 JSValue b = valueOfJSConstant(op2);
2305                 if (a.isNumber() && b.isNumber()) {
2306                     set(VirtualRegister(currentInstruction[1].u.operand),
2307                         getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2308                     NEXT_OPCODE(op_greater);
2309                 }
2310             }
2311             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreater, op1, op2));
2312             NEXT_OPCODE(op_greater);
2313         }
2314
2315         case op_greatereq: {
2316             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2317             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2318             if (canFold(op1) && canFold(op2)) {
2319                 JSValue a = valueOfJSConstant(op1);
2320                 JSValue b = valueOfJSConstant(op2);
2321                 if (a.isNumber() && b.isNumber()) {
2322                     set(VirtualRegister(currentInstruction[1].u.operand),
2323                         getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2324                     NEXT_OPCODE(op_greatereq);
2325                 }
2326             }
2327             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreaterEq, op1, op2));
2328             NEXT_OPCODE(op_greatereq);
2329         }
2330
2331         case op_eq: {
2332             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2333             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2334             if (canFold(op1) && canFold(op2)) {
2335                 JSValue a = valueOfJSConstant(op1);
2336                 JSValue b = valueOfJSConstant(op2);
2337                 set(VirtualRegister(currentInstruction[1].u.operand),
2338                     getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2339                 NEXT_OPCODE(op_eq);
2340             }
2341             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEq, op1, op2));
2342             NEXT_OPCODE(op_eq);
2343         }
2344
2345         case op_eq_null: {
2346             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2347             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEqConstant, value, constantNull()));
2348             NEXT_OPCODE(op_eq_null);
2349         }
2350
2351         case op_stricteq: {
2352             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2353             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2354             if (canFold(op1) && canFold(op2)) {
2355                 JSValue a = valueOfJSConstant(op1);
2356                 JSValue b = valueOfJSConstant(op2);
2357                 set(VirtualRegister(currentInstruction[1].u.operand),
2358                     getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2359                 NEXT_OPCODE(op_stricteq);
2360             }
2361             if (isConstantForCompareStrictEq(op1))
2362                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEqConstant, op2, op1));
2363             else if (isConstantForCompareStrictEq(op2))
2364                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEqConstant, op1, op2));
2365             else
2366                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEq, op1, op2));
2367             NEXT_OPCODE(op_stricteq);
2368         }
2369
2370         case op_neq: {
2371             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2372             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2373             if (canFold(op1) && canFold(op2)) {
2374                 JSValue a = valueOfJSConstant(op1);
2375                 JSValue b = valueOfJSConstant(op2);
2376                 set(VirtualRegister(currentInstruction[1].u.operand),
2377                     getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2378                 NEXT_OPCODE(op_neq);
2379             }
2380             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2381             NEXT_OPCODE(op_neq);
2382         }
2383
2384         case op_neq_null: {
2385             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2386             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2387             NEXT_OPCODE(op_neq_null);
2388         }
2389
2390         case op_nstricteq: {
2391             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2392             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2393             if (canFold(op1) && canFold(op2)) {
2394                 JSValue a = valueOfJSConstant(op1);
2395                 JSValue b = valueOfJSConstant(op2);
2396                 set(VirtualRegister(currentInstruction[1].u.operand),
2397                     getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2398                 NEXT_OPCODE(op_nstricteq);
2399             }
2400             Node* invertedResult;
2401             if (isConstantForCompareStrictEq(op1))
2402                 invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2403             else if (isConstantForCompareStrictEq(op2))
2404                 invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2405             else
2406                 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2407             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, invertedResult));
2408             NEXT_OPCODE(op_nstricteq);
2409         }
2410
2411         // === Property access operations ===
2412
2413         case op_get_by_val: {
2414             SpeculatedType prediction = getPrediction();
2415             
2416             Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
2417             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2418             Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
2419             Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2420             set(VirtualRegister(currentInstruction[1].u.operand), getByVal);
2421
2422             NEXT_OPCODE(op_get_by_val);
2423         }
2424
2425         case op_put_by_val: {
2426             Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
2427
2428             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
2429             
2430             Node* property = get(VirtualRegister(currentInstruction[2].u.operand));
2431             Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
2432             
2433             addVarArgChild(base);
2434             addVarArgChild(property);
2435             addVarArgChild(value);
2436             addVarArgChild(0); // Leave room for property storage.
2437             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2438
2439             NEXT_OPCODE(op_put_by_val);
2440         }
2441             
2442         case op_get_by_id:
2443         case op_get_by_id_out_of_line:
2444         case op_get_array_length: {
2445             SpeculatedType prediction = getPrediction();
2446             
2447             Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
2448             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2449             
2450             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2451             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2452                 m_inlineStackTop->m_profiledBlock, m_currentIndex, uid);
2453             
2454             handleGetById(
2455                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2456
2457             NEXT_OPCODE(op_get_by_id);
2458         }
2459         case op_put_by_id:
2460         case op_put_by_id_out_of_line:
2461         case op_put_by_id_transition_direct:
2462         case op_put_by_id_transition_normal:
2463         case op_put_by_id_transition_direct_out_of_line:
2464         case op_put_by_id_transition_normal_out_of_line: {
2465             Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
2466             Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
2467             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2468             bool direct = currentInstruction[8].u.operand;
2469
2470             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2471                 m_inlineStackTop->m_profiledBlock,
2472                 m_currentIndex,
2473                 m_graph.identifiers()[identifierNumber]);
2474             bool canCountAsInlined = true;
2475             if (!putByIdStatus.isSet()) {
2476                 addToGraph(ForceOSRExit);
2477                 canCountAsInlined = false;
2478             }
2479             
2480             bool hasExitSite =
2481                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2482                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
2483             
2484             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2485                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2486                 handlePutByOffset(base, identifierNumber, putByIdStatus.offset(), value);
2487             } else if (
2488                 !hasExitSite
2489                 && putByIdStatus.isSimpleTransition()
2490                 && (!putByIdStatus.structureChain()
2491                     || putByIdStatus.structureChain()->isStillValid())) {
2492                 
2493                 m_graph.chains().addLazily(putByIdStatus.structureChain());
2494                 
2495                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2496                 if (!direct) {
2497                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2498                         cellConstantWithStructureCheck(
2499                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2500                     }
2501                     
2502                     for (unsigned i = 0; i < putByIdStatus.structureChain()->size(); ++i) {
2503                         JSValue prototype = putByIdStatus.structureChain()->at(i)->storedPrototype();
2504                         if (prototype.isNull())
2505                             continue;
2506                         cellConstantWithStructureCheck(prototype.asCell());
2507                     }
2508                 }
2509                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2510                 
2511                 Node* propertyStorage;
2512                 StructureTransitionData* transitionData =
2513                     m_graph.addStructureTransitionData(
2514                         StructureTransitionData(
2515                             putByIdStatus.oldStructure(),
2516                             putByIdStatus.newStructure()));
2517
2518                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2519                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2520                     
2521                     // If we're growing the property storage then it must be because we're
2522                     // storing into the out-of-line storage.
2523                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2524                     
2525                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2526                         propertyStorage = addToGraph(
2527                             AllocatePropertyStorage, OpInfo(transitionData), base);
2528                     } else {
2529                         propertyStorage = addToGraph(
2530                             ReallocatePropertyStorage, OpInfo(transitionData),
2531                             base, addToGraph(GetButterfly, base));
2532                     }
2533                 } else {
2534                     if (isInlineOffset(putByIdStatus.offset()))
2535                         propertyStorage = base;
2536                     else
2537                         propertyStorage = addToGraph(GetButterfly, base);
2538                 }
2539                 
2540                 addToGraph(PutStructure, OpInfo(transitionData), base);
2541                 
2542                 addToGraph(
2543                     PutByOffset,
2544                     OpInfo(m_graph.m_storageAccessData.size()),
2545                     propertyStorage,
2546                     base,
2547                     value);
2548                 
2549                 StorageAccessData storageAccessData;
2550                 storageAccessData.offset = putByIdStatus.offset();
2551                 storageAccessData.identifierNumber = identifierNumber;
2552                 m_graph.m_storageAccessData.append(storageAccessData);
2553             } else {
2554                 if (direct)
2555                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2556                 else
2557                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2558                 canCountAsInlined = false;
2559             }
2560             
2561             if (canCountAsInlined && m_graph.compilation())
2562                 m_graph.compilation()->noticeInlinedPutById();
2563
2564             NEXT_OPCODE(op_put_by_id);
2565         }
2566
2567         case op_init_global_const_nop: {
2568             NEXT_OPCODE(op_init_global_const_nop);
2569         }
2570
2571         case op_init_global_const: {
2572             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2573             addToGraph(
2574                 PutGlobalVar,
2575                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2576                 value);
2577             NEXT_OPCODE(op_init_global_const);
2578         }
2579
2580         // === Block terminators. ===
2581
2582         case op_jmp: {
2583             unsigned relativeOffset = currentInstruction[1].u.operand;
2584             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2585             LAST_OPCODE(op_jmp);
2586         }
2587
2588         case op_jtrue: {
2589             unsigned relativeOffset = currentInstruction[2].u.operand;
2590             Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2591             if (canFold(condition)) {
2592                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2593                 if (state == TrueTriState) {
2594                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2595                     LAST_OPCODE(op_jtrue);
2596                 } else if (state == FalseTriState) {
2597                     // Emit a placeholder for this bytecode operation but otherwise
2598                     // just fall through.
2599                     addToGraph(Phantom);
2600                     NEXT_OPCODE(op_jtrue);
2601                 }
2602             }
2603             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2604             LAST_OPCODE(op_jtrue);
2605         }
2606
2607         case op_jfalse: {
2608             unsigned relativeOffset = currentInstruction[2].u.operand;
2609             Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2610             if (canFold(condition)) {
2611                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2612                 if (state == FalseTriState) {
2613                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2614                     LAST_OPCODE(op_jfalse);
2615                 } else if (state == TrueTriState) {
2616                     // Emit a placeholder for this bytecode operation but otherwise
2617                     // just fall through.
2618                     addToGraph(Phantom);
2619                     NEXT_OPCODE(op_jfalse);
2620                 }
2621             }
2622             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2623             LAST_OPCODE(op_jfalse);
2624         }
2625
2626         case op_jeq_null: {
2627             unsigned relativeOffset = currentInstruction[2].u.operand;
2628             Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
2629             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2630             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2631             LAST_OPCODE(op_jeq_null);
2632         }
2633
2634         case op_jneq_null: {
2635             unsigned relativeOffset = currentInstruction[2].u.operand;
2636             Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
2637             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2638             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2639             LAST_OPCODE(op_jneq_null);
2640         }
2641
2642         case op_jless: {
2643             unsigned relativeOffset = currentInstruction[3].u.operand;
2644             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2645             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2646             if (canFold(op1) && canFold(op2)) {
2647                 JSValue aValue = valueOfJSConstant(op1);
2648                 JSValue bValue = valueOfJSConstant(op2);
2649                 if (aValue.isNumber() && bValue.isNumber()) {
2650                     double a = aValue.asNumber();
2651                     double b = bValue.asNumber();
2652                     if (a < b) {
2653                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2654                         LAST_OPCODE(op_jless);
2655                     } else {
2656                         // Emit a placeholder for this bytecode operation but otherwise
2657                         // just fall through.
2658                         addToGraph(Phantom);
2659                         NEXT_OPCODE(op_jless);
2660                     }
2661                 }
2662             }
2663             Node* condition = addToGraph(CompareLess, op1, op2);
2664             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2665             LAST_OPCODE(op_jless);
2666         }
2667
2668         case op_jlesseq: {
2669             unsigned relativeOffset = currentInstruction[3].u.operand;
2670             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2671             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2672             if (canFold(op1) && canFold(op2)) {
2673                 JSValue aValue = valueOfJSConstant(op1);
2674                 JSValue bValue = valueOfJSConstant(op2);
2675                 if (aValue.isNumber() && bValue.isNumber()) {
2676                     double a = aValue.asNumber();
2677                     double b = bValue.asNumber();
2678                     if (a <= b) {
2679                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2680                         LAST_OPCODE(op_jlesseq);
2681                     } else {
2682                         // Emit a placeholder for this bytecode operation but otherwise
2683                         // just fall through.
2684                         addToGraph(Phantom);
2685                         NEXT_OPCODE(op_jlesseq);
2686                     }
2687                 }
2688             }
2689             Node* condition = addToGraph(CompareLessEq, op1, op2);
2690             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2691             LAST_OPCODE(op_jlesseq);
2692         }
2693
2694         case op_jgreater: {
2695             unsigned relativeOffset = currentInstruction[3].u.operand;
2696             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2697             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2698             if (canFold(op1) && canFold(op2)) {
2699                 JSValue aValue = valueOfJSConstant(op1);
2700                 JSValue bValue = valueOfJSConstant(op2);
2701                 if (aValue.isNumber() && bValue.isNumber()) {
2702                     double a = aValue.asNumber();
2703                     double b = bValue.asNumber();
2704                     if (a > b) {
2705                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2706                         LAST_OPCODE(op_jgreater);
2707                     } else {
2708                         // Emit a placeholder for this bytecode operation but otherwise
2709                         // just fall through.
2710                         addToGraph(Phantom);
2711                         NEXT_OPCODE(op_jgreater);
2712                     }
2713                 }
2714             }
2715             Node* condition = addToGraph(CompareGreater, op1, op2);
2716             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2717             LAST_OPCODE(op_jgreater);
2718         }
2719
2720         case op_jgreatereq: {
2721             unsigned relativeOffset = currentInstruction[3].u.operand;
2722             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2723             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2724             if (canFold(op1) && canFold(op2)) {
2725                 JSValue aValue = valueOfJSConstant(op1);
2726                 JSValue bValue = valueOfJSConstant(op2);
2727                 if (aValue.isNumber() && bValue.isNumber()) {
2728                     double a = aValue.asNumber();
2729                     double b = bValue.asNumber();
2730                     if (a >= b) {
2731                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2732                         LAST_OPCODE(op_jgreatereq);
2733                     } else {
2734                         // Emit a placeholder for this bytecode operation but otherwise
2735                         // just fall through.
2736                         addToGraph(Phantom);
2737                         NEXT_OPCODE(op_jgreatereq);
2738                     }
2739                 }
2740             }
2741             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2742             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2743             LAST_OPCODE(op_jgreatereq);
2744         }
2745
2746         case op_jnless: {
2747             unsigned relativeOffset = currentInstruction[3].u.operand;
2748             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2749             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2750             if (canFold(op1) && canFold(op2)) {
2751                 JSValue aValue = valueOfJSConstant(op1);
2752                 JSValue bValue = valueOfJSConstant(op2);
2753                 if (aValue.isNumber() && bValue.isNumber()) {
2754                     double a = aValue.asNumber();
2755                     double b = bValue.asNumber();
2756                     if (a < b) {
2757                         // Emit a placeholder for this bytecode operation but otherwise
2758                         // just fall through.
2759                         addToGraph(Phantom);
2760                         NEXT_OPCODE(op_jnless);
2761                     } else {
2762                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2763                         LAST_OPCODE(op_jnless);
2764                     }
2765                 }
2766             }
2767             Node* condition = addToGraph(CompareLess, op1, op2);
2768             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2769             LAST_OPCODE(op_jnless);
2770         }
2771
2772         case op_jnlesseq: {
2773             unsigned relativeOffset = currentInstruction[3].u.operand;
2774             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2775             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2776             if (canFold(op1) && canFold(op2)) {
2777                 JSValue aValue = valueOfJSConstant(op1);
2778                 JSValue bValue = valueOfJSConstant(op2);
2779                 if (aValue.isNumber() && bValue.isNumber()) {
2780                     double a = aValue.asNumber();
2781                     double b = bValue.asNumber();
2782                     if (a <= b) {
2783                         // Emit a placeholder for this bytecode operation but otherwise
2784                         // just fall through.
2785                         addToGraph(Phantom);
2786                         NEXT_OPCODE(op_jnlesseq);
2787                     } else {
2788                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2789                         LAST_OPCODE(op_jnlesseq);
2790                     }
2791                 }
2792             }
2793             Node* condition = addToGraph(CompareLessEq, op1, op2);
2794             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2795             LAST_OPCODE(op_jnlesseq);
2796         }
2797
2798         case op_jngreater: {
2799             unsigned relativeOffset = currentInstruction[3].u.operand;
2800             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2801             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2802             if (canFold(op1) && canFold(op2)) {
2803                 JSValue aValue = valueOfJSConstant(op1);
2804                 JSValue bValue = valueOfJSConstant(op2);
2805                 if (aValue.isNumber() && bValue.isNumber()) {
2806                     double a = aValue.asNumber();
2807                     double b = bValue.asNumber();
2808                     if (a > b) {
2809                         // Emit a placeholder for this bytecode operation but otherwise
2810                         // just fall through.
2811                         addToGraph(Phantom);
2812                         NEXT_OPCODE(op_jngreater);
2813                     } else {
2814                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2815                         LAST_OPCODE(op_jngreater);
2816                     }
2817                 }
2818             }
2819             Node* condition = addToGraph(CompareGreater, op1, op2);
2820             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2821             LAST_OPCODE(op_jngreater);
2822         }
2823
2824         case op_jngreatereq: {
2825             unsigned relativeOffset = currentInstruction[3].u.operand;
2826             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2827             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2828             if (canFold(op1) && canFold(op2)) {
2829                 JSValue aValue = valueOfJSConstant(op1);
2830                 JSValue bValue = valueOfJSConstant(op2);
2831                 if (aValue.isNumber() && bValue.isNumber()) {
2832                     double a = aValue.asNumber();
2833                     double b = bValue.asNumber();
2834                     if (a >= b) {
2835                         // Emit a placeholder for this bytecode operation but otherwise
2836                         // just fall through.
2837                         addToGraph(Phantom);
2838                         NEXT_OPCODE(op_jngreatereq);
2839                     } else {
2840                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2841                         LAST_OPCODE(op_jngreatereq);
2842                     }
2843                 }
2844             }
2845             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2846             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2847             LAST_OPCODE(op_jngreatereq);
2848         }
2849             
2850         case op_switch_imm: {
2851             SwitchData data;
2852             data.kind = SwitchImm;
2853             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2854             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2855             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2856             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2857                 if (!table.branchOffsets[i])
2858                     continue;
2859                 unsigned target = m_currentIndex + table.branchOffsets[i];
2860                 if (target == data.fallThroughBytecodeIndex())
2861                     continue;
2862                 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(static_cast<int32_t>(table.min + i)), target));
2863             }
2864             m_graph.m_switchData.append(data);
2865             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
2866             LAST_OPCODE(op_switch_imm);
2867         }
2868             
2869         case op_switch_char: {
2870             SwitchData data;
2871             data.kind = SwitchChar;
2872             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2873             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2874             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2875             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2876                 if (!table.branchOffsets[i])
2877                     continue;
2878                 unsigned target = m_currentIndex + table.branchOffsets[i];
2879                 if (target == data.fallThroughBytecodeIndex())
2880                     continue;
2881                 data.cases.append(
2882                     SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2883             }
2884             m_graph.m_switchData.append(data);
2885             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
2886             LAST_OPCODE(op_switch_char);
2887         }
2888
2889         case op_switch_string: {
2890             SwitchData data;
2891             data.kind = SwitchString;
2892             data.switchTableIndex = currentInstruction[1].u.operand;
2893             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2894             StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2895             StringJumpTable::StringOffsetTable::iterator iter;
2896             StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2897             for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2898                 unsigned target = m_currentIndex + iter->value.branchOffset;
2899                 if (target == data.fallThroughBytecodeIndex())
2900                     continue;
2901                 data.cases.append(
2902                     SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
2903             }
2904             m_graph.m_switchData.append(data);
2905             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
2906             LAST_OPCODE(op_switch_string);
2907         }
2908
2909         case op_ret:
2910             flushArgumentsAndCapturedVariables();
2911             if (inlineCallFrame()) {
2912                 ASSERT(m_inlineStackTop->m_returnValue.isValid());
2913                 setDirect(m_inlineStackTop->m_returnValue, get(VirtualRegister(currentInstruction[1].u.operand)));
2914                 m_inlineStackTop->m_didReturn = true;
2915                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2916                     // If we're returning from the first block, then we're done parsing.
2917                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
2918                     shouldContinueParsing = false;
2919                     LAST_OPCODE(op_ret);
2920                 } else {
2921                     // If inlining created blocks, and we're doing a return, then we need some
2922                     // special linking.
2923                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
2924                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2925                 }
2926                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2927                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2928                     addToGraph(Jump, OpInfo(0));
2929                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2930                     m_inlineStackTop->m_didEarlyReturn = true;
2931                 }
2932                 LAST_OPCODE(op_ret);
2933             }
2934             addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
2935             LAST_OPCODE(op_ret);
2936             
2937         case op_end:
2938             flushArgumentsAndCapturedVariables();
2939             ASSERT(!inlineCallFrame());
2940             addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
2941             LAST_OPCODE(op_end);
2942
2943         case op_throw:
2944             addToGraph(Throw, get(VirtualRegister(currentInstruction[1].u.operand)));
2945             flushAllArgumentsAndCapturedVariablesInInlineStack();
2946             addToGraph(Unreachable);
2947             LAST_OPCODE(op_throw);
2948             
2949         case op_throw_static_error:
2950             addToGraph(ThrowReferenceError);
2951             flushAllArgumentsAndCapturedVariablesInInlineStack();
2952             addToGraph(Unreachable);
2953             LAST_OPCODE(op_throw_static_error);
2954             
2955         case op_call:
2956             handleCall(currentInstruction, Call, CodeForCall);
2957             NEXT_OPCODE(op_call);
2958             
2959         case op_construct:
2960             handleCall(currentInstruction, Construct, CodeForConstruct);
2961             NEXT_OPCODE(op_construct);
2962             
2963         case op_call_varargs: {
2964             ASSERT(inlineCallFrame());
2965             ASSERT(currentInstruction[4].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister().offset());
2966             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2967             // It would be cool to funnel this into handleCall() so that it can handle
2968             // inlining. But currently that won't be profitable anyway, since none of the
2969             // uses of call_varargs will be inlineable. So we set this up manually and
2970             // without inline/intrinsic detection.
2971             
2972             SpeculatedType prediction = getPrediction();
2973             
2974             addToGraph(CheckArgumentsNotCreated);
2975             
2976             unsigned argCount = inlineCallFrame()->arguments.size();
2977             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2978                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2979             
2980             addVarArgChild(get(VirtualRegister(currentInstruction[2].u.operand))); // callee
2981             addVarArgChild(get(VirtualRegister(currentInstruction[3].u.operand))); // this
2982             for (unsigned argument = 1; argument < argCount; ++argument)
2983                 addVarArgChild(get(virtualRegisterForArgument(argument)));
2984             
2985             set(VirtualRegister(currentInstruction[1].u.operand),
2986                 addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction)));
2987             
2988             NEXT_OPCODE(op_call_varargs);
2989         }
2990             
2991         case op_jneq_ptr:
2992             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2993             // support simmer for a while before making it more general, since it's
2994             // already gnarly enough as it is.
2995             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2996             addToGraph(
2997                 CheckFunction,
2998                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2999                 get(VirtualRegister(currentInstruction[1].u.operand)));
3000             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
3001             LAST_OPCODE(op_jneq_ptr);
3002
3003         case op_resolve_scope: {
3004             int dst = currentInstruction[1].u.operand;
3005             ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
3006             unsigned depth = currentInstruction[4].u.operand;
3007
3008             // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3009             if (needsVarInjectionChecks(resolveType))
3010                 addToGraph(VarInjectionWatchpoint);
3011
3012             switch (resolveType) {
3013             case GlobalProperty:
3014             case GlobalVar:
3015             case GlobalPropertyWithVarInjectionChecks:
3016             case GlobalVarWithVarInjectionChecks:
3017                 set(VirtualRegister(dst), cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3018                 break;
3019             case ClosureVar:
3020             case ClosureVarWithVarInjectionChecks:
3021                 set(VirtualRegister(dst), getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
3022                 break;
3023             case Dynamic:
3024                 RELEASE_ASSERT_NOT_REACHED();
3025                 break;
3026             }
3027             NEXT_OPCODE(op_resolve_scope);
3028         }
3029
3030         case op_get_from_scope: {
3031             int dst = currentInstruction[1].u.operand;
3032             unsigned scope = currentInstruction[2].u.operand;
3033             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3034             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3035             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3036
3037             Structure* structure;
3038             uintptr_t operand;
3039             {
3040                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3041                 structure = currentInstruction[5].u.structure.get();
3042                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3043             }
3044
3045             SpeculatedType prediction = getPrediction();
3046             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3047
3048             switch (resolveType) {
3049             case GlobalProperty:
3050             case GlobalPropertyWithVarInjectionChecks: {
3051                 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
3052                 if (status.takesSlowPath()) {
3053                     set(VirtualRegister(dst), addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(VirtualRegister(scope))));
3054                     break;
3055                 }
3056                 Node* base = cellConstantWithStructureCheck(globalObject, status.structureSet().singletonStructure());
3057                 if (JSValue specificValue = status.specificValue())
3058                     set(VirtualRegister(dst), cellConstant(specificValue.asCell()));
3059                 else
3060                     set(VirtualRegister(dst), handleGetByOffset(prediction, base, identifierNumber, operand));
3061                 break;
3062             }
3063             case GlobalVar:
3064             case GlobalVarWithVarInjectionChecks: {
3065                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3066                 if (!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet())) {
3067                     set(VirtualRegister(dst), addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
3068                     break;
3069                 }
3070
3071                 addToGraph(GlobalVarWatchpoint, OpInfo(operand), OpInfo(identifierNumber));
3072                 JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
3073                 set(VirtualRegister(dst), cellConstant(specificValue.asCell()));
3074                 break;
3075             }
3076             case ClosureVar:
3077             case ClosureVarWithVarInjectionChecks:
3078                 set(VirtualRegister(dst),
3079                     addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), 
3080                         addToGraph(GetClosureRegisters, get(VirtualRegister(scope)))));
3081                 break;
3082             case Dynamic:
3083                 RELEASE_ASSERT_NOT_REACHED();
3084                 break;
3085             }
3086             NEXT_OPCODE(op_get_from_scope);
3087         }
3088
3089         case op_put_to_scope: {
3090             unsigned scope = currentInstruction[1].u.operand;
3091             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3092             unsigned value = currentInstruction[3].u.operand;
3093             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3094             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3095
3096             Structure* structure;
3097             uintptr_t operand;
3098             {
3099                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3100                 structure = currentInstruction[5].u.structure.get();
3101                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3102             }
3103
3104             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3105
3106             switch (resolveType) {
3107             case GlobalProperty:
3108             case GlobalPropertyWithVarInjectionChecks: {
3109                 PutByIdStatus status = PutByIdStatus::computeFor(*m_vm, globalObject, structure, uid, false);
3110                 if (!status.isSimpleReplace()) {
3111                     addToGraph(PutById, OpInfo(identifierNumber), get(VirtualRegister(scope)), get(VirtualRegister(value)));
3112                     break;
3113                 }
3114                 Node* base = cellConstantWithStructureCheck(globalObject, status.oldStructure());
3115                 handlePutByOffset(base, identifierNumber, static_cast<PropertyOffset>(operand), get(VirtualRegister(value)));
3116                 // Keep scope alive until after put.
3117                 addToGraph(Phantom, get(VirtualRegister(scope)));
3118                 break;
3119             }
3120             case GlobalVar:
3121             case GlobalVarWithVarInjectionChecks: {
3122                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3123                 ASSERT(!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet()));
3124                 addToGraph(PutGlobalVar, OpInfo(operand), get(VirtualRegister(value)));
3125                 // Keep scope alive until after put.
3126                 addToGraph(Phantom, get(VirtualRegister(scope)));
3127                 break;
3128             }
3129             case ClosureVar:
3130             case ClosureVarWithVarInjectionChecks: {
3131                 Node* scopeNode = get(VirtualRegister(scope));
3132                 Node* scopeRegisters = addToGraph(GetClosureRegisters, scopeNode);
3133                 addToGraph(PutClosureVar, OpInfo(operand), scopeNode, scopeRegisters, get(VirtualRegister(value)));
3134                 break;
3135             }
3136             case Dynamic:
3137                 RELEASE_ASSERT_NOT_REACHED();
3138                 break;
3139             }
3140             NEXT_OPCODE(op_put_to_scope);
3141         }
3142
3143         case op_loop_hint: {
3144             // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3145             // OSR can only happen at basic block boundaries. Assert that these two statements
3146             // are compatible.
3147             RELEASE_ASSERT(m_currentIndex == blockBegin);
3148             
3149             // We never do OSR into an inlined code block. That could not happen, since OSR
3150             // looks up the code block that is the replacement for the baseline JIT code
3151             // block. Hence, machine code block = true code block = not inline code block.
3152             if (!m_inlineStackTop->m_caller)
3153                 m_currentBlock->isOSRTarget = true;
3154
3155             addToGraph(LoopHint);
3156             
3157             if (m_vm->watchdog.isEnabled())
3158                 addToGraph(CheckWatchdogTimer);
3159             
3160             NEXT_OPCODE(op_loop_hint);
3161         }
3162             
3163         case op_init_lazy_reg: {
3164             set(VirtualRegister(currentInstruction[1].u.operand), getJSConstantForValue(JSValue()));
3165             ASSERT(operandIsLocal(currentInstruction[1].u.operand));
3166             m_graph.m_lazyVars.set(VirtualRegister(currentInstruction[1].u.operand).toLocal());
3167             NEXT_OPCODE(op_init_lazy_reg);
3168         }
3169             
3170         case op_create_activation: {
3171             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CreateActivation, get(VirtualRegister(currentInstruction[1].u.operand))));
3172             NEXT_OPCODE(op_create_activation);
3173         }
3174             
3175         case op_create_arguments: {
3176             m_graph.m_hasArguments = true;
3177             Node* createArguments = addToGraph(CreateArguments, get(VirtualRegister(currentInstruction[1].u.operand)));
3178             set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
3179             set(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand)), createArguments);
3180             NEXT_OPCODE(op_create_arguments);
3181         }
3182             
3183         case op_tear_off_activation: {
3184             addToGraph(TearOffActivation, get(VirtualRegister(currentInstruction[1].u.operand)));
3185             NEXT_OPCODE(op_tear_off_activation);
3186         }
3187
3188         case op_tear_off_arguments: {
3189             m_graph.m_hasArguments = true;
3190             addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand))), get(VirtualRegister(currentInstruction[2].u.operand)));
3191             NEXT_OPCODE(op_tear_off_arguments);
3192         }
3193             
3194         case op_get_arguments_length: {
3195             m_graph.m_hasArguments = true;
3196             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetMyArgumentsLengthSafe));
3197             NEXT_OPCODE(op_get_arguments_length);
3198         }
3199             
3200         case op_get_argument_by_val: {
3201             m_graph.m_hasArguments = true;
3202             set(VirtualRegister(currentInstruction[1].u.operand),
3203                 addToGraph(
3204                     GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
3205                     get(VirtualRegister(currentInstruction[3].u.operand))));
3206             NEXT_OPCODE(op_get_argument_by_val);
3207         }
3208             
3209         case op_new_func: {
3210             if (!currentInstruction[3].u.operand) {
3211                 set(VirtualRegister(currentInstruction[1].u.operand),
3212                     addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3213             } else {
3214                 set(VirtualRegister(currentInstruction[1].u.operand),
3215                     addToGraph(
3216                         NewFunction,
3217                         OpInfo(currentInstruction[2].u.operand),
3218                         get(VirtualRegister(currentInstruction[1].u.operand))));
3219             }
3220             NEXT_OPCODE(op_new_func);
3221         }
3222             
3223         case op_new_func_exp: {
3224             set(VirtualRegister(currentInstruction[1].u.operand),
3225                 addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3226             NEXT_OPCODE(op_new_func_exp);
3227         }
3228
3229         case op_typeof: {
3230             set(VirtualRegister(currentInstruction[1].u.operand),
3231                 addToGraph(TypeOf, get(VirtualRegister(currentInstruction[2].u.operand))));
3232             NEXT_OPCODE(op_typeof);
3233         }
3234
3235         case op_to_number: {
3236             set(VirtualRegister(currentInstruction[1].u.operand),
3237                 addToGraph(Identity, Edge(get(VirtualRegister(currentInstruction[2].u.operand)), NumberUse)));
3238             NEXT_OPCODE(op_to_number);
3239         }
3240             
3241         case op_in: {
3242             set(VirtualRegister(currentInstruction[1].u.operand),
3243                 addToGraph(In, get(VirtualRegister(currentInstruction[2].u.operand)), get(VirtualRegister(currentInstruction[3].u.operand))));
3244             NEXT_OPCODE(op_in);
3245         }
3246
3247         default:
3248             // Parse failed! This should not happen because the capabilities checker
3249             // should have caught it.
3250             RELEASE_ASSERT_NOT_REACHED();
3251             return false;
3252         }
3253     }
3254 }
3255
3256 void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BasicBlock*>& possibleTargets)
3257 {
3258     ASSERT(!block->isLinked);
3259     ASSERT(!block->isEmpty());
3260     Node* node = block->last();
3261     ASSERT(node->isTerminal());
3262     
3263     switch (node->op()) {
3264     case Jump:
3265         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3266 #if DFG_ENABLE(DEBUG_VERBOSE)
3267         dataLogF("Linked basic block %p to %p, #%u.\n", block, node->takenBlock(), node->takenBlock()->index);
3268 #endif
3269         break;
3270         
3271     case Branch:
3272         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3273         node->setNotTakenBlock(blockForBytecodeOffset(possibleTargets, node->notTakenBytecodeOffsetDuringParsing()));
3274 #if DFG_ENABLE(DEBUG_VERBOSE)
3275         dataLogF("Linked basic block %p to %p, #%u and %p, #%u.\n", block, node->takenBlock(), node->takenBlock()->index, node->notTakenBlock(), node->notTakenBlock()->index);
3276 #endif
3277         break;
3278         
3279     case Switch:
3280         for (unsigned i = node->switchData()->cases.size(); i--;)
3281             node->switchData()->cases[i].target = blockForBytecodeOffset(possibleTargets, node->switchData()->cases[i].targetBytecodeIndex());
3282         node->switchData()->fallThrough = blockForBytecodeOffset(possibleTargets, node->switchData()->fallThroughBytecodeIndex());
3283         break;
3284         
3285     default:
3286 #if DFG_ENABLE(DEBUG_VERBOSE)
3287         dataLogF("Marking basic block %p as linked.\n", block);
3288 #endif
3289         break;
3290     }
3291     
3292 #if !ASSERT_DISABLED
3293     block->isLinked = true;
3294 #endif
3295 }
3296
3297 void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets)
3298 {
3299     for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3300         if (unlinkedBlocks[i].m_needsNormalLinking) {
3301             linkBlock(unlinkedBlocks[i].m_block, possibleTargets);
3302             unlinkedBlocks[i].m_needsNormalLinking = false;
3303         }
3304     }
3305 }
3306
3307 void ByteCodeParser::buildOperandMapsIfNecessary()
3308 {
3309     if (m_haveBuiltOperandMaps)
3310         return;
3311     
3312     for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3313         m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3314     for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3315         JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3316         if (!value)
3317             m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3318         else
3319             m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3320     }
3321     
3322     m_haveBuiltOperandMaps = true;
3323 }
3324
3325 ByteCodeParser::InlineStackEntry::InlineStackEntry(
3326     ByteCodeParser* byteCodeParser,
3327     CodeBlock* codeBlock,
3328     CodeBlock* profiledBlock,
3329     BasicBlock* callsiteBlockHead,
3330     JSFunction* callee, // Null if this is a closure call.
3331     VirtualRegister returnValueVR,
3332     VirtualRegister inlineCallFrameStart,
3333     int argumentCountIncludingThis,
3334     CodeSpecializationKind kind)
3335     : m_byteCodeParser(byteCodeParser)
3336     , m_codeBlock(codeBlock)
3337     , m_profiledBlock(profiledBlock)
3338     , m_callsiteBlockHead(callsiteBlockHead)
3339     , m_returnValue(returnValueVR)
3340     , m_didReturn(false)
3341     , m_didEarlyReturn(false)
3342     , m_caller(byteCodeParser->m_inlineStackTop)
3343 {
3344     {
3345         ConcurrentJITLocker locker(m_profiledBlock->m_lock);
3346         m_lazyOperands.initialize(locker, m_profiledBlock->lazyOperandValueProfiles());
3347         m_exitProfile.initialize(locker, profiledBlock->exitProfile());
3348     }
3349     
3350     m_argumentPositions.resize(argumentCountIncludingThis);
3351     for (int i = 0; i < argumentCountIncludingThis; ++i) {
3352         byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3353         ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3354         m_argumentPositions[i] = argumentPosition;
3355     }
3356     
3357     // Track the code-block-global exit sites.
3358     if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3359         byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3360             codeBlock->ownerExecutable());
3361     }
3362         
3363     if (m_caller) {
3364         // Inline case.
3365         ASSERT(codeBlock != byteCodeParser->m_codeBlock);
3366         ASSERT(inlineCallFrameStart.isValid());
3367         ASSERT(callsiteBlockHead);
3368         
3369         m_inlineCallFrame = byteCodeParser->m_graph.m_inlineCallFrames->add();
3370         initializeLazyWriteBarrierForInlineCallFrameExecutable(
3371             byteCodeParser->m_graph.m_plan.writeBarriers,
3372             m_inlineCallFrame->executable,
3373             byteCodeParser->m_codeBlock,
3374             m_inlineCallFrame,
3375             byteCodeParser->m_codeBlock->ownerExecutable(), 
3376             codeBlock->ownerExecutable());
3377         m_inlineCallFrame->stackOffset = inlineCallFrameStart.offset() - JSStack::CallFrameHeaderSize;
3378         if (callee) {
3379             initializeLazyWriteBarrierForInlineCallFrameCallee(
3380                 byteCodeParser->m_graph.m_plan.writeBarriers,
3381                 m_inlineCallFrame->callee,
3382                 byteCodeParser->m_codeBlock,
3383                 m_inlineCallFrame,
3384                 byteCodeParser->m_codeBlock->ownerExecutable(), 
3385                 callee);
3386         }
3387         m_inlineCallFrame->caller = byteCodeParser->currentCodeOrigin();
3388         m_inlineCallFrame->arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
3389         m_inlineCallFrame->isCall = isCall(kind);
3390         
3391         if (m_inlineCallFrame->caller.inlineCallFrame)
3392             m_inlineCallFrame->capturedVars = m_inlineCallFrame->caller.inlineCallFrame->capturedVars;
3393         else {
3394             for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
3395                 if (byteCodeParser->m_codeBlock->isCaptured(virtualRegisterForLocal(i)))
3396                     m_inlineCallFrame->capturedVars.set(i);
3397             }
3398         }
3399
3400         for (int i = argumentCountIncludingThis; i--;) {
3401             VirtualRegister argument = virtualRegisterForArgument(i);
3402             if (codeBlock->isCaptured(argument))
3403                 m_inlineCallFrame->capturedVars.set(VirtualRegister(argument.offset() + m_inlineCallFrame->stackOffset).toLocal());
3404         }
3405         for (size_t i = codeBlock->m_numVars; i--;) {
3406             VirtualRegister local = virtualRegisterForLocal(i);
3407             if (codeBlock->isCaptured(local))
3408                 m_inlineCallFrame->capturedVars.set(VirtualRegister(local.offset() + m_inlineCallFrame->stackOffset).toLocal());
3409         }
3410
3411 #if DFG_ENABLE(DEBUG_VERBOSE)
3412         dataLogF("Current captured variables: ");
3413         m_inlineCallFrame->capturedVars.dump(WTF::dataFile());
3414         dataLogF("\n");
3415 #endif
3416         
3417         byteCodeParser->buildOperandMapsIfNecessary();
3418         
3419         m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3420         m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3421         m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3422         m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
3423
3424         for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
3425             StringImpl* rep = codeBlock->identifier(i).impl();
3426             BorrowedIdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_graph.identifiers().numberOfIdentifiers());
3427             if (result.isNewEntry)
3428                 byteCodeParser->m_graph.identifiers().addLazily(rep);
3429             m_identifierRemap[i] = result.iterator->value;
3430         }
3431         for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i) {
3432             JSValue value = codeBlock->getConstant(i + FirstConstantRegisterIndex);
3433             if (!value) {
3434                 if (byteCodeParser->m_emptyJSValueIndex == UINT_MAX) {
3435                     byteCodeParser->m_emptyJSValueIndex = byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex;
3436                     byteCodeParser->addConstant(JSValue());
3437                     byteCodeParser->m_constants.append(ConstantRecord());
3438                 }
3439                 m_constantRemap[i] = byteCodeParser->m_emptyJSValueIndex;
3440                 continue;
3441             }
3442             JSValueMap::AddResult result = byteCodeParser->m_jsValueMap.add(JSValue::encode(value), byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex);
3443             if (result.isNewEntry) {
3444                 byteCodeParser->addConstant(value);
3445                 byteCodeParser->m_constants.append(ConstantRecord());
3446             }
3447             m_constantRemap[i] = result.iterator->value;
3448         }
3449         for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
3450             // If we inline the same code block multiple times, we don't want to needlessly
3451             // duplicate its constant buffers.
3452             HashMap<ConstantBufferKey, unsigned>::iterator iter =
3453                 byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
3454             if (iter != byteCodeParser->m_constantBufferCache.end()) {
3455                 m_constantBufferRemap[i] = iter->value;
3456                 continue;
3457             }
3458             Vector<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
3459             unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
3460             m_constantBufferRemap[i] = newIndex;
3461             byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
3462         }
3463         for (unsigned i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i) {
3464             m_switchRemap[i] = byteCodeParser->m_codeBlock->numberOfSwitchJumpTables();
3465             byteCodeParser->m_codeBlock->addSwitchJumpTable() = codeBlock->switchJumpTable(i);
3466         }
3467         m_callsiteBlockHeadNeedsLinking = true;
3468     } else {
3469         // Machine code block case.
3470         ASSERT(codeBlock == byteCodeParser->m_codeBlock);
3471         ASSERT(!callee);
3472         ASSERT(!returnValueVR.isValid());
3473         ASSERT(!inlineCallFrameStart.isValid());
3474         ASSERT(!callsiteBlockHead);
3475
3476         m_inlineCallFrame = 0;
3477
3478         m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3479         m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3480         m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3481         m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
3482         for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
3483             m_identifierRemap[i] = i;
3484         for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i)
3485             m_constantRemap[i] = i + FirstConstantRegisterIndex;
3486         for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
3487             m_constantBufferRemap[i] = i;
3488         for (size_t i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i)
3489             m_switchRemap[i] = i;
3490         m_callsiteBlockHeadNeedsLinking = false;
3491     }
3492     
3493     for (size_t i = 0; i < m_constantRemap.size(); ++i)
3494         ASSERT(m_constantRemap[i] >= static_cast<unsigned>(FirstConstantRegisterIndex));
3495     
3496     byteCodeParser->m_inlineStackTop = this;
3497 }
3498
3499 void ByteCodeParser::parseCodeBlock()
3500 {
3501     CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3502     
3503     if (m_graph.compilation()) {
3504         m_graph.compilation()->addProfiledBytecodes(
3505             *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
3506     }
3507     
3508     bool shouldDumpBytecode = Options::dumpBytecodeAtDFGTime();
3509 #if DFG_ENABLE(DEBUG_VERBOSE)
3510     shouldDumpBytecode |= true;
3511 #endif
3512     if (shouldDumpBytecode) {
3513         dataLog("Parsing ", *codeBlock);
3514         if (inlineCallFrame()) {
3515             dataLog(
3516                 " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
3517                 " ", inlineCallFrame()->caller);
3518         }
3519         dataLog(
3520             ": captureCount = ", codeBlock->symbolTable() ? codeBlock->symbolTable()->captureCount() : 0,
3521             ", needsFullScopeChain = ", codeBlock->needsFullScopeChain(),
3522             ", needsActivation = ", codeBlock->ownerExecutable()->needsActivation(),
3523             ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
3524         codeBlock->baselineVersion()->dumpBytecode();
3525     }
3526     
3527     Vector<unsigned, 32> jumpTargets;
3528     computePreciseJumpTargets(codeBlock, jumpTargets);
3529     if (Options::dumpBytecodeAtDFGTime()) {
3530         dataLog("Jump targets: ");
3531         CommaPrinter comma;
3532         for (unsigned i = 0; i < jumpTargets.size(); ++i)