Unreviewed, rolling out r156474.
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGArrayMode.h"
36 #include "DFGCapabilities.h"
37 #include "DFGJITCode.h"
38 #include "GetByIdStatus.h"
39 #include "Operations.h"
40 #include "PreciseJumpTargets.h"
41 #include "PutByIdStatus.h"
42 #include "StringConstructor.h"
43 #include <wtf/CommaPrinter.h>
44 #include <wtf/HashMap.h>
45 #include <wtf/MathExtras.h>
46 #include <wtf/StdLibExtras.h>
47
48 namespace JSC { namespace DFG {
49
50 class ConstantBufferKey {
51 public:
52     ConstantBufferKey()
53         : m_codeBlock(0)
54         , m_index(0)
55     {
56     }
57     
58     ConstantBufferKey(WTF::HashTableDeletedValueType)
59         : m_codeBlock(0)
60         , m_index(1)
61     {
62     }
63     
64     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
65         : m_codeBlock(codeBlock)
66         , m_index(index)
67     {
68     }
69     
70     bool operator==(const ConstantBufferKey& other) const
71     {
72         return m_codeBlock == other.m_codeBlock
73             && m_index == other.m_index;
74     }
75     
76     unsigned hash() const
77     {
78         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
79     }
80     
81     bool isHashTableDeletedValue() const
82     {
83         return !m_codeBlock && m_index;
84     }
85     
86     CodeBlock* codeBlock() const { return m_codeBlock; }
87     unsigned index() const { return m_index; }
88     
89 private:
90     CodeBlock* m_codeBlock;
91     unsigned m_index;
92 };
93
94 struct ConstantBufferKeyHash {
95     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
96     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
97     {
98         return a == b;
99     }
100     
101     static const bool safeToCompareToEmptyOrDeleted = true;
102 };
103
104 } } // namespace JSC::DFG
105
106 namespace WTF {
107
108 template<typename T> struct DefaultHash;
109 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
110     typedef JSC::DFG::ConstantBufferKeyHash Hash;
111 };
112
113 template<typename T> struct HashTraits;
114 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
115
116 } // namespace WTF
117
118 namespace JSC { namespace DFG {
119
120 // === ByteCodeParser ===
121 //
122 // This class is used to compile the dataflow graph from a CodeBlock.
123 class ByteCodeParser {
124 public:
125     ByteCodeParser(Graph& graph)
126         : m_vm(&graph.m_vm)
127         , m_codeBlock(graph.m_codeBlock)
128         , m_profiledBlock(graph.m_profiledBlock)
129         , m_graph(graph)
130         , m_currentBlock(0)
131         , m_currentIndex(0)
132         , m_constantUndefined(UINT_MAX)
133         , m_constantNull(UINT_MAX)
134         , m_constantNaN(UINT_MAX)
135         , m_constant1(UINT_MAX)
136         , m_constants(m_codeBlock->numberOfConstantRegisters())
137         , m_numArguments(m_codeBlock->numParameters())
138         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
139         , m_preservedVars(m_codeBlock->m_numVars)
140         , m_parameterSlots(0)
141         , m_numPassedVarArgs(0)
142         , m_inlineStackTop(0)
143         , m_haveBuiltOperandMaps(false)
144         , m_emptyJSValueIndex(UINT_MAX)
145         , m_currentInstruction(0)
146     {
147         ASSERT(m_profiledBlock);
148         
149         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
150             m_preservedVars.set(i);
151     }
152     
153     // Parse a full CodeBlock of bytecode.
154     bool parse();
155     
156 private:
157     struct InlineStackEntry;
158
159     // Just parse from m_currentIndex to the end of the current CodeBlock.
160     void parseCodeBlock();
161
162     // Helper for min and max.
163     bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
164     
165     // Handle calls. This resolves issues surrounding inlining and intrinsics.
166     void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
167     void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
168     void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
169     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
170     bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
171     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
172     bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
173     bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
174     bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
175     Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
176     Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
177     void handleGetByOffset(
178         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179         PropertyOffset);
180     void handleGetById(
181         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
182         const GetByIdStatus&);
183
184     Node* getScope(bool skipTop, unsigned skipCount);
185     
186     // Prepare to parse a block.
187     void prepareToParseBlock();
188     // Parse a single basic block of bytecode instructions.
189     bool parseBlock(unsigned limit);
190     // Link block successors.
191     void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
192     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
193     
194     VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
195     {
196         ASSERT(operand < FirstConstantRegisterIndex);
197         
198         m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
199         return &m_graph.m_variableAccessData.last();
200     }
201     
202     // Get/Set the operands/result of a bytecode instruction.
203     Node* getDirect(int operand)
204     {
205         // Is this a constant?
206         if (operand >= FirstConstantRegisterIndex) {
207             unsigned constant = operand - FirstConstantRegisterIndex;
208             ASSERT(constant < m_constants.size());
209             return getJSConstant(constant);
210         }
211
212         ASSERT(operand != JSStack::Callee);
213         
214         // Is this an argument?
215         if (operandIsArgument(operand))
216             return getArgument(operand);
217
218         // Must be a local.
219         return getLocal(operand);
220     }
221
222     Node* get(int operand)
223     {
224         if (operand == JSStack::Callee) {
225             if (inlineCallFrame() && inlineCallFrame()->callee)
226                 return cellConstant(inlineCallFrame()->callee.get());
227             
228             return getCallee();
229         }
230         
231         return getDirect(m_inlineStackTop->remapOperand(operand));
232     }
233
234     enum SetMode { NormalSet, SetOnEntry };
235     void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
236     {
237         // Is this an argument?
238         if (operandIsArgument(operand)) {
239             setArgument(operand, value, setMode);
240             return;
241         }
242
243         // Must be a local.
244         setLocal(operand, value, setMode);
245     }
246
247     void set(int operand, Node* value, SetMode setMode = NormalSet)
248     {
249         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
250     }
251     
252     Node* injectLazyOperandSpeculation(Node* node)
253     {
254         ASSERT(node->op() == GetLocal);
255         ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
256         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
257         LazyOperandValueProfileKey key(m_currentIndex, node->local());
258         SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
259 #if DFG_ENABLE(DEBUG_VERBOSE)
260         dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
261 #endif
262         node->variableAccessData()->predict(prediction);
263         return node;
264     }
265
266     // Used in implementing get/set, above, where the operand is a local variable.
267     Node* getLocal(int operand)
268     {
269         unsigned local = operandToLocal(operand);
270         Node* node = m_currentBlock->variablesAtTail.local(local);
271         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
272         
273         // This has two goals: 1) link together variable access datas, and 2)
274         // try to avoid creating redundant GetLocals. (1) is required for
275         // correctness - no other phase will ensure that block-local variable
276         // access data unification is done correctly. (2) is purely opportunistic
277         // and is meant as an compile-time optimization only.
278         
279         VariableAccessData* variable;
280         
281         if (node) {
282             variable = node->variableAccessData();
283             variable->mergeIsCaptured(isCaptured);
284             
285             if (!isCaptured) {
286                 switch (node->op()) {
287                 case GetLocal:
288                     return node;
289                 case SetLocal:
290                     return node->child1().node();
291                 default:
292                     break;
293                 }
294             }
295         } else {
296             m_preservedVars.set(local);
297             variable = newVariableAccessData(operand, isCaptured);
298         }
299         
300         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
301         m_currentBlock->variablesAtTail.local(local) = node;
302         return node;
303     }
304
305     void setLocal(int operand, Node* value, SetMode setMode = NormalSet)
306     {
307         unsigned local = operandToLocal(operand);
308         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
309         
310         if (setMode == NormalSet) {
311             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
312             if (isCaptured || argumentPosition)
313                 flushDirect(operand, argumentPosition);
314         }
315
316         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
317         variableAccessData->mergeStructureCheckHoistingFailed(
318             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
319         variableAccessData->mergeCheckArrayHoistingFailed(
320             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
321         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
322         m_currentBlock->variablesAtTail.local(local) = node;
323     }
324
325     // Used in implementing get/set, above, where the operand is an argument.
326     Node* getArgument(unsigned operand)
327     {
328         unsigned argument = operandToArgument(operand);
329         ASSERT(argument < m_numArguments);
330         
331         Node* node = m_currentBlock->variablesAtTail.argument(argument);
332         bool isCaptured = m_codeBlock->isCaptured(operand);
333
334         VariableAccessData* variable;
335         
336         if (node) {
337             variable = node->variableAccessData();
338             variable->mergeIsCaptured(isCaptured);
339             
340             switch (node->op()) {
341             case GetLocal:
342                 return node;
343             case SetLocal:
344                 return node->child1().node();
345             default:
346                 break;
347             }
348         } else
349             variable = newVariableAccessData(operand, isCaptured);
350         
351         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
352         m_currentBlock->variablesAtTail.argument(argument) = node;
353         return node;
354     }
355     void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
356     {
357         unsigned argument = operandToArgument(operand);
358         ASSERT(argument < m_numArguments);
359         
360         bool isCaptured = m_codeBlock->isCaptured(operand);
361
362         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
363
364         // Always flush arguments, except for 'this'. If 'this' is created by us,
365         // then make sure that it's never unboxed.
366         if (argument) {
367             if (setMode == NormalSet)
368                 flushDirect(operand);
369         } else if (m_codeBlock->specializationKind() == CodeForConstruct)
370             variableAccessData->mergeShouldNeverUnbox(true);
371         
372         variableAccessData->mergeStructureCheckHoistingFailed(
373             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
374         variableAccessData->mergeCheckArrayHoistingFailed(
375             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
376         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
377         m_currentBlock->variablesAtTail.argument(argument) = node;
378     }
379     
380     ArgumentPosition* findArgumentPositionForArgument(int argument)
381     {
382         InlineStackEntry* stack = m_inlineStackTop;
383         while (stack->m_inlineCallFrame)
384             stack = stack->m_caller;
385         return stack->m_argumentPositions[argument];
386     }
387     
388     ArgumentPosition* findArgumentPositionForLocal(int operand)
389     {
390         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
391             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
392             if (!inlineCallFrame)
393                 break;
394             if (operand <= static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
395                 continue;
396             if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
397                 continue;
398             if (operand > static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize + inlineCallFrame->arguments.size()))
399                 continue;
400             int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
401             return stack->m_argumentPositions[argument];
402         }
403         return 0;
404     }
405     
406     ArgumentPosition* findArgumentPosition(int operand)
407     {
408         if (operandIsArgument(operand))
409             return findArgumentPositionForArgument(operandToArgument(operand));
410         return findArgumentPositionForLocal(operand);
411     }
412
413     void addConstant(JSValue value)
414     {
415         unsigned constantIndex = m_codeBlock->addConstantLazily();
416         initializeLazyWriteBarrierForConstant(
417             m_graph.m_plan.writeBarriers,
418             m_codeBlock->constants()[constantIndex],
419             m_codeBlock,
420             constantIndex,
421             m_codeBlock->ownerExecutable(), 
422             value);
423     }
424     
425     void flush(int operand)
426     {
427         flushDirect(m_inlineStackTop->remapOperand(operand));
428     }
429     
430     void flushDirect(int operand)
431     {
432         flushDirect(operand, findArgumentPosition(operand));
433     }
434     
435     void flushDirect(int operand, ArgumentPosition* argumentPosition)
436     {
437         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
438         
439         ASSERT(operand < FirstConstantRegisterIndex);
440         
441         if (!operandIsArgument(operand))
442             m_preservedVars.set(operandToLocal(operand));
443         
444         Node* node = m_currentBlock->variablesAtTail.operand(operand);
445         
446         VariableAccessData* variable;
447         
448         if (node) {
449             variable = node->variableAccessData();
450             variable->mergeIsCaptured(isCaptured);
451         } else
452             variable = newVariableAccessData(operand, isCaptured);
453         
454         node = addToGraph(Flush, OpInfo(variable));
455         m_currentBlock->variablesAtTail.operand(operand) = node;
456         if (argumentPosition)
457             argumentPosition->addVariable(variable);
458     }
459
460     void flush(InlineStackEntry* inlineStackEntry)
461     {
462         int numArguments;
463         if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
464             numArguments = inlineCallFrame->arguments.size();
465         else
466             numArguments = inlineStackEntry->m_codeBlock->numParameters();
467         for (unsigned argument = numArguments; argument-- > 1;)
468             flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
469         for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
470             if (!inlineStackEntry->m_codeBlock->isCaptured(localToOperand(local)))
471                 continue;
472             flushDirect(inlineStackEntry->remapOperand(localToOperand(local)));
473         }
474     }
475
476     void flushAllArgumentsAndCapturedVariablesInInlineStack()
477     {
478         for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
479             flush(inlineStackEntry);
480     }
481
482     void flushArgumentsAndCapturedVariables()
483     {
484         flush(m_inlineStackTop);
485     }
486
487     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
488     Node* getToInt32(int operand)
489     {
490         return toInt32(get(operand));
491     }
492
493     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
494     Node* toInt32(Node* node)
495     {
496         if (node->hasInt32Result())
497             return node;
498
499         if (node->op() == UInt32ToNumber)
500             return node->child1().node();
501
502         // Check for numeric constants boxed as JSValues.
503         if (canFold(node)) {
504             JSValue v = valueOfJSConstant(node);
505             if (v.isInt32())
506                 return getJSConstant(node->constantNumber());
507             if (v.isNumber())
508                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
509         }
510
511         return addToGraph(ValueToInt32, node);
512     }
513
514     // NOTE: Only use this to construct constants that arise from non-speculative
515     // constant folding. I.e. creating constants using this if we had constant
516     // field inference would be a bad idea, since the bytecode parser's folding
517     // doesn't handle liveness preservation.
518     Node* getJSConstantForValue(JSValue constantValue)
519     {
520         unsigned constantIndex;
521         if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
522             addConstant(constantValue);
523             m_constants.append(ConstantRecord());
524         }
525         
526         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
527         
528         return getJSConstant(constantIndex);
529     }
530
531     Node* getJSConstant(unsigned constant)
532     {
533         Node* node = m_constants[constant].asJSValue;
534         if (node)
535             return node;
536
537         Node* result = addToGraph(JSConstant, OpInfo(constant));
538         m_constants[constant].asJSValue = result;
539         return result;
540     }
541
542     Node* getCallee()
543     {
544         return addToGraph(GetCallee);
545     }
546
547     // Helper functions to get/set the this value.
548     Node* getThis()
549     {
550         return get(m_inlineStackTop->m_codeBlock->thisRegister());
551     }
552     void setThis(Node* value)
553     {
554         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
555     }
556
557     // Convenience methods for checking nodes for constants.
558     bool isJSConstant(Node* node)
559     {
560         return node->op() == JSConstant;
561     }
562     bool isInt32Constant(Node* node)
563     {
564         return isJSConstant(node) && valueOfJSConstant(node).isInt32();
565     }
566     // Convenience methods for getting constant values.
567     JSValue valueOfJSConstant(Node* node)
568     {
569         ASSERT(isJSConstant(node));
570         return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
571     }
572     int32_t valueOfInt32Constant(Node* node)
573     {
574         ASSERT(isInt32Constant(node));
575         return valueOfJSConstant(node).asInt32();
576     }
577     
578     // This method returns a JSConstant with the value 'undefined'.
579     Node* constantUndefined()
580     {
581         // Has m_constantUndefined been set up yet?
582         if (m_constantUndefined == UINT_MAX) {
583             // Search the constant pool for undefined, if we find it, we can just reuse this!
584             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
585             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
586                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
587                 if (testMe.isUndefined())
588                     return getJSConstant(m_constantUndefined);
589             }
590
591             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
592             ASSERT(m_constants.size() == numberOfConstants);
593             addConstant(jsUndefined());
594             m_constants.append(ConstantRecord());
595             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
596         }
597
598         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
599         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
600         return getJSConstant(m_constantUndefined);
601     }
602
603     // This method returns a JSConstant with the value 'null'.
604     Node* constantNull()
605     {
606         // Has m_constantNull been set up yet?
607         if (m_constantNull == UINT_MAX) {
608             // Search the constant pool for null, if we find it, we can just reuse this!
609             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
610             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
611                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
612                 if (testMe.isNull())
613                     return getJSConstant(m_constantNull);
614             }
615
616             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
617             ASSERT(m_constants.size() == numberOfConstants);
618             addConstant(jsNull());
619             m_constants.append(ConstantRecord());
620             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
621         }
622
623         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
624         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
625         return getJSConstant(m_constantNull);
626     }
627
628     // This method returns a DoubleConstant with the value 1.
629     Node* one()
630     {
631         // Has m_constant1 been set up yet?
632         if (m_constant1 == UINT_MAX) {
633             // Search the constant pool for the value 1, if we find it, we can just reuse this!
634             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
635             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
636                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
637                 if (testMe.isInt32() && testMe.asInt32() == 1)
638                     return getJSConstant(m_constant1);
639             }
640
641             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
642             ASSERT(m_constants.size() == numberOfConstants);
643             addConstant(jsNumber(1));
644             m_constants.append(ConstantRecord());
645             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
646         }
647
648         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
649         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
650         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
651         return getJSConstant(m_constant1);
652     }
653     
654     // This method returns a DoubleConstant with the value NaN.
655     Node* constantNaN()
656     {
657         JSValue nan = jsNaN();
658         
659         // Has m_constantNaN been set up yet?
660         if (m_constantNaN == UINT_MAX) {
661             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
662             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
663             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
664                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
665                 if (JSValue::encode(testMe) == JSValue::encode(nan))
666                     return getJSConstant(m_constantNaN);
667             }
668
669             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
670             ASSERT(m_constants.size() == numberOfConstants);
671             addConstant(nan);
672             m_constants.append(ConstantRecord());
673             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
674         }
675
676         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
677         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
678         ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
679         return getJSConstant(m_constantNaN);
680     }
681     
682     Node* cellConstant(JSCell* cell)
683     {
684         HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
685         if (result.isNewEntry)
686             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
687         
688         return result.iterator->value;
689     }
690     
691     InlineCallFrame* inlineCallFrame()
692     {
693         return m_inlineStackTop->m_inlineCallFrame;
694     }
695
696     CodeOrigin currentCodeOrigin()
697     {
698         return CodeOrigin(m_currentIndex, inlineCallFrame());
699     }
700     
701     bool canFold(Node* node)
702     {
703         return node->isStronglyProvedConstantIn(inlineCallFrame());
704     }
705
706     // Our codegen for constant strict equality performs a bitwise comparison,
707     // so we can only select values that have a consistent bitwise identity.
708     bool isConstantForCompareStrictEq(Node* node)
709     {
710         if (!node->isConstant())
711             return false;
712         JSValue value = valueOfJSConstant(node);
713         return value.isBoolean() || value.isUndefinedOrNull();
714     }
715     
716     Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
717     {
718         Node* result = m_graph.addNode(
719             SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
720         ASSERT(op != Phi);
721         m_currentBlock->append(result);
722         return result;
723     }
724     Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
725     {
726         Node* result = m_graph.addNode(
727             SpecNone, op, currentCodeOrigin(), child1, child2, child3);
728         ASSERT(op != Phi);
729         m_currentBlock->append(result);
730         return result;
731     }
732     Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
733     {
734         Node* result = m_graph.addNode(
735             SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
736         ASSERT(op != Phi);
737         m_currentBlock->append(result);
738         return result;
739     }
740     Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
741     {
742         Node* result = m_graph.addNode(
743             SpecNone, op, currentCodeOrigin(), info1, info2,
744             Edge(child1), Edge(child2), Edge(child3));
745         ASSERT(op != Phi);
746         m_currentBlock->append(result);
747         return result;
748     }
749     
750     Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
751     {
752         Node* result = m_graph.addNode(
753             SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
754             m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
755         ASSERT(op != Phi);
756         m_currentBlock->append(result);
757         
758         m_numPassedVarArgs = 0;
759         
760         return result;
761     }
762
763     void addVarArgChild(Node* child)
764     {
765         m_graph.m_varArgChildren.append(Edge(child));
766         m_numPassedVarArgs++;
767     }
768     
769     Node* addCall(Instruction* currentInstruction, NodeType op)
770     {
771         SpeculatedType prediction = getPrediction();
772         
773         addVarArgChild(get(currentInstruction[2].u.operand));
774         int argCount = currentInstruction[3].u.operand;
775         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
776             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
777
778         int registerOffset = -currentInstruction[4].u.operand;
779         int dummyThisArgument = op == Call ? 0 : 1;
780         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
781             addVarArgChild(get(registerOffset + argumentToOperand(i)));
782
783         Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
784         set(currentInstruction[1].u.operand, call);
785         return call;
786     }
787     
788     Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
789     {
790         Node* objectNode = cellConstant(object);
791         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
792         return objectNode;
793     }
794     
795     Node* cellConstantWithStructureCheck(JSCell* object)
796     {
797         return cellConstantWithStructureCheck(object, object->structure());
798     }
799
800     SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
801     {
802         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
803         return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
804     }
805
806     SpeculatedType getPrediction(unsigned bytecodeIndex)
807     {
808         SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
809         
810         if (prediction == SpecNone) {
811             // We have no information about what values this node generates. Give up
812             // on executing this code, since we're likely to do more damage than good.
813             addToGraph(ForceOSRExit);
814         }
815         
816         return prediction;
817     }
818     
819     SpeculatedType getPredictionWithoutOSRExit()
820     {
821         return getPredictionWithoutOSRExit(m_currentIndex);
822     }
823     
824     SpeculatedType getPrediction()
825     {
826         return getPrediction(m_currentIndex);
827     }
828     
829     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
830     {
831         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
832         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
833         return ArrayMode::fromObserved(locker, profile, action, false);
834     }
835     
836     ArrayMode getArrayMode(ArrayProfile* profile)
837     {
838         return getArrayMode(profile, Array::Read);
839     }
840     
841     ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
842     {
843         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
844         
845         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
846         
847 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
848         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
849             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
850         dataLogF("Array profile for bc#%u: %u %s%s\n", m_currentIndex, profile->observedArrayModes(locker), profile->structureIsPolymorphic(locker) ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses(locker) ? " (may intercept)" : "");
851 #endif
852         
853         bool makeSafe =
854             m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
855             || profile->outOfBounds(locker);
856         
857         ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
858         
859         return result;
860     }
861     
862     Node* makeSafe(Node* node)
863     {
864         bool likelyToTakeSlowCase;
865         if (!isX86() && node->op() == ArithMod)
866             likelyToTakeSlowCase = false;
867         else
868             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
869         
870         if (!likelyToTakeSlowCase
871             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
872             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
873             return node;
874         
875         switch (node->op()) {
876         case UInt32ToNumber:
877         case ArithAdd:
878         case ArithSub:
879         case ValueAdd:
880         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
881             node->mergeFlags(NodeMayOverflow);
882             break;
883             
884         case ArithNegate:
885             // Currently we can't tell the difference between a negation overflowing
886             // (i.e. -(1 << 31)) or generating negative zero (i.e. -0). If it took slow
887             // path then we assume that it did both of those things.
888             node->mergeFlags(NodeMayOverflow);
889             node->mergeFlags(NodeMayNegZero);
890             break;
891
892         case ArithMul:
893             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
894                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
895 #if DFG_ENABLE(DEBUG_VERBOSE)
896                 dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
897 #endif
898                 node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
899             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
900                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
901 #if DFG_ENABLE(DEBUG_VERBOSE)
902                 dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
903 #endif
904                 node->mergeFlags(NodeMayNegZero);
905             }
906             break;
907             
908         default:
909             RELEASE_ASSERT_NOT_REACHED();
910             break;
911         }
912         
913         return node;
914     }
915     
916     Node* makeDivSafe(Node* node)
917     {
918         ASSERT(node->op() == ArithDiv);
919         
920         // The main slow case counter for op_div in the old JIT counts only when
921         // the operands are not numbers. We don't care about that since we already
922         // have speculations in place that take care of that separately. We only
923         // care about when the outcome of the division is not an integer, which
924         // is what the special fast case counter tells us.
925         
926         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
927             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
928             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
929             return node;
930         
931 #if DFG_ENABLE(DEBUG_VERBOSE)
932         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
933 #endif
934         
935         // FIXME: It might be possible to make this more granular. The DFG certainly can
936         // distinguish between negative zero and overflow in its exit profiles.
937         node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
938         
939         return node;
940     }
941     
942     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
943     {
944         if (direct)
945             return true;
946         
947         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
948             return false;
949         
950         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
951             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
952                 return false;
953         }
954         
955         return true;
956     }
957     
958     void buildOperandMapsIfNecessary();
959     
960     VM* m_vm;
961     CodeBlock* m_codeBlock;
962     CodeBlock* m_profiledBlock;
963     Graph& m_graph;
964
965     // The current block being generated.
966     BasicBlock* m_currentBlock;
967     // The bytecode index of the current instruction being generated.
968     unsigned m_currentIndex;
969
970     // We use these values during code generation, and to avoid the need for
971     // special handling we make sure they are available as constants in the
972     // CodeBlock's constant pool. These variables are initialized to
973     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
974     // constant pool, as necessary.
975     unsigned m_constantUndefined;
976     unsigned m_constantNull;
977     unsigned m_constantNaN;
978     unsigned m_constant1;
979     HashMap<JSCell*, unsigned> m_cellConstants;
980     HashMap<JSCell*, Node*> m_cellConstantNodes;
981
982     // A constant in the constant pool may be represented by more than one
983     // node in the graph, depending on the context in which it is being used.
984     struct ConstantRecord {
985         ConstantRecord()
986             : asInt32(0)
987             , asNumeric(0)
988             , asJSValue(0)
989         {
990         }
991
992         Node* asInt32;
993         Node* asNumeric;
994         Node* asJSValue;
995     };
996
997     // Track the index of the node whose result is the current value for every
998     // register value in the bytecode - argument, local, and temporary.
999     Vector<ConstantRecord, 16> m_constants;
1000
1001     // The number of arguments passed to the function.
1002     unsigned m_numArguments;
1003     // The number of locals (vars + temporaries) used in the function.
1004     unsigned m_numLocals;
1005     // The set of registers we need to preserve across BasicBlock boundaries;
1006     // typically equal to the set of vars, but we expand this to cover all
1007     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
1008     BitVector m_preservedVars;
1009     // The number of slots (in units of sizeof(Register)) that we need to
1010     // preallocate for calls emanating from this frame. This includes the
1011     // size of the CallFrame, only if this is not a leaf function.  (I.e.
1012     // this is 0 if and only if this function is a leaf.)
1013     unsigned m_parameterSlots;
1014     // The number of var args passed to the next var arg node.
1015     unsigned m_numPassedVarArgs;
1016
1017     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1018     
1019     struct InlineStackEntry {
1020         ByteCodeParser* m_byteCodeParser;
1021         
1022         CodeBlock* m_codeBlock;
1023         CodeBlock* m_profiledBlock;
1024         InlineCallFrame* m_inlineCallFrame;
1025         
1026         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1027         
1028         QueryableExitProfile m_exitProfile;
1029         
1030         // Remapping of identifier and constant numbers from the code block being
1031         // inlined (inline callee) to the code block that we're inlining into
1032         // (the machine code block, which is the transitive, though not necessarily
1033         // direct, caller).
1034         Vector<unsigned> m_identifierRemap;
1035         Vector<unsigned> m_constantRemap;
1036         Vector<unsigned> m_constantBufferRemap;
1037         Vector<unsigned> m_switchRemap;
1038         
1039         // Blocks introduced by this code block, which need successor linking.
1040         // May include up to one basic block that includes the continuation after
1041         // the callsite in the caller. These must be appended in the order that they
1042         // are created, but their bytecodeBegin values need not be in order as they
1043         // are ignored.
1044         Vector<UnlinkedBlock> m_unlinkedBlocks;
1045         
1046         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1047         // cannot have two blocks that have the same bytecodeBegin. For this very
1048         // reason, this is not equivalent to 
1049         Vector<BasicBlock*> m_blockLinkingTargets;
1050         
1051         // If the callsite's basic block was split into two, then this will be
1052         // the head of the callsite block. It needs its successors linked to the
1053         // m_unlinkedBlocks, but not the other way around: there's no way for
1054         // any blocks in m_unlinkedBlocks to jump back into this block.
1055         BasicBlock* m_callsiteBlockHead;
1056         
1057         // Does the callsite block head need linking? This is typically true
1058         // but will be false for the machine code block's inline stack entry
1059         // (since that one is not inlined) and for cases where an inline callee
1060         // did the linking for us.
1061         bool m_callsiteBlockHeadNeedsLinking;
1062         
1063         VirtualRegister m_returnValue;
1064         
1065         // Speculations about variable types collected from the profiled code block,
1066         // which are based on OSR exit profiles that past DFG compilatins of this
1067         // code block had gathered.
1068         LazyOperandValueProfileParser m_lazyOperands;
1069         
1070         // Did we see any returns? We need to handle the (uncommon but necessary)
1071         // case where a procedure that does not return was inlined.
1072         bool m_didReturn;
1073         
1074         // Did we have any early returns?
1075         bool m_didEarlyReturn;
1076         
1077         // Pointers to the argument position trackers for this slice of code.
1078         Vector<ArgumentPosition*> m_argumentPositions;
1079         
1080         InlineStackEntry* m_caller;
1081         
1082         InlineStackEntry(
1083             ByteCodeParser*,
1084             CodeBlock*,
1085             CodeBlock* profiledBlock,
1086             BasicBlock* callsiteBlockHead,
1087             JSFunction* callee, // Null if this is a closure call.
1088             VirtualRegister returnValueVR,
1089             VirtualRegister inlineCallFrameStart,
1090             int argumentCountIncludingThis,
1091             CodeSpecializationKind);
1092         
1093         ~InlineStackEntry()
1094         {
1095             m_byteCodeParser->m_inlineStackTop = m_caller;
1096         }
1097         
1098         int remapOperand(int operand) const
1099         {
1100             if (!m_inlineCallFrame)
1101                 return operand;
1102             
1103             if (operand >= FirstConstantRegisterIndex) {
1104                 int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1105                 ASSERT(result >= FirstConstantRegisterIndex);
1106                 return result;
1107             }
1108
1109             ASSERT(operand != JSStack::Callee);
1110
1111             return operand + m_inlineCallFrame->stackOffset;
1112         }
1113     };
1114     
1115     InlineStackEntry* m_inlineStackTop;
1116
1117     // Have we built operand maps? We initialize them lazily, and only when doing
1118     // inlining.
1119     bool m_haveBuiltOperandMaps;
1120     // Mapping between identifier names and numbers.
1121     BorrowedIdentifierMap m_identifierMap;
1122     // Mapping between values and constant numbers.
1123     JSValueMap m_jsValueMap;
1124     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1125     // work-around for the fact that JSValueMap can't handle "empty" values.
1126     unsigned m_emptyJSValueIndex;
1127     
1128     Instruction* m_currentInstruction;
1129 };
1130
1131 #define NEXT_OPCODE(name) \
1132     m_currentIndex += OPCODE_LENGTH(name); \
1133     continue
1134
1135 #define LAST_OPCODE(name) \
1136     m_currentIndex += OPCODE_LENGTH(name); \
1137     return shouldContinueParsing
1138
1139
1140 void ByteCodeParser::handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1141 {
1142     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1143     
1144     Node* callTarget = get(currentInstruction[2].u.operand);
1145     
1146     CallLinkStatus callLinkStatus;
1147
1148     if (m_graph.isConstant(callTarget))
1149         callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1150     else {
1151         callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
1152         callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
1153         callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1154         callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
1155     }
1156     
1157 #if DFG_ENABLE(DEBUG_VERBOSE)
1158     dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
1159 #endif
1160     
1161     if (!callLinkStatus.canOptimize()) {
1162         // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1163         // that we cannot optimize them.
1164         
1165         addCall(currentInstruction, op);
1166         return;
1167     }
1168     
1169     int argumentCountIncludingThis = currentInstruction[3].u.operand;
1170     int registerOffset = -currentInstruction[4].u.operand;
1171
1172     int resultOperand = currentInstruction[1].u.operand;
1173     unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1174     SpeculatedType prediction = getPrediction();
1175
1176     if (InternalFunction* function = callLinkStatus.internalFunction()) {
1177         if (handleConstantInternalFunction(resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1178             // This phantoming has to be *after* the code for the intrinsic, to signify that
1179             // the inputs must be kept alive whatever exits the intrinsic may do.
1180             addToGraph(Phantom, callTarget);
1181             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1182             return;
1183         }
1184         
1185         // Can only handle this using the generic call handler.
1186         addCall(currentInstruction, op);
1187         return;
1188     }
1189         
1190     Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1191     if (intrinsic != NoIntrinsic) {
1192         emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1193             
1194         if (handleIntrinsic(resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1195             // This phantoming has to be *after* the code for the intrinsic, to signify that
1196             // the inputs must be kept alive whatever exits the intrinsic may do.
1197             addToGraph(Phantom, callTarget);
1198             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1199             if (m_graph.compilation())
1200                 m_graph.compilation()->noticeInlinedCall();
1201             return;
1202         }
1203     } else if (handleInlining(callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1204         if (m_graph.compilation())
1205             m_graph.compilation()->noticeInlinedCall();
1206         return;
1207     }
1208     
1209     addCall(currentInstruction, op);
1210 }
1211
1212 void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1213 {
1214     Node* thisArgument;
1215     if (kind == CodeForCall)
1216         thisArgument = get(registerOffset + argumentToOperand(0));
1217     else
1218         thisArgument = 0;
1219
1220     if (callLinkStatus.isProved()) {
1221         addToGraph(Phantom, callTarget, thisArgument);
1222         return;
1223     }
1224     
1225     ASSERT(callLinkStatus.canOptimize());
1226     
1227     if (JSFunction* function = callLinkStatus.function())
1228         addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1229     else {
1230         ASSERT(callLinkStatus.structure());
1231         ASSERT(callLinkStatus.executable());
1232         
1233         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1234         addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1235     }
1236 }
1237
1238 void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1239 {
1240     for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1241         addToGraph(Phantom, get(registerOffset + argumentToOperand(i)));
1242 }
1243
1244 bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1245 {
1246     // First, the really simple checks: do we have an actual JS function?
1247     if (!callLinkStatus.executable())
1248         return false;
1249     if (callLinkStatus.executable()->isHostFunction())
1250         return false;
1251     
1252     FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1253     
1254     // Does the number of arguments we're passing match the arity of the target? We currently
1255     // inline only if the number of arguments passed is greater than or equal to the number
1256     // arguments expected.
1257     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1258         return false;
1259     
1260     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1261     // If either of these are detected, then don't inline.
1262     unsigned depth = 0;
1263     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1264         ++depth;
1265         if (depth >= Options::maximumInliningDepth())
1266             return false; // Depth exceeded.
1267         
1268         if (entry->executable() == executable)
1269             return false; // Recursion detected.
1270     }
1271     
1272     // Do we have a code block, and does the code block's size match the heuristics/requirements for
1273     // being an inline candidate? We might not have a code block if code was thrown away or if we
1274     // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1275     // if we had a static proof of what was being called; this might happen for example if you call a
1276     // global function, where watchpointing gives us static information. Overall, it's a rare case
1277     // because we expect that any hot callees would have already been compiled.
1278     CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1279     if (!codeBlock)
1280         return false;
1281     if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
1282         return false;
1283     
1284 #if DFG_ENABLE(DEBUG_VERBOSE)
1285     dataLogF("Inlining executable %p.\n", executable);
1286 #endif
1287     
1288     // Now we know without a doubt that we are committed to inlining. So begin the process
1289     // by checking the callee (if necessary) and making sure that arguments and the callee
1290     // are flushed.
1291     emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1292     
1293     // FIXME: Don't flush constants!
1294     
1295     int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) + JSStack::CallFrameHeaderSize;
1296     
1297     // Make sure that the area used by the call frame is reserved.
1298     for (int arg = operandToLocal(inlineCallFrameStart) + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > operandToLocal(inlineCallFrameStart);)
1299         m_preservedVars.set(arg);
1300     
1301     // Make sure that we have enough locals.
1302     unsigned newNumLocals = operandToLocal(inlineCallFrameStart) + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1303     if (newNumLocals > m_numLocals) {
1304         m_numLocals = newNumLocals;
1305         for (size_t i = 0; i < m_graph.numBlocks(); ++i)
1306             m_graph.block(i)->ensureLocals(newNumLocals);
1307     }
1308     
1309     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1310
1311     InlineStackEntry inlineStackEntry(
1312         this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1313         (VirtualRegister)m_inlineStackTop->remapOperand(resultOperand),
1314         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1315     
1316     // This is where the actual inlining really happens.
1317     unsigned oldIndex = m_currentIndex;
1318     m_currentIndex = 0;
1319
1320     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1321     if (callLinkStatus.isClosureCall()) {
1322         addToGraph(SetCallee, callTargetNode);
1323         addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
1324     }
1325     
1326     parseCodeBlock();
1327     
1328     m_currentIndex = oldIndex;
1329     
1330     // If the inlined code created some new basic blocks, then we have linking to do.
1331     if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
1332         
1333         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1334         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1335             linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
1336         else
1337             ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
1338         
1339         // It's possible that the callsite block head is not owned by the caller.
1340         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1341             // It's definitely owned by the caller, because the caller created new blocks.
1342             // Assert that this all adds up.
1343             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
1344             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1345             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1346         } else {
1347             // It's definitely not owned by the caller. Tell the caller that he does not
1348             // need to link his callsite block head, because we did it for him.
1349             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1350             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1351             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1352         }
1353         
1354         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1355     } else
1356         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1357     
1358     BasicBlock* lastBlock = m_graph.lastBlock();
1359     // If there was a return, but no early returns, then we're done. We allow parsing of
1360     // the caller to continue in whatever basic block we're in right now.
1361     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1362         ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1363         
1364         // If we created new blocks then the last block needs linking, but in the
1365         // caller. It doesn't need to be linked to, but it needs outgoing links.
1366         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1367 #if DFG_ENABLE(DEBUG_VERBOSE)
1368             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1369 #endif
1370             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1371             // for release builds because this block will never serve as a potential target
1372             // in the linker's binary search.
1373             lastBlock->bytecodeBegin = m_currentIndex;
1374             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1375         }
1376         
1377         m_currentBlock = m_graph.lastBlock();
1378         
1379 #if DFG_ENABLE(DEBUG_VERBOSE)
1380         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1381 #endif
1382         return true;
1383     }
1384     
1385     // If we get to this point then all blocks must end in some sort of terminals.
1386     ASSERT(lastBlock->last()->isTerminal());
1387     
1388
1389     // Need to create a new basic block for the continuation at the caller.
1390     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1391
1392 #if DFG_ENABLE(DEBUG_VERBOSE)
1393     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.numBlocks(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
1394 #endif
1395
1396     // Link the early returns to the basic block we're about to create.
1397     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1398         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1399             continue;
1400         BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1401         ASSERT(!blockToLink->isLinked);
1402         Node* node = blockToLink->last();
1403         ASSERT(node->op() == Jump);
1404         ASSERT(node->takenBlock() == 0);
1405         node->setTakenBlock(block.get());
1406         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1407 #if !ASSERT_DISABLED
1408         blockToLink->isLinked = true;
1409 #endif
1410     }
1411     
1412     m_currentBlock = block.get();
1413     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1414     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1415     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1416     m_graph.appendBlock(block);
1417     prepareToParseBlock();
1418     
1419     // At this point we return and continue to generate code for the caller, but
1420     // in the new basic block.
1421 #if DFG_ENABLE(DEBUG_VERBOSE)
1422     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1423 #endif
1424     return true;
1425 }
1426
1427 bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1428 {
1429     if (argumentCountIncludingThis == 1) { // Math.min()
1430         set(resultOperand, constantNaN());
1431         return true;
1432     }
1433      
1434     if (argumentCountIncludingThis == 2) { // Math.min(x)
1435         Node* result = get(registerOffset + argumentToOperand(1));
1436         addToGraph(Phantom, Edge(result, NumberUse));
1437         set(resultOperand, result);
1438         return true;
1439     }
1440     
1441     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1442         set(resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1443         return true;
1444     }
1445     
1446     // Don't handle >=3 arguments for now.
1447     return false;
1448 }
1449
1450 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1451 // they need to perform the ToNumber conversion, which can have side-effects.
1452 bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1453 {
1454     switch (intrinsic) {
1455     case AbsIntrinsic: {
1456         if (argumentCountIncludingThis == 1) { // Math.abs()
1457             set(resultOperand, constantNaN());
1458             return true;
1459         }
1460
1461         if (!MacroAssembler::supportsFloatingPointAbs())
1462             return false;
1463
1464         Node* node = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1465         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1466             node->mergeFlags(NodeMayOverflow);
1467         set(resultOperand, node);
1468         return true;
1469     }
1470
1471     case MinIntrinsic:
1472         return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1473         
1474     case MaxIntrinsic:
1475         return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1476         
1477     case SqrtIntrinsic: {
1478         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1479             set(resultOperand, constantNaN());
1480             return true;
1481         }
1482         
1483         if (!MacroAssembler::supportsFloatingPointSqrt())
1484             return false;
1485
1486         set(resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1487         return true;
1488     }
1489         
1490     case ArrayPushIntrinsic: {
1491         if (argumentCountIncludingThis != 2)
1492             return false;
1493         
1494         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1495         if (!arrayMode.isJSArray())
1496             return false;
1497         switch (arrayMode.type()) {
1498         case Array::Undecided:
1499         case Array::Int32:
1500         case Array::Double:
1501         case Array::Contiguous:
1502         case Array::ArrayStorage: {
1503             Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1504             set(resultOperand, arrayPush);
1505             
1506             return true;
1507         }
1508             
1509         default:
1510             return false;
1511         }
1512     }
1513         
1514     case ArrayPopIntrinsic: {
1515         if (argumentCountIncludingThis != 1)
1516             return false;
1517         
1518         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1519         if (!arrayMode.isJSArray())
1520             return false;
1521         switch (arrayMode.type()) {
1522         case Array::Int32:
1523         case Array::Double:
1524         case Array::Contiguous:
1525         case Array::ArrayStorage: {
1526             Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1527             set(resultOperand, arrayPop);
1528             return true;
1529         }
1530             
1531         default:
1532             return false;
1533         }
1534     }
1535
1536     case CharCodeAtIntrinsic: {
1537         if (argumentCountIncludingThis != 2)
1538             return false;
1539
1540         int thisOperand = registerOffset + argumentToOperand(0);
1541         int indexOperand = registerOffset + argumentToOperand(1);
1542         Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1543
1544         set(resultOperand, charCode);
1545         return true;
1546     }
1547
1548     case CharAtIntrinsic: {
1549         if (argumentCountIncludingThis != 2)
1550             return false;
1551
1552         int thisOperand = registerOffset + argumentToOperand(0);
1553         int indexOperand = registerOffset + argumentToOperand(1);
1554         Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1555
1556         set(resultOperand, charCode);
1557         return true;
1558     }
1559     case FromCharCodeIntrinsic: {
1560         if (argumentCountIncludingThis != 2)
1561             return false;
1562
1563         int indexOperand = registerOffset + argumentToOperand(1);
1564         Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
1565
1566         set(resultOperand, charCode);
1567
1568         return true;
1569     }
1570
1571     case RegExpExecIntrinsic: {
1572         if (argumentCountIncludingThis != 2)
1573             return false;
1574         
1575         Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1576         set(resultOperand, regExpExec);
1577         
1578         return true;
1579     }
1580         
1581     case RegExpTestIntrinsic: {
1582         if (argumentCountIncludingThis != 2)
1583             return false;
1584         
1585         Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1586         set(resultOperand, regExpExec);
1587         
1588         return true;
1589     }
1590
1591     case IMulIntrinsic: {
1592         if (argumentCountIncludingThis != 3)
1593             return false;
1594         int leftOperand = registerOffset + argumentToOperand(1);
1595         int rightOperand = registerOffset + argumentToOperand(2);
1596         Node* left = getToInt32(leftOperand);
1597         Node* right = getToInt32(rightOperand);
1598         set(resultOperand, addToGraph(ArithIMul, left, right));
1599         return true;
1600     }
1601         
1602     default:
1603         return false;
1604     }
1605 }
1606
1607 bool ByteCodeParser::handleTypedArrayConstructor(
1608     int resultOperand, InternalFunction* function, int registerOffset,
1609     int argumentCountIncludingThis, TypedArrayType type)
1610 {
1611     if (!isTypedView(type))
1612         return false;
1613     
1614     if (function->classInfo() != constructorClassInfoForType(type))
1615         return false;
1616     
1617     if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1618         return false;
1619     
1620     // We only have an intrinsic for the case where you say:
1621     //
1622     // new FooArray(blah);
1623     //
1624     // Of course, 'blah' could be any of the following:
1625     //
1626     // - Integer, indicating that you want to allocate an array of that length.
1627     //   This is the thing we're hoping for, and what we can actually do meaningful
1628     //   optimizations for.
1629     //
1630     // - Array buffer, indicating that you want to create a view onto that _entire_
1631     //   buffer.
1632     //
1633     // - Non-buffer object, indicating that you want to create a copy of that
1634     //   object by pretending that it quacks like an array.
1635     //
1636     // - Anything else, indicating that you want to have an exception thrown at
1637     //   you.
1638     //
1639     // The intrinsic, NewTypedArray, will behave as if it could do any of these
1640     // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
1641     // predicted Int32, then we lock it in as a normal typed array allocation.
1642     // Otherwise, NewTypedArray turns into a totally opaque function call that
1643     // may clobber the world - by virtue of it accessing properties on what could
1644     // be an object.
1645     //
1646     // Note that although the generic form of NewTypedArray sounds sort of awful,
1647     // it is actually quite likely to be more efficient than a fully generic
1648     // Construct. So, we might want to think about making NewTypedArray variadic,
1649     // or else making Construct not super slow.
1650     
1651     if (argumentCountIncludingThis != 2)
1652         return false;
1653     
1654     set(resultOperand,
1655         addToGraph(NewTypedArray, OpInfo(type), get(registerOffset + argumentToOperand(1))));
1656     return true;
1657 }
1658
1659 bool ByteCodeParser::handleConstantInternalFunction(
1660     int resultOperand, InternalFunction* function, int registerOffset,
1661     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1662 {
1663     // If we ever find that we have a lot of internal functions that we specialize for,
1664     // then we should probably have some sort of hashtable dispatch, or maybe even
1665     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1666     // it seems that this case is hit infrequently enough, and the number of functions
1667     // we know about is small enough, that having just a linear cascade of if statements
1668     // is good enough.
1669     
1670     UNUSED_PARAM(prediction); // Remove this once we do more things.
1671     
1672     if (function->classInfo() == ArrayConstructor::info()) {
1673         if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1674             return false;
1675         
1676         if (argumentCountIncludingThis == 2) {
1677             set(resultOperand,
1678                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1679             return true;
1680         }
1681         
1682         for (int i = 1; i < argumentCountIncludingThis; ++i)
1683             addVarArgChild(get(registerOffset + argumentToOperand(i)));
1684         set(resultOperand,
1685             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1686         return true;
1687     }
1688     
1689     if (function->classInfo() == StringConstructor::info()) {
1690         Node* result;
1691         
1692         if (argumentCountIncludingThis <= 1)
1693             result = cellConstant(m_vm->smallStrings.emptyString());
1694         else
1695             result = addToGraph(ToString, get(registerOffset + argumentToOperand(1)));
1696         
1697         if (kind == CodeForConstruct)
1698             result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1699         
1700         set(resultOperand, result);
1701         return true;
1702     }
1703     
1704     for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
1705         bool result = handleTypedArrayConstructor(
1706             resultOperand, function, registerOffset, argumentCountIncludingThis,
1707             indexToTypedArrayType(typeIndex));
1708         if (result)
1709             return true;
1710     }
1711     
1712     return false;
1713 }
1714
1715 Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1716 {
1717     Node* propertyStorage;
1718     if (isInlineOffset(offset))
1719         propertyStorage = base;
1720     else
1721         propertyStorage = addToGraph(GetButterfly, base);
1722     Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
1723
1724     StorageAccessData storageAccessData;
1725     storageAccessData.offset = offset;
1726     storageAccessData.identifierNumber = identifierNumber;
1727     m_graph.m_storageAccessData.append(storageAccessData);
1728
1729     return getByOffset;
1730 }
1731
1732 void ByteCodeParser::handleGetByOffset(
1733     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1734     PropertyOffset offset)
1735 {
1736     set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1737 }
1738
1739 Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1740 {
1741     Node* propertyStorage;
1742     if (isInlineOffset(offset))
1743         propertyStorage = base;
1744     else
1745         propertyStorage = addToGraph(GetButterfly, base);
1746     Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1747     
1748     StorageAccessData storageAccessData;
1749     storageAccessData.offset = offset;
1750     storageAccessData.identifierNumber = identifier;
1751     m_graph.m_storageAccessData.append(storageAccessData);
1752
1753     return result;
1754 }
1755
1756 void ByteCodeParser::handleGetById(
1757     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1758     const GetByIdStatus& getByIdStatus)
1759 {
1760     if (!getByIdStatus.isSimple()
1761         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1762         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
1763         set(destinationOperand,
1764             addToGraph(
1765                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1766                 OpInfo(identifierNumber), OpInfo(prediction), base));
1767         return;
1768     }
1769     
1770     ASSERT(getByIdStatus.structureSet().size());
1771                 
1772     // The implementation of GetByOffset does not know to terminate speculative
1773     // execution if it doesn't have a prediction, so we do it manually.
1774     if (prediction == SpecNone)
1775         addToGraph(ForceOSRExit);
1776     else if (m_graph.compilation())
1777         m_graph.compilation()->noticeInlinedGetById();
1778     
1779     Node* originalBaseForBaselineJIT = base;
1780                 
1781     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1782     
1783     if (getByIdStatus.chain()) {
1784         m_graph.chains().addLazily(getByIdStatus.chain());
1785         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1786         JSObject* currentObject = 0;
1787         for (unsigned i = 0; i < getByIdStatus.chain()->size(); ++i) {
1788             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1789             currentStructure = getByIdStatus.chain()->at(i);
1790             base = cellConstantWithStructureCheck(currentObject, currentStructure);
1791         }
1792     }
1793     
1794     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1795     // ensure that the base of the original get_by_id is kept alive until we're done with
1796     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1797     // on something other than the base following the CheckStructure on base, or if the
1798     // access was compiled to a WeakJSConstant specific value, in which case we might not
1799     // have any explicit use of the base at all.
1800     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1801         addToGraph(Phantom, originalBaseForBaselineJIT);
1802     
1803     if (getByIdStatus.specificValue()) {
1804         ASSERT(getByIdStatus.specificValue().isCell());
1805         
1806         set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1807         return;
1808     }
1809     
1810     handleGetByOffset(
1811         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1812 }
1813
1814 void ByteCodeParser::prepareToParseBlock()
1815 {
1816     for (unsigned i = 0; i < m_constants.size(); ++i)
1817         m_constants[i] = ConstantRecord();
1818     m_cellConstantNodes.clear();
1819 }
1820
1821 Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1822 {
1823     Node* localBase;
1824     if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
1825         ASSERT(inlineCallFrame()->callee);
1826         localBase = cellConstant(inlineCallFrame()->callee->scope());
1827     } else
1828         localBase = addToGraph(GetMyScope);
1829     if (skipTop) {
1830         ASSERT(!inlineCallFrame());
1831         localBase = addToGraph(SkipTopScope, localBase);
1832     }
1833     for (unsigned n = skipCount; n--;)
1834         localBase = addToGraph(SkipScope, localBase);
1835     return localBase;
1836 }
1837
1838 bool ByteCodeParser::parseBlock(unsigned limit)
1839 {
1840     bool shouldContinueParsing = true;
1841
1842     Interpreter* interpreter = m_vm->interpreter;
1843     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1844     unsigned blockBegin = m_currentIndex;
1845     
1846     // If we are the first basic block, introduce markers for arguments. This allows
1847     // us to track if a use of an argument may use the actual argument passed, as
1848     // opposed to using a value we set explicitly.
1849     if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
1850         m_graph.m_arguments.resize(m_numArguments);
1851         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1852             VariableAccessData* variable = newVariableAccessData(
1853                 argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
1854             variable->mergeStructureCheckHoistingFailed(
1855                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1856             variable->mergeCheckArrayHoistingFailed(
1857                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
1858             
1859             Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1860             m_graph.m_arguments[argument] = setArgument;
1861             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1862         }
1863     }
1864
1865     while (true) {
1866         // Don't extend over jump destinations.
1867         if (m_currentIndex == limit) {
1868             // Ordinarily we want to plant a jump. But refuse to do this if the block is
1869             // empty. This is a special case for inlining, which might otherwise create
1870             // some empty blocks in some cases. When parseBlock() returns with an empty
1871             // block, it will get repurposed instead of creating a new one. Note that this
1872             // logic relies on every bytecode resulting in one or more nodes, which would
1873             // be true anyway except for op_loop_hint, which emits a Phantom to force this
1874             // to be true.
1875             if (!m_currentBlock->isEmpty())
1876                 addToGraph(Jump, OpInfo(m_currentIndex));
1877             else {
1878 #if DFG_ENABLE(DEBUG_VERBOSE)
1879                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
1880 #endif
1881             }
1882             return shouldContinueParsing;
1883         }
1884         
1885         // Switch on the current bytecode opcode.
1886         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1887         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1888         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
1889         
1890         if (m_graph.compilation()) {
1891             addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
1892                 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
1893         }
1894         
1895         switch (opcodeID) {
1896
1897         // === Function entry opcodes ===
1898
1899         case op_enter:
1900             // Initialize all locals to undefined.
1901             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
1902                 set(localToOperand(i), constantUndefined(), SetOnEntry);
1903             NEXT_OPCODE(op_enter);
1904
1905         case op_to_this: {
1906             Node* op1 = getThis();
1907             if (op1->op() != ToThis) {
1908                 Structure* cachedStructure = currentInstruction[2].u.structure.get();
1909                 if (!cachedStructure
1910                     || cachedStructure->classInfo()->methodTable.toThis != JSObject::info()->methodTable.toThis
1911                     || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
1912                     || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)) {
1913                     setThis(addToGraph(ToThis, op1));
1914                 } else {
1915                     addToGraph(
1916                         CheckStructure,
1917                         OpInfo(m_graph.addStructureSet(cachedStructure)),
1918                         op1);
1919                 }
1920             }
1921             NEXT_OPCODE(op_to_this);
1922         }
1923
1924         case op_create_this: {
1925             int calleeOperand = currentInstruction[2].u.operand;
1926             Node* callee = get(calleeOperand);
1927             bool alreadyEmitted = false;
1928             if (callee->op() == WeakJSConstant) {
1929                 JSCell* cell = callee->weakConstant();
1930                 ASSERT(cell->inherits(JSFunction::info()));
1931                 
1932                 JSFunction* function = jsCast<JSFunction*>(cell);
1933                 ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
1934                 if (allocationProfile) {
1935                     addToGraph(AllocationProfileWatchpoint, OpInfo(function));
1936                     // The callee is still live up to this point.
1937                     addToGraph(Phantom, callee);
1938                     set(currentInstruction[1].u.operand,
1939                         addToGraph(NewObject, OpInfo(allocationProfile->structure())));
1940                     alreadyEmitted = true;
1941                 }
1942             }
1943             if (!alreadyEmitted)
1944                 set(currentInstruction[1].u.operand,
1945                     addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
1946             NEXT_OPCODE(op_create_this);
1947         }
1948
1949         case op_new_object: {
1950             set(currentInstruction[1].u.operand,
1951                 addToGraph(NewObject,
1952                     OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
1953             NEXT_OPCODE(op_new_object);
1954         }
1955             
1956         case op_new_array: {
1957             int startOperand = currentInstruction[2].u.operand;
1958             int numOperands = currentInstruction[3].u.operand;
1959             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1960             for (int operandIdx = startOperand; operandIdx > startOperand - numOperands; --operandIdx)
1961                 addVarArgChild(get(operandIdx));
1962             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
1963             NEXT_OPCODE(op_new_array);
1964         }
1965             
1966         case op_new_array_with_size: {
1967             int lengthOperand = currentInstruction[2].u.operand;
1968             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
1969             set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
1970             NEXT_OPCODE(op_new_array_with_size);
1971         }
1972             
1973         case op_new_array_buffer: {
1974             int startConstant = currentInstruction[2].u.operand;
1975             int numConstants = currentInstruction[3].u.operand;
1976             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1977             NewArrayBufferData data;
1978             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
1979             data.numConstants = numConstants;
1980             data.indexingType = profile->selectIndexingType();
1981
1982             // If this statement has never executed, we'll have the wrong indexing type in the profile.
1983             for (int i = 0; i < numConstants; ++i) {
1984                 data.indexingType =
1985                     leastUpperBoundOfIndexingTypeAndValue(
1986                         data.indexingType,
1987                         m_codeBlock->constantBuffer(data.startConstant)[i]);
1988             }
1989             
1990             m_graph.m_newArrayBufferData.append(data);
1991             set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
1992             NEXT_OPCODE(op_new_array_buffer);
1993         }
1994             
1995         case op_new_regexp: {
1996             set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
1997             NEXT_OPCODE(op_new_regexp);
1998         }
1999             
2000         case op_get_callee: {
2001             JSCell* cachedFunction = currentInstruction[2].u.jsCell.get();
2002             if (!cachedFunction 
2003                 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2004                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction)) {
2005                 set(currentInstruction[1].u.operand, get(JSStack::Callee));
2006             } else {
2007                 ASSERT(cachedFunction->inherits(JSFunction::info()));
2008                 Node* actualCallee = get(JSStack::Callee);
2009                 addToGraph(CheckFunction, OpInfo(cachedFunction), actualCallee);
2010                 set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(cachedFunction)));
2011             }
2012             NEXT_OPCODE(op_get_callee);
2013         }
2014
2015         // === Bitwise operations ===
2016
2017         case op_bitand: {
2018             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2019             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2020             set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
2021             NEXT_OPCODE(op_bitand);
2022         }
2023
2024         case op_bitor: {
2025             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2026             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2027             set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
2028             NEXT_OPCODE(op_bitor);
2029         }
2030
2031         case op_bitxor: {
2032             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2033             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2034             set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
2035             NEXT_OPCODE(op_bitxor);
2036         }
2037
2038         case op_rshift: {
2039             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2040             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2041             Node* result;
2042             // Optimize out shifts by zero.
2043             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2044                 result = op1;
2045             else
2046                 result = addToGraph(BitRShift, op1, op2);
2047             set(currentInstruction[1].u.operand, result);
2048             NEXT_OPCODE(op_rshift);
2049         }
2050
2051         case op_lshift: {
2052             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2053             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2054             Node* result;
2055             // Optimize out shifts by zero.
2056             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2057                 result = op1;
2058             else
2059                 result = addToGraph(BitLShift, op1, op2);
2060             set(currentInstruction[1].u.operand, result);
2061             NEXT_OPCODE(op_lshift);
2062         }
2063
2064         case op_urshift: {
2065             Node* op1 = getToInt32(currentInstruction[2].u.operand);
2066             Node* op2 = getToInt32(currentInstruction[3].u.operand);
2067             Node* result;
2068             // The result of a zero-extending right shift is treated as an unsigned value.
2069             // This means that if the top bit is set, the result is not in the int32 range,
2070             // and as such must be stored as a double. If the shift amount is a constant,
2071             // we may be able to optimize.
2072             if (isInt32Constant(op2)) {
2073                 // If we know we are shifting by a non-zero amount, then since the operation
2074                 // zero fills we know the top bit of the result must be zero, and as such the
2075                 // result must be within the int32 range. Conversely, if this is a shift by
2076                 // zero, then the result may be changed by the conversion to unsigned, but it
2077                 // is not necessary to perform the shift!
2078                 if (valueOfInt32Constant(op2) & 0x1f)
2079                     result = addToGraph(BitURShift, op1, op2);
2080                 else
2081                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
2082             }  else {
2083                 // Cannot optimize at this stage; shift & potentially rebox as a double.
2084                 result = addToGraph(BitURShift, op1, op2);
2085                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2086             }
2087             set(currentInstruction[1].u.operand, result);
2088             NEXT_OPCODE(op_urshift);
2089         }
2090
2091         // === Increment/Decrement opcodes ===
2092
2093         case op_inc: {
2094             int srcDst = currentInstruction[1].u.operand;
2095             Node* op = get(srcDst);
2096             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2097             NEXT_OPCODE(op_inc);
2098         }
2099
2100         case op_dec: {
2101             int srcDst = currentInstruction[1].u.operand;
2102             Node* op = get(srcDst);
2103             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2104             NEXT_OPCODE(op_dec);
2105         }
2106
2107         // === Arithmetic operations ===
2108
2109         case op_add: {
2110             Node* op1 = get(currentInstruction[2].u.operand);
2111             Node* op2 = get(currentInstruction[3].u.operand);
2112             if (op1->hasNumberResult() && op2->hasNumberResult())
2113                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2114             else
2115                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2116             NEXT_OPCODE(op_add);
2117         }
2118
2119         case op_sub: {
2120             Node* op1 = get(currentInstruction[2].u.operand);
2121             Node* op2 = get(currentInstruction[3].u.operand);
2122             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2123             NEXT_OPCODE(op_sub);
2124         }
2125
2126         case op_negate: {
2127             Node* op1 = get(currentInstruction[2].u.operand);
2128             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2129             NEXT_OPCODE(op_negate);
2130         }
2131
2132         case op_mul: {
2133             // Multiply requires that the inputs are not truncated, unfortunately.
2134             Node* op1 = get(currentInstruction[2].u.operand);
2135             Node* op2 = get(currentInstruction[3].u.operand);
2136             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2137             NEXT_OPCODE(op_mul);
2138         }
2139
2140         case op_mod: {
2141             Node* op1 = get(currentInstruction[2].u.operand);
2142             Node* op2 = get(currentInstruction[3].u.operand);
2143             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2144             NEXT_OPCODE(op_mod);
2145         }
2146
2147         case op_div: {
2148             Node* op1 = get(currentInstruction[2].u.operand);
2149             Node* op2 = get(currentInstruction[3].u.operand);
2150             set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2151             NEXT_OPCODE(op_div);
2152         }
2153
2154         // === Misc operations ===
2155
2156 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2157         case op_debug:
2158             addToGraph(Breakpoint);
2159             NEXT_OPCODE(op_debug);
2160 #endif
2161         case op_mov: {
2162             Node* op = get(currentInstruction[2].u.operand);
2163             set(currentInstruction[1].u.operand, op);
2164             NEXT_OPCODE(op_mov);
2165         }
2166
2167         case op_check_has_instance:
2168             addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2169             NEXT_OPCODE(op_check_has_instance);
2170
2171         case op_instanceof: {
2172             Node* value = get(currentInstruction[2].u.operand);
2173             Node* prototype = get(currentInstruction[3].u.operand);
2174             set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2175             NEXT_OPCODE(op_instanceof);
2176         }
2177             
2178         case op_is_undefined: {
2179             Node* value = get(currentInstruction[2].u.operand);
2180             set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2181             NEXT_OPCODE(op_is_undefined);
2182         }
2183
2184         case op_is_boolean: {
2185             Node* value = get(currentInstruction[2].u.operand);
2186             set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2187             NEXT_OPCODE(op_is_boolean);
2188         }
2189
2190         case op_is_number: {
2191             Node* value = get(currentInstruction[2].u.operand);
2192             set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2193             NEXT_OPCODE(op_is_number);
2194         }
2195
2196         case op_is_string: {
2197             Node* value = get(currentInstruction[2].u.operand);
2198             set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2199             NEXT_OPCODE(op_is_string);
2200         }
2201
2202         case op_is_object: {
2203             Node* value = get(currentInstruction[2].u.operand);
2204             set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2205             NEXT_OPCODE(op_is_object);
2206         }
2207
2208         case op_is_function: {
2209             Node* value = get(currentInstruction[2].u.operand);
2210             set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2211             NEXT_OPCODE(op_is_function);
2212         }
2213
2214         case op_not: {
2215             Node* value = get(currentInstruction[2].u.operand);
2216             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2217             NEXT_OPCODE(op_not);
2218         }
2219             
2220         case op_to_primitive: {
2221             Node* value = get(currentInstruction[2].u.operand);
2222             set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2223             NEXT_OPCODE(op_to_primitive);
2224         }
2225             
2226         case op_strcat: {
2227             int startOperand = currentInstruction[2].u.operand;
2228             int numOperands = currentInstruction[3].u.operand;
2229 #if CPU(X86)
2230             // X86 doesn't have enough registers to compile MakeRope with three arguments.
2231             // Rather than try to be clever, we just make MakeRope dumber on this processor.
2232             const unsigned maxRopeArguments = 2;
2233 #else
2234             const unsigned maxRopeArguments = 3;
2235 #endif
2236             auto toStringNodes = std::make_unique<Node*[]>(numOperands);
2237             for (int i = 0; i < numOperands; i++)
2238                 toStringNodes[i] = addToGraph(ToString, get(startOperand - i));
2239
2240             for (int i = 0; i < numOperands; i++)
2241                 addToGraph(Phantom, toStringNodes[i]);
2242
2243             Node* operands[AdjacencyList::Size];
2244             unsigned indexInOperands = 0;
2245             for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2246                 operands[i] = 0;
2247             for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2248                 if (indexInOperands == maxRopeArguments) {
2249                     operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2250                     for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2251                         operands[i] = 0;
2252                     indexInOperands = 1;
2253                 }
2254                 
2255                 ASSERT(indexInOperands < AdjacencyList::Size);
2256                 ASSERT(indexInOperands < maxRopeArguments);
2257                 operands[indexInOperands++] = toStringNodes[operandIdx];
2258             }
2259             set(currentInstruction[1].u.operand,
2260                 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2261             NEXT_OPCODE(op_strcat);
2262         }
2263
2264         case op_less: {
2265             Node* op1 = get(currentInstruction[2].u.operand);
2266             Node* op2 = get(currentInstruction[3].u.operand);
2267             if (canFold(op1) && canFold(op2)) {
2268                 JSValue a = valueOfJSConstant(op1);
2269                 JSValue b = valueOfJSConstant(op2);
2270                 if (a.isNumber() && b.isNumber()) {
2271                     set(currentInstruction[1].u.operand,
2272                         getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2273                     NEXT_OPCODE(op_less);
2274                 }
2275             }
2276             set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2277             NEXT_OPCODE(op_less);
2278         }
2279
2280         case op_lesseq: {
2281             Node* op1 = get(currentInstruction[2].u.operand);
2282             Node* op2 = get(currentInstruction[3].u.operand);
2283             if (canFold(op1) && canFold(op2)) {
2284                 JSValue a = valueOfJSConstant(op1);
2285                 JSValue b = valueOfJSConstant(op2);
2286                 if (a.isNumber() && b.isNumber()) {
2287                     set(currentInstruction[1].u.operand,
2288                         getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2289                     NEXT_OPCODE(op_lesseq);
2290                 }
2291             }
2292             set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2293             NEXT_OPCODE(op_lesseq);
2294         }
2295
2296         case op_greater: {
2297             Node* op1 = get(currentInstruction[2].u.operand);
2298             Node* op2 = get(currentInstruction[3].u.operand);
2299             if (canFold(op1) && canFold(op2)) {
2300                 JSValue a = valueOfJSConstant(op1);
2301                 JSValue b = valueOfJSConstant(op2);
2302                 if (a.isNumber() && b.isNumber()) {
2303                     set(currentInstruction[1].u.operand,
2304                         getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2305                     NEXT_OPCODE(op_greater);
2306                 }
2307             }
2308             set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2309             NEXT_OPCODE(op_greater);
2310         }
2311
2312         case op_greatereq: {
2313             Node* op1 = get(currentInstruction[2].u.operand);
2314             Node* op2 = get(currentInstruction[3].u.operand);
2315             if (canFold(op1) && canFold(op2)) {
2316                 JSValue a = valueOfJSConstant(op1);
2317                 JSValue b = valueOfJSConstant(op2);
2318                 if (a.isNumber() && b.isNumber()) {
2319                     set(currentInstruction[1].u.operand,
2320                         getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2321                     NEXT_OPCODE(op_greatereq);
2322                 }
2323             }
2324             set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2325             NEXT_OPCODE(op_greatereq);
2326         }
2327
2328         case op_eq: {
2329             Node* op1 = get(currentInstruction[2].u.operand);
2330             Node* op2 = get(currentInstruction[3].u.operand);
2331             if (canFold(op1) && canFold(op2)) {
2332                 JSValue a = valueOfJSConstant(op1);
2333                 JSValue b = valueOfJSConstant(op2);
2334                 set(currentInstruction[1].u.operand,
2335                     getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2336                 NEXT_OPCODE(op_eq);
2337             }
2338             set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2339             NEXT_OPCODE(op_eq);
2340         }
2341
2342         case op_eq_null: {
2343             Node* value = get(currentInstruction[2].u.operand);
2344             set(currentInstruction[1].u.operand, addToGraph(CompareEqConstant, value, constantNull()));
2345             NEXT_OPCODE(op_eq_null);
2346         }
2347
2348         case op_stricteq: {
2349             Node* op1 = get(currentInstruction[2].u.operand);
2350             Node* op2 = get(currentInstruction[3].u.operand);
2351             if (canFold(op1) && canFold(op2)) {
2352                 JSValue a = valueOfJSConstant(op1);
2353                 JSValue b = valueOfJSConstant(op2);
2354                 set(currentInstruction[1].u.operand,
2355                     getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2356                 NEXT_OPCODE(op_stricteq);
2357             }
2358             if (isConstantForCompareStrictEq(op1))
2359                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op2, op1));
2360             else if (isConstantForCompareStrictEq(op2))
2361                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op1, op2));
2362             else
2363                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2364             NEXT_OPCODE(op_stricteq);
2365         }
2366
2367         case op_neq: {
2368             Node* op1 = get(currentInstruction[2].u.operand);
2369             Node* op2 = get(currentInstruction[3].u.operand);
2370             if (canFold(op1) && canFold(op2)) {
2371                 JSValue a = valueOfJSConstant(op1);
2372                 JSValue b = valueOfJSConstant(op2);
2373                 set(currentInstruction[1].u.operand,
2374                     getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2375                 NEXT_OPCODE(op_neq);
2376             }
2377             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2378             NEXT_OPCODE(op_neq);
2379         }
2380
2381         case op_neq_null: {
2382             Node* value = get(currentInstruction[2].u.operand);
2383             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2384             NEXT_OPCODE(op_neq_null);
2385         }
2386
2387         case op_nstricteq: {
2388             Node* op1 = get(currentInstruction[2].u.operand);
2389             Node* op2 = get(currentInstruction[3].u.operand);
2390             if (canFold(op1) && canFold(op2)) {
2391                 JSValue a = valueOfJSConstant(op1);
2392                 JSValue b = valueOfJSConstant(op2);
2393                 set(currentInstruction[1].u.operand,
2394                     getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2395                 NEXT_OPCODE(op_nstricteq);
2396             }
2397             Node* invertedResult;
2398             if (isConstantForCompareStrictEq(op1))
2399                 invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2400             else if (isConstantForCompareStrictEq(op2))
2401                 invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2402             else
2403                 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2404             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, invertedResult));
2405             NEXT_OPCODE(op_nstricteq);
2406         }
2407
2408         // === Property access operations ===
2409
2410         case op_get_by_val: {
2411             SpeculatedType prediction = getPrediction();
2412             
2413             Node* base = get(currentInstruction[2].u.operand);
2414             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2415             Node* property = get(currentInstruction[3].u.operand);
2416             Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2417             set(currentInstruction[1].u.operand, getByVal);
2418
2419             NEXT_OPCODE(op_get_by_val);
2420         }
2421
2422         case op_put_by_val: {
2423             Node* base = get(currentInstruction[1].u.operand);
2424
2425             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
2426             
2427             Node* property = get(currentInstruction[2].u.operand);
2428             Node* value = get(currentInstruction[3].u.operand);
2429             
2430             addVarArgChild(base);
2431             addVarArgChild(property);
2432             addVarArgChild(value);
2433             addVarArgChild(0); // Leave room for property storage.
2434             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2435
2436             NEXT_OPCODE(op_put_by_val);
2437         }
2438             
2439         case op_get_by_id:
2440         case op_get_by_id_out_of_line:
2441         case op_get_array_length: {
2442             SpeculatedType prediction = getPrediction();
2443             
2444             Node* base = get(currentInstruction[2].u.operand);
2445             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2446             
2447             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2448             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2449                 m_inlineStackTop->m_profiledBlock, m_currentIndex, uid);
2450             
2451             handleGetById(
2452                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2453
2454             NEXT_OPCODE(op_get_by_id);
2455         }
2456         case op_put_by_id:
2457         case op_put_by_id_out_of_line:
2458         case op_put_by_id_transition_direct:
2459         case op_put_by_id_transition_normal:
2460         case op_put_by_id_transition_direct_out_of_line:
2461         case op_put_by_id_transition_normal_out_of_line: {
2462             Node* value = get(currentInstruction[3].u.operand);
2463             Node* base = get(currentInstruction[1].u.operand);
2464             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2465             bool direct = currentInstruction[8].u.operand;
2466
2467             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2468                 m_inlineStackTop->m_profiledBlock,
2469                 m_currentIndex,
2470                 m_graph.identifiers()[identifierNumber]);
2471             bool canCountAsInlined = true;
2472             if (!putByIdStatus.isSet()) {
2473                 addToGraph(ForceOSRExit);
2474                 canCountAsInlined = false;
2475             }
2476             
2477             bool hasExitSite =
2478                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2479                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
2480             
2481             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2482                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2483                 handlePutByOffset(base, identifierNumber, putByIdStatus.offset(), value);
2484             } else if (
2485                 !hasExitSite
2486                 && putByIdStatus.isSimpleTransition()
2487                 && (!putByIdStatus.structureChain()
2488                     || putByIdStatus.structureChain()->isStillValid())) {
2489                 
2490                 m_graph.chains().addLazily(putByIdStatus.structureChain());
2491                 
2492                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2493                 if (!direct) {
2494                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2495                         cellConstantWithStructureCheck(
2496                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2497                     }
2498                     
2499                     for (unsigned i = 0; i < putByIdStatus.structureChain()->size(); ++i) {
2500                         JSValue prototype = putByIdStatus.structureChain()->at(i)->storedPrototype();
2501                         if (prototype.isNull())
2502                             continue;
2503                         cellConstantWithStructureCheck(prototype.asCell());
2504                     }
2505                 }
2506                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2507                 
2508                 Node* propertyStorage;
2509                 StructureTransitionData* transitionData =
2510                     m_graph.addStructureTransitionData(
2511                         StructureTransitionData(
2512                             putByIdStatus.oldStructure(),
2513                             putByIdStatus.newStructure()));
2514
2515                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2516                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2517                     
2518                     // If we're growing the property storage then it must be because we're
2519                     // storing into the out-of-line storage.
2520                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2521                     
2522                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2523                         propertyStorage = addToGraph(
2524                             AllocatePropertyStorage, OpInfo(transitionData), base);
2525                     } else {
2526                         propertyStorage = addToGraph(
2527                             ReallocatePropertyStorage, OpInfo(transitionData),
2528                             base, addToGraph(GetButterfly, base));
2529                     }
2530                 } else {
2531                     if (isInlineOffset(putByIdStatus.offset()))
2532                         propertyStorage = base;
2533                     else
2534                         propertyStorage = addToGraph(GetButterfly, base);
2535                 }
2536                 
2537                 addToGraph(PutStructure, OpInfo(transitionData), base);
2538                 
2539                 addToGraph(
2540                     PutByOffset,
2541                     OpInfo(m_graph.m_storageAccessData.size()),
2542                     propertyStorage,
2543                     base,
2544                     value);
2545                 
2546                 StorageAccessData storageAccessData;
2547                 storageAccessData.offset = putByIdStatus.offset();
2548                 storageAccessData.identifierNumber = identifierNumber;
2549                 m_graph.m_storageAccessData.append(storageAccessData);
2550             } else {
2551                 if (direct)
2552                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2553                 else
2554                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2555                 canCountAsInlined = false;
2556             }
2557             
2558             if (canCountAsInlined && m_graph.compilation())
2559                 m_graph.compilation()->noticeInlinedPutById();
2560
2561             NEXT_OPCODE(op_put_by_id);
2562         }
2563
2564         case op_init_global_const_nop: {
2565             NEXT_OPCODE(op_init_global_const_nop);
2566         }
2567
2568         case op_init_global_const: {
2569             Node* value = get(currentInstruction[2].u.operand);
2570             addToGraph(
2571                 PutGlobalVar,
2572                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2573                 value);
2574             NEXT_OPCODE(op_init_global_const);
2575         }
2576
2577         // === Block terminators. ===
2578
2579         case op_jmp: {
2580             unsigned relativeOffset = currentInstruction[1].u.operand;
2581             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2582             LAST_OPCODE(op_jmp);
2583         }
2584
2585         case op_jtrue: {
2586             unsigned relativeOffset = currentInstruction[2].u.operand;
2587             Node* condition = get(currentInstruction[1].u.operand);
2588             if (canFold(condition)) {
2589                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2590                 if (state == TrueTriState) {
2591                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2592                     LAST_OPCODE(op_jtrue);
2593                 } else if (state == FalseTriState) {
2594                     // Emit a placeholder for this bytecode operation but otherwise
2595                     // just fall through.
2596                     addToGraph(Phantom);
2597                     NEXT_OPCODE(op_jtrue);
2598                 }
2599             }
2600             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2601             LAST_OPCODE(op_jtrue);
2602         }
2603
2604         case op_jfalse: {
2605             unsigned relativeOffset = currentInstruction[2].u.operand;
2606             Node* condition = get(currentInstruction[1].u.operand);
2607             if (canFold(condition)) {
2608                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2609                 if (state == FalseTriState) {
2610                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2611                     LAST_OPCODE(op_jfalse);
2612                 } else if (state == TrueTriState) {
2613                     // Emit a placeholder for this bytecode operation but otherwise
2614                     // just fall through.
2615                     addToGraph(Phantom);
2616                     NEXT_OPCODE(op_jfalse);
2617                 }
2618             }
2619             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2620             LAST_OPCODE(op_jfalse);
2621         }
2622
2623         case op_jeq_null: {
2624             unsigned relativeOffset = currentInstruction[2].u.operand;
2625             Node* value = get(currentInstruction[1].u.operand);
2626             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2627             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2628             LAST_OPCODE(op_jeq_null);
2629         }
2630
2631         case op_jneq_null: {
2632             unsigned relativeOffset = currentInstruction[2].u.operand;
2633             Node* value = get(currentInstruction[1].u.operand);
2634             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2635             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2636             LAST_OPCODE(op_jneq_null);
2637         }
2638
2639         case op_jless: {
2640             unsigned relativeOffset = currentInstruction[3].u.operand;
2641             Node* op1 = get(currentInstruction[1].u.operand);
2642             Node* op2 = get(currentInstruction[2].u.operand);
2643             if (canFold(op1) && canFold(op2)) {
2644                 JSValue aValue = valueOfJSConstant(op1);
2645                 JSValue bValue = valueOfJSConstant(op2);
2646                 if (aValue.isNumber() && bValue.isNumber()) {
2647                     double a = aValue.asNumber();
2648                     double b = bValue.asNumber();
2649                     if (a < b) {
2650                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2651                         LAST_OPCODE(op_jless);
2652                     } else {
2653                         // Emit a placeholder for this bytecode operation but otherwise
2654                         // just fall through.
2655                         addToGraph(Phantom);
2656                         NEXT_OPCODE(op_jless);
2657                     }
2658                 }
2659             }
2660             Node* condition = addToGraph(CompareLess, op1, op2);
2661             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2662             LAST_OPCODE(op_jless);
2663         }
2664
2665         case op_jlesseq: {
2666             unsigned relativeOffset = currentInstruction[3].u.operand;
2667             Node* op1 = get(currentInstruction[1].u.operand);
2668             Node* op2 = get(currentInstruction[2].u.operand);
2669             if (canFold(op1) && canFold(op2)) {
2670                 JSValue aValue = valueOfJSConstant(op1);
2671                 JSValue bValue = valueOfJSConstant(op2);
2672                 if (aValue.isNumber() && bValue.isNumber()) {
2673                     double a = aValue.asNumber();
2674                     double b = bValue.asNumber();
2675                     if (a <= b) {
2676                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2677                         LAST_OPCODE(op_jlesseq);
2678                     } else {
2679                         // Emit a placeholder for this bytecode operation but otherwise
2680                         // just fall through.
2681                         addToGraph(Phantom);
2682                         NEXT_OPCODE(op_jlesseq);
2683                     }
2684                 }
2685             }
2686             Node* condition = addToGraph(CompareLessEq, op1, op2);
2687             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2688             LAST_OPCODE(op_jlesseq);
2689         }
2690
2691         case op_jgreater: {
2692             unsigned relativeOffset = currentInstruction[3].u.operand;
2693             Node* op1 = get(currentInstruction[1].u.operand);
2694             Node* op2 = get(currentInstruction[2].u.operand);
2695             if (canFold(op1) && canFold(op2)) {
2696                 JSValue aValue = valueOfJSConstant(op1);
2697                 JSValue bValue = valueOfJSConstant(op2);
2698                 if (aValue.isNumber() && bValue.isNumber()) {
2699                     double a = aValue.asNumber();
2700                     double b = bValue.asNumber();
2701                     if (a > b) {
2702                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2703                         LAST_OPCODE(op_jgreater);
2704                     } else {
2705                         // Emit a placeholder for this bytecode operation but otherwise
2706                         // just fall through.
2707                         addToGraph(Phantom);
2708                         NEXT_OPCODE(op_jgreater);
2709                     }
2710                 }
2711             }
2712             Node* condition = addToGraph(CompareGreater, op1, op2);
2713             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2714             LAST_OPCODE(op_jgreater);
2715         }
2716
2717         case op_jgreatereq: {
2718             unsigned relativeOffset = currentInstruction[3].u.operand;
2719             Node* op1 = get(currentInstruction[1].u.operand);
2720             Node* op2 = get(currentInstruction[2].u.operand);
2721             if (canFold(op1) && canFold(op2)) {
2722                 JSValue aValue = valueOfJSConstant(op1);
2723                 JSValue bValue = valueOfJSConstant(op2);
2724                 if (aValue.isNumber() && bValue.isNumber()) {
2725                     double a = aValue.asNumber();
2726                     double b = bValue.asNumber();
2727                     if (a >= b) {
2728                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2729                         LAST_OPCODE(op_jgreatereq);
2730                     } else {
2731                         // Emit a placeholder for this bytecode operation but otherwise
2732                         // just fall through.
2733                         addToGraph(Phantom);
2734                         NEXT_OPCODE(op_jgreatereq);
2735                     }
2736                 }
2737             }
2738             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2739             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2740             LAST_OPCODE(op_jgreatereq);
2741         }
2742
2743         case op_jnless: {
2744             unsigned relativeOffset = currentInstruction[3].u.operand;
2745             Node* op1 = get(currentInstruction[1].u.operand);
2746             Node* op2 = get(currentInstruction[2].u.operand);
2747             if (canFold(op1) && canFold(op2)) {
2748                 JSValue aValue = valueOfJSConstant(op1);
2749                 JSValue bValue = valueOfJSConstant(op2);
2750                 if (aValue.isNumber() && bValue.isNumber()) {
2751                     double a = aValue.asNumber();
2752                     double b = bValue.asNumber();
2753                     if (a < b) {
2754                         // Emit a placeholder for this bytecode operation but otherwise
2755                         // just fall through.
2756                         addToGraph(Phantom);
2757                         NEXT_OPCODE(op_jnless);
2758                     } else {
2759                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2760                         LAST_OPCODE(op_jnless);
2761                     }
2762                 }
2763             }
2764             Node* condition = addToGraph(CompareLess, op1, op2);
2765             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2766             LAST_OPCODE(op_jnless);
2767         }
2768
2769         case op_jnlesseq: {
2770             unsigned relativeOffset = currentInstruction[3].u.operand;
2771             Node* op1 = get(currentInstruction[1].u.operand);
2772             Node* op2 = get(currentInstruction[2].u.operand);
2773             if (canFold(op1) && canFold(op2)) {
2774                 JSValue aValue = valueOfJSConstant(op1);
2775                 JSValue bValue = valueOfJSConstant(op2);
2776                 if (aValue.isNumber() && bValue.isNumber()) {
2777                     double a = aValue.asNumber();
2778                     double b = bValue.asNumber();
2779                     if (a <= b) {
2780                         // Emit a placeholder for this bytecode operation but otherwise
2781                         // just fall through.
2782                         addToGraph(Phantom);
2783                         NEXT_OPCODE(op_jnlesseq);
2784                     } else {
2785                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2786                         LAST_OPCODE(op_jnlesseq);
2787                     }
2788                 }
2789             }
2790             Node* condition = addToGraph(CompareLessEq, op1, op2);
2791             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2792             LAST_OPCODE(op_jnlesseq);
2793         }
2794
2795         case op_jngreater: {
2796             unsigned relativeOffset = currentInstruction[3].u.operand;
2797             Node* op1 = get(currentInstruction[1].u.operand);
2798             Node* op2 = get(currentInstruction[2].u.operand);
2799             if (canFold(op1) && canFold(op2)) {
2800                 JSValue aValue = valueOfJSConstant(op1);
2801                 JSValue bValue = valueOfJSConstant(op2);
2802                 if (aValue.isNumber() && bValue.isNumber()) {
2803                     double a = aValue.asNumber();
2804                     double b = bValue.asNumber();
2805                     if (a > b) {
2806                         // Emit a placeholder for this bytecode operation but otherwise
2807                         // just fall through.
2808                         addToGraph(Phantom);
2809                         NEXT_OPCODE(op_jngreater);
2810                     } else {
2811                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2812                         LAST_OPCODE(op_jngreater);
2813                     }
2814                 }
2815             }
2816             Node* condition = addToGraph(CompareGreater, op1, op2);
2817             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2818             LAST_OPCODE(op_jngreater);
2819         }
2820
2821         case op_jngreatereq: {
2822             unsigned relativeOffset = currentInstruction[3].u.operand;
2823             Node* op1 = get(currentInstruction[1].u.operand);
2824             Node* op2 = get(currentInstruction[2].u.operand);
2825             if (canFold(op1) && canFold(op2)) {
2826                 JSValue aValue = valueOfJSConstant(op1);
2827                 JSValue bValue = valueOfJSConstant(op2);
2828                 if (aValue.isNumber() && bValue.isNumber()) {
2829                     double a = aValue.asNumber();
2830                     double b = bValue.asNumber();
2831                     if (a >= b) {
2832                         // Emit a placeholder for this bytecode operation but otherwise
2833                         // just fall through.
2834                         addToGraph(Phantom);
2835                         NEXT_OPCODE(op_jngreatereq);
2836                     } else {
2837                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2838                         LAST_OPCODE(op_jngreatereq);
2839                     }
2840                 }
2841             }
2842             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2843             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2844             LAST_OPCODE(op_jngreatereq);
2845         }
2846             
2847         case op_switch_imm: {
2848             SwitchData data;
2849             data.kind = SwitchImm;
2850             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2851             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2852             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2853             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2854                 if (!table.branchOffsets[i])
2855                     continue;
2856                 unsigned target = m_currentIndex + table.branchOffsets[i];
2857                 if (target == data.fallThroughBytecodeIndex())
2858                     continue;
2859                 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(static_cast<int32_t>(table.min + i)), target));
2860             }
2861             m_graph.m_switchData.append(data);
2862             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2863             LAST_OPCODE(op_switch_imm);
2864         }
2865             
2866         case op_switch_char: {
2867             SwitchData data;
2868             data.kind = SwitchChar;
2869             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2870             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2871             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2872             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2873                 if (!table.branchOffsets[i])
2874                     continue;
2875                 unsigned target = m_currentIndex + table.branchOffsets[i];
2876                 if (target == data.fallThroughBytecodeIndex())
2877                     continue;
2878                 data.cases.append(
2879                     SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2880             }
2881             m_graph.m_switchData.append(data);
2882             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2883             LAST_OPCODE(op_switch_char);
2884         }
2885
2886         case op_switch_string: {
2887             SwitchData data;
2888             data.kind = SwitchString;
2889             data.switchTableIndex = currentInstruction[1].u.operand;
2890             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2891             StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2892             StringJumpTable::StringOffsetTable::iterator iter;
2893             StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2894             for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2895                 unsigned target = m_currentIndex + iter->value.branchOffset;
2896                 if (target == data.fallThroughBytecodeIndex())
2897                     continue;
2898                 data.cases.append(
2899                     SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
2900             }
2901             m_graph.m_switchData.append(data);
2902             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2903             LAST_OPCODE(op_switch_string);
2904         }
2905
2906         case op_ret:
2907             flushArgumentsAndCapturedVariables();
2908             if (inlineCallFrame()) {
2909                 ASSERT(m_inlineStackTop->m_returnValue != InvalidVirtualRegister);
2910                 setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
2911                 m_inlineStackTop->m_didReturn = true;
2912                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2913                     // If we're returning from the first block, then we're done parsing.
2914                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
2915                     shouldContinueParsing = false;
2916                     LAST_OPCODE(op_ret);
2917                 } else {
2918                     // If inlining created blocks, and we're doing a return, then we need some
2919                     // special linking.
2920                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
2921                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2922                 }
2923                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2924                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2925                     addToGraph(Jump, OpInfo(0));
2926                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2927                     m_inlineStackTop->m_didEarlyReturn = true;
2928                 }
2929                 LAST_OPCODE(op_ret);
2930             }
2931             addToGraph(Return, get(currentInstruction[1].u.operand));
2932             LAST_OPCODE(op_ret);
2933             
2934         case op_end:
2935             flushArgumentsAndCapturedVariables();
2936             ASSERT(!inlineCallFrame());
2937             addToGraph(Return, get(currentInstruction[1].u.operand));
2938             LAST_OPCODE(op_end);
2939
2940         case op_throw:
2941             addToGraph(Throw, get(currentInstruction[1].u.operand));
2942             flushAllArgumentsAndCapturedVariablesInInlineStack();
2943             addToGraph(Unreachable);
2944             LAST_OPCODE(op_throw);
2945             
2946         case op_throw_static_error:
2947             addToGraph(ThrowReferenceError);
2948             flushAllArgumentsAndCapturedVariablesInInlineStack();
2949             addToGraph(Unreachable);
2950             LAST_OPCODE(op_throw_static_error);
2951             
2952         case op_call:
2953             handleCall(currentInstruction, Call, CodeForCall);
2954             NEXT_OPCODE(op_call);
2955             
2956         case op_construct:
2957             handleCall(currentInstruction, Construct, CodeForConstruct);
2958             NEXT_OPCODE(op_construct);
2959             
2960         case op_call_varargs: {
2961             ASSERT(inlineCallFrame());
2962             ASSERT(currentInstruction[4].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
2963             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2964             // It would be cool to funnel this into handleCall() so that it can handle
2965             // inlining. But currently that won't be profitable anyway, since none of the
2966             // uses of call_varargs will be inlineable. So we set this up manually and
2967             // without inline/intrinsic detection.
2968             
2969             SpeculatedType prediction = getPrediction();
2970             
2971             addToGraph(CheckArgumentsNotCreated);
2972             
2973             unsigned argCount = inlineCallFrame()->arguments.size();
2974             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2975                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2976             
2977             addVarArgChild(get(currentInstruction[2].u.operand)); // callee
2978             addVarArgChild(get(currentInstruction[3].u.operand)); // this
2979             for (unsigned argument = 1; argument < argCount; ++argument)
2980                 addVarArgChild(get(argumentToOperand(argument)));
2981             
2982             set(currentInstruction[1].u.operand,
2983                 addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction)));
2984             
2985             NEXT_OPCODE(op_call_varargs);
2986         }
2987             
2988         case op_jneq_ptr:
2989             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2990             // support simmer for a while before making it more general, since it's
2991             // already gnarly enough as it is.
2992             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2993             addToGraph(
2994                 CheckFunction,
2995                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2996                 get(currentInstruction[1].u.operand));
2997             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
2998             LAST_OPCODE(op_jneq_ptr);
2999
3000         case op_resolve_scope: {
3001             int dst = currentInstruction[1].u.operand;
3002             ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
3003             unsigned depth = currentInstruction[4].u.operand;
3004
3005             // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3006             if (needsVarInjectionChecks(resolveType))
3007                 addToGraph(VarInjectionWatchpoint);
3008
3009             switch (resolveType) {
3010             case GlobalProperty:
3011             case GlobalVar:
3012             case GlobalPropertyWithVarInjectionChecks:
3013             case GlobalVarWithVarInjectionChecks:
3014                 set(dst, cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3015                 break;
3016             case ClosureVar:
3017             case ClosureVarWithVarInjectionChecks:
3018                 set(dst, getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
3019                 break;
3020             case Dynamic:
3021                 RELEASE_ASSERT_NOT_REACHED();
3022                 break;
3023             }
3024             NEXT_OPCODE(op_resolve_scope);
3025         }
3026
3027         case op_get_from_scope: {
3028             int dst = currentInstruction[1].u.operand;
3029             unsigned scope = currentInstruction[2].u.operand;
3030             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3031             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3032             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3033
3034             Structure* structure;
3035             uintptr_t operand;
3036             {
3037                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3038                 structure = currentInstruction[5].u.structure.get();
3039                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3040             }
3041
3042             SpeculatedType prediction = getPrediction();
3043             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3044
3045             switch (resolveType) {
3046             case GlobalProperty:
3047             case GlobalPropertyWithVarInjectionChecks: {
3048                 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
3049                 if (status.takesSlowPath()) {
3050                     set(dst, addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(scope)));
3051                     break;
3052                 }
3053                 Node* base = cellConstantWithStructureCheck(globalObject, status.structureSet().singletonStructure());
3054                 if (JSValue specificValue = status.specificValue())
3055                     set(dst, cellConstant(specificValue.asCell()));
3056                 else
3057                     set(dst, handleGetByOffset(prediction, base, identifierNumber, operand));
3058                 break;
3059             }
3060             case GlobalVar:
3061             case GlobalVarWithVarInjectionChecks: {
3062                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3063                 if (!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet())) {
3064                     set(dst, addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
3065                     break;
3066                 }
3067
3068                 addToGraph(GlobalVarWatchpoint, OpInfo(operand), OpInfo(identifierNumber));
3069                 JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
3070                 set(dst, cellConstant(specificValue.asCell()));
3071                 break;
3072             }
3073             case ClosureVar:
3074             case ClosureVarWithVarInjectionChecks:
3075                 set(dst, 
3076                     addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), 
3077                         addToGraph(GetClosureRegisters, get(scope))));
3078                 break;
3079             case Dynamic:
3080                 RELEASE_ASSERT_NOT_REACHED();
3081                 break;