1981ce7bd8e55d840290202755917509145ae0ca
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "DFGByteCodeParser.h"
28
29 #if ENABLE(DFG_JIT)
30
31 #include "ArrayConstructor.h"
32 #include "CallLinkStatus.h"
33 #include "CodeBlock.h"
34 #include "CodeBlockWithJITType.h"
35 #include "DFGArrayMode.h"
36 #include "DFGCapabilities.h"
37 #include "GetByIdStatus.h"
38 #include "Operations.h"
39 #include "PreciseJumpTargets.h"
40 #include "PutByIdStatus.h"
41 #include "StringConstructor.h"
42 #include <wtf/CommaPrinter.h>
43 #include <wtf/HashMap.h>
44 #include <wtf/MathExtras.h>
45
46 namespace JSC { namespace DFG {
47
48 class ConstantBufferKey {
49 public:
50     ConstantBufferKey()
51         : m_codeBlock(0)
52         , m_index(0)
53     {
54     }
55     
56     ConstantBufferKey(WTF::HashTableDeletedValueType)
57         : m_codeBlock(0)
58         , m_index(1)
59     {
60     }
61     
62     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
63         : m_codeBlock(codeBlock)
64         , m_index(index)
65     {
66     }
67     
68     bool operator==(const ConstantBufferKey& other) const
69     {
70         return m_codeBlock == other.m_codeBlock
71             && m_index == other.m_index;
72     }
73     
74     unsigned hash() const
75     {
76         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
77     }
78     
79     bool isHashTableDeletedValue() const
80     {
81         return !m_codeBlock && m_index;
82     }
83     
84     CodeBlock* codeBlock() const { return m_codeBlock; }
85     unsigned index() const { return m_index; }
86     
87 private:
88     CodeBlock* m_codeBlock;
89     unsigned m_index;
90 };
91
92 struct ConstantBufferKeyHash {
93     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
94     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
95     {
96         return a == b;
97     }
98     
99     static const bool safeToCompareToEmptyOrDeleted = true;
100 };
101
102 } } // namespace JSC::DFG
103
104 namespace WTF {
105
106 template<typename T> struct DefaultHash;
107 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
108     typedef JSC::DFG::ConstantBufferKeyHash Hash;
109 };
110
111 template<typename T> struct HashTraits;
112 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
113
114 } // namespace WTF
115
116 namespace JSC { namespace DFG {
117
118 // === ByteCodeParser ===
119 //
120 // This class is used to compile the dataflow graph from a CodeBlock.
121 class ByteCodeParser {
122 public:
123     ByteCodeParser(Graph& graph)
124         : m_vm(&graph.m_vm)
125         , m_codeBlock(graph.m_codeBlock)
126         , m_profiledBlock(graph.m_profiledBlock)
127         , m_graph(graph)
128         , m_currentBlock(0)
129         , m_currentIndex(0)
130         , m_constantUndefined(UINT_MAX)
131         , m_constantNull(UINT_MAX)
132         , m_constantNaN(UINT_MAX)
133         , m_constant1(UINT_MAX)
134         , m_constants(m_codeBlock->numberOfConstantRegisters())
135         , m_numArguments(m_codeBlock->numParameters())
136         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
137         , m_preservedVars(m_codeBlock->m_numVars)
138         , m_parameterSlots(0)
139         , m_numPassedVarArgs(0)
140         , m_inlineStackTop(0)
141         , m_haveBuiltOperandMaps(false)
142         , m_emptyJSValueIndex(UINT_MAX)
143         , m_currentInstruction(0)
144     {
145         ASSERT(m_profiledBlock);
146         
147         for (int i = 0; i < m_codeBlock->m_numVars; ++i)
148             m_preservedVars.set(i);
149     }
150     
151     // Parse a full CodeBlock of bytecode.
152     bool parse();
153     
154 private:
155     struct InlineStackEntry;
156
157     // Just parse from m_currentIndex to the end of the current CodeBlock.
158     void parseCodeBlock();
159
160     // Helper for min and max.
161     bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
162     
163     // Handle calls. This resolves issues surrounding inlining and intrinsics.
164     void handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
165     void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
166     void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
167     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
168     bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
169     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
170     bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
171     bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
172     Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
173     Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
174     void handleGetByOffset(
175         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
176         PropertyOffset);
177     void handleGetById(
178         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179         const GetByIdStatus&);
180
181     Node* getScope(bool skipTop, unsigned skipCount);
182     
183     // Prepare to parse a block.
184     void prepareToParseBlock();
185     // Parse a single basic block of bytecode instructions.
186     bool parseBlock(unsigned limit);
187     // Link block successors.
188     void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
189     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
190     
191     VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
192     {
193         ASSERT(operand < FirstConstantRegisterIndex);
194         
195         m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
196         return &m_graph.m_variableAccessData.last();
197     }
198     
199     // Get/Set the operands/result of a bytecode instruction.
200     Node* getDirect(int operand)
201     {
202         // Is this a constant?
203         if (operand >= FirstConstantRegisterIndex) {
204             unsigned constant = operand - FirstConstantRegisterIndex;
205             ASSERT(constant < m_constants.size());
206             return getJSConstant(constant);
207         }
208
209         ASSERT(operand != JSStack::Callee);
210         
211         // Is this an argument?
212         if (operandIsArgument(operand))
213             return getArgument(operand);
214
215         // Must be a local.
216         return getLocal((unsigned)operand);
217     }
218     Node* get(int operand)
219     {
220         if (operand == JSStack::Callee) {
221             if (inlineCallFrame() && inlineCallFrame()->callee)
222                 return cellConstant(inlineCallFrame()->callee.get());
223             
224             return getCallee();
225         }
226         
227         return getDirect(m_inlineStackTop->remapOperand(operand));
228     }
229     enum SetMode { NormalSet, SetOnEntry };
230     void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
231     {
232         // Is this an argument?
233         if (operandIsArgument(operand)) {
234             setArgument(operand, value, setMode);
235             return;
236         }
237
238         // Must be a local.
239         setLocal((unsigned)operand, value, setMode);
240     }
241     void set(int operand, Node* value, SetMode setMode = NormalSet)
242     {
243         setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
244     }
245     
246     Node* injectLazyOperandSpeculation(Node* node)
247     {
248         ASSERT(node->op() == GetLocal);
249         ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
250         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
251         LazyOperandValueProfileKey key(m_currentIndex, node->local());
252         SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
253 #if DFG_ENABLE(DEBUG_VERBOSE)
254         dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
255 #endif
256         node->variableAccessData()->predict(prediction);
257         return node;
258     }
259
260     // Used in implementing get/set, above, where the operand is a local variable.
261     Node* getLocal(unsigned operand)
262     {
263         Node* node = m_currentBlock->variablesAtTail.local(operand);
264         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
265         
266         // This has two goals: 1) link together variable access datas, and 2)
267         // try to avoid creating redundant GetLocals. (1) is required for
268         // correctness - no other phase will ensure that block-local variable
269         // access data unification is done correctly. (2) is purely opportunistic
270         // and is meant as an compile-time optimization only.
271         
272         VariableAccessData* variable;
273         
274         if (node) {
275             variable = node->variableAccessData();
276             variable->mergeIsCaptured(isCaptured);
277             
278             if (!isCaptured) {
279                 switch (node->op()) {
280                 case GetLocal:
281                     return node;
282                 case SetLocal:
283                     return node->child1().node();
284                 default:
285                     break;
286                 }
287             }
288         } else {
289             m_preservedVars.set(operand);
290             variable = newVariableAccessData(operand, isCaptured);
291         }
292         
293         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
294         m_currentBlock->variablesAtTail.local(operand) = node;
295         return node;
296     }
297     void setLocal(unsigned operand, Node* value, SetMode setMode = NormalSet)
298     {
299         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
300         
301         if (setMode == NormalSet) {
302             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
303             if (isCaptured || argumentPosition)
304                 flushDirect(operand, argumentPosition);
305         }
306
307         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
308         variableAccessData->mergeStructureCheckHoistingFailed(
309             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
310         variableAccessData->mergeCheckArrayHoistingFailed(
311             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
312         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
313         m_currentBlock->variablesAtTail.local(operand) = node;
314     }
315
316     // Used in implementing get/set, above, where the operand is an argument.
317     Node* getArgument(unsigned operand)
318     {
319         unsigned argument = operandToArgument(operand);
320         ASSERT(argument < m_numArguments);
321         
322         Node* node = m_currentBlock->variablesAtTail.argument(argument);
323         bool isCaptured = m_codeBlock->isCaptured(operand);
324
325         VariableAccessData* variable;
326         
327         if (node) {
328             variable = node->variableAccessData();
329             variable->mergeIsCaptured(isCaptured);
330             
331             switch (node->op()) {
332             case GetLocal:
333                 return node;
334             case SetLocal:
335                 return node->child1().node();
336             default:
337                 break;
338             }
339         } else
340             variable = newVariableAccessData(operand, isCaptured);
341         
342         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
343         m_currentBlock->variablesAtTail.argument(argument) = node;
344         return node;
345     }
346     void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
347     {
348         unsigned argument = operandToArgument(operand);
349         ASSERT(argument < m_numArguments);
350         
351         bool isCaptured = m_codeBlock->isCaptured(operand);
352
353         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
354
355         // Always flush arguments, except for 'this'. If 'this' is created by us,
356         // then make sure that it's never unboxed.
357         if (argument) {
358             if (setMode == NormalSet)
359                 flushDirect(operand);
360         } else if (m_codeBlock->specializationKind() == CodeForConstruct)
361             variableAccessData->mergeShouldNeverUnbox(true);
362         
363         variableAccessData->mergeStructureCheckHoistingFailed(
364             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
365         variableAccessData->mergeCheckArrayHoistingFailed(
366             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
367         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
368         m_currentBlock->variablesAtTail.argument(argument) = node;
369     }
370     
371     ArgumentPosition* findArgumentPositionForArgument(int argument)
372     {
373         InlineStackEntry* stack = m_inlineStackTop;
374         while (stack->m_inlineCallFrame)
375             stack = stack->m_caller;
376         return stack->m_argumentPositions[argument];
377     }
378     
379     ArgumentPosition* findArgumentPositionForLocal(int operand)
380     {
381         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
382             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
383             if (!inlineCallFrame)
384                 break;
385             if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
386                 continue;
387             if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
388                 continue;
389             if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
390                 continue;
391             int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
392             return stack->m_argumentPositions[argument];
393         }
394         return 0;
395     }
396     
397     ArgumentPosition* findArgumentPosition(int operand)
398     {
399         if (operandIsArgument(operand))
400             return findArgumentPositionForArgument(operandToArgument(operand));
401         return findArgumentPositionForLocal(operand);
402     }
403     
404     void flush(int operand)
405     {
406         flushDirect(m_inlineStackTop->remapOperand(operand));
407     }
408     
409     void flushDirect(int operand)
410     {
411         flushDirect(operand, findArgumentPosition(operand));
412     }
413     
414     void flushDirect(int operand, ArgumentPosition* argumentPosition)
415     {
416         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
417         
418         ASSERT(operand < FirstConstantRegisterIndex);
419         
420         if (!operandIsArgument(operand))
421             m_preservedVars.set(operand);
422         
423         Node* node = m_currentBlock->variablesAtTail.operand(operand);
424         
425         VariableAccessData* variable;
426         
427         if (node) {
428             variable = node->variableAccessData();
429             variable->mergeIsCaptured(isCaptured);
430         } else
431             variable = newVariableAccessData(operand, isCaptured);
432         
433         node = addToGraph(Flush, OpInfo(variable));
434         m_currentBlock->variablesAtTail.operand(operand) = node;
435         if (argumentPosition)
436             argumentPosition->addVariable(variable);
437     }
438
439     void flush(InlineStackEntry* inlineStackEntry)
440     {
441         int numArguments;
442         if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
443             numArguments = inlineCallFrame->arguments.size();
444         else
445             numArguments = inlineStackEntry->m_codeBlock->numParameters();
446         for (unsigned argument = numArguments; argument-- > 1;)
447             flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
448         for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
449             if (!inlineStackEntry->m_codeBlock->isCaptured(local))
450                 continue;
451             flushDirect(inlineStackEntry->remapOperand(local));
452         }
453     }
454
455     void flushAllArgumentsAndCapturedVariablesInInlineStack()
456     {
457         for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
458             flush(inlineStackEntry);
459     }
460
461     void flushArgumentsAndCapturedVariables()
462     {
463         flush(m_inlineStackTop);
464     }
465
466     // Get an operand, and perform a ToInt32/ToNumber conversion on it.
467     Node* getToInt32(int operand)
468     {
469         return toInt32(get(operand));
470     }
471
472     // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
473     Node* toInt32(Node* node)
474     {
475         if (node->hasInt32Result())
476             return node;
477
478         if (node->op() == UInt32ToNumber)
479             return node->child1().node();
480
481         // Check for numeric constants boxed as JSValues.
482         if (canFold(node)) {
483             JSValue v = valueOfJSConstant(node);
484             if (v.isInt32())
485                 return getJSConstant(node->constantNumber());
486             if (v.isNumber())
487                 return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
488         }
489
490         return addToGraph(ValueToInt32, node);
491     }
492
493     // NOTE: Only use this to construct constants that arise from non-speculative
494     // constant folding. I.e. creating constants using this if we had constant
495     // field inference would be a bad idea, since the bytecode parser's folding
496     // doesn't handle liveness preservation.
497     Node* getJSConstantForValue(JSValue constantValue)
498     {
499         unsigned constantIndex = m_codeBlock->addOrFindConstant(constantValue);
500         if (constantIndex >= m_constants.size())
501             m_constants.append(ConstantRecord());
502         
503         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
504         
505         return getJSConstant(constantIndex);
506     }
507
508     Node* getJSConstant(unsigned constant)
509     {
510         Node* node = m_constants[constant].asJSValue;
511         if (node)
512             return node;
513
514         Node* result = addToGraph(JSConstant, OpInfo(constant));
515         m_constants[constant].asJSValue = result;
516         return result;
517     }
518
519     Node* getCallee()
520     {
521         return addToGraph(GetCallee);
522     }
523
524     // Helper functions to get/set the this value.
525     Node* getThis()
526     {
527         return get(m_inlineStackTop->m_codeBlock->thisRegister());
528     }
529     void setThis(Node* value)
530     {
531         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
532     }
533
534     // Convenience methods for checking nodes for constants.
535     bool isJSConstant(Node* node)
536     {
537         return node->op() == JSConstant;
538     }
539     bool isInt32Constant(Node* node)
540     {
541         return isJSConstant(node) && valueOfJSConstant(node).isInt32();
542     }
543     // Convenience methods for getting constant values.
544     JSValue valueOfJSConstant(Node* node)
545     {
546         ASSERT(isJSConstant(node));
547         return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
548     }
549     int32_t valueOfInt32Constant(Node* node)
550     {
551         ASSERT(isInt32Constant(node));
552         return valueOfJSConstant(node).asInt32();
553     }
554     
555     // This method returns a JSConstant with the value 'undefined'.
556     Node* constantUndefined()
557     {
558         // Has m_constantUndefined been set up yet?
559         if (m_constantUndefined == UINT_MAX) {
560             // Search the constant pool for undefined, if we find it, we can just reuse this!
561             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
562             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
563                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
564                 if (testMe.isUndefined())
565                     return getJSConstant(m_constantUndefined);
566             }
567
568             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
569             ASSERT(m_constants.size() == numberOfConstants);
570             m_codeBlock->addConstant(jsUndefined());
571             m_constants.append(ConstantRecord());
572             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
573         }
574
575         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
576         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
577         return getJSConstant(m_constantUndefined);
578     }
579
580     // This method returns a JSConstant with the value 'null'.
581     Node* constantNull()
582     {
583         // Has m_constantNull been set up yet?
584         if (m_constantNull == UINT_MAX) {
585             // Search the constant pool for null, if we find it, we can just reuse this!
586             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
587             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
588                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
589                 if (testMe.isNull())
590                     return getJSConstant(m_constantNull);
591             }
592
593             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
594             ASSERT(m_constants.size() == numberOfConstants);
595             m_codeBlock->addConstant(jsNull());
596             m_constants.append(ConstantRecord());
597             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
598         }
599
600         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
601         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
602         return getJSConstant(m_constantNull);
603     }
604
605     // This method returns a DoubleConstant with the value 1.
606     Node* one()
607     {
608         // Has m_constant1 been set up yet?
609         if (m_constant1 == UINT_MAX) {
610             // Search the constant pool for the value 1, if we find it, we can just reuse this!
611             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
612             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
613                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
614                 if (testMe.isInt32() && testMe.asInt32() == 1)
615                     return getJSConstant(m_constant1);
616             }
617
618             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
619             ASSERT(m_constants.size() == numberOfConstants);
620             m_codeBlock->addConstant(jsNumber(1));
621             m_constants.append(ConstantRecord());
622             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
623         }
624
625         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
626         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
627         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
628         return getJSConstant(m_constant1);
629     }
630     
631     // This method returns a DoubleConstant with the value NaN.
632     Node* constantNaN()
633     {
634         JSValue nan = jsNaN();
635         
636         // Has m_constantNaN been set up yet?
637         if (m_constantNaN == UINT_MAX) {
638             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
639             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
640             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
641                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
642                 if (JSValue::encode(testMe) == JSValue::encode(nan))
643                     return getJSConstant(m_constantNaN);
644             }
645
646             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
647             ASSERT(m_constants.size() == numberOfConstants);
648             m_codeBlock->addConstant(nan);
649             m_constants.append(ConstantRecord());
650             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
651         }
652
653         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
654         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
655         ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
656         return getJSConstant(m_constantNaN);
657     }
658     
659     Node* cellConstant(JSCell* cell)
660     {
661         HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, 0);
662         if (result.isNewEntry)
663             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
664         
665         return result.iterator->value;
666     }
667     
668     InlineCallFrame* inlineCallFrame()
669     {
670         return m_inlineStackTop->m_inlineCallFrame;
671     }
672
673     CodeOrigin currentCodeOrigin()
674     {
675         return CodeOrigin(m_currentIndex, inlineCallFrame());
676     }
677     
678     bool canFold(Node* node)
679     {
680         return node->isStronglyProvedConstantIn(inlineCallFrame());
681     }
682
683     // Our codegen for constant strict equality performs a bitwise comparison,
684     // so we can only select values that have a consistent bitwise identity.
685     bool isConstantForCompareStrictEq(Node* node)
686     {
687         if (!node->isConstant())
688             return false;
689         JSValue value = valueOfJSConstant(node);
690         return value.isBoolean() || value.isUndefinedOrNull();
691     }
692     
693     Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
694     {
695         Node* result = m_graph.addNode(
696             SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
697         ASSERT(op != Phi);
698         m_currentBlock->append(result);
699         return result;
700     }
701     Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
702     {
703         Node* result = m_graph.addNode(
704             SpecNone, op, currentCodeOrigin(), child1, child2, child3);
705         ASSERT(op != Phi);
706         m_currentBlock->append(result);
707         return result;
708     }
709     Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
710     {
711         Node* result = m_graph.addNode(
712             SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
713         ASSERT(op != Phi);
714         m_currentBlock->append(result);
715         return result;
716     }
717     Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
718     {
719         Node* result = m_graph.addNode(
720             SpecNone, op, currentCodeOrigin(), info1, info2,
721             Edge(child1), Edge(child2), Edge(child3));
722         ASSERT(op != Phi);
723         m_currentBlock->append(result);
724         return result;
725     }
726     
727     Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
728     {
729         Node* result = m_graph.addNode(
730             SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
731             m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
732         ASSERT(op != Phi);
733         m_currentBlock->append(result);
734         
735         m_numPassedVarArgs = 0;
736         
737         return result;
738     }
739
740     void addVarArgChild(Node* child)
741     {
742         m_graph.m_varArgChildren.append(Edge(child));
743         m_numPassedVarArgs++;
744     }
745     
746     Node* addCall(Instruction* currentInstruction, NodeType op)
747     {
748         SpeculatedType prediction = getPrediction();
749         
750         addVarArgChild(get(currentInstruction[2].u.operand));
751         int argCount = currentInstruction[3].u.operand;
752         if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
753             m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
754
755         int registerOffset = currentInstruction[4].u.operand;
756         int dummyThisArgument = op == Call ? 0 : 1;
757         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
758             addVarArgChild(get(registerOffset + argumentToOperand(i)));
759
760         Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
761         set(currentInstruction[1].u.operand, call);
762         return call;
763     }
764     
765     Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
766     {
767         Node* objectNode = cellConstant(object);
768         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
769         return objectNode;
770     }
771     
772     Node* cellConstantWithStructureCheck(JSCell* object)
773     {
774         return cellConstantWithStructureCheck(object, object->structure());
775     }
776
777     SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
778     {
779         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
780         return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
781     }
782
783     SpeculatedType getPrediction(unsigned bytecodeIndex)
784     {
785         SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
786         
787         if (prediction == SpecNone) {
788             // We have no information about what values this node generates. Give up
789             // on executing this code, since we're likely to do more damage than good.
790             addToGraph(ForceOSRExit);
791         }
792         
793         return prediction;
794     }
795     
796     SpeculatedType getPredictionWithoutOSRExit()
797     {
798         return getPredictionWithoutOSRExit(m_currentIndex);
799     }
800     
801     SpeculatedType getPrediction()
802     {
803         return getPrediction(m_currentIndex);
804     }
805     
806     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
807     {
808         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
809         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
810         return ArrayMode::fromObserved(locker, profile, action, false);
811     }
812     
813     ArrayMode getArrayMode(ArrayProfile* profile)
814     {
815         return getArrayMode(profile, Array::Read);
816     }
817     
818     ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
819     {
820         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
821         
822         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
823         
824 #if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
825         if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
826             dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
827         dataLogF("Array profile for bc#%u: %u %s%s\n", m_currentIndex, profile->observedArrayModes(locker), profile->structureIsPolymorphic(locker) ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses(locker) ? " (may intercept)" : "");
828 #endif
829         
830         bool makeSafe =
831             m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
832             || profile->outOfBounds(locker);
833         
834         ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
835         
836         return result;
837     }
838     
839     Node* makeSafe(Node* node)
840     {
841         bool likelyToTakeSlowCase;
842         if (!isX86() && node->op() == ArithMod)
843             likelyToTakeSlowCase = false;
844         else
845             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
846         
847         if (!likelyToTakeSlowCase
848             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
849             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
850             return node;
851         
852         switch (node->op()) {
853         case UInt32ToNumber:
854         case ArithAdd:
855         case ArithSub:
856         case ArithNegate:
857         case ValueAdd:
858         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
859             node->mergeFlags(NodeMayOverflow);
860             break;
861             
862         case ArithMul:
863             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
864                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
865 #if DFG_ENABLE(DEBUG_VERBOSE)
866                 dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
867 #endif
868                 node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
869             } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
870                        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
871 #if DFG_ENABLE(DEBUG_VERBOSE)
872                 dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
873 #endif
874                 node->mergeFlags(NodeMayNegZero);
875             }
876             break;
877             
878         default:
879             RELEASE_ASSERT_NOT_REACHED();
880             break;
881         }
882         
883         return node;
884     }
885     
886     Node* makeDivSafe(Node* node)
887     {
888         ASSERT(node->op() == ArithDiv);
889         
890         // The main slow case counter for op_div in the old JIT counts only when
891         // the operands are not numbers. We don't care about that since we already
892         // have speculations in place that take care of that separately. We only
893         // care about when the outcome of the division is not an integer, which
894         // is what the special fast case counter tells us.
895         
896         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
897             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
898             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
899             return node;
900         
901 #if DFG_ENABLE(DEBUG_VERBOSE)
902         dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
903 #endif
904         
905         // FIXME: It might be possible to make this more granular. The DFG certainly can
906         // distinguish between negative zero and overflow in its exit profiles.
907         node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
908         
909         return node;
910     }
911     
912     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
913     {
914         if (direct)
915             return true;
916         
917         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
918             return false;
919         
920         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
921             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
922                 return false;
923         }
924         
925         return true;
926     }
927     
928     void buildOperandMapsIfNecessary();
929     
930     VM* m_vm;
931     CodeBlock* m_codeBlock;
932     CodeBlock* m_profiledBlock;
933     Graph& m_graph;
934
935     // The current block being generated.
936     BasicBlock* m_currentBlock;
937     // The bytecode index of the current instruction being generated.
938     unsigned m_currentIndex;
939
940     // We use these values during code generation, and to avoid the need for
941     // special handling we make sure they are available as constants in the
942     // CodeBlock's constant pool. These variables are initialized to
943     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
944     // constant pool, as necessary.
945     unsigned m_constantUndefined;
946     unsigned m_constantNull;
947     unsigned m_constantNaN;
948     unsigned m_constant1;
949     HashMap<JSCell*, unsigned> m_cellConstants;
950     HashMap<JSCell*, Node*> m_cellConstantNodes;
951
952     // A constant in the constant pool may be represented by more than one
953     // node in the graph, depending on the context in which it is being used.
954     struct ConstantRecord {
955         ConstantRecord()
956             : asInt32(0)
957             , asNumeric(0)
958             , asJSValue(0)
959         {
960         }
961
962         Node* asInt32;
963         Node* asNumeric;
964         Node* asJSValue;
965     };
966
967     // Track the index of the node whose result is the current value for every
968     // register value in the bytecode - argument, local, and temporary.
969     Vector<ConstantRecord, 16> m_constants;
970
971     // The number of arguments passed to the function.
972     unsigned m_numArguments;
973     // The number of locals (vars + temporaries) used in the function.
974     unsigned m_numLocals;
975     // The set of registers we need to preserve across BasicBlock boundaries;
976     // typically equal to the set of vars, but we expand this to cover all
977     // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
978     BitVector m_preservedVars;
979     // The number of slots (in units of sizeof(Register)) that we need to
980     // preallocate for calls emanating from this frame. This includes the
981     // size of the CallFrame, only if this is not a leaf function.  (I.e.
982     // this is 0 if and only if this function is a leaf.)
983     unsigned m_parameterSlots;
984     // The number of var args passed to the next var arg node.
985     unsigned m_numPassedVarArgs;
986
987     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
988     
989     struct InlineStackEntry {
990         ByteCodeParser* m_byteCodeParser;
991         
992         CodeBlock* m_codeBlock;
993         CodeBlock* m_profiledBlock;
994         InlineCallFrame* m_inlineCallFrame;
995         
996         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
997         
998         QueryableExitProfile m_exitProfile;
999         
1000         // Remapping of identifier and constant numbers from the code block being
1001         // inlined (inline callee) to the code block that we're inlining into
1002         // (the machine code block, which is the transitive, though not necessarily
1003         // direct, caller).
1004         Vector<unsigned> m_identifierRemap;
1005         Vector<unsigned> m_constantRemap;
1006         Vector<unsigned> m_constantBufferRemap;
1007         Vector<unsigned> m_switchRemap;
1008         
1009         // Blocks introduced by this code block, which need successor linking.
1010         // May include up to one basic block that includes the continuation after
1011         // the callsite in the caller. These must be appended in the order that they
1012         // are created, but their bytecodeBegin values need not be in order as they
1013         // are ignored.
1014         Vector<UnlinkedBlock> m_unlinkedBlocks;
1015         
1016         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1017         // cannot have two blocks that have the same bytecodeBegin. For this very
1018         // reason, this is not equivalent to 
1019         Vector<BasicBlock*> m_blockLinkingTargets;
1020         
1021         // If the callsite's basic block was split into two, then this will be
1022         // the head of the callsite block. It needs its successors linked to the
1023         // m_unlinkedBlocks, but not the other way around: there's no way for
1024         // any blocks in m_unlinkedBlocks to jump back into this block.
1025         BasicBlock* m_callsiteBlockHead;
1026         
1027         // Does the callsite block head need linking? This is typically true
1028         // but will be false for the machine code block's inline stack entry
1029         // (since that one is not inlined) and for cases where an inline callee
1030         // did the linking for us.
1031         bool m_callsiteBlockHeadNeedsLinking;
1032         
1033         VirtualRegister m_returnValue;
1034         
1035         // Speculations about variable types collected from the profiled code block,
1036         // which are based on OSR exit profiles that past DFG compilatins of this
1037         // code block had gathered.
1038         LazyOperandValueProfileParser m_lazyOperands;
1039         
1040         // Did we see any returns? We need to handle the (uncommon but necessary)
1041         // case where a procedure that does not return was inlined.
1042         bool m_didReturn;
1043         
1044         // Did we have any early returns?
1045         bool m_didEarlyReturn;
1046         
1047         // Pointers to the argument position trackers for this slice of code.
1048         Vector<ArgumentPosition*> m_argumentPositions;
1049         
1050         InlineStackEntry* m_caller;
1051         
1052         InlineStackEntry(
1053             ByteCodeParser*,
1054             CodeBlock*,
1055             CodeBlock* profiledBlock,
1056             BasicBlock* callsiteBlockHead,
1057             JSFunction* callee, // Null if this is a closure call.
1058             VirtualRegister returnValueVR,
1059             VirtualRegister inlineCallFrameStart,
1060             int argumentCountIncludingThis,
1061             CodeSpecializationKind);
1062         
1063         ~InlineStackEntry()
1064         {
1065             m_byteCodeParser->m_inlineStackTop = m_caller;
1066         }
1067         
1068         int remapOperand(int operand) const
1069         {
1070             if (!m_inlineCallFrame)
1071                 return operand;
1072             
1073             if (operand >= FirstConstantRegisterIndex) {
1074                 int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1075                 ASSERT(result >= FirstConstantRegisterIndex);
1076                 return result;
1077             }
1078
1079             ASSERT(operand != JSStack::Callee);
1080
1081             return operand + m_inlineCallFrame->stackOffset;
1082         }
1083     };
1084     
1085     InlineStackEntry* m_inlineStackTop;
1086
1087     // Have we built operand maps? We initialize them lazily, and only when doing
1088     // inlining.
1089     bool m_haveBuiltOperandMaps;
1090     // Mapping between identifier names and numbers.
1091     BorrowedIdentifierMap m_identifierMap;
1092     // Mapping between values and constant numbers.
1093     JSValueMap m_jsValueMap;
1094     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1095     // work-around for the fact that JSValueMap can't handle "empty" values.
1096     unsigned m_emptyJSValueIndex;
1097     
1098     Instruction* m_currentInstruction;
1099 };
1100
1101 #define NEXT_OPCODE(name) \
1102     m_currentIndex += OPCODE_LENGTH(name); \
1103     continue
1104
1105 #define LAST_OPCODE(name) \
1106     m_currentIndex += OPCODE_LENGTH(name); \
1107     return shouldContinueParsing
1108
1109
1110 void ByteCodeParser::handleCall(Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1111 {
1112     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1113     
1114     Node* callTarget = get(currentInstruction[2].u.operand);
1115     
1116     CallLinkStatus callLinkStatus;
1117
1118     if (m_graph.isConstant(callTarget))
1119         callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1120     else {
1121         callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
1122         callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
1123         callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1124         callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
1125     }
1126     
1127 #if DFG_ENABLE(DEBUG_VERBOSE)
1128     dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
1129 #endif
1130     
1131     if (!callLinkStatus.canOptimize()) {
1132         // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1133         // that we cannot optimize them.
1134         
1135         addCall(currentInstruction, op);
1136         return;
1137     }
1138     
1139     int argumentCountIncludingThis = currentInstruction[3].u.operand;
1140     int registerOffset = currentInstruction[4].u.operand;
1141
1142     int resultOperand = currentInstruction[1].u.operand;
1143     unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1144     SpeculatedType prediction = getPrediction();
1145
1146     if (InternalFunction* function = callLinkStatus.internalFunction()) {
1147         if (handleConstantInternalFunction(resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1148             // This phantoming has to be *after* the code for the intrinsic, to signify that
1149             // the inputs must be kept alive whatever exits the intrinsic may do.
1150             addToGraph(Phantom, callTarget);
1151             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1152             return;
1153         }
1154         
1155         // Can only handle this using the generic call handler.
1156         addCall(currentInstruction, op);
1157         return;
1158     }
1159         
1160     Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1161     if (intrinsic != NoIntrinsic) {
1162         emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1163             
1164         if (handleIntrinsic(resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1165             // This phantoming has to be *after* the code for the intrinsic, to signify that
1166             // the inputs must be kept alive whatever exits the intrinsic may do.
1167             addToGraph(Phantom, callTarget);
1168             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1169             if (m_graph.compilation())
1170                 m_graph.compilation()->noticeInlinedCall();
1171             return;
1172         }
1173     } else if (handleInlining(callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1174         if (m_graph.compilation())
1175             m_graph.compilation()->noticeInlinedCall();
1176         return;
1177     }
1178     
1179     addCall(currentInstruction, op);
1180 }
1181
1182 void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1183 {
1184     Node* thisArgument;
1185     if (kind == CodeForCall)
1186         thisArgument = get(registerOffset + argumentToOperand(0));
1187     else
1188         thisArgument = 0;
1189
1190     if (callLinkStatus.isProved()) {
1191         addToGraph(Phantom, callTarget, thisArgument);
1192         return;
1193     }
1194     
1195     ASSERT(callLinkStatus.canOptimize());
1196     
1197     if (JSFunction* function = callLinkStatus.function())
1198         addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1199     else {
1200         ASSERT(callLinkStatus.structure());
1201         ASSERT(callLinkStatus.executable());
1202         
1203         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1204         addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1205     }
1206 }
1207
1208 void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1209 {
1210     for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1211         addToGraph(Phantom, get(registerOffset + argumentToOperand(i)));
1212 }
1213
1214 bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1215 {
1216     // First, the really simple checks: do we have an actual JS function?
1217     if (!callLinkStatus.executable())
1218         return false;
1219     if (callLinkStatus.executable()->isHostFunction())
1220         return false;
1221     
1222     FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1223     
1224     // Does the number of arguments we're passing match the arity of the target? We currently
1225     // inline only if the number of arguments passed is greater than or equal to the number
1226     // arguments expected.
1227     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1228         return false;
1229     
1230     // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1231     // If either of these are detected, then don't inline.
1232     unsigned depth = 0;
1233     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1234         ++depth;
1235         if (depth >= Options::maximumInliningDepth())
1236             return false; // Depth exceeded.
1237         
1238         if (entry->executable() == executable)
1239             return false; // Recursion detected.
1240     }
1241     
1242     // Do we have a code block, and does the code block's size match the heuristics/requirements for
1243     // being an inline candidate? We might not have a code block if code was thrown away or if we
1244     // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1245     // if we had a static proof of what was being called; this might happen for example if you call a
1246     // global function, where watchpointing gives us static information. Overall, it's a rare case
1247     // because we expect that any hot callees would have already been compiled.
1248     CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1249     if (!codeBlock)
1250         return false;
1251     if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
1252         return false;
1253     
1254 #if DFG_ENABLE(DEBUG_VERBOSE)
1255     dataLogF("Inlining executable %p.\n", executable);
1256 #endif
1257     
1258     // Now we know without a doubt that we are committed to inlining. So begin the process
1259     // by checking the callee (if necessary) and making sure that arguments and the callee
1260     // are flushed.
1261     emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1262     
1263     // FIXME: Don't flush constants!
1264     
1265     int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
1266     
1267     // Make sure that the area used by the call frame is reserved.
1268     for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
1269         m_preservedVars.set(arg);
1270     
1271     // Make sure that we have enough locals.
1272     unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1273     if (newNumLocals > m_numLocals) {
1274         m_numLocals = newNumLocals;
1275         for (size_t i = 0; i < m_graph.numBlocks(); ++i)
1276             m_graph.block(i)->ensureLocals(newNumLocals);
1277     }
1278     
1279     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1280
1281     InlineStackEntry inlineStackEntry(
1282         this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1283         (VirtualRegister)m_inlineStackTop->remapOperand(resultOperand),
1284         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1285     
1286     // This is where the actual inlining really happens.
1287     unsigned oldIndex = m_currentIndex;
1288     m_currentIndex = 0;
1289
1290     addToGraph(InlineStart, OpInfo(argumentPositionStart));
1291     if (callLinkStatus.isClosureCall()) {
1292         addToGraph(SetCallee, callTargetNode);
1293         addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
1294     }
1295     
1296     parseCodeBlock();
1297     
1298     m_currentIndex = oldIndex;
1299     
1300     // If the inlined code created some new basic blocks, then we have linking to do.
1301     if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
1302         
1303         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1304         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1305             linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
1306         else
1307             ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
1308         
1309         // It's possible that the callsite block head is not owned by the caller.
1310         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1311             // It's definitely owned by the caller, because the caller created new blocks.
1312             // Assert that this all adds up.
1313             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
1314             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1315             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1316         } else {
1317             // It's definitely not owned by the caller. Tell the caller that he does not
1318             // need to link his callsite block head, because we did it for him.
1319             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1320             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1321             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1322         }
1323         
1324         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1325     } else
1326         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1327     
1328     BasicBlock* lastBlock = m_graph.lastBlock();
1329     // If there was a return, but no early returns, then we're done. We allow parsing of
1330     // the caller to continue in whatever basic block we're in right now.
1331     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1332         ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1333         
1334         // If we created new blocks then the last block needs linking, but in the
1335         // caller. It doesn't need to be linked to, but it needs outgoing links.
1336         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1337 #if DFG_ENABLE(DEBUG_VERBOSE)
1338             dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1339 #endif
1340             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1341             // for release builds because this block will never serve as a potential target
1342             // in the linker's binary search.
1343             lastBlock->bytecodeBegin = m_currentIndex;
1344             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1345         }
1346         
1347         m_currentBlock = m_graph.lastBlock();
1348         
1349 #if DFG_ENABLE(DEBUG_VERBOSE)
1350         dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1351 #endif
1352         return true;
1353     }
1354     
1355     // If we get to this point then all blocks must end in some sort of terminals.
1356     ASSERT(lastBlock->last()->isTerminal());
1357     
1358
1359     // Need to create a new basic block for the continuation at the caller.
1360     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1361
1362 #if DFG_ENABLE(DEBUG_VERBOSE)
1363     dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.numBlocks(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
1364 #endif
1365
1366     // Link the early returns to the basic block we're about to create.
1367     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1368         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1369             continue;
1370         BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1371         ASSERT(!blockToLink->isLinked);
1372         Node* node = blockToLink->last();
1373         ASSERT(node->op() == Jump);
1374         ASSERT(node->takenBlock() == 0);
1375         node->setTakenBlock(block.get());
1376         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1377 #if !ASSERT_DISABLED
1378         blockToLink->isLinked = true;
1379 #endif
1380     }
1381     
1382     m_currentBlock = block.get();
1383     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1384     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1385     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1386     m_graph.appendBlock(block);
1387     prepareToParseBlock();
1388     
1389     // At this point we return and continue to generate code for the caller, but
1390     // in the new basic block.
1391 #if DFG_ENABLE(DEBUG_VERBOSE)
1392     dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1393 #endif
1394     return true;
1395 }
1396
1397 bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1398 {
1399     if (argumentCountIncludingThis == 1) { // Math.min()
1400         set(resultOperand, constantNaN());
1401         return true;
1402     }
1403      
1404     if (argumentCountIncludingThis == 2) { // Math.min(x)
1405         Node* result = get(registerOffset + argumentToOperand(1));
1406         addToGraph(Phantom, Edge(result, NumberUse));
1407         set(resultOperand, result);
1408         return true;
1409     }
1410     
1411     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1412         set(resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1413         return true;
1414     }
1415     
1416     // Don't handle >=3 arguments for now.
1417     return false;
1418 }
1419
1420 // FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1421 // they need to perform the ToNumber conversion, which can have side-effects.
1422 bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1423 {
1424     switch (intrinsic) {
1425     case AbsIntrinsic: {
1426         if (argumentCountIncludingThis == 1) { // Math.abs()
1427             set(resultOperand, constantNaN());
1428             return true;
1429         }
1430
1431         if (!MacroAssembler::supportsFloatingPointAbs())
1432             return false;
1433
1434         Node* node = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1435         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1436             node->mergeFlags(NodeMayOverflow);
1437         set(resultOperand, node);
1438         return true;
1439     }
1440
1441     case MinIntrinsic:
1442         return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1443         
1444     case MaxIntrinsic:
1445         return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1446         
1447     case SqrtIntrinsic: {
1448         if (argumentCountIncludingThis == 1) { // Math.sqrt()
1449             set(resultOperand, constantNaN());
1450             return true;
1451         }
1452         
1453         if (!MacroAssembler::supportsFloatingPointSqrt())
1454             return false;
1455
1456         set(resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1457         return true;
1458     }
1459         
1460     case ArrayPushIntrinsic: {
1461         if (argumentCountIncludingThis != 2)
1462             return false;
1463         
1464         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1465         if (!arrayMode.isJSArray())
1466             return false;
1467         switch (arrayMode.type()) {
1468         case Array::Undecided:
1469         case Array::Int32:
1470         case Array::Double:
1471         case Array::Contiguous:
1472         case Array::ArrayStorage: {
1473             Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1474             set(resultOperand, arrayPush);
1475             
1476             return true;
1477         }
1478             
1479         default:
1480             return false;
1481         }
1482     }
1483         
1484     case ArrayPopIntrinsic: {
1485         if (argumentCountIncludingThis != 1)
1486             return false;
1487         
1488         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1489         if (!arrayMode.isJSArray())
1490             return false;
1491         switch (arrayMode.type()) {
1492         case Array::Int32:
1493         case Array::Double:
1494         case Array::Contiguous:
1495         case Array::ArrayStorage: {
1496             Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1497             set(resultOperand, arrayPop);
1498             return true;
1499         }
1500             
1501         default:
1502             return false;
1503         }
1504     }
1505
1506     case CharCodeAtIntrinsic: {
1507         if (argumentCountIncludingThis != 2)
1508             return false;
1509
1510         int thisOperand = registerOffset + argumentToOperand(0);
1511         int indexOperand = registerOffset + argumentToOperand(1);
1512         Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1513
1514         set(resultOperand, charCode);
1515         return true;
1516     }
1517
1518     case CharAtIntrinsic: {
1519         if (argumentCountIncludingThis != 2)
1520             return false;
1521
1522         int thisOperand = registerOffset + argumentToOperand(0);
1523         int indexOperand = registerOffset + argumentToOperand(1);
1524         Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1525
1526         set(resultOperand, charCode);
1527         return true;
1528     }
1529     case FromCharCodeIntrinsic: {
1530         if (argumentCountIncludingThis != 2)
1531             return false;
1532
1533         int indexOperand = registerOffset + argumentToOperand(1);
1534         Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
1535
1536         set(resultOperand, charCode);
1537
1538         return true;
1539     }
1540
1541     case RegExpExecIntrinsic: {
1542         if (argumentCountIncludingThis != 2)
1543             return false;
1544         
1545         Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1546         set(resultOperand, regExpExec);
1547         
1548         return true;
1549     }
1550         
1551     case RegExpTestIntrinsic: {
1552         if (argumentCountIncludingThis != 2)
1553             return false;
1554         
1555         Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1556         set(resultOperand, regExpExec);
1557         
1558         return true;
1559     }
1560
1561     case IMulIntrinsic: {
1562         if (argumentCountIncludingThis != 3)
1563             return false;
1564         int leftOperand = registerOffset + argumentToOperand(1);
1565         int rightOperand = registerOffset + argumentToOperand(2);
1566         Node* left = getToInt32(leftOperand);
1567         Node* right = getToInt32(rightOperand);
1568         set(resultOperand, addToGraph(ArithIMul, left, right));
1569         return true;
1570     }
1571         
1572     default:
1573         return false;
1574     }
1575 }
1576
1577 bool ByteCodeParser::handleConstantInternalFunction(
1578     int resultOperand, InternalFunction* function, int registerOffset,
1579     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1580 {
1581     // If we ever find that we have a lot of internal functions that we specialize for,
1582     // then we should probably have some sort of hashtable dispatch, or maybe even
1583     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1584     // it seems that this case is hit infrequently enough, and the number of functions
1585     // we know about is small enough, that having just a linear cascade of if statements
1586     // is good enough.
1587     
1588     UNUSED_PARAM(prediction); // Remove this once we do more things.
1589     
1590     if (function->classInfo() == &ArrayConstructor::s_info) {
1591         if (argumentCountIncludingThis == 2) {
1592             set(resultOperand,
1593                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1594             return true;
1595         }
1596         
1597         for (int i = 1; i < argumentCountIncludingThis; ++i)
1598             addVarArgChild(get(registerOffset + argumentToOperand(i)));
1599         set(resultOperand,
1600             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1601         return true;
1602     } else if (function->classInfo() == &StringConstructor::s_info) {
1603         Node* result;
1604         
1605         if (argumentCountIncludingThis <= 1)
1606             result = cellConstant(m_vm->smallStrings.emptyString());
1607         else
1608             result = addToGraph(ToString, get(registerOffset + argumentToOperand(1)));
1609         
1610         if (kind == CodeForConstruct)
1611             result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1612         
1613         set(resultOperand, result);
1614         return true;
1615     }
1616     
1617     return false;
1618 }
1619
1620 Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1621 {
1622     Node* propertyStorage;
1623     if (isInlineOffset(offset))
1624         propertyStorage = base;
1625     else
1626         propertyStorage = addToGraph(GetButterfly, base);
1627     Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
1628
1629     StorageAccessData storageAccessData;
1630     storageAccessData.offset = offset;
1631     storageAccessData.identifierNumber = identifierNumber;
1632     m_graph.m_storageAccessData.append(storageAccessData);
1633
1634     return getByOffset;
1635 }
1636
1637 void ByteCodeParser::handleGetByOffset(
1638     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1639     PropertyOffset offset)
1640 {
1641     set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1642 }
1643
1644 Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1645 {
1646     Node* propertyStorage;
1647     if (isInlineOffset(offset))
1648         propertyStorage = base;
1649     else
1650         propertyStorage = addToGraph(GetButterfly, base);
1651     Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1652     
1653     StorageAccessData storageAccessData;
1654     storageAccessData.offset = offset;
1655     storageAccessData.identifierNumber = identifier;
1656     m_graph.m_storageAccessData.append(storageAccessData);
1657
1658     return result;
1659 }
1660
1661 void ByteCodeParser::handleGetById(
1662     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1663     const GetByIdStatus& getByIdStatus)
1664 {
1665     if (!getByIdStatus.isSimple()
1666         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1667         || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
1668         set(destinationOperand,
1669             addToGraph(
1670                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1671                 OpInfo(identifierNumber), OpInfo(prediction), base));
1672         return;
1673     }
1674     
1675     ASSERT(getByIdStatus.structureSet().size());
1676                 
1677     // The implementation of GetByOffset does not know to terminate speculative
1678     // execution if it doesn't have a prediction, so we do it manually.
1679     if (prediction == SpecNone)
1680         addToGraph(ForceOSRExit);
1681     else if (m_graph.compilation())
1682         m_graph.compilation()->noticeInlinedGetById();
1683     
1684     Node* originalBaseForBaselineJIT = base;
1685                 
1686     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1687     
1688     if (getByIdStatus.chain()) {
1689         m_graph.chains().addLazily(getByIdStatus.chain());
1690         Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1691         JSObject* currentObject = 0;
1692         for (unsigned i = 0; i < getByIdStatus.chain()->size(); ++i) {
1693             currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1694             currentStructure = getByIdStatus.chain()->at(i);
1695             base = cellConstantWithStructureCheck(currentObject, currentStructure);
1696         }
1697     }
1698     
1699     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1700     // ensure that the base of the original get_by_id is kept alive until we're done with
1701     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1702     // on something other than the base following the CheckStructure on base, or if the
1703     // access was compiled to a WeakJSConstant specific value, in which case we might not
1704     // have any explicit use of the base at all.
1705     if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1706         addToGraph(Phantom, originalBaseForBaselineJIT);
1707     
1708     if (getByIdStatus.specificValue()) {
1709         ASSERT(getByIdStatus.specificValue().isCell());
1710         
1711         set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1712         return;
1713     }
1714     
1715     handleGetByOffset(
1716         destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1717 }
1718
1719 void ByteCodeParser::prepareToParseBlock()
1720 {
1721     for (unsigned i = 0; i < m_constants.size(); ++i)
1722         m_constants[i] = ConstantRecord();
1723     m_cellConstantNodes.clear();
1724 }
1725
1726 Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1727 {
1728     Node* localBase;
1729     if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
1730         ASSERT(inlineCallFrame()->callee);
1731         localBase = cellConstant(inlineCallFrame()->callee->scope());
1732     } else
1733         localBase = addToGraph(GetMyScope);
1734     if (skipTop) {
1735         ASSERT(!inlineCallFrame());
1736         localBase = addToGraph(SkipTopScope, localBase);
1737     }
1738     for (unsigned n = skipCount; n--;)
1739         localBase = addToGraph(SkipScope, localBase);
1740     return localBase;
1741 }
1742
1743 bool ByteCodeParser::parseBlock(unsigned limit)
1744 {
1745     bool shouldContinueParsing = true;
1746
1747     Interpreter* interpreter = m_vm->interpreter;
1748     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1749     unsigned blockBegin = m_currentIndex;
1750     
1751     // If we are the first basic block, introduce markers for arguments. This allows
1752     // us to track if a use of an argument may use the actual argument passed, as
1753     // opposed to using a value we set explicitly.
1754     if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
1755         m_graph.m_arguments.resize(m_numArguments);
1756         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1757             VariableAccessData* variable = newVariableAccessData(
1758                 argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
1759             variable->mergeStructureCheckHoistingFailed(
1760                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1761             variable->mergeCheckArrayHoistingFailed(
1762                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
1763             
1764             Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1765             m_graph.m_arguments[argument] = setArgument;
1766             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1767         }
1768     }
1769
1770     while (true) {
1771         // Don't extend over jump destinations.
1772         if (m_currentIndex == limit) {
1773             // Ordinarily we want to plant a jump. But refuse to do this if the block is
1774             // empty. This is a special case for inlining, which might otherwise create
1775             // some empty blocks in some cases. When parseBlock() returns with an empty
1776             // block, it will get repurposed instead of creating a new one. Note that this
1777             // logic relies on every bytecode resulting in one or more nodes, which would
1778             // be true anyway except for op_loop_hint, which emits a Phantom to force this
1779             // to be true.
1780             if (!m_currentBlock->isEmpty())
1781                 addToGraph(Jump, OpInfo(m_currentIndex));
1782             else {
1783 #if DFG_ENABLE(DEBUG_VERBOSE)
1784                 dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
1785 #endif
1786             }
1787             return shouldContinueParsing;
1788         }
1789         
1790         // Switch on the current bytecode opcode.
1791         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1792         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1793         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
1794         
1795         if (m_graph.compilation()) {
1796             addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
1797                 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
1798         }
1799         
1800         switch (opcodeID) {
1801
1802         // === Function entry opcodes ===
1803
1804         case op_enter:
1805             // Initialize all locals to undefined.
1806             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
1807                 set(i, constantUndefined(), SetOnEntry);
1808             NEXT_OPCODE(op_enter);
1809
1810         case op_to_this: {
1811             Node* op1 = getThis();
1812             if (op1->op() != ToThis) {
1813                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
1814                 ValueProfile* profile =
1815                     m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentIndex);
1816                 profile->computeUpdatedPrediction(locker);
1817 #if DFG_ENABLE(DEBUG_VERBOSE)
1818                 dataLogF("[bc#%u]: profile %p: ", m_currentIndex, profile);
1819                 profile->dump(WTF::dataFile());
1820                 dataLogF("\n");
1821 #endif
1822                 if (profile->m_singletonValueIsTop
1823                     || !profile->m_singletonValue
1824                     || !profile->m_singletonValue.isCell()
1825                     || profile->m_singletonValue.asCell()->classInfo() != &Structure::s_info)
1826                     setThis(addToGraph(ToThis, op1));
1827                 else {
1828                     addToGraph(
1829                         CheckStructure,
1830                         OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
1831                         op1);
1832                 }
1833             }
1834             NEXT_OPCODE(op_to_this);
1835         }
1836
1837         case op_create_this: {
1838             int calleeOperand = currentInstruction[2].u.operand;
1839             Node* callee = get(calleeOperand);
1840             bool alreadyEmitted = false;
1841             if (callee->op() == WeakJSConstant) {
1842                 JSCell* cell = callee->weakConstant();
1843                 ASSERT(cell->inherits(&JSFunction::s_info));
1844                 
1845                 JSFunction* function = jsCast<JSFunction*>(cell);
1846                 ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
1847                 if (allocationProfile) {
1848                     addToGraph(AllocationProfileWatchpoint, OpInfo(function));
1849                     // The callee is still live up to this point.
1850                     addToGraph(Phantom, callee);
1851                     set(currentInstruction[1].u.operand,
1852                         addToGraph(NewObject, OpInfo(allocationProfile->structure())));
1853                     alreadyEmitted = true;
1854                 }
1855             }
1856             if (!alreadyEmitted)
1857                 set(currentInstruction[1].u.operand,
1858                     addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
1859             NEXT_OPCODE(op_create_this);
1860         }
1861
1862         case op_new_object: {
1863             set(currentInstruction[1].u.operand,
1864                 addToGraph(NewObject,
1865                     OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
1866             NEXT_OPCODE(op_new_object);
1867         }
1868             
1869         case op_new_array: {
1870             int startOperand = currentInstruction[2].u.operand;
1871             int numOperands = currentInstruction[3].u.operand;
1872             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1873             for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
1874                 addVarArgChild(get(operandIdx));
1875             set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
1876             NEXT_OPCODE(op_new_array);
1877         }
1878             
1879         case op_new_array_with_size: {
1880             int lengthOperand = currentInstruction[2].u.operand;
1881             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
1882             set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
1883             NEXT_OPCODE(op_new_array_with_size);
1884         }
1885             
1886         case op_new_array_buffer: {
1887             int startConstant = currentInstruction[2].u.operand;
1888             int numConstants = currentInstruction[3].u.operand;
1889             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
1890             NewArrayBufferData data;
1891             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
1892             data.numConstants = numConstants;
1893             data.indexingType = profile->selectIndexingType();
1894
1895             // If this statement has never executed, we'll have the wrong indexing type in the profile.
1896             for (int i = 0; i < numConstants; ++i) {
1897                 data.indexingType =
1898                     leastUpperBoundOfIndexingTypeAndValue(
1899                         data.indexingType,
1900                         m_codeBlock->constantBuffer(data.startConstant)[i]);
1901             }
1902             
1903             m_graph.m_newArrayBufferData.append(data);
1904             set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
1905             NEXT_OPCODE(op_new_array_buffer);
1906         }
1907             
1908         case op_new_regexp: {
1909             set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
1910             NEXT_OPCODE(op_new_regexp);
1911         }
1912             
1913         case op_get_callee: {
1914             ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
1915             ValueProfile* profile = currentInstruction[2].u.profile;
1916             profile->computeUpdatedPrediction(locker);
1917             if (profile->m_singletonValueIsTop
1918                 || !profile->m_singletonValue
1919                 || !profile->m_singletonValue.isCell())
1920                 set(currentInstruction[1].u.operand, get(JSStack::Callee));
1921             else {
1922                 ASSERT(profile->m_singletonValue.asCell()->inherits(&JSFunction::s_info));
1923                 Node* actualCallee = get(JSStack::Callee);
1924                 addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
1925                 set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
1926             }
1927             NEXT_OPCODE(op_get_callee);
1928         }
1929
1930         // === Bitwise operations ===
1931
1932         case op_bitand: {
1933             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1934             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1935             set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
1936             NEXT_OPCODE(op_bitand);
1937         }
1938
1939         case op_bitor: {
1940             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1941             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1942             set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
1943             NEXT_OPCODE(op_bitor);
1944         }
1945
1946         case op_bitxor: {
1947             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1948             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1949             set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
1950             NEXT_OPCODE(op_bitxor);
1951         }
1952
1953         case op_rshift: {
1954             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1955             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1956             Node* result;
1957             // Optimize out shifts by zero.
1958             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
1959                 result = op1;
1960             else
1961                 result = addToGraph(BitRShift, op1, op2);
1962             set(currentInstruction[1].u.operand, result);
1963             NEXT_OPCODE(op_rshift);
1964         }
1965
1966         case op_lshift: {
1967             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1968             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1969             Node* result;
1970             // Optimize out shifts by zero.
1971             if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
1972                 result = op1;
1973             else
1974                 result = addToGraph(BitLShift, op1, op2);
1975             set(currentInstruction[1].u.operand, result);
1976             NEXT_OPCODE(op_lshift);
1977         }
1978
1979         case op_urshift: {
1980             Node* op1 = getToInt32(currentInstruction[2].u.operand);
1981             Node* op2 = getToInt32(currentInstruction[3].u.operand);
1982             Node* result;
1983             // The result of a zero-extending right shift is treated as an unsigned value.
1984             // This means that if the top bit is set, the result is not in the int32 range,
1985             // and as such must be stored as a double. If the shift amount is a constant,
1986             // we may be able to optimize.
1987             if (isInt32Constant(op2)) {
1988                 // If we know we are shifting by a non-zero amount, then since the operation
1989                 // zero fills we know the top bit of the result must be zero, and as such the
1990                 // result must be within the int32 range. Conversely, if this is a shift by
1991                 // zero, then the result may be changed by the conversion to unsigned, but it
1992                 // is not necessary to perform the shift!
1993                 if (valueOfInt32Constant(op2) & 0x1f)
1994                     result = addToGraph(BitURShift, op1, op2);
1995                 else
1996                     result = makeSafe(addToGraph(UInt32ToNumber, op1));
1997             }  else {
1998                 // Cannot optimize at this stage; shift & potentially rebox as a double.
1999                 result = addToGraph(BitURShift, op1, op2);
2000                 result = makeSafe(addToGraph(UInt32ToNumber, result));
2001             }
2002             set(currentInstruction[1].u.operand, result);
2003             NEXT_OPCODE(op_urshift);
2004         }
2005
2006         // === Increment/Decrement opcodes ===
2007
2008         case op_inc: {
2009             unsigned srcDst = currentInstruction[1].u.operand;
2010             Node* op = get(srcDst);
2011             set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2012             NEXT_OPCODE(op_inc);
2013         }
2014
2015         case op_dec: {
2016             unsigned srcDst = currentInstruction[1].u.operand;
2017             Node* op = get(srcDst);
2018             set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2019             NEXT_OPCODE(op_dec);
2020         }
2021
2022         // === Arithmetic operations ===
2023
2024         case op_add: {
2025             Node* op1 = get(currentInstruction[2].u.operand);
2026             Node* op2 = get(currentInstruction[3].u.operand);
2027             if (op1->hasNumberResult() && op2->hasNumberResult())
2028                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2029             else
2030                 set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2031             NEXT_OPCODE(op_add);
2032         }
2033
2034         case op_sub: {
2035             Node* op1 = get(currentInstruction[2].u.operand);
2036             Node* op2 = get(currentInstruction[3].u.operand);
2037             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2038             NEXT_OPCODE(op_sub);
2039         }
2040
2041         case op_negate: {
2042             Node* op1 = get(currentInstruction[2].u.operand);
2043             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2044             NEXT_OPCODE(op_negate);
2045         }
2046
2047         case op_mul: {
2048             // Multiply requires that the inputs are not truncated, unfortunately.
2049             Node* op1 = get(currentInstruction[2].u.operand);
2050             Node* op2 = get(currentInstruction[3].u.operand);
2051             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2052             NEXT_OPCODE(op_mul);
2053         }
2054
2055         case op_mod: {
2056             Node* op1 = get(currentInstruction[2].u.operand);
2057             Node* op2 = get(currentInstruction[3].u.operand);
2058             set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2059             NEXT_OPCODE(op_mod);
2060         }
2061
2062         case op_div: {
2063             Node* op1 = get(currentInstruction[2].u.operand);
2064             Node* op2 = get(currentInstruction[3].u.operand);
2065             set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2066             NEXT_OPCODE(op_div);
2067         }
2068
2069         // === Misc operations ===
2070
2071 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2072         case op_debug:
2073             addToGraph(Breakpoint);
2074             NEXT_OPCODE(op_debug);
2075 #endif
2076         case op_mov: {
2077             Node* op = get(currentInstruction[2].u.operand);
2078             set(currentInstruction[1].u.operand, op);
2079             NEXT_OPCODE(op_mov);
2080         }
2081
2082         case op_check_has_instance:
2083             addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2084             NEXT_OPCODE(op_check_has_instance);
2085
2086         case op_instanceof: {
2087             Node* value = get(currentInstruction[2].u.operand);
2088             Node* prototype = get(currentInstruction[3].u.operand);
2089             set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2090             NEXT_OPCODE(op_instanceof);
2091         }
2092             
2093         case op_is_undefined: {
2094             Node* value = get(currentInstruction[2].u.operand);
2095             set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2096             NEXT_OPCODE(op_is_undefined);
2097         }
2098
2099         case op_is_boolean: {
2100             Node* value = get(currentInstruction[2].u.operand);
2101             set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2102             NEXT_OPCODE(op_is_boolean);
2103         }
2104
2105         case op_is_number: {
2106             Node* value = get(currentInstruction[2].u.operand);
2107             set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2108             NEXT_OPCODE(op_is_number);
2109         }
2110
2111         case op_is_string: {
2112             Node* value = get(currentInstruction[2].u.operand);
2113             set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2114             NEXT_OPCODE(op_is_string);
2115         }
2116
2117         case op_is_object: {
2118             Node* value = get(currentInstruction[2].u.operand);
2119             set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2120             NEXT_OPCODE(op_is_object);
2121         }
2122
2123         case op_is_function: {
2124             Node* value = get(currentInstruction[2].u.operand);
2125             set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2126             NEXT_OPCODE(op_is_function);
2127         }
2128
2129         case op_not: {
2130             Node* value = get(currentInstruction[2].u.operand);
2131             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2132             NEXT_OPCODE(op_not);
2133         }
2134             
2135         case op_to_primitive: {
2136             Node* value = get(currentInstruction[2].u.operand);
2137             set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2138             NEXT_OPCODE(op_to_primitive);
2139         }
2140             
2141         case op_strcat: {
2142             int startOperand = currentInstruction[2].u.operand;
2143             int numOperands = currentInstruction[3].u.operand;
2144 #if CPU(X86)
2145             // X86 doesn't have enough registers to compile MakeRope with three arguments.
2146             // Rather than try to be clever, we just make MakeRope dumber on this processor.
2147             const unsigned maxRopeArguments = 2;
2148 #else
2149             const unsigned maxRopeArguments = 3;
2150 #endif
2151             OwnArrayPtr<Node*> toStringNodes = adoptArrayPtr(new Node*[numOperands]);
2152             for (int i = 0; i < numOperands; i++)
2153                 toStringNodes[i] = addToGraph(ToString, get(startOperand + i));
2154
2155             for (int i = 0; i < numOperands; i++)
2156                 addToGraph(Phantom, toStringNodes[i]);
2157
2158             Node* operands[AdjacencyList::Size];
2159             unsigned indexInOperands = 0;
2160             for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2161                 operands[i] = 0;
2162             for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2163                 if (indexInOperands == maxRopeArguments) {
2164                     operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2165                     for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2166                         operands[i] = 0;
2167                     indexInOperands = 1;
2168                 }
2169                 
2170                 ASSERT(indexInOperands < AdjacencyList::Size);
2171                 ASSERT(indexInOperands < maxRopeArguments);
2172                 operands[indexInOperands++] = toStringNodes[operandIdx];
2173             }
2174             set(currentInstruction[1].u.operand,
2175                 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2176             NEXT_OPCODE(op_strcat);
2177         }
2178
2179         case op_less: {
2180             Node* op1 = get(currentInstruction[2].u.operand);
2181             Node* op2 = get(currentInstruction[3].u.operand);
2182             if (canFold(op1) && canFold(op2)) {
2183                 JSValue a = valueOfJSConstant(op1);
2184                 JSValue b = valueOfJSConstant(op2);
2185                 if (a.isNumber() && b.isNumber()) {
2186                     set(currentInstruction[1].u.operand,
2187                         getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2188                     NEXT_OPCODE(op_less);
2189                 }
2190             }
2191             set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2192             NEXT_OPCODE(op_less);
2193         }
2194
2195         case op_lesseq: {
2196             Node* op1 = get(currentInstruction[2].u.operand);
2197             Node* op2 = get(currentInstruction[3].u.operand);
2198             if (canFold(op1) && canFold(op2)) {
2199                 JSValue a = valueOfJSConstant(op1);
2200                 JSValue b = valueOfJSConstant(op2);
2201                 if (a.isNumber() && b.isNumber()) {
2202                     set(currentInstruction[1].u.operand,
2203                         getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2204                     NEXT_OPCODE(op_lesseq);
2205                 }
2206             }
2207             set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2208             NEXT_OPCODE(op_lesseq);
2209         }
2210
2211         case op_greater: {
2212             Node* op1 = get(currentInstruction[2].u.operand);
2213             Node* op2 = get(currentInstruction[3].u.operand);
2214             if (canFold(op1) && canFold(op2)) {
2215                 JSValue a = valueOfJSConstant(op1);
2216                 JSValue b = valueOfJSConstant(op2);
2217                 if (a.isNumber() && b.isNumber()) {
2218                     set(currentInstruction[1].u.operand,
2219                         getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2220                     NEXT_OPCODE(op_greater);
2221                 }
2222             }
2223             set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2224             NEXT_OPCODE(op_greater);
2225         }
2226
2227         case op_greatereq: {
2228             Node* op1 = get(currentInstruction[2].u.operand);
2229             Node* op2 = get(currentInstruction[3].u.operand);
2230             if (canFold(op1) && canFold(op2)) {
2231                 JSValue a = valueOfJSConstant(op1);
2232                 JSValue b = valueOfJSConstant(op2);
2233                 if (a.isNumber() && b.isNumber()) {
2234                     set(currentInstruction[1].u.operand,
2235                         getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2236                     NEXT_OPCODE(op_greatereq);
2237                 }
2238             }
2239             set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2240             NEXT_OPCODE(op_greatereq);
2241         }
2242
2243         case op_eq: {
2244             Node* op1 = get(currentInstruction[2].u.operand);
2245             Node* op2 = get(currentInstruction[3].u.operand);
2246             if (canFold(op1) && canFold(op2)) {
2247                 JSValue a = valueOfJSConstant(op1);
2248                 JSValue b = valueOfJSConstant(op2);
2249                 set(currentInstruction[1].u.operand,
2250                     getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2251                 NEXT_OPCODE(op_eq);
2252             }
2253             set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2254             NEXT_OPCODE(op_eq);
2255         }
2256
2257         case op_eq_null: {
2258             Node* value = get(currentInstruction[2].u.operand);
2259             set(currentInstruction[1].u.operand, addToGraph(CompareEqConstant, value, constantNull()));
2260             NEXT_OPCODE(op_eq_null);
2261         }
2262
2263         case op_stricteq: {
2264             Node* op1 = get(currentInstruction[2].u.operand);
2265             Node* op2 = get(currentInstruction[3].u.operand);
2266             if (canFold(op1) && canFold(op2)) {
2267                 JSValue a = valueOfJSConstant(op1);
2268                 JSValue b = valueOfJSConstant(op2);
2269                 set(currentInstruction[1].u.operand,
2270                     getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2271                 NEXT_OPCODE(op_stricteq);
2272             }
2273             if (isConstantForCompareStrictEq(op1))
2274                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op2, op1));
2275             else if (isConstantForCompareStrictEq(op2))
2276                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op1, op2));
2277             else
2278                 set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2279             NEXT_OPCODE(op_stricteq);
2280         }
2281
2282         case op_neq: {
2283             Node* op1 = get(currentInstruction[2].u.operand);
2284             Node* op2 = get(currentInstruction[3].u.operand);
2285             if (canFold(op1) && canFold(op2)) {
2286                 JSValue a = valueOfJSConstant(op1);
2287                 JSValue b = valueOfJSConstant(op2);
2288                 set(currentInstruction[1].u.operand,
2289                     getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2290                 NEXT_OPCODE(op_neq);
2291             }
2292             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2293             NEXT_OPCODE(op_neq);
2294         }
2295
2296         case op_neq_null: {
2297             Node* value = get(currentInstruction[2].u.operand);
2298             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2299             NEXT_OPCODE(op_neq_null);
2300         }
2301
2302         case op_nstricteq: {
2303             Node* op1 = get(currentInstruction[2].u.operand);
2304             Node* op2 = get(currentInstruction[3].u.operand);
2305             if (canFold(op1) && canFold(op2)) {
2306                 JSValue a = valueOfJSConstant(op1);
2307                 JSValue b = valueOfJSConstant(op2);
2308                 set(currentInstruction[1].u.operand,
2309                     getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2310                 NEXT_OPCODE(op_nstricteq);
2311             }
2312             Node* invertedResult;
2313             if (isConstantForCompareStrictEq(op1))
2314                 invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2315             else if (isConstantForCompareStrictEq(op2))
2316                 invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2317             else
2318                 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2319             set(currentInstruction[1].u.operand, addToGraph(LogicalNot, invertedResult));
2320             NEXT_OPCODE(op_nstricteq);
2321         }
2322
2323         // === Property access operations ===
2324
2325         case op_get_by_val: {
2326             SpeculatedType prediction = getPrediction();
2327             
2328             Node* base = get(currentInstruction[2].u.operand);
2329             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2330             Node* property = get(currentInstruction[3].u.operand);
2331             Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2332             set(currentInstruction[1].u.operand, getByVal);
2333
2334             NEXT_OPCODE(op_get_by_val);
2335         }
2336
2337         case op_put_by_val: {
2338             Node* base = get(currentInstruction[1].u.operand);
2339
2340             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
2341             
2342             Node* property = get(currentInstruction[2].u.operand);
2343             Node* value = get(currentInstruction[3].u.operand);
2344             
2345             addVarArgChild(base);
2346             addVarArgChild(property);
2347             addVarArgChild(value);
2348             addVarArgChild(0); // Leave room for property storage.
2349             addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2350
2351             NEXT_OPCODE(op_put_by_val);
2352         }
2353             
2354         case op_get_by_id:
2355         case op_get_by_id_out_of_line:
2356         case op_get_array_length: {
2357             SpeculatedType prediction = getPrediction();
2358             
2359             Node* base = get(currentInstruction[2].u.operand);
2360             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2361             
2362             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2363             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2364                 m_inlineStackTop->m_profiledBlock, m_currentIndex, uid);
2365             
2366             handleGetById(
2367                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2368
2369             NEXT_OPCODE(op_get_by_id);
2370         }
2371         case op_put_by_id:
2372         case op_put_by_id_out_of_line:
2373         case op_put_by_id_transition_direct:
2374         case op_put_by_id_transition_normal:
2375         case op_put_by_id_transition_direct_out_of_line:
2376         case op_put_by_id_transition_normal_out_of_line: {
2377             Node* value = get(currentInstruction[3].u.operand);
2378             Node* base = get(currentInstruction[1].u.operand);
2379             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2380             bool direct = currentInstruction[8].u.operand;
2381
2382             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2383                 m_inlineStackTop->m_profiledBlock,
2384                 m_currentIndex,
2385                 m_graph.identifiers()[identifierNumber]);
2386             bool canCountAsInlined = true;
2387             if (!putByIdStatus.isSet()) {
2388                 addToGraph(ForceOSRExit);
2389                 canCountAsInlined = false;
2390             }
2391             
2392             bool hasExitSite =
2393                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2394                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
2395             
2396             if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2397                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2398                 handlePutByOffset(base, identifierNumber, putByIdStatus.offset(), value);
2399             } else if (
2400                 !hasExitSite
2401                 && putByIdStatus.isSimpleTransition()
2402                 && (!putByIdStatus.structureChain()
2403                     || putByIdStatus.structureChain()->isStillValid())) {
2404                 
2405                 m_graph.chains().addLazily(putByIdStatus.structureChain());
2406                 
2407                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2408                 if (!direct) {
2409                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2410                         cellConstantWithStructureCheck(
2411                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2412                     }
2413                     
2414                     for (unsigned i = 0; i < putByIdStatus.structureChain()->size(); ++i) {
2415                         JSValue prototype = putByIdStatus.structureChain()->at(i)->storedPrototype();
2416                         if (prototype.isNull())
2417                             continue;
2418                         cellConstantWithStructureCheck(prototype.asCell());
2419                     }
2420                 }
2421                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2422                 
2423                 Node* propertyStorage;
2424                 StructureTransitionData* transitionData =
2425                     m_graph.addStructureTransitionData(
2426                         StructureTransitionData(
2427                             putByIdStatus.oldStructure(),
2428                             putByIdStatus.newStructure()));
2429
2430                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2431                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2432                     
2433                     // If we're growing the property storage then it must be because we're
2434                     // storing into the out-of-line storage.
2435                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2436                     
2437                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2438                         propertyStorage = addToGraph(
2439                             AllocatePropertyStorage, OpInfo(transitionData), base);
2440                     } else {
2441                         propertyStorage = addToGraph(
2442                             ReallocatePropertyStorage, OpInfo(transitionData),
2443                             base, addToGraph(GetButterfly, base));
2444                     }
2445                 } else {
2446                     if (isInlineOffset(putByIdStatus.offset()))
2447                         propertyStorage = base;
2448                     else
2449                         propertyStorage = addToGraph(GetButterfly, base);
2450                 }
2451                 
2452                 addToGraph(PutStructure, OpInfo(transitionData), base);
2453                 
2454                 addToGraph(
2455                     PutByOffset,
2456                     OpInfo(m_graph.m_storageAccessData.size()),
2457                     propertyStorage,
2458                     base,
2459                     value);
2460                 
2461                 StorageAccessData storageAccessData;
2462                 storageAccessData.offset = putByIdStatus.offset();
2463                 storageAccessData.identifierNumber = identifierNumber;
2464                 m_graph.m_storageAccessData.append(storageAccessData);
2465             } else {
2466                 if (direct)
2467                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2468                 else
2469                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2470                 canCountAsInlined = false;
2471             }
2472             
2473             if (canCountAsInlined && m_graph.compilation())
2474                 m_graph.compilation()->noticeInlinedPutById();
2475
2476             NEXT_OPCODE(op_put_by_id);
2477         }
2478
2479         case op_init_global_const_nop: {
2480             NEXT_OPCODE(op_init_global_const_nop);
2481         }
2482
2483         case op_init_global_const: {
2484             Node* value = get(currentInstruction[2].u.operand);
2485             addToGraph(
2486                 PutGlobalVar,
2487                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2488                 value);
2489             NEXT_OPCODE(op_init_global_const);
2490         }
2491
2492         // === Block terminators. ===
2493
2494         case op_jmp: {
2495             unsigned relativeOffset = currentInstruction[1].u.operand;
2496             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2497             LAST_OPCODE(op_jmp);
2498         }
2499
2500         case op_jtrue: {
2501             unsigned relativeOffset = currentInstruction[2].u.operand;
2502             Node* condition = get(currentInstruction[1].u.operand);
2503             if (canFold(condition)) {
2504                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2505                 if (state == TrueTriState) {
2506                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2507                     LAST_OPCODE(op_jtrue);
2508                 } else if (state == FalseTriState) {
2509                     // Emit a placeholder for this bytecode operation but otherwise
2510                     // just fall through.
2511                     addToGraph(Phantom);
2512                     NEXT_OPCODE(op_jtrue);
2513                 }
2514             }
2515             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2516             LAST_OPCODE(op_jtrue);
2517         }
2518
2519         case op_jfalse: {
2520             unsigned relativeOffset = currentInstruction[2].u.operand;
2521             Node* condition = get(currentInstruction[1].u.operand);
2522             if (canFold(condition)) {
2523                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2524                 if (state == FalseTriState) {
2525                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2526                     LAST_OPCODE(op_jfalse);
2527                 } else if (state == TrueTriState) {
2528                     // Emit a placeholder for this bytecode operation but otherwise
2529                     // just fall through.
2530                     addToGraph(Phantom);
2531                     NEXT_OPCODE(op_jfalse);
2532                 }
2533             }
2534             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2535             LAST_OPCODE(op_jfalse);
2536         }
2537
2538         case op_jeq_null: {
2539             unsigned relativeOffset = currentInstruction[2].u.operand;
2540             Node* value = get(currentInstruction[1].u.operand);
2541             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2542             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2543             LAST_OPCODE(op_jeq_null);
2544         }
2545
2546         case op_jneq_null: {
2547             unsigned relativeOffset = currentInstruction[2].u.operand;
2548             Node* value = get(currentInstruction[1].u.operand);
2549             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2550             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2551             LAST_OPCODE(op_jneq_null);
2552         }
2553
2554         case op_jless: {
2555             unsigned relativeOffset = currentInstruction[3].u.operand;
2556             Node* op1 = get(currentInstruction[1].u.operand);
2557             Node* op2 = get(currentInstruction[2].u.operand);
2558             if (canFold(op1) && canFold(op2)) {
2559                 JSValue aValue = valueOfJSConstant(op1);
2560                 JSValue bValue = valueOfJSConstant(op2);
2561                 if (aValue.isNumber() && bValue.isNumber()) {
2562                     double a = aValue.asNumber();
2563                     double b = bValue.asNumber();
2564                     if (a < b) {
2565                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2566                         LAST_OPCODE(op_jless);
2567                     } else {
2568                         // Emit a placeholder for this bytecode operation but otherwise
2569                         // just fall through.
2570                         addToGraph(Phantom);
2571                         NEXT_OPCODE(op_jless);
2572                     }
2573                 }
2574             }
2575             Node* condition = addToGraph(CompareLess, op1, op2);
2576             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2577             LAST_OPCODE(op_jless);
2578         }
2579
2580         case op_jlesseq: {
2581             unsigned relativeOffset = currentInstruction[3].u.operand;
2582             Node* op1 = get(currentInstruction[1].u.operand);
2583             Node* op2 = get(currentInstruction[2].u.operand);
2584             if (canFold(op1) && canFold(op2)) {
2585                 JSValue aValue = valueOfJSConstant(op1);
2586                 JSValue bValue = valueOfJSConstant(op2);
2587                 if (aValue.isNumber() && bValue.isNumber()) {
2588                     double a = aValue.asNumber();
2589                     double b = bValue.asNumber();
2590                     if (a <= b) {
2591                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2592                         LAST_OPCODE(op_jlesseq);
2593                     } else {
2594                         // Emit a placeholder for this bytecode operation but otherwise
2595                         // just fall through.
2596                         addToGraph(Phantom);
2597                         NEXT_OPCODE(op_jlesseq);
2598                     }
2599                 }
2600             }
2601             Node* condition = addToGraph(CompareLessEq, op1, op2);
2602             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2603             LAST_OPCODE(op_jlesseq);
2604         }
2605
2606         case op_jgreater: {
2607             unsigned relativeOffset = currentInstruction[3].u.operand;
2608             Node* op1 = get(currentInstruction[1].u.operand);
2609             Node* op2 = get(currentInstruction[2].u.operand);
2610             if (canFold(op1) && canFold(op2)) {
2611                 JSValue aValue = valueOfJSConstant(op1);
2612                 JSValue bValue = valueOfJSConstant(op2);
2613                 if (aValue.isNumber() && bValue.isNumber()) {
2614                     double a = aValue.asNumber();
2615                     double b = bValue.asNumber();
2616                     if (a > b) {
2617                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2618                         LAST_OPCODE(op_jgreater);
2619                     } else {
2620                         // Emit a placeholder for this bytecode operation but otherwise
2621                         // just fall through.
2622                         addToGraph(Phantom);
2623                         NEXT_OPCODE(op_jgreater);
2624                     }
2625                 }
2626             }
2627             Node* condition = addToGraph(CompareGreater, op1, op2);
2628             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2629             LAST_OPCODE(op_jgreater);
2630         }
2631
2632         case op_jgreatereq: {
2633             unsigned relativeOffset = currentInstruction[3].u.operand;
2634             Node* op1 = get(currentInstruction[1].u.operand);
2635             Node* op2 = get(currentInstruction[2].u.operand);
2636             if (canFold(op1) && canFold(op2)) {
2637                 JSValue aValue = valueOfJSConstant(op1);
2638                 JSValue bValue = valueOfJSConstant(op2);
2639                 if (aValue.isNumber() && bValue.isNumber()) {
2640                     double a = aValue.asNumber();
2641                     double b = bValue.asNumber();
2642                     if (a >= b) {
2643                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2644                         LAST_OPCODE(op_jgreatereq);
2645                     } else {
2646                         // Emit a placeholder for this bytecode operation but otherwise
2647                         // just fall through.
2648                         addToGraph(Phantom);
2649                         NEXT_OPCODE(op_jgreatereq);
2650                     }
2651                 }
2652             }
2653             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2654             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2655             LAST_OPCODE(op_jgreatereq);
2656         }
2657
2658         case op_jnless: {
2659             unsigned relativeOffset = currentInstruction[3].u.operand;
2660             Node* op1 = get(currentInstruction[1].u.operand);
2661             Node* op2 = get(currentInstruction[2].u.operand);
2662             if (canFold(op1) && canFold(op2)) {
2663                 JSValue aValue = valueOfJSConstant(op1);
2664                 JSValue bValue = valueOfJSConstant(op2);
2665                 if (aValue.isNumber() && bValue.isNumber()) {
2666                     double a = aValue.asNumber();
2667                     double b = bValue.asNumber();
2668                     if (a < b) {
2669                         // Emit a placeholder for this bytecode operation but otherwise
2670                         // just fall through.
2671                         addToGraph(Phantom);
2672                         NEXT_OPCODE(op_jnless);
2673                     } else {
2674                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2675                         LAST_OPCODE(op_jnless);
2676                     }
2677                 }
2678             }
2679             Node* condition = addToGraph(CompareLess, op1, op2);
2680             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2681             LAST_OPCODE(op_jnless);
2682         }
2683
2684         case op_jnlesseq: {
2685             unsigned relativeOffset = currentInstruction[3].u.operand;
2686             Node* op1 = get(currentInstruction[1].u.operand);
2687             Node* op2 = get(currentInstruction[2].u.operand);
2688             if (canFold(op1) && canFold(op2)) {
2689                 JSValue aValue = valueOfJSConstant(op1);
2690                 JSValue bValue = valueOfJSConstant(op2);
2691                 if (aValue.isNumber() && bValue.isNumber()) {
2692                     double a = aValue.asNumber();
2693                     double b = bValue.asNumber();
2694                     if (a <= b) {
2695                         // Emit a placeholder for this bytecode operation but otherwise
2696                         // just fall through.
2697                         addToGraph(Phantom);
2698                         NEXT_OPCODE(op_jnlesseq);
2699                     } else {
2700                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2701                         LAST_OPCODE(op_jnlesseq);
2702                     }
2703                 }
2704             }
2705             Node* condition = addToGraph(CompareLessEq, op1, op2);
2706             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2707             LAST_OPCODE(op_jnlesseq);
2708         }
2709
2710         case op_jngreater: {
2711             unsigned relativeOffset = currentInstruction[3].u.operand;
2712             Node* op1 = get(currentInstruction[1].u.operand);
2713             Node* op2 = get(currentInstruction[2].u.operand);
2714             if (canFold(op1) && canFold(op2)) {
2715                 JSValue aValue = valueOfJSConstant(op1);
2716                 JSValue bValue = valueOfJSConstant(op2);
2717                 if (aValue.isNumber() && bValue.isNumber()) {
2718                     double a = aValue.asNumber();
2719                     double b = bValue.asNumber();
2720                     if (a > b) {
2721                         // Emit a placeholder for this bytecode operation but otherwise
2722                         // just fall through.
2723                         addToGraph(Phantom);
2724                         NEXT_OPCODE(op_jngreater);
2725                     } else {
2726                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2727                         LAST_OPCODE(op_jngreater);
2728                     }
2729                 }
2730             }
2731             Node* condition = addToGraph(CompareGreater, op1, op2);
2732             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2733             LAST_OPCODE(op_jngreater);
2734         }
2735
2736         case op_jngreatereq: {
2737             unsigned relativeOffset = currentInstruction[3].u.operand;
2738             Node* op1 = get(currentInstruction[1].u.operand);
2739             Node* op2 = get(currentInstruction[2].u.operand);
2740             if (canFold(op1) && canFold(op2)) {
2741                 JSValue aValue = valueOfJSConstant(op1);
2742                 JSValue bValue = valueOfJSConstant(op2);
2743                 if (aValue.isNumber() && bValue.isNumber()) {
2744                     double a = aValue.asNumber();
2745                     double b = bValue.asNumber();
2746                     if (a >= b) {
2747                         // Emit a placeholder for this bytecode operation but otherwise
2748                         // just fall through.
2749                         addToGraph(Phantom);
2750                         NEXT_OPCODE(op_jngreatereq);
2751                     } else {
2752                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2753                         LAST_OPCODE(op_jngreatereq);
2754                     }
2755                 }
2756             }
2757             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2758             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2759             LAST_OPCODE(op_jngreatereq);
2760         }
2761             
2762         case op_switch_imm: {
2763             SwitchData data;
2764             data.kind = SwitchImm;
2765             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2766             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2767             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2768             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2769                 if (!table.branchOffsets[i])
2770                     continue;
2771                 unsigned target = m_currentIndex + table.branchOffsets[i];
2772                 if (target == data.fallThroughBytecodeIndex())
2773                     continue;
2774                 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(table.min + i), target));
2775             }
2776             m_graph.m_switchData.append(data);
2777             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2778             LAST_OPCODE(op_switch_imm);
2779         }
2780             
2781         case op_switch_char: {
2782             SwitchData data;
2783             data.kind = SwitchChar;
2784             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2785             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2786             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2787             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2788                 if (!table.branchOffsets[i])
2789                     continue;
2790                 unsigned target = m_currentIndex + table.branchOffsets[i];
2791                 if (target == data.fallThroughBytecodeIndex())
2792                     continue;
2793                 data.cases.append(
2794                     SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2795             }
2796             m_graph.m_switchData.append(data);
2797             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2798             LAST_OPCODE(op_switch_char);
2799         }
2800
2801         case op_switch_string: {
2802             SwitchData data;
2803             data.kind = SwitchString;
2804             data.switchTableIndex = currentInstruction[1].u.operand;
2805             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2806             StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2807             StringJumpTable::StringOffsetTable::iterator iter;
2808             StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2809             for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2810                 unsigned target = m_currentIndex + iter->value.branchOffset;
2811                 if (target == data.fallThroughBytecodeIndex())
2812                     continue;
2813                 data.cases.append(
2814                     SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
2815             }
2816             m_graph.m_switchData.append(data);
2817             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(currentInstruction[3].u.operand));
2818             LAST_OPCODE(op_switch_string);
2819         }
2820
2821         case op_ret:
2822             flushArgumentsAndCapturedVariables();
2823             if (inlineCallFrame()) {
2824                 ASSERT(m_inlineStackTop->m_returnValue != InvalidVirtualRegister);
2825                 setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
2826                 m_inlineStackTop->m_didReturn = true;
2827                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
2828                     // If we're returning from the first block, then we're done parsing.
2829                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
2830                     shouldContinueParsing = false;
2831                     LAST_OPCODE(op_ret);
2832                 } else {
2833                     // If inlining created blocks, and we're doing a return, then we need some
2834                     // special linking.
2835                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
2836                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
2837                 }
2838                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
2839                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
2840                     addToGraph(Jump, OpInfo(0));
2841                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
2842                     m_inlineStackTop->m_didEarlyReturn = true;
2843                 }
2844                 LAST_OPCODE(op_ret);
2845             }
2846             addToGraph(Return, get(currentInstruction[1].u.operand));
2847             LAST_OPCODE(op_ret);
2848             
2849         case op_end:
2850             flushArgumentsAndCapturedVariables();
2851             ASSERT(!inlineCallFrame());
2852             addToGraph(Return, get(currentInstruction[1].u.operand));
2853             LAST_OPCODE(op_end);
2854
2855         case op_throw:
2856             flushAllArgumentsAndCapturedVariablesInInlineStack();
2857             addToGraph(Throw, get(currentInstruction[1].u.operand));
2858             LAST_OPCODE(op_throw);
2859             
2860         case op_throw_static_error:
2861             flushAllArgumentsAndCapturedVariablesInInlineStack();
2862             addToGraph(ThrowReferenceError);
2863             LAST_OPCODE(op_throw_static_error);
2864             
2865         case op_call:
2866             handleCall(currentInstruction, Call, CodeForCall);
2867             NEXT_OPCODE(op_call);
2868             
2869         case op_construct:
2870             handleCall(currentInstruction, Construct, CodeForConstruct);
2871             NEXT_OPCODE(op_construct);
2872             
2873         case op_call_varargs: {
2874             ASSERT(inlineCallFrame());
2875             ASSERT(currentInstruction[4].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
2876             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
2877             // It would be cool to funnel this into handleCall() so that it can handle
2878             // inlining. But currently that won't be profitable anyway, since none of the
2879             // uses of call_varargs will be inlineable. So we set this up manually and
2880             // without inline/intrinsic detection.
2881             
2882             SpeculatedType prediction = getPrediction();
2883             
2884             addToGraph(CheckArgumentsNotCreated);
2885             
2886             unsigned argCount = inlineCallFrame()->arguments.size();
2887             if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
2888                 m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
2889             
2890             addVarArgChild(get(currentInstruction[2].u.operand)); // callee
2891             addVarArgChild(get(currentInstruction[3].u.operand)); // this
2892             for (unsigned argument = 1; argument < argCount; ++argument)
2893                 addVarArgChild(get(argumentToOperand(argument)));
2894             
2895             set(currentInstruction[1].u.operand,
2896                 addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction)));
2897             
2898             NEXT_OPCODE(op_call_varargs);
2899         }
2900             
2901         case op_jneq_ptr:
2902             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
2903             // support simmer for a while before making it more general, since it's
2904             // already gnarly enough as it is.
2905             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
2906             addToGraph(
2907                 CheckFunction,
2908                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
2909                 get(currentInstruction[1].u.operand));
2910             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
2911             LAST_OPCODE(op_jneq_ptr);
2912
2913         case op_resolve_scope: {
2914             unsigned dst = currentInstruction[1].u.operand;
2915             ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
2916             unsigned depth = currentInstruction[4].u.operand;
2917
2918             // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
2919             if (needsVarInjectionChecks(resolveType))
2920                 addToGraph(VarInjectionWatchpoint);
2921
2922             switch (resolveType) {
2923             case GlobalProperty:
2924             case GlobalVar:
2925             case GlobalPropertyWithVarInjectionChecks:
2926             case GlobalVarWithVarInjectionChecks:
2927                 set(dst, cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
2928                 break;
2929             case ClosureVar:
2930             case ClosureVarWithVarInjectionChecks:
2931                 set(dst, getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
2932                 break;
2933             case Dynamic:
2934                 RELEASE_ASSERT_NOT_REACHED();
2935                 break;
2936             }
2937             NEXT_OPCODE(op_resolve_scope);
2938         }
2939
2940         case op_get_from_scope: {
2941             unsigned dst = currentInstruction[1].u.operand;
2942             unsigned scope = currentInstruction[2].u.operand;
2943             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2944             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2945             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
2946
2947             Structure* structure;
2948             uintptr_t operand;
2949             {
2950                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
2951                 structure = currentInstruction[5].u.structure.get();
2952                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
2953             }
2954
2955             SpeculatedType prediction = getPrediction();
2956             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
2957
2958             switch (resolveType) {
2959             case GlobalProperty:
2960             case GlobalPropertyWithVarInjectionChecks: {
2961                 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
2962                 if (status.takesSlowPath()) {
2963                     set(dst, addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(scope)));
2964                     break;
2965                 }
2966                 Node* base = cellConstantWithStructureCheck(globalObject, status.structureSet().singletonStructure());
2967                 if (JSValue specificValue = status.specificValue())
2968                     set(dst, cellConstant(specificValue.asCell()));
2969                 else
2970                     set(dst, handleGetByOffset(prediction, base, identifierNumber, operand));
2971                 break;
2972             }
2973             case GlobalVar:
2974             case GlobalVarWithVarInjectionChecks: {
2975                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
2976                 if (!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet())) {
2977                     set(dst, addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
2978                     break;
2979                 }
2980
2981                 addToGraph(GlobalVarWatchpoint, OpInfo(operand), OpInfo(identifierNumber));
2982                 JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
2983                 set(dst, cellConstant(specificValue.asCell()));
2984                 break;
2985             }
2986             case ClosureVar:
2987             case ClosureVarWithVarInjectionChecks:
2988                 set(dst, 
2989                     addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), 
2990                         addToGraph(GetClosureRegisters, get(scope))));
2991                 break;
2992             case Dynamic:
2993                 RELEASE_ASSERT_NOT_REACHED();
2994                 break;
2995             }
2996             NEXT_OPCODE(op_get_from_scope);
2997         }
2998
2999         case op_put_to_scope: {
3000             unsigned scope = currentInstruction[1].u.operand;
3001             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3002             unsigned value = currentInstruction[3].u.operand;
3003             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3004             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3005
3006             Structure* structure;
3007             uintptr_t operand;
3008             {
3009                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3010                 structure = currentInstruction[5].u.structure.get();
3011                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3012             }
3013
3014             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3015
3016             switch (resolveType) {
3017             case GlobalProperty:
3018             case GlobalPropertyWithVarInjectionChecks: {
3019                 PutByIdStatus status = PutByIdStatus::computeFor(*m_vm, globalObject, structure, uid, false);
3020                 if (!status.isSimpleReplace()) {
3021                     addToGraph(PutById, OpInfo(identifierNumber), get(scope), get(value));
3022                     break;
3023                 }
3024                 Node* base = cellConstantWithStructureCheck(globalObject, status.oldStructure());
3025                 handlePutByOffset(base, identifierNumber, static_cast<PropertyOffset>(operand), get(value));
3026                 break;
3027             }
3028             case GlobalVar:
3029             case GlobalVarWithVarInjectionChecks: {
3030                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3031                 ASSERT(!entry.couldBeWatched() || !m_graph.watchpoints().isStillValid(entry.watchpointSet()));
3032                 addToGraph(PutGlobalVar, OpInfo(operand), get(value));
3033                 break;
3034             }
3035             case ClosureVar:
3036             case ClosureVarWithVarInjectionChecks: {
3037                 Node* scopeNode = get(scope);
3038                 Node* scopeRegisters = addToGraph(GetClosureRegisters, scopeNode);
3039                 addToGraph(PutClosureVar, OpInfo(operand), scopeNode, scopeRegisters, get(value));
3040                 break;
3041             }
3042             case Dynamic:
3043                 RELEASE_ASSERT_NOT_REACHED();
3044                 break;
3045             }
3046             NEXT_OPCODE(op_put_to_scope);
3047         }
3048
3049         case op_loop_hint: {
3050             // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3051             // OSR can only happen at basic block boundaries. Assert that these two statements
3052             // are compatible.
3053             RELEASE_ASSERT(m_currentIndex == blockBegin);
3054             
3055             // We never do OSR into an inlined code block. That could not happen, since OSR
3056             // looks up the code block that is the replacement for the baseline JIT code
3057             // block. Hence, machine code block = true code block = not inline code block.
3058             if (!m_inlineStackTop->m_caller)
3059                 m_currentBlock->isOSRTarget = true;
3060
3061             if (m_vm->watchdog.isEnabled())
3062                 addToGraph(CheckWatchdogTimer);
3063             else {
3064                 // Emit a phantom node to ensure that there is a placeholder
3065                 // node for this bytecode op.
3066                 addToGraph(Phantom);
3067             }
3068             
3069             NEXT_OPCODE(op_loop_hint);
3070         }
3071             
3072         case op_init_lazy_reg: {
3073             set(currentInstruction[1].u.operand, getJSConstantForValue(JSValue()));
3074             NEXT_OPCODE(op_init_lazy_reg);
3075         }
3076             
3077         case op_create_activation: {
3078             set(currentInstruction[1].u.operand, addToGraph(CreateActivation, get(currentInstruction[1].u.operand)));
3079             NEXT_OPCODE(op_create_activation);
3080         }
3081             
3082         case op_create_arguments: {
3083             m_graph.m_hasArguments = true;
3084             Node* createArguments = addToGraph(CreateArguments, get(currentInstruction[1].u.operand));
3085             set(currentInstruction[1].u.operand, createArguments);
3086             set(unmodifiedArgumentsRegister(currentInstruction[1].u.operand), createArguments);
3087             NEXT_OPCODE(op_create_arguments);
3088         }
3089             
3090         case op_tear_off_activation: {
3091             addToGraph(TearOffActivation, get(currentInstruction[1].u.operand));
3092             NEXT_OPCODE(op_tear_off_activation);
3093         }
3094
3095         case op_tear_off_arguments: {
3096             m_graph.m_hasArguments = true;
3097             addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(currentInstruction[1].u.operand)), get(currentInstruction[2].u.operand));
3098             NEXT_OPCODE(op_tear_off_arguments);
3099         }
3100             
3101         case op_get_arguments_length: {
3102             m_graph.m_hasArguments = true;
3103             set(currentInstruction[1].u.operand, addToGraph(GetMyArgumentsLengthSafe));
3104             NEXT_OPCODE(op_get_arguments_length);
3105         }
3106             
3107         case op_get_argument_by_val: {
3108             m_graph.m_hasArguments = true;
3109             set(currentInstruction[1].u.operand,
3110                 addToGraph(
3111                     GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
3112                     get(currentInstruction[3].u.operand)));
3113             NEXT_OPCODE(op_get_argument_by_val);
3114         }
3115             
3116         case op_new_func: {
3117             if (!currentInstruction[3].u.operand) {
3118                 set(currentInstruction[1].u.operand,
3119                     addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3120             } else {
3121                 set(currentInstruction[1].u.operand,
3122                     addToGraph(
3123                         NewFunction,
3124                         OpInfo(currentInstruction[2].u.operand),
3125                         get(currentInstruction[1].u.operand)));
3126             }
3127             NEXT_OPCODE(op_new_func);
3128         }
3129             
3130         case op_new_func_exp: {
3131             set(currentInstruction[1].u.operand,
3132                 addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3133             NEXT_OPCODE(op_new_func_exp);
3134         }
3135
3136         case op_typeof: {
3137             set(currentInstruction[1].u.operand,
3138                 addToGraph(TypeOf, get(currentInstruction[2].u.operand)));
3139             NEXT_OPCODE(op_typeof);
3140         }
3141
3142         case op_to_number: {
3143             set(currentInstruction[1].u.operand,
3144                 addToGraph(Identity, Edge(get(currentInstruction[2].u.operand), NumberUse)));
3145             NEXT_OPCODE(op_to_number);
3146         }
3147             
3148         case op_in: {
3149             set(currentInstruction[1].u.operand,
3150                 addToGraph(In, get(currentInstruction[2].u.operand), get(currentInstruction[3].u.operand)));
3151             NEXT_OPCODE(op_in);
3152         }
3153
3154         default:
3155             // Parse failed! This should not happen because the capabilities checker
3156             // should have caught it.
3157             RELEASE_ASSERT_NOT_REACHED();
3158             return false;
3159         }
3160     }
3161 }
3162
3163 void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BasicBlock*>& possibleTargets)
3164 {
3165     ASSERT(!block->isLinked);
3166     ASSERT(!block->isEmpty());
3167     Node* node = block->last();
3168     ASSERT(node->isTerminal());
3169     
3170     switch (node->op()) {
3171     case Jump:
3172         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3173 #if DFG_ENABLE(DEBUG_VERBOSE)
3174         dataLogF("Linked basic block %p to %p, #%u.\n", block, node->takenBlock(), node->takenBlock()->index);
3175 #endif
3176         break;
3177         
3178     case Branch:
3179         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3180         node->setNotTakenBlock(blockForBytecodeOffset(possibleTargets, node->notTakenBytecodeOffsetDuringParsing()));
3181 #if DFG_ENABLE(DEBUG_VERBOSE)
3182         dataLogF("Linked basic block %p to %p, #%u and %p, #%u.\n", block, node->takenBlock(), node->takenBlock()->index, node->notTakenBlock(), node->notTakenBlock()->index);
3183 #endif
3184         break;
3185         
3186     case Switch:
3187         for (unsigned i = node->switchData()->cases.size(); i--;)
3188             node->switchData()->cases[i].target = blockForBytecodeOffset(possibleTargets, node->switchData()->cases[i].targetBytecodeIndex());
3189         node->switchData()->fallThrough = blockForBytecodeOffset(possibleTargets, node->switchData()->fallThroughBytecodeIndex());
3190         break;
3191         
3192     default:
3193 #if DFG_ENABLE(DEBUG_VERBOSE)
3194         dataLogF("Marking basic block %p as linked.\n", block);
3195 #endif
3196         break;
3197     }
3198     
3199 #if !ASSERT_DISABLED
3200     block->isLinked = true;
3201 #endif
3202 }
3203
3204 void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets)
3205 {
3206     for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3207         if (unlinkedBlocks[i].m_needsNormalLinking) {
3208             linkBlock(unlinkedBlocks[i].m_block, possibleTargets);
3209             unlinkedBlocks[i].m_needsNormalLinking = false;
3210         }
3211     }
3212 }
3213
3214 void ByteCodeParser::buildOperandMapsIfNecessary()
3215 {
3216     if (m_haveBuiltOperandMaps)
3217         return;
3218     
3219     for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3220         m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3221     for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3222         JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3223         if (!value)
3224             m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3225         else
3226             m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3227     }
3228     
3229     m_haveBuiltOperandMaps = true;
3230 }
3231
3232 ByteCodeParser::InlineStackEntry::InlineStackEntry(
3233     ByteCodeParser* byteCodeParser,
3234     CodeBlock* codeBlock,
3235     CodeBlock* profiledBlock,
3236     BasicBlock* callsiteBlockHead,
3237     JSFunction* callee, // Null if this is a closure call.
3238     VirtualRegister returnValueVR,
3239     VirtualRegister inlineCallFrameStart,
3240     int argumentCountIncludingThis,
3241     CodeSpecializationKind kind)
3242     : m_byteCodeParser(byteCodeParser)
3243     , m_codeBlock(codeBlock)
3244     , m_profiledBlock(profiledBlock)
3245     , m_callsiteBlockHead(callsiteBlockHead)
3246     , m_returnValue(returnValueVR)
3247     , m_didReturn(false)
3248     , m_didEarlyReturn(false)
3249     , m_caller(byteCodeParser->m_inlineStackTop)
3250 {
3251     {
3252         ConcurrentJITLocker locker(m_profiledBlock->m_lock);
3253         m_lazyOperands.initialize(locker, m_profiledBlock->lazyOperandValueProfiles());
3254         m_exitProfile.initialize(locker, profiledBlock->exitProfile());
3255     }
3256     
3257     m_argumentPositions.resize(argumentCountIncludingThis);
3258     for (int i = 0; i < argumentCountIncludingThis; ++i) {
3259         byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3260         ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3261         m_argumentPositions[i] = argumentPosition;
3262     }
3263     
3264     // Track the code-block-global exit sites.
3265     if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3266         byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3267             codeBlock->ownerExecutable());
3268     }
3269         
3270     if (m_caller) {
3271         // Inline case.
3272         ASSERT(codeBlock != byteCodeParser->m_codeBlock);
3273         ASSERT(inlineCallFrameStart != InvalidVirtualRegister);
3274         ASSERT(callsiteBlockHead);
3275         
3276         InlineCallFrame inlineCallFrame;
3277         inlineCallFrame.executable.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), codeBlock->ownerExecutable());
3278         inlineCallFrame.stackOffset = inlineCallFrameStart + JSStack::CallFrameHeaderSize;
3279         if (callee)
3280             inlineCallFrame.callee.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), callee);
3281         inlineCallFrame.caller = byteCodeParser->currentCodeOrigin();
3282         inlineCallFrame.arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
3283         inlineCallFrame.isCall = isCall(kind);
3284         
3285         if (inlineCallFrame.caller.inlineCallFrame)
3286             inlineCallFrame.capturedVars = inlineCallFrame.caller.inlineCallFrame->capturedVars;
3287         else {
3288             for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
3289                 if (byteCodeParser->m_codeBlock->isCaptured(i))
3290                     inlineCallFrame.capturedVars.set(i);
3291             }
3292         }
3293
3294         for (int i = argumentCountIncludingThis; i--;) {
3295             if (codeBlock->isCaptured(argumentToOperand(i)))
3296                 inlineCallFrame.capturedVars.set(argumentToOperand(i) + inlineCallFrame.stackOffset);
3297         }
3298         for (size_t i = codeBlock->m_numVars; i--;) {
3299             if (codeBlock->isCaptured(i))
3300                 inlineCallFrame.capturedVars.set(i + inlineCallFrame.stackOffset);
3301         }
3302
3303 #if DFG_ENABLE(DEBUG_VERBOSE)
3304         dataLogF("Current captured variables: ");
3305         inlineCallFrame.capturedVars.dump(WTF::dataFile());
3306         dataLogF("\n");
3307 #endif
3308         
3309         byteCodeParser->m_codeBlock->inlineCallFrames().append(inlineCallFrame);
3310         m_inlineCallFrame = &byteCodeParser->m_codeBlock->inlineCallFrames().last();
3311         
3312         byteCodeParser->buildOperandMapsIfNecessary();
3313         
3314         m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3315         m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3316         m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3317         m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
3318
3319         for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
3320             StringImpl* rep = codeBlock->identifier(i).impl();
3321             BorrowedIdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_graph.identifiers().numberOfIdentifiers());
3322             if (result.isNewEntry)
3323                 byteCodeParser->m_graph.identifiers().addLazily(rep);
3324             m_identifierRemap[i] = result.iterator->value;
3325         }
3326         for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i) {
3327             JSValue value = codeBlock->getConstant(i + FirstConstantRegisterIndex);
3328             if (!value) {
3329                 if (byteCodeParser->m_emptyJSValueIndex == UINT_MAX) {
3330                     byteCodeParser->m_emptyJSValueIndex = byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex;
3331                     byteCodeParser->m_codeBlock->addConstant(JSValue());
3332                     byteCodeParser->m_constants.append(ConstantRecord());
3333                 }
3334                 m_constantRemap[i] = byteCodeParser->m_emptyJSValueIndex;
3335                 continue;
3336             }
3337             JSValueMap::AddResult result = byteCodeParser->m_jsValueMap.add(JSValue::encode(value), byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex);
3338             if (result.isNewEntry) {
3339                 byteCodeParser->m_codeBlock->addConstant(value);
3340                 byteCodeParser->m_constants.append(ConstantRecord());
3341             }
3342             m_constantRemap[i] = result.iterator->value;
3343         }
3344         for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
3345             // If we inline the same code block multiple times, we don't want to needlessly
3346             // duplicate its constant buffers.
3347             HashMap<ConstantBufferKey, unsigned>::iterator iter =
3348                 byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
3349             if (iter != byteCodeParser->m_constantBufferCache.end()) {
3350                 m_constantBufferRemap[i] = iter->value;
3351                 continue;
3352             }
3353             Vector<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
3354             unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
3355             m_constantBufferRemap[i] = newIndex;
3356             byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
3357         }
3358         for (unsigned i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i) {
3359             m_switchRemap[i] = byteCodeParser->m_codeBlock->numberOfSwitchJumpTables();
3360             byteCodeParser->m_codeBlock->addSwitchJumpTable() = codeBlock->switchJumpTable(i);
3361         }
3362         m_callsiteBlockHeadNeedsLinking = true;
3363     } else {
3364         // Machine code block case.
3365         ASSERT(codeBlock == byteCodeParser->m_codeBlock);
3366         ASSERT(!callee);
3367         ASSERT(returnValueVR == InvalidVirtualRegister);
3368         ASSERT(inlineCallFrameStart == InvalidVirtualRegister);
3369         ASSERT(!callsiteBlockHead);
3370
3371         m_inlineCallFrame = 0;
3372
3373         m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3374         m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3375         m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3376         m_switchRemap.resize(codeBlock->numberOfSwitchJumpTables());
3377         for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
3378             m_identifierRemap[i] = i;
3379         for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i)
3380             m_constantRemap[i] = i + FirstConstantRegisterIndex;
3381         for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
3382             m_constantBufferRemap[i] = i;
3383         for (size_t i = 0; i < codeBlock->numberOfSwitchJumpTables(); ++i)
3384             m_switchRemap[i] = i;
3385         m_callsiteBlockHeadNeedsLinking = false;
3386     }
3387     
3388     for (size_t i = 0; i < m_constantRemap.size(); ++i)
3389         ASSERT(m_constantRemap[i] >= static_cast<unsigned>(FirstConstantRegisterIndex));
3390     
3391     byteCodeParser->m_inlineStackTop = this;
3392 }
3393
3394 void ByteCodeParser::parseCodeBlock()
3395 {
3396     CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3397     
3398     if (m_graph.compilation()) {
3399         m_graph.compilation()->addProfiledBytecodes(
3400             *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
3401     }
3402     
3403     bool shouldDumpBytecode = Options::dumpBytecodeAtDFGTime();
3404 #if DFG_ENABLE(DEBUG_VERBOSE)
3405     shouldDumpBytecode |= true;
3406 #endif
3407     if (shouldDumpBytecode) {
3408         dataLog("Parsing ", *codeBlock);
3409         if (inlineCallFrame()) {
3410             dataLog(
3411                 " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
3412                 " ", inlineCallFrame()->caller);
3413         }
3414         dataLog(
3415             ": captureCount = ", codeBlock->symbolTable() ? codeBlock->symbolTable()->captureCount() : 0,
3416             ", needsFullScopeChain = ", codeBlock->needsFullScopeChain(),
3417             ", needsActivation = ", codeBlock->ownerExecutable()->needsActivation(),
3418             ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
3419         codeBlock->baselineVersion()->dumpBytecode();
3420     }
3421     
3422     Vector<unsigned, 32> jumpTargets;
3423     computePreciseJumpTargets(codeBlock, jumpTargets);
3424     if (Options::dumpBytecodeAtDFGTime()) {
3425         dataLog("Jump targets: ");
3426         CommaPrinter comma;
3427         for (unsigned i = 0; i < jumpTargets.size(); ++i)
3428             dataLog(comma, jumpTargets[i]);
3429         dataLog("\n");
3430     }
3431     
3432     for (unsigned jumpTargetIndex = 0; jumpTargetIndex <= jumpTargets.size(); ++jumpTargetIndex) {
3433         // The maximum bytecode offset to go into the current basicblock is either the next jump target, or the end of the instructions.
3434         unsigned limit = jumpTargetIndex < jumpTargets.size() ? jumpTargets[jumpTargetIndex] : codeBlock->instructions().size();
3435 #if DFG_ENABLE(DEBUG_VERBOSE)
3436         dataLog(
3437             "Parsing bytecode with limit ", pointerDump(inlineCallFrame()),
3438             " bc#", limit, " at inline depth ",
3439             CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()), ".\n");
3440 #endif
3441         ASSERT(m_currentIndex < limit);
3442
3443         // Loop until we reach the current limit (i.e. next jump target).
3444         do {
3445             if (!m_currentBlock) {
3446                 // Check if we can use the last block.
3447                 if (m_graph.numBlocks() && m_graph.lastBlock()->isEmpty()) {
3448                     // This must be a block belonging to us.
3449                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
3450                     // Either the block is linkable or it isn't. If it's linkable then it's the last
3451                     // block in the blockLinkingTargets list. If it's not then the last block will
3452                     // have a lower bytecode index that the one we're about to give to this block.
3453                     if (m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin != m_currentIndex) {
3454                         // Make the block linkable.
3455                         ASSERT(m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_blockLinkingTargets.last()->bytecodeBegin < m_currentIndex);
3456                         m_inlineStackTop->m_blockLinkingTargets.append(m_graph.lastBlock());
3457                     }
3458                     // Change its bytecode begin and continue.
3459                     m_currentBlock = m_graph.lastBlock();
3460 #if DFG_ENABLE(DEBUG_VERBOSE)
3461                     dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (peephole case).\n", m_currentBlock, m_currentBlock->bytecodeBegin, m_currentIndex);
3462 #endif
3463                     m_currentBlock->bytecodeBegin = m_currentIndex;
3464                 } else {
3465                     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(m_currentIndex, m_numArguments, m_numLocals));
3466 #if DFG_ENABLE(DEBUG_VERBOSE)
3467                     dataLogF("Creating basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.numBlocks(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
3468 #endif
3469                     m_currentBlock = block.get();
3470                     // This assertion checks two things:
3471                     // 1) If the bytecodeBegin is greater than currentIndex, then something has gone
3472                     //    horribly wrong. So, we're probably generating incorrect code.
3473                     // 2) If the bytecodeBegin is equal to the currentIndex, then we failed to do
3474                     //    a peephole coalescing of this block in the if statement above. So, we're
3475                     //    generating suboptimal code and leaving more work for the CFG simplifier.
3476                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.isEmpty() || m_inlineStackTop->m_unlinkedBlocks.last().m_block->bytecodeBegin < m_currentIndex);
3477                     m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
3478                     m_inlineStackTop->m_blockLinkingTargets.append(block.get());
3479                     // The first block is definitely an OSR target.
3480                     if (!m_graph.numBlocks())
3481                         block->isOSRTarget = true;
3482                     m_graph.appendBlock(block);
3483                     prepareToParseBlock();
3484                 }
3485             }
3486
3487             bool shouldContinueParsing = parseBlock(limit);
3488
3489             // We should not have gone beyond the limit.
3490             ASSERT(m_currentIndex <= limit);
3491             
3492             // We should have planted a terminal, or we just gave up because
3493             // we realized that the jump target information is imprecise, or we
3494             // are at the end of an inline function, or we realized that we
3495             // should stop parsing because there was a return in the first
3496             // basic block.
3497             ASSERT(m_currentBlock->isEmpty() || m_currentBlock->last()->isTerminal() || (m_currentIndex == codeBlock->instructions().size() && inlineCallFrame()) || !shouldContinueParsing);
3498
3499             if (!shouldContinueParsing)
3500                 return;
3501             
3502             m_currentBlock = 0;
3503         } while (m_currentIndex < limit);
3504     }
3505
3506     // Should have reached the end of the instructions.
3507     ASSERT(m_currentIndex == codeBlock->instructions().size());
3508 }
3509
3510 bool ByteCodeParser::parse()
3511 {
3512     // Set during construction.
3513     ASSERT(!m_currentIndex);
3514     
3515 #if DFG_ENABLE(ALL_VARIABLES_CAPTURED)
3516     // We should be pretending that the code has an activation.
3517     ASSERT(m_graph.needsActivation());
3518 #endif
3519     
3520     InlineStackEntry inlineStackEntry(
3521         this, m_codeBlock, m_profiledBlock, 0, 0, InvalidVirtualRegister, InvalidVirtualRegister,
3522         m_codeBlock->numParameters(), CodeForCall);
3523     
3524     parseCodeBlock();
3525
3526     linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
3527     m_graph.determineReachability();
3528     
3529     ASSERT(m_preservedVars.size());
3530     size_t numberOfLocals = 0;