dbb649272cf39d9a1c00488152f002df3c7de573
[WebKit-https.git] / Source / JavaScriptCore / dfg / DFGByteCodeParser.cpp
1  /*
2  * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27
28 #if ENABLE(DFG_JIT)
29
30 #include "DFGByteCodeParser.h"
31
32 #include "ArrayConstructor.h"
33 #include "CallLinkStatus.h"
34 #include "CodeBlock.h"
35 #include "CodeBlockWithJITType.h"
36 #include "DFGArrayMode.h"
37 #include "DFGCapabilities.h"
38 #include "DFGJITCode.h"
39 #include "GetByIdStatus.h"
40 #include "JSActivation.h"
41 #include "JSCInlines.h"
42 #include "PreciseJumpTargets.h"
43 #include "PutByIdStatus.h"
44 #include "StackAlignment.h"
45 #include "StringConstructor.h"
46 #include <wtf/CommaPrinter.h>
47 #include <wtf/HashMap.h>
48 #include <wtf/MathExtras.h>
49 #include <wtf/StdLibExtras.h>
50
51 namespace JSC { namespace DFG {
52
53 class ConstantBufferKey {
54 public:
55     ConstantBufferKey()
56         : m_codeBlock(0)
57         , m_index(0)
58     {
59     }
60     
61     ConstantBufferKey(WTF::HashTableDeletedValueType)
62         : m_codeBlock(0)
63         , m_index(1)
64     {
65     }
66     
67     ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
68         : m_codeBlock(codeBlock)
69         , m_index(index)
70     {
71     }
72     
73     bool operator==(const ConstantBufferKey& other) const
74     {
75         return m_codeBlock == other.m_codeBlock
76             && m_index == other.m_index;
77     }
78     
79     unsigned hash() const
80     {
81         return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
82     }
83     
84     bool isHashTableDeletedValue() const
85     {
86         return !m_codeBlock && m_index;
87     }
88     
89     CodeBlock* codeBlock() const { return m_codeBlock; }
90     unsigned index() const { return m_index; }
91     
92 private:
93     CodeBlock* m_codeBlock;
94     unsigned m_index;
95 };
96
97 struct ConstantBufferKeyHash {
98     static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
99     static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
100     {
101         return a == b;
102     }
103     
104     static const bool safeToCompareToEmptyOrDeleted = true;
105 };
106
107 } } // namespace JSC::DFG
108
109 namespace WTF {
110
111 template<typename T> struct DefaultHash;
112 template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
113     typedef JSC::DFG::ConstantBufferKeyHash Hash;
114 };
115
116 template<typename T> struct HashTraits;
117 template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
118
119 } // namespace WTF
120
121 namespace JSC { namespace DFG {
122
123 // === ByteCodeParser ===
124 //
125 // This class is used to compile the dataflow graph from a CodeBlock.
126 class ByteCodeParser {
127 public:
128     ByteCodeParser(Graph& graph)
129         : m_vm(&graph.m_vm)
130         , m_codeBlock(graph.m_codeBlock)
131         , m_profiledBlock(graph.m_profiledBlock)
132         , m_graph(graph)
133         , m_currentBlock(0)
134         , m_currentIndex(0)
135         , m_constantUndefined(UINT_MAX)
136         , m_constantNull(UINT_MAX)
137         , m_constantNaN(UINT_MAX)
138         , m_constant1(UINT_MAX)
139         , m_constants(m_codeBlock->numberOfConstantRegisters())
140         , m_numArguments(m_codeBlock->numParameters())
141         , m_numLocals(m_codeBlock->m_numCalleeRegisters)
142         , m_parameterSlots(0)
143         , m_numPassedVarArgs(0)
144         , m_inlineStackTop(0)
145         , m_haveBuiltOperandMaps(false)
146         , m_emptyJSValueIndex(UINT_MAX)
147         , m_currentInstruction(0)
148     {
149         ASSERT(m_profiledBlock);
150     }
151     
152     // Parse a full CodeBlock of bytecode.
153     bool parse();
154     
155 private:
156     struct InlineStackEntry;
157
158     // Just parse from m_currentIndex to the end of the current CodeBlock.
159     void parseCodeBlock();
160
161     // Helper for min and max.
162     bool handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
163     
164     // Handle calls. This resolves issues surrounding inlining and intrinsics.
165     void handleCall(int result, NodeType op, CodeSpecializationKind, unsigned instructionSize, int callee, int argCount, int registerOffset);
166     void handleCall(Instruction* pc, NodeType op, CodeSpecializationKind);
167     void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
168     void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
169     // Handle inlining. Return true if it succeeded, false if we need to plant a call.
170     bool handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
171     // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
172     bool handleIntrinsic(int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
173     bool handleTypedArrayConstructor(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, TypedArrayType);
174     bool handleConstantInternalFunction(int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
175     Node* handlePutByOffset(Node* base, unsigned identifier, PropertyOffset, Node* value);
176     Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
177     void handleGetByOffset(
178         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179         PropertyOffset);
180     void handleGetById(
181         int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
182         const GetByIdStatus&);
183     Node* emitPrototypeChecks(const GetByIdVariant&);
184
185     Node* getScope(bool skipTop, unsigned skipCount);
186     
187     // Prepare to parse a block.
188     void prepareToParseBlock();
189     // Parse a single basic block of bytecode instructions.
190     bool parseBlock(unsigned limit);
191     // Link block successors.
192     void linkBlock(BasicBlock*, Vector<BasicBlock*>& possibleTargets);
193     void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets);
194     
195     VariableAccessData* newVariableAccessData(VirtualRegister operand, bool isCaptured)
196     {
197         ASSERT(!operand.isConstant());
198         
199         m_graph.m_variableAccessData.append(VariableAccessData(operand, isCaptured));
200         return &m_graph.m_variableAccessData.last();
201     }
202     
203     // Get/Set the operands/result of a bytecode instruction.
204     Node* getDirect(VirtualRegister operand)
205     {
206         // Is this a constant?
207         if (operand.isConstant()) {
208             unsigned constant = operand.toConstantIndex();
209             ASSERT(constant < m_constants.size());
210             return getJSConstant(constant);
211         }
212
213         // Is this an argument?
214         if (operand.isArgument())
215             return getArgument(operand);
216
217         // Must be a local.
218         return getLocal(operand);
219     }
220
221     Node* get(VirtualRegister operand)
222     {
223         if (inlineCallFrame()) {
224             if (!inlineCallFrame()->isClosureCall) {
225                 JSFunction* callee = inlineCallFrame()->calleeConstant();
226                 if (operand.offset() == JSStack::Callee)
227                     return cellConstant(callee);
228                 if (operand.offset() == JSStack::ScopeChain)
229                     return cellConstant(callee->scope());
230             }
231         } else if (operand.offset() == JSStack::Callee)
232             return addToGraph(GetCallee);
233         else if (operand.offset() == JSStack::ScopeChain)
234             return addToGraph(GetMyScope);
235         
236         return getDirect(m_inlineStackTop->remapOperand(operand));
237     }
238     
239     enum SetMode { NormalSet, ImmediateSet };
240     Node* setDirect(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
241     {
242         addToGraph(MovHint, OpInfo(operand.offset()), value);
243         
244         DelayedSetLocal delayed = DelayedSetLocal(operand, value);
245         
246         if (setMode == NormalSet) {
247             m_setLocalQueue.append(delayed);
248             return 0;
249         }
250         
251         return delayed.execute(this, setMode);
252     }
253
254     Node* set(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
255     {
256         return setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
257     }
258     
259     Node* injectLazyOperandSpeculation(Node* node)
260     {
261         ASSERT(node->op() == GetLocal);
262         ASSERT(node->origin.semantic.bytecodeIndex == m_currentIndex);
263         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
264         LazyOperandValueProfileKey key(m_currentIndex, node->local());
265         SpeculatedType prediction = m_inlineStackTop->m_lazyOperands.prediction(locker, key);
266         node->variableAccessData()->predict(prediction);
267         return node;
268     }
269
270     // Used in implementing get/set, above, where the operand is a local variable.
271     Node* getLocal(VirtualRegister operand)
272     {
273         unsigned local = operand.toLocal();
274
275         if (local < m_localWatchpoints.size()) {
276             if (VariableWatchpointSet* set = m_localWatchpoints[local]) {
277                 if (JSValue value = set->inferredValue()) {
278                     addToGraph(FunctionReentryWatchpoint, OpInfo(m_codeBlock->symbolTable()));
279                     addToGraph(VariableWatchpoint, OpInfo(set));
280                     // Note: this is very special from an OSR exit standpoint. We wouldn't be
281                     // able to do this for most locals, but it works here because we're dealing
282                     // with a flushed local. For most locals we would need to issue a GetLocal
283                     // here and ensure that we have uses in DFG IR wherever there would have
284                     // been uses in bytecode. Clearly this optimization does not do this. But
285                     // that's fine, because we don't need to track liveness for captured
286                     // locals, and this optimization only kicks in for captured locals.
287                     return inferredConstant(value);
288                 }
289             }
290         }
291
292         Node* node = m_currentBlock->variablesAtTail.local(local);
293         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
294         
295         // This has two goals: 1) link together variable access datas, and 2)
296         // try to avoid creating redundant GetLocals. (1) is required for
297         // correctness - no other phase will ensure that block-local variable
298         // access data unification is done correctly. (2) is purely opportunistic
299         // and is meant as an compile-time optimization only.
300         
301         VariableAccessData* variable;
302         
303         if (node) {
304             variable = node->variableAccessData();
305             variable->mergeIsCaptured(isCaptured);
306             
307             if (!isCaptured) {
308                 switch (node->op()) {
309                 case GetLocal:
310                     return node;
311                 case SetLocal:
312                     return node->child1().node();
313                 default:
314                     break;
315                 }
316             }
317         } else
318             variable = newVariableAccessData(operand, isCaptured);
319         
320         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
321         m_currentBlock->variablesAtTail.local(local) = node;
322         return node;
323     }
324
325     Node* setLocal(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
326     {
327         unsigned local = operand.toLocal();
328         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
329         
330         if (setMode == NormalSet) {
331             ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
332             if (isCaptured || argumentPosition)
333                 flushDirect(operand, argumentPosition);
334         }
335
336         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
337         variableAccessData->mergeStructureCheckHoistingFailed(
338             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
339             || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
340         variableAccessData->mergeCheckArrayHoistingFailed(
341             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
342         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
343         m_currentBlock->variablesAtTail.local(local) = node;
344         return node;
345     }
346
347     // Used in implementing get/set, above, where the operand is an argument.
348     Node* getArgument(VirtualRegister operand)
349     {
350         unsigned argument = operand.toArgument();
351         ASSERT(argument < m_numArguments);
352         
353         Node* node = m_currentBlock->variablesAtTail.argument(argument);
354         bool isCaptured = m_codeBlock->isCaptured(operand);
355
356         VariableAccessData* variable;
357         
358         if (node) {
359             variable = node->variableAccessData();
360             variable->mergeIsCaptured(isCaptured);
361             
362             switch (node->op()) {
363             case GetLocal:
364                 return node;
365             case SetLocal:
366                 return node->child1().node();
367             default:
368                 break;
369             }
370         } else
371             variable = newVariableAccessData(operand, isCaptured);
372         
373         node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
374         m_currentBlock->variablesAtTail.argument(argument) = node;
375         return node;
376     }
377     Node* setArgument(VirtualRegister operand, Node* value, SetMode setMode = NormalSet)
378     {
379         unsigned argument = operand.toArgument();
380         ASSERT(argument < m_numArguments);
381         
382         bool isCaptured = m_codeBlock->isCaptured(operand);
383
384         VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
385
386         // Always flush arguments, except for 'this'. If 'this' is created by us,
387         // then make sure that it's never unboxed.
388         if (argument) {
389             if (setMode == NormalSet)
390                 flushDirect(operand);
391         } else if (m_codeBlock->specializationKind() == CodeForConstruct)
392             variableAccessData->mergeShouldNeverUnbox(true);
393         
394         variableAccessData->mergeStructureCheckHoistingFailed(
395             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
396             || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
397         variableAccessData->mergeCheckArrayHoistingFailed(
398             m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
399         Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
400         m_currentBlock->variablesAtTail.argument(argument) = node;
401         return node;
402     }
403     
404     ArgumentPosition* findArgumentPositionForArgument(int argument)
405     {
406         InlineStackEntry* stack = m_inlineStackTop;
407         while (stack->m_inlineCallFrame)
408             stack = stack->m_caller;
409         return stack->m_argumentPositions[argument];
410     }
411     
412     ArgumentPosition* findArgumentPositionForLocal(VirtualRegister operand)
413     {
414         for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
415             InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
416             if (!inlineCallFrame)
417                 break;
418             if (operand.offset() < static_cast<int>(inlineCallFrame->stackOffset + JSStack::CallFrameHeaderSize))
419                 continue;
420             if (operand.offset() == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
421                 continue;
422             if (operand.offset() >= static_cast<int>(inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset() + inlineCallFrame->arguments.size()))
423                 continue;
424             int argument = VirtualRegister(operand.offset() - inlineCallFrame->stackOffset).toArgument();
425             return stack->m_argumentPositions[argument];
426         }
427         return 0;
428     }
429     
430     ArgumentPosition* findArgumentPosition(VirtualRegister operand)
431     {
432         if (operand.isArgument())
433             return findArgumentPositionForArgument(operand.toArgument());
434         return findArgumentPositionForLocal(operand);
435     }
436
437     void addConstant(JSValue value)
438     {
439         unsigned constantIndex = m_codeBlock->addConstantLazily();
440         initializeLazyWriteBarrierForConstant(
441             m_graph.m_plan.writeBarriers,
442             m_codeBlock->constants()[constantIndex],
443             m_codeBlock,
444             constantIndex,
445             m_codeBlock->ownerExecutable(), 
446             value);
447     }
448     
449     void flush(VirtualRegister operand)
450     {
451         flushDirect(m_inlineStackTop->remapOperand(operand));
452     }
453     
454     void flushDirect(VirtualRegister operand)
455     {
456         flushDirect(operand, findArgumentPosition(operand));
457     }
458     
459     void flushDirect(VirtualRegister operand, ArgumentPosition* argumentPosition)
460     {
461         bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
462         
463         ASSERT(!operand.isConstant());
464         
465         Node* node = m_currentBlock->variablesAtTail.operand(operand);
466         
467         VariableAccessData* variable;
468         
469         if (node) {
470             variable = node->variableAccessData();
471             variable->mergeIsCaptured(isCaptured);
472         } else
473             variable = newVariableAccessData(operand, isCaptured);
474         
475         node = addToGraph(Flush, OpInfo(variable));
476         m_currentBlock->variablesAtTail.operand(operand) = node;
477         if (argumentPosition)
478             argumentPosition->addVariable(variable);
479     }
480
481     void flush(InlineStackEntry* inlineStackEntry)
482     {
483         int numArguments;
484         if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame) {
485             numArguments = inlineCallFrame->arguments.size();
486             if (inlineCallFrame->isClosureCall) {
487                 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::Callee)));
488                 flushDirect(inlineStackEntry->remapOperand(VirtualRegister(JSStack::ScopeChain)));
489             }
490         } else
491             numArguments = inlineStackEntry->m_codeBlock->numParameters();
492         for (unsigned argument = numArguments; argument-- > 1;)
493             flushDirect(inlineStackEntry->remapOperand(virtualRegisterForArgument(argument)));
494         for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
495             if (!inlineStackEntry->m_codeBlock->isCaptured(virtualRegisterForLocal(local)))
496                 continue;
497             flushDirect(inlineStackEntry->remapOperand(virtualRegisterForLocal(local)));
498         }
499     }
500
501     void flushAllArgumentsAndCapturedVariablesInInlineStack()
502     {
503         for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
504             flush(inlineStackEntry);
505     }
506
507     void flushArgumentsAndCapturedVariables()
508     {
509         flush(m_inlineStackTop);
510     }
511
512     // NOTE: Only use this to construct constants that arise from non-speculative
513     // constant folding. I.e. creating constants using this if we had constant
514     // field inference would be a bad idea, since the bytecode parser's folding
515     // doesn't handle liveness preservation.
516     Node* getJSConstantForValue(JSValue constantValue, NodeFlags flags = NodeIsStaticConstant)
517     {
518         unsigned constantIndex;
519         if (!m_codeBlock->findConstant(constantValue, constantIndex)) {
520             addConstant(constantValue);
521             m_constants.append(ConstantRecord());
522         }
523         
524         ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
525         
526         return getJSConstant(constantIndex, flags);
527     }
528
529     Node* getJSConstant(unsigned constant, NodeFlags flags = NodeIsStaticConstant)
530     {
531         Node* node = m_constants[constant].asJSValue;
532         if (node)
533             return node;
534
535         Node* result = addToGraph(JSConstant, OpInfo(constant));
536         result->mergeFlags(flags);
537         m_constants[constant].asJSValue = result;
538         return result;
539     }
540
541     // Helper functions to get/set the this value.
542     Node* getThis()
543     {
544         return get(m_inlineStackTop->m_codeBlock->thisRegister());
545     }
546
547     void setThis(Node* value)
548     {
549         set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
550     }
551
552     // Convenience methods for checking nodes for constants.
553     bool isJSConstant(Node* node)
554     {
555         return node->op() == JSConstant;
556     }
557     bool isInt32Constant(Node* node)
558     {
559         return isJSConstant(node) && valueOfJSConstant(node).isInt32();
560     }
561     // Convenience methods for getting constant values.
562     JSValue valueOfJSConstant(Node* node)
563     {
564         ASSERT(isJSConstant(node));
565         return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
566     }
567     int32_t valueOfInt32Constant(Node* node)
568     {
569         ASSERT(isInt32Constant(node));
570         return valueOfJSConstant(node).asInt32();
571     }
572     
573     // This method returns a JSConstant with the value 'undefined'.
574     Node* constantUndefined()
575     {
576         // Has m_constantUndefined been set up yet?
577         if (m_constantUndefined == UINT_MAX) {
578             // Search the constant pool for undefined, if we find it, we can just reuse this!
579             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
580             for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
581                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
582                 if (testMe.isUndefined())
583                     return getJSConstant(m_constantUndefined);
584             }
585
586             // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
587             ASSERT(m_constants.size() == numberOfConstants);
588             addConstant(jsUndefined());
589             m_constants.append(ConstantRecord());
590             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
591         }
592
593         // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
594         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
595         return getJSConstant(m_constantUndefined);
596     }
597
598     // This method returns a JSConstant with the value 'null'.
599     Node* constantNull()
600     {
601         // Has m_constantNull been set up yet?
602         if (m_constantNull == UINT_MAX) {
603             // Search the constant pool for null, if we find it, we can just reuse this!
604             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
605             for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
606                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
607                 if (testMe.isNull())
608                     return getJSConstant(m_constantNull);
609             }
610
611             // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
612             ASSERT(m_constants.size() == numberOfConstants);
613             addConstant(jsNull());
614             m_constants.append(ConstantRecord());
615             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
616         }
617
618         // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
619         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
620         return getJSConstant(m_constantNull);
621     }
622
623     // This method returns a DoubleConstant with the value 1.
624     Node* one()
625     {
626         // Has m_constant1 been set up yet?
627         if (m_constant1 == UINT_MAX) {
628             // Search the constant pool for the value 1, if we find it, we can just reuse this!
629             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
630             for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
631                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
632                 if (testMe.isInt32() && testMe.asInt32() == 1)
633                     return getJSConstant(m_constant1);
634             }
635
636             // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
637             ASSERT(m_constants.size() == numberOfConstants);
638             addConstant(jsNumber(1));
639             m_constants.append(ConstantRecord());
640             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
641         }
642
643         // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
644         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
645         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
646         return getJSConstant(m_constant1);
647     }
648     
649     // This method returns a DoubleConstant with the value NaN.
650     Node* constantNaN()
651     {
652         JSValue nan = jsNaN();
653         
654         // Has m_constantNaN been set up yet?
655         if (m_constantNaN == UINT_MAX) {
656             // Search the constant pool for the value NaN, if we find it, we can just reuse this!
657             unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
658             for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
659                 JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
660                 if (JSValue::encode(testMe) == JSValue::encode(nan))
661                     return getJSConstant(m_constantNaN);
662             }
663
664             // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
665             ASSERT(m_constants.size() == numberOfConstants);
666             addConstant(nan);
667             m_constants.append(ConstantRecord());
668             ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
669         }
670
671         // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
672         ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
673         ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
674         return getJSConstant(m_constantNaN);
675     }
676     
677     Node* cellConstant(JSCell* cell)
678     {
679         HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, nullptr);
680         if (result.isNewEntry)
681             result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
682         
683         return result.iterator->value;
684     }
685     
686     Node* inferredConstant(JSValue value)
687     {
688         if (value.isCell())
689             return cellConstant(value.asCell());
690         return getJSConstantForValue(value, 0);
691     }
692     
693     InlineCallFrame* inlineCallFrame()
694     {
695         return m_inlineStackTop->m_inlineCallFrame;
696     }
697
698     CodeOrigin currentCodeOrigin()
699     {
700         return CodeOrigin(m_currentIndex, inlineCallFrame());
701     }
702     
703     bool canFold(Node* node)
704     {
705         if (Options::validateFTLOSRExitLiveness()) {
706             // The static folding that the bytecode parser does results in the DFG
707             // being able to do some DCE that the bytecode liveness analysis would
708             // miss. Hence, we disable the static folding if we're validating FTL OSR
709             // exit liveness. This may be brutish, but this validator is powerful
710             // enough that it's worth it.
711             return false;
712         }
713         
714         return node->isStronglyProvedConstantIn(inlineCallFrame());
715     }
716
717     // Our codegen for constant strict equality performs a bitwise comparison,
718     // so we can only select values that have a consistent bitwise identity.
719     bool isConstantForCompareStrictEq(Node* node)
720     {
721         if (!node->isConstant())
722             return false;
723         JSValue value = valueOfJSConstant(node);
724         return value.isBoolean() || value.isUndefinedOrNull();
725     }
726     
727     Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
728     {
729         Node* result = m_graph.addNode(
730             SpecNone, op, NodeOrigin(currentCodeOrigin()), Edge(child1), Edge(child2),
731             Edge(child3));
732         ASSERT(op != Phi);
733         m_currentBlock->append(result);
734         return result;
735     }
736     Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
737     {
738         Node* result = m_graph.addNode(
739             SpecNone, op, NodeOrigin(currentCodeOrigin()), child1, child2, child3);
740         ASSERT(op != Phi);
741         m_currentBlock->append(result);
742         return result;
743     }
744     Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
745     {
746         Node* result = m_graph.addNode(
747             SpecNone, op, NodeOrigin(currentCodeOrigin()), info, Edge(child1), Edge(child2),
748             Edge(child3));
749         ASSERT(op != Phi);
750         m_currentBlock->append(result);
751         return result;
752     }
753     Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
754     {
755         Node* result = m_graph.addNode(
756             SpecNone, op, NodeOrigin(currentCodeOrigin()), info1, info2,
757             Edge(child1), Edge(child2), Edge(child3));
758         ASSERT(op != Phi);
759         m_currentBlock->append(result);
760         return result;
761     }
762     
763     Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
764     {
765         Node* result = m_graph.addNode(
766             SpecNone, Node::VarArg, op, NodeOrigin(currentCodeOrigin()), info1, info2,
767             m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
768         ASSERT(op != Phi);
769         m_currentBlock->append(result);
770         
771         m_numPassedVarArgs = 0;
772         
773         return result;
774     }
775
776     void addVarArgChild(Node* child)
777     {
778         m_graph.m_varArgChildren.append(Edge(child));
779         m_numPassedVarArgs++;
780     }
781     
782     Node* addCall(int result, NodeType op, int callee, int argCount, int registerOffset)
783     {
784         SpeculatedType prediction = getPrediction();
785         
786         addVarArgChild(get(VirtualRegister(callee)));
787         size_t parameterSlots = JSStack::CallFrameHeaderSize - JSStack::CallerFrameAndPCSize + argCount;
788         if (parameterSlots > m_parameterSlots)
789             m_parameterSlots = parameterSlots;
790
791         int dummyThisArgument = op == Call ? 0 : 1;
792         for (int i = 0 + dummyThisArgument; i < argCount; ++i)
793             addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
794
795         Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
796         set(VirtualRegister(result), call);
797         return call;
798     }
799     
800     Node* cellConstantWithStructureCheck(JSCell* object, Structure* structure)
801     {
802         Node* objectNode = cellConstant(object);
803         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
804         return objectNode;
805     }
806     
807     Node* cellConstantWithStructureCheck(JSCell* object)
808     {
809         return cellConstantWithStructureCheck(object, object->structure());
810     }
811
812     SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
813     {
814         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
815         return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(locker, bytecodeIndex);
816     }
817
818     SpeculatedType getPrediction(unsigned bytecodeIndex)
819     {
820         SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
821         
822         if (prediction == SpecNone) {
823             // We have no information about what values this node generates. Give up
824             // on executing this code, since we're likely to do more damage than good.
825             addToGraph(ForceOSRExit);
826         }
827         
828         return prediction;
829     }
830     
831     SpeculatedType getPredictionWithoutOSRExit()
832     {
833         return getPredictionWithoutOSRExit(m_currentIndex);
834     }
835     
836     SpeculatedType getPrediction()
837     {
838         return getPrediction(m_currentIndex);
839     }
840     
841     ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
842     {
843         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
844         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
845         return ArrayMode::fromObserved(locker, profile, action, false);
846     }
847     
848     ArrayMode getArrayMode(ArrayProfile* profile)
849     {
850         return getArrayMode(profile, Array::Read);
851     }
852     
853     ArrayMode getArrayModeConsideringSlowPath(ArrayProfile* profile, Array::Action action)
854     {
855         ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
856         
857         profile->computeUpdatedPrediction(locker, m_inlineStackTop->m_profiledBlock);
858         
859         bool makeSafe =
860             m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
861             || profile->outOfBounds(locker);
862         
863         ArrayMode result = ArrayMode::fromObserved(locker, profile, action, makeSafe);
864         
865         return result;
866     }
867     
868     Node* makeSafe(Node* node)
869     {
870         bool likelyToTakeSlowCase;
871         if (!isX86() && node->op() == ArithMod)
872             likelyToTakeSlowCase = false;
873         else
874             likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
875         
876         if (!likelyToTakeSlowCase
877             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
878             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
879             return node;
880         
881         switch (node->op()) {
882         case UInt32ToNumber:
883         case ArithAdd:
884         case ArithSub:
885         case ValueAdd:
886         case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
887             node->mergeFlags(NodeMayOverflow);
888             break;
889             
890         case ArithNegate:
891             // Currently we can't tell the difference between a negation overflowing
892             // (i.e. -(1 << 31)) or generating negative zero (i.e. -0). If it took slow
893             // path then we assume that it did both of those things.
894             node->mergeFlags(NodeMayOverflow);
895             node->mergeFlags(NodeMayNegZero);
896             break;
897
898         case ArithMul:
899             if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
900                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
901                 node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
902             else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
903                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
904                 node->mergeFlags(NodeMayNegZero);
905             break;
906             
907         default:
908             RELEASE_ASSERT_NOT_REACHED();
909             break;
910         }
911         
912         return node;
913     }
914     
915     Node* makeDivSafe(Node* node)
916     {
917         ASSERT(node->op() == ArithDiv);
918         
919         // The main slow case counter for op_div in the old JIT counts only when
920         // the operands are not numbers. We don't care about that since we already
921         // have speculations in place that take care of that separately. We only
922         // care about when the outcome of the division is not an integer, which
923         // is what the special fast case counter tells us.
924         
925         if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
926             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
927             && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
928             return node;
929         
930         // FIXME: It might be possible to make this more granular. The DFG certainly can
931         // distinguish between negative zero and overflow in its exit profiles.
932         node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
933         
934         return node;
935     }
936     
937     bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
938     {
939         if (direct)
940             return true;
941         
942         if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
943             return false;
944         
945         for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
946             if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
947                 return false;
948         }
949         
950         return true;
951     }
952     
953     void buildOperandMapsIfNecessary();
954     
955     VM* m_vm;
956     CodeBlock* m_codeBlock;
957     CodeBlock* m_profiledBlock;
958     Graph& m_graph;
959
960     // The current block being generated.
961     BasicBlock* m_currentBlock;
962     // The bytecode index of the current instruction being generated.
963     unsigned m_currentIndex;
964
965     // We use these values during code generation, and to avoid the need for
966     // special handling we make sure they are available as constants in the
967     // CodeBlock's constant pool. These variables are initialized to
968     // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
969     // constant pool, as necessary.
970     unsigned m_constantUndefined;
971     unsigned m_constantNull;
972     unsigned m_constantNaN;
973     unsigned m_constant1;
974     HashMap<JSCell*, unsigned> m_cellConstants;
975     HashMap<JSCell*, Node*> m_cellConstantNodes;
976
977     // A constant in the constant pool may be represented by more than one
978     // node in the graph, depending on the context in which it is being used.
979     struct ConstantRecord {
980         ConstantRecord()
981             : asInt32(0)
982             , asNumeric(0)
983             , asJSValue(0)
984         {
985         }
986
987         Node* asInt32;
988         Node* asNumeric;
989         Node* asJSValue;
990     };
991
992     // Track the index of the node whose result is the current value for every
993     // register value in the bytecode - argument, local, and temporary.
994     Vector<ConstantRecord, 16> m_constants;
995
996     // The number of arguments passed to the function.
997     unsigned m_numArguments;
998     // The number of locals (vars + temporaries) used in the function.
999     unsigned m_numLocals;
1000     // The number of slots (in units of sizeof(Register)) that we need to
1001     // preallocate for arguments to outgoing calls from this frame. This
1002     // number includes the CallFrame slots that we initialize for the callee
1003     // (but not the callee-initialized CallerFrame and ReturnPC slots).
1004     // This number is 0 if and only if this function is a leaf.
1005     unsigned m_parameterSlots;
1006     // The number of var args passed to the next var arg node.
1007     unsigned m_numPassedVarArgs;
1008
1009     HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1010     
1011     Vector<VariableWatchpointSet*, 16> m_localWatchpoints;
1012     
1013     struct InlineStackEntry {
1014         ByteCodeParser* m_byteCodeParser;
1015         
1016         CodeBlock* m_codeBlock;
1017         CodeBlock* m_profiledBlock;
1018         InlineCallFrame* m_inlineCallFrame;
1019         
1020         ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1021         
1022         QueryableExitProfile m_exitProfile;
1023         
1024         // Remapping of identifier and constant numbers from the code block being
1025         // inlined (inline callee) to the code block that we're inlining into
1026         // (the machine code block, which is the transitive, though not necessarily
1027         // direct, caller).
1028         Vector<unsigned> m_identifierRemap;
1029         Vector<unsigned> m_constantRemap;
1030         Vector<unsigned> m_constantBufferRemap;
1031         Vector<unsigned> m_switchRemap;
1032         
1033         // Blocks introduced by this code block, which need successor linking.
1034         // May include up to one basic block that includes the continuation after
1035         // the callsite in the caller. These must be appended in the order that they
1036         // are created, but their bytecodeBegin values need not be in order as they
1037         // are ignored.
1038         Vector<UnlinkedBlock> m_unlinkedBlocks;
1039         
1040         // Potential block linking targets. Must be sorted by bytecodeBegin, and
1041         // cannot have two blocks that have the same bytecodeBegin. For this very
1042         // reason, this is not equivalent to 
1043         Vector<BasicBlock*> m_blockLinkingTargets;
1044         
1045         // If the callsite's basic block was split into two, then this will be
1046         // the head of the callsite block. It needs its successors linked to the
1047         // m_unlinkedBlocks, but not the other way around: there's no way for
1048         // any blocks in m_unlinkedBlocks to jump back into this block.
1049         BasicBlock* m_callsiteBlockHead;
1050         
1051         // Does the callsite block head need linking? This is typically true
1052         // but will be false for the machine code block's inline stack entry
1053         // (since that one is not inlined) and for cases where an inline callee
1054         // did the linking for us.
1055         bool m_callsiteBlockHeadNeedsLinking;
1056         
1057         VirtualRegister m_returnValue;
1058         
1059         // Speculations about variable types collected from the profiled code block,
1060         // which are based on OSR exit profiles that past DFG compilatins of this
1061         // code block had gathered.
1062         LazyOperandValueProfileParser m_lazyOperands;
1063         
1064         StubInfoMap m_stubInfos;
1065         
1066         // Did we see any returns? We need to handle the (uncommon but necessary)
1067         // case where a procedure that does not return was inlined.
1068         bool m_didReturn;
1069         
1070         // Did we have any early returns?
1071         bool m_didEarlyReturn;
1072         
1073         // Pointers to the argument position trackers for this slice of code.
1074         Vector<ArgumentPosition*> m_argumentPositions;
1075         
1076         InlineStackEntry* m_caller;
1077         
1078         InlineStackEntry(
1079             ByteCodeParser*,
1080             CodeBlock*,
1081             CodeBlock* profiledBlock,
1082             BasicBlock* callsiteBlockHead,
1083             JSFunction* callee, // Null if this is a closure call.
1084             VirtualRegister returnValueVR,
1085             VirtualRegister inlineCallFrameStart,
1086             int argumentCountIncludingThis,
1087             CodeSpecializationKind);
1088         
1089         ~InlineStackEntry()
1090         {
1091             m_byteCodeParser->m_inlineStackTop = m_caller;
1092         }
1093         
1094         VirtualRegister remapOperand(VirtualRegister operand) const
1095         {
1096             if (!m_inlineCallFrame)
1097                 return operand;
1098             
1099             if (operand.isConstant()) {
1100                 VirtualRegister result = VirtualRegister(m_constantRemap[operand.toConstantIndex()]);
1101                 ASSERT(result.isConstant());
1102                 return result;
1103             }
1104
1105             return VirtualRegister(operand.offset() + m_inlineCallFrame->stackOffset);
1106         }
1107     };
1108     
1109     InlineStackEntry* m_inlineStackTop;
1110     
1111     struct DelayedSetLocal {
1112         VirtualRegister m_operand;
1113         Node* m_value;
1114         
1115         DelayedSetLocal() { }
1116         DelayedSetLocal(VirtualRegister operand, Node* value)
1117             : m_operand(operand)
1118             , m_value(value)
1119         {
1120         }
1121         
1122         Node* execute(ByteCodeParser* parser, SetMode setMode = NormalSet)
1123         {
1124             if (m_operand.isArgument())
1125                 return parser->setArgument(m_operand, m_value, setMode);
1126             return parser->setLocal(m_operand, m_value, setMode);
1127         }
1128     };
1129     
1130     Vector<DelayedSetLocal, 2> m_setLocalQueue;
1131
1132     // Have we built operand maps? We initialize them lazily, and only when doing
1133     // inlining.
1134     bool m_haveBuiltOperandMaps;
1135     // Mapping between identifier names and numbers.
1136     BorrowedIdentifierMap m_identifierMap;
1137     // Mapping between values and constant numbers.
1138     JSValueMap m_jsValueMap;
1139     // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1140     // work-around for the fact that JSValueMap can't handle "empty" values.
1141     unsigned m_emptyJSValueIndex;
1142     
1143     CodeBlock* m_dfgCodeBlock;
1144     CallLinkStatus::ContextMap m_callContextMap;
1145     StubInfoMap m_dfgStubInfos;
1146     
1147     Instruction* m_currentInstruction;
1148 };
1149
1150 #define NEXT_OPCODE(name) \
1151     m_currentIndex += OPCODE_LENGTH(name); \
1152     continue
1153
1154 #define LAST_OPCODE(name) \
1155     m_currentIndex += OPCODE_LENGTH(name); \
1156     return shouldContinueParsing
1157
1158 void ByteCodeParser::handleCall(Instruction* pc, NodeType op, CodeSpecializationKind kind)
1159 {
1160     ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1161     handleCall(
1162         pc[1].u.operand, op, kind, OPCODE_LENGTH(op_call),
1163         pc[2].u.operand, pc[3].u.operand, -pc[4].u.operand);
1164 }
1165
1166 void ByteCodeParser::handleCall(
1167     int result, NodeType op, CodeSpecializationKind kind, unsigned instructionSize,
1168     int callee, int argumentCountIncludingThis, int registerOffset)
1169 {
1170     ASSERT(registerOffset <= 0);
1171     
1172     Node* callTarget = get(VirtualRegister(callee));
1173     
1174     CallLinkStatus callLinkStatus;
1175
1176     if (m_graph.isConstant(callTarget)) {
1177         callLinkStatus = CallLinkStatus(
1178             m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1179     } else {
1180         callLinkStatus = CallLinkStatus::computeFor(
1181             m_inlineStackTop->m_profiledBlock, currentCodeOrigin(), m_callContextMap);
1182     }
1183     
1184     if (!callLinkStatus.canOptimize()) {
1185         // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1186         // that we cannot optimize them.
1187         
1188         addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
1189         return;
1190     }
1191     
1192     unsigned nextOffset = m_currentIndex + instructionSize;
1193     SpeculatedType prediction = getPrediction();
1194
1195     if (InternalFunction* function = callLinkStatus.internalFunction()) {
1196         if (handleConstantInternalFunction(result, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1197             // This phantoming has to be *after* the code for the intrinsic, to signify that
1198             // the inputs must be kept alive whatever exits the intrinsic may do.
1199             addToGraph(Phantom, callTarget);
1200             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1201             return;
1202         }
1203         
1204         // Can only handle this using the generic call handler.
1205         addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
1206         return;
1207     }
1208         
1209     Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1210     if (intrinsic != NoIntrinsic) {
1211         emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1212             
1213         if (handleIntrinsic(result, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1214             // This phantoming has to be *after* the code for the intrinsic, to signify that
1215             // the inputs must be kept alive whatever exits the intrinsic may do.
1216             addToGraph(Phantom, callTarget);
1217             emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1218             if (m_graph.compilation())
1219                 m_graph.compilation()->noticeInlinedCall();
1220             return;
1221         }
1222     } else if (handleInlining(callTarget, result, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1223         if (m_graph.compilation())
1224             m_graph.compilation()->noticeInlinedCall();
1225         return;
1226     }
1227     
1228     addCall(result, op, callee, argumentCountIncludingThis, registerOffset);
1229 }
1230
1231 void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1232 {
1233     Node* thisArgument;
1234     if (kind == CodeForCall)
1235         thisArgument = get(virtualRegisterForArgument(0, registerOffset));
1236     else
1237         thisArgument = 0;
1238
1239     if (callLinkStatus.isProved()) {
1240         addToGraph(Phantom, callTarget, thisArgument);
1241         return;
1242     }
1243     
1244     ASSERT(callLinkStatus.canOptimize());
1245     
1246     if (JSFunction* function = callLinkStatus.function())
1247         addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1248     else {
1249         ASSERT(callLinkStatus.structure());
1250         ASSERT(callLinkStatus.executable());
1251         
1252         addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1253         addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1254     }
1255 }
1256
1257 void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1258 {
1259     for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1260         addToGraph(Phantom, get(virtualRegisterForArgument(i, registerOffset)));
1261 }
1262
1263 bool ByteCodeParser::handleInlining(Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1264 {
1265     static const bool verbose = false;
1266     
1267     if (verbose)
1268         dataLog("Considering inlining ", callLinkStatus, " into ", currentCodeOrigin(), "\n");
1269     
1270     // First, the really simple checks: do we have an actual JS function?
1271     if (!callLinkStatus.executable()) {
1272         if (verbose)
1273             dataLog("    Failing because there is no executable.\n");
1274         return false;
1275     }
1276     if (callLinkStatus.executable()->isHostFunction()) {
1277         if (verbose)
1278             dataLog("    Failing because it's a host function.\n");
1279         return false;
1280     }
1281     
1282     FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1283     
1284     // Does the number of arguments we're passing match the arity of the target? We currently
1285     // inline only if the number of arguments passed is greater than or equal to the number
1286     // arguments expected.
1287     if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis) {
1288         if (verbose)
1289             dataLog("    Failing because of arity mismatch.\n");
1290         return false;
1291     }
1292     
1293     // Do we have a code block, and does the code block's size match the heuristics/requirements for
1294     // being an inline candidate? We might not have a code block if code was thrown away or if we
1295     // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1296     // if we had a static proof of what was being called; this might happen for example if you call a
1297     // global function, where watchpointing gives us static information. Overall, it's a rare case
1298     // because we expect that any hot callees would have already been compiled.
1299     CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1300     if (!codeBlock) {
1301         if (verbose)
1302             dataLog("    Failing because no code block available.\n");
1303         return false;
1304     }
1305     CapabilityLevel capabilityLevel = inlineFunctionForCapabilityLevel(
1306         codeBlock, kind, callLinkStatus.isClosureCall());
1307     if (!canInline(capabilityLevel)) {
1308         if (verbose)
1309             dataLog("    Failing because the function is not inlineable.\n");
1310         return false;
1311     }
1312     
1313     // FIXME: this should be better at predicting how much bloat we will introduce by inlining
1314     // this function.
1315     // https://bugs.webkit.org/show_bug.cgi?id=127627
1316     
1317     // Have we exceeded inline stack depth, or are we trying to inline a recursive call to
1318     // too many levels? If either of these are detected, then don't inline. We adjust our
1319     // heuristics if we are dealing with a function that cannot otherwise be compiled.
1320     
1321     unsigned depth = 0;
1322     unsigned recursion = 0;
1323     
1324     for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1325         ++depth;
1326         if (depth >= Options::maximumInliningDepth()) {
1327             if (verbose)
1328                 dataLog("    Failing because depth exceeded.\n");
1329             return false;
1330         }
1331         
1332         if (entry->executable() == executable) {
1333             ++recursion;
1334             if (recursion >= Options::maximumInliningRecursion()) {
1335                 if (verbose)
1336                     dataLog("    Failing because recursion detected.\n");
1337                 return false;
1338             }
1339         }
1340     }
1341     
1342     if (verbose)
1343         dataLog("    Committing to inlining.\n");
1344     
1345     // Now we know without a doubt that we are committed to inlining. So begin the process
1346     // by checking the callee (if necessary) and making sure that arguments and the callee
1347     // are flushed.
1348     emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1349     
1350     // FIXME: Don't flush constants!
1351     
1352     int inlineCallFrameStart = m_inlineStackTop->remapOperand(VirtualRegister(registerOffset)).offset() + JSStack::CallFrameHeaderSize;
1353     
1354     // Make sure that we have enough locals.
1355     unsigned newNumLocals = VirtualRegister(inlineCallFrameStart).toLocal() + 1 + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1356     if (newNumLocals > m_numLocals) {
1357         m_numLocals = newNumLocals;
1358         for (size_t i = 0; i < m_graph.numBlocks(); ++i)
1359             m_graph.block(i)->ensureLocals(newNumLocals);
1360     }
1361     
1362     size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1363
1364     InlineStackEntry inlineStackEntry(
1365         this, codeBlock, codeBlock, m_graph.lastBlock(), callLinkStatus.function(),
1366         m_inlineStackTop->remapOperand(VirtualRegister(resultOperand)),
1367         (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1368     
1369     // This is where the actual inlining really happens.
1370     unsigned oldIndex = m_currentIndex;
1371     m_currentIndex = 0;
1372
1373     InlineVariableData inlineVariableData;
1374     inlineVariableData.inlineCallFrame = m_inlineStackTop->m_inlineCallFrame;
1375     inlineVariableData.argumentPositionStart = argumentPositionStart;
1376     inlineVariableData.calleeVariable = 0;
1377     
1378     RELEASE_ASSERT(
1379         m_inlineStackTop->m_inlineCallFrame->isClosureCall
1380         == callLinkStatus.isClosureCall());
1381     if (callLinkStatus.isClosureCall()) {
1382         VariableAccessData* calleeVariable =
1383             set(VirtualRegister(JSStack::Callee), callTargetNode, ImmediateSet)->variableAccessData();
1384         VariableAccessData* scopeVariable =
1385             set(VirtualRegister(JSStack::ScopeChain), addToGraph(GetScope, callTargetNode), ImmediateSet)->variableAccessData();
1386         
1387         calleeVariable->mergeShouldNeverUnbox(true);
1388         scopeVariable->mergeShouldNeverUnbox(true);
1389         
1390         inlineVariableData.calleeVariable = calleeVariable;
1391     }
1392     
1393     m_graph.m_inlineVariableData.append(inlineVariableData);
1394     
1395     parseCodeBlock();
1396     
1397     m_currentIndex = oldIndex;
1398     
1399     // If the inlined code created some new basic blocks, then we have linking to do.
1400     if (inlineStackEntry.m_callsiteBlockHead != m_graph.lastBlock()) {
1401         
1402         ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1403         if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1404             linkBlock(inlineStackEntry.m_callsiteBlockHead, inlineStackEntry.m_blockLinkingTargets);
1405         else
1406             ASSERT(inlineStackEntry.m_callsiteBlockHead->isLinked);
1407         
1408         // It's possible that the callsite block head is not owned by the caller.
1409         if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1410             // It's definitely owned by the caller, because the caller created new blocks.
1411             // Assert that this all adds up.
1412             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_block == inlineStackEntry.m_callsiteBlockHead);
1413             ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1414             inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1415         } else {
1416             // It's definitely not owned by the caller. Tell the caller that he does not
1417             // need to link his callsite block head, because we did it for him.
1418             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1419             ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1420             inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1421         }
1422         
1423         linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1424     } else
1425         ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1426     
1427     BasicBlock* lastBlock = m_graph.lastBlock();
1428     // If there was a return, but no early returns, then we're done. We allow parsing of
1429     // the caller to continue in whatever basic block we're in right now.
1430     if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1431         ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1432         
1433         // If we created new blocks then the last block needs linking, but in the
1434         // caller. It doesn't need to be linked to, but it needs outgoing links.
1435         if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1436             // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1437             // for release builds because this block will never serve as a potential target
1438             // in the linker's binary search.
1439             lastBlock->bytecodeBegin = m_currentIndex;
1440             m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.lastBlock()));
1441         }
1442         
1443         m_currentBlock = m_graph.lastBlock();
1444         return true;
1445     }
1446     
1447     // If we get to this point then all blocks must end in some sort of terminals.
1448     ASSERT(lastBlock->last()->isTerminal());
1449     
1450
1451     // Need to create a new basic block for the continuation at the caller.
1452     RefPtr<BasicBlock> block = adoptRef(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1453
1454     // Link the early returns to the basic block we're about to create.
1455     for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1456         if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1457             continue;
1458         BasicBlock* blockToLink = inlineStackEntry.m_unlinkedBlocks[i].m_block;
1459         ASSERT(!blockToLink->isLinked);
1460         Node* node = blockToLink->last();
1461         ASSERT(node->op() == Jump);
1462         ASSERT(node->takenBlock() == 0);
1463         node->setTakenBlock(block.get());
1464         inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1465 #if !ASSERT_DISABLED
1466         blockToLink->isLinked = true;
1467 #endif
1468     }
1469     
1470     m_currentBlock = block.get();
1471     ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_inlineStackTop->m_caller->m_blockLinkingTargets.last()->bytecodeBegin < nextOffset);
1472     m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(block.get()));
1473     m_inlineStackTop->m_caller->m_blockLinkingTargets.append(block.get());
1474     m_graph.appendBlock(block);
1475     prepareToParseBlock();
1476     
1477     // At this point we return and continue to generate code for the caller, but
1478     // in the new basic block.
1479     return true;
1480 }
1481
1482 bool ByteCodeParser::handleMinMax(int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1483 {
1484     if (argumentCountIncludingThis == 1) { // Math.min()
1485         set(VirtualRegister(resultOperand), constantNaN());
1486         return true;
1487     }
1488      
1489     if (argumentCountIncludingThis == 2) { // Math.min(x)
1490         Node* result = get(VirtualRegister(virtualRegisterForArgument(1, registerOffset)));
1491         addToGraph(Phantom, Edge(result, NumberUse));
1492         set(VirtualRegister(resultOperand), result);
1493         return true;
1494     }
1495     
1496     if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1497         set(VirtualRegister(resultOperand), addToGraph(op, get(virtualRegisterForArgument(1, registerOffset)), get(virtualRegisterForArgument(2, registerOffset))));
1498         return true;
1499     }
1500     
1501     // Don't handle >=3 arguments for now.
1502     return false;
1503 }
1504
1505 bool ByteCodeParser::handleIntrinsic(int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1506 {
1507     switch (intrinsic) {
1508     case AbsIntrinsic: {
1509         if (argumentCountIncludingThis == 1) { // Math.abs()
1510             set(VirtualRegister(resultOperand), constantNaN());
1511             return true;
1512         }
1513
1514         if (!MacroAssembler::supportsFloatingPointAbs())
1515             return false;
1516
1517         Node* node = addToGraph(ArithAbs, get(virtualRegisterForArgument(1, registerOffset)));
1518         if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1519             node->mergeFlags(NodeMayOverflow);
1520         set(VirtualRegister(resultOperand), node);
1521         return true;
1522     }
1523
1524     case MinIntrinsic:
1525         return handleMinMax(resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1526         
1527     case MaxIntrinsic:
1528         return handleMinMax(resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1529         
1530     case SqrtIntrinsic:
1531     case CosIntrinsic:
1532     case SinIntrinsic: {
1533         if (argumentCountIncludingThis == 1) {
1534             set(VirtualRegister(resultOperand), constantNaN());
1535             return true;
1536         }
1537         
1538         switch (intrinsic) {
1539         case SqrtIntrinsic:
1540             if (!MacroAssembler::supportsFloatingPointSqrt())
1541                 return false;
1542             
1543             set(VirtualRegister(resultOperand), addToGraph(ArithSqrt, get(virtualRegisterForArgument(1, registerOffset))));
1544             return true;
1545             
1546         case CosIntrinsic:
1547             set(VirtualRegister(resultOperand), addToGraph(ArithCos, get(virtualRegisterForArgument(1, registerOffset))));
1548             return true;
1549             
1550         case SinIntrinsic:
1551             set(VirtualRegister(resultOperand), addToGraph(ArithSin, get(virtualRegisterForArgument(1, registerOffset))));
1552             return true;
1553             
1554         default:
1555             RELEASE_ASSERT_NOT_REACHED();
1556             return false;
1557         }
1558     }
1559         
1560     case ArrayPushIntrinsic: {
1561         if (argumentCountIncludingThis != 2)
1562             return false;
1563         
1564         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1565         if (!arrayMode.isJSArray())
1566             return false;
1567         switch (arrayMode.type()) {
1568         case Array::Undecided:
1569         case Array::Int32:
1570         case Array::Double:
1571         case Array::Contiguous:
1572         case Array::ArrayStorage: {
1573             Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1574             set(VirtualRegister(resultOperand), arrayPush);
1575             
1576             return true;
1577         }
1578             
1579         default:
1580             return false;
1581         }
1582     }
1583         
1584     case ArrayPopIntrinsic: {
1585         if (argumentCountIncludingThis != 1)
1586             return false;
1587         
1588         ArrayMode arrayMode = getArrayMode(m_currentInstruction[6].u.arrayProfile);
1589         if (!arrayMode.isJSArray())
1590             return false;
1591         switch (arrayMode.type()) {
1592         case Array::Int32:
1593         case Array::Double:
1594         case Array::Contiguous:
1595         case Array::ArrayStorage: {
1596             Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)));
1597             set(VirtualRegister(resultOperand), arrayPop);
1598             return true;
1599         }
1600             
1601         default:
1602             return false;
1603         }
1604     }
1605
1606     case CharCodeAtIntrinsic: {
1607         if (argumentCountIncludingThis != 2)
1608             return false;
1609
1610         VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1611         VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1612         Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
1613
1614         set(VirtualRegister(resultOperand), charCode);
1615         return true;
1616     }
1617
1618     case CharAtIntrinsic: {
1619         if (argumentCountIncludingThis != 2)
1620             return false;
1621
1622         VirtualRegister thisOperand = virtualRegisterForArgument(0, registerOffset);
1623         VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1624         Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), get(indexOperand));
1625
1626         set(VirtualRegister(resultOperand), charCode);
1627         return true;
1628     }
1629     case FromCharCodeIntrinsic: {
1630         if (argumentCountIncludingThis != 2)
1631             return false;
1632
1633         VirtualRegister indexOperand = virtualRegisterForArgument(1, registerOffset);
1634         Node* charCode = addToGraph(StringFromCharCode, get(indexOperand));
1635
1636         set(VirtualRegister(resultOperand), charCode);
1637
1638         return true;
1639     }
1640
1641     case RegExpExecIntrinsic: {
1642         if (argumentCountIncludingThis != 2)
1643             return false;
1644         
1645         Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1646         set(VirtualRegister(resultOperand), regExpExec);
1647         
1648         return true;
1649     }
1650         
1651     case RegExpTestIntrinsic: {
1652         if (argumentCountIncludingThis != 2)
1653             return false;
1654         
1655         Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(virtualRegisterForArgument(0, registerOffset)), get(virtualRegisterForArgument(1, registerOffset)));
1656         set(VirtualRegister(resultOperand), regExpExec);
1657         
1658         return true;
1659     }
1660
1661     case IMulIntrinsic: {
1662         if (argumentCountIncludingThis != 3)
1663             return false;
1664         VirtualRegister leftOperand = virtualRegisterForArgument(1, registerOffset);
1665         VirtualRegister rightOperand = virtualRegisterForArgument(2, registerOffset);
1666         Node* left = get(leftOperand);
1667         Node* right = get(rightOperand);
1668         set(VirtualRegister(resultOperand), addToGraph(ArithIMul, left, right));
1669         return true;
1670     }
1671         
1672     case DFGTrue: {
1673         set(VirtualRegister(resultOperand), getJSConstantForValue(jsBoolean(true), 0));
1674         return true;
1675     }
1676         
1677     default:
1678         return false;
1679     }
1680 }
1681
1682 bool ByteCodeParser::handleTypedArrayConstructor(
1683     int resultOperand, InternalFunction* function, int registerOffset,
1684     int argumentCountIncludingThis, TypedArrayType type)
1685 {
1686     if (!isTypedView(type))
1687         return false;
1688     
1689     if (function->classInfo() != constructorClassInfoForType(type))
1690         return false;
1691     
1692     if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1693         return false;
1694     
1695     // We only have an intrinsic for the case where you say:
1696     //
1697     // new FooArray(blah);
1698     //
1699     // Of course, 'blah' could be any of the following:
1700     //
1701     // - Integer, indicating that you want to allocate an array of that length.
1702     //   This is the thing we're hoping for, and what we can actually do meaningful
1703     //   optimizations for.
1704     //
1705     // - Array buffer, indicating that you want to create a view onto that _entire_
1706     //   buffer.
1707     //
1708     // - Non-buffer object, indicating that you want to create a copy of that
1709     //   object by pretending that it quacks like an array.
1710     //
1711     // - Anything else, indicating that you want to have an exception thrown at
1712     //   you.
1713     //
1714     // The intrinsic, NewTypedArray, will behave as if it could do any of these
1715     // things up until we do Fixup. Thereafter, if child1 (i.e. 'blah') is
1716     // predicted Int32, then we lock it in as a normal typed array allocation.
1717     // Otherwise, NewTypedArray turns into a totally opaque function call that
1718     // may clobber the world - by virtue of it accessing properties on what could
1719     // be an object.
1720     //
1721     // Note that although the generic form of NewTypedArray sounds sort of awful,
1722     // it is actually quite likely to be more efficient than a fully generic
1723     // Construct. So, we might want to think about making NewTypedArray variadic,
1724     // or else making Construct not super slow.
1725     
1726     if (argumentCountIncludingThis != 2)
1727         return false;
1728     
1729     set(VirtualRegister(resultOperand),
1730         addToGraph(NewTypedArray, OpInfo(type), get(virtualRegisterForArgument(1, registerOffset))));
1731     return true;
1732 }
1733
1734 bool ByteCodeParser::handleConstantInternalFunction(
1735     int resultOperand, InternalFunction* function, int registerOffset,
1736     int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1737 {
1738     // If we ever find that we have a lot of internal functions that we specialize for,
1739     // then we should probably have some sort of hashtable dispatch, or maybe even
1740     // dispatch straight through the MethodTable of the InternalFunction. But for now,
1741     // it seems that this case is hit infrequently enough, and the number of functions
1742     // we know about is small enough, that having just a linear cascade of if statements
1743     // is good enough.
1744     
1745     UNUSED_PARAM(prediction); // Remove this once we do more things.
1746     
1747     if (function->classInfo() == ArrayConstructor::info()) {
1748         if (function->globalObject() != m_inlineStackTop->m_codeBlock->globalObject())
1749             return false;
1750         
1751         if (argumentCountIncludingThis == 2) {
1752             set(VirtualRegister(resultOperand),
1753                 addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(virtualRegisterForArgument(1, registerOffset))));
1754             return true;
1755         }
1756         
1757         for (int i = 1; i < argumentCountIncludingThis; ++i)
1758             addVarArgChild(get(virtualRegisterForArgument(i, registerOffset)));
1759         set(VirtualRegister(resultOperand),
1760             addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1761         return true;
1762     }
1763     
1764     if (function->classInfo() == StringConstructor::info()) {
1765         Node* result;
1766         
1767         if (argumentCountIncludingThis <= 1)
1768             result = cellConstant(m_vm->smallStrings.emptyString());
1769         else
1770             result = addToGraph(ToString, get(virtualRegisterForArgument(1, registerOffset)));
1771         
1772         if (kind == CodeForConstruct)
1773             result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1774         
1775         set(VirtualRegister(resultOperand), result);
1776         return true;
1777     }
1778     
1779     for (unsigned typeIndex = 0; typeIndex < NUMBER_OF_TYPED_ARRAY_TYPES; ++typeIndex) {
1780         bool result = handleTypedArrayConstructor(
1781             resultOperand, function, registerOffset, argumentCountIncludingThis,
1782             indexToTypedArrayType(typeIndex));
1783         if (result)
1784             return true;
1785     }
1786     
1787     return false;
1788 }
1789
1790 Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1791 {
1792     Node* propertyStorage;
1793     if (isInlineOffset(offset))
1794         propertyStorage = base;
1795     else
1796         propertyStorage = addToGraph(GetButterfly, base);
1797     Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage, base);
1798
1799     StorageAccessData storageAccessData;
1800     storageAccessData.offset = offset;
1801     storageAccessData.identifierNumber = identifierNumber;
1802     m_graph.m_storageAccessData.append(storageAccessData);
1803
1804     return getByOffset;
1805 }
1806
1807 void ByteCodeParser::handleGetByOffset(
1808     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1809     PropertyOffset offset)
1810 {
1811     set(VirtualRegister(destinationOperand), handleGetByOffset(prediction, base, identifierNumber, offset));
1812 }
1813
1814 Node* ByteCodeParser::handlePutByOffset(Node* base, unsigned identifier, PropertyOffset offset, Node* value)
1815 {
1816     Node* propertyStorage;
1817     if (isInlineOffset(offset))
1818         propertyStorage = base;
1819     else
1820         propertyStorage = addToGraph(GetButterfly, base);
1821     Node* result = addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
1822     
1823     StorageAccessData storageAccessData;
1824     storageAccessData.offset = offset;
1825     storageAccessData.identifierNumber = identifier;
1826     m_graph.m_storageAccessData.append(storageAccessData);
1827
1828     return result;
1829 }
1830
1831 Node* ByteCodeParser::emitPrototypeChecks(const GetByIdVariant& variant)
1832 {
1833     Node* base = 0;
1834     m_graph.chains().addLazily(variant.chain());
1835     Structure* currentStructure = variant.structureSet().singletonStructure();
1836     JSObject* currentObject = 0;
1837     for (unsigned i = 0; i < variant.chain()->size(); ++i) {
1838         currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1839         currentStructure = variant.chain()->at(i);
1840         base = cellConstantWithStructureCheck(currentObject, currentStructure);
1841     }
1842     RELEASE_ASSERT(base);
1843     return base;
1844 }
1845
1846 void ByteCodeParser::handleGetById(
1847     int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1848     const GetByIdStatus& getByIdStatus)
1849 {
1850     if (!getByIdStatus.isSimple()) {
1851         set(VirtualRegister(destinationOperand),
1852             addToGraph(
1853                 getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1854                 OpInfo(identifierNumber), OpInfo(prediction), base));
1855         return;
1856     }
1857     
1858     if (getByIdStatus.numVariants() > 1) {
1859         if (!isFTL(m_graph.m_plan.mode)) {
1860             set(VirtualRegister(destinationOperand),
1861                 addToGraph(GetById, OpInfo(identifierNumber), OpInfo(prediction), base));
1862             return;
1863         }
1864         
1865         // 1) Emit prototype structure checks for all chains. This could sort of maybe not be
1866         //    optimal, if there is some rarely executed case in the chain that requires a lot
1867         //    of checks and those checks are not watchpointable.
1868         for (unsigned variantIndex = getByIdStatus.numVariants(); variantIndex--;) {
1869             if (getByIdStatus[variantIndex].chain())
1870                 emitPrototypeChecks(getByIdStatus[variantIndex]);
1871         }
1872         
1873         // 2) Emit a MultiGetByOffset
1874         MultiGetByOffsetData* data = m_graph.m_multiGetByOffsetData.add();
1875         data->variants = getByIdStatus.variants();
1876         data->identifierNumber = identifierNumber;
1877         set(VirtualRegister(destinationOperand),
1878             addToGraph(MultiGetByOffset, OpInfo(data), OpInfo(prediction), base));
1879         return;
1880     }
1881     
1882     ASSERT(getByIdStatus.numVariants() == 1);
1883     GetByIdVariant variant = getByIdStatus[0];
1884                 
1885     if (m_graph.compilation())
1886         m_graph.compilation()->noticeInlinedGetById();
1887     
1888     Node* originalBaseForBaselineJIT = base;
1889                 
1890     addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(variant.structureSet())), base);
1891     
1892     if (variant.chain())
1893         base = emitPrototypeChecks(variant);
1894     
1895     // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1896     // ensure that the base of the original get_by_id is kept alive until we're done with
1897     // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1898     // on something other than the base following the CheckStructure on base, or if the
1899     // access was compiled to a WeakJSConstant specific value, in which case we might not
1900     // have any explicit use of the base at all.
1901     if (variant.specificValue() || originalBaseForBaselineJIT != base)
1902         addToGraph(Phantom, originalBaseForBaselineJIT);
1903     
1904     if (variant.specificValue()) {
1905         ASSERT(variant.specificValue().isCell());
1906         
1907         set(VirtualRegister(destinationOperand), cellConstant(variant.specificValue().asCell()));
1908         return;
1909     }
1910     
1911     handleGetByOffset(
1912         destinationOperand, prediction, base, identifierNumber, variant.offset());
1913 }
1914
1915 void ByteCodeParser::prepareToParseBlock()
1916 {
1917     for (unsigned i = 0; i < m_constants.size(); ++i)
1918         m_constants[i] = ConstantRecord();
1919     m_cellConstantNodes.clear();
1920 }
1921
1922 Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1923 {
1924     Node* localBase = get(VirtualRegister(JSStack::ScopeChain));
1925     if (skipTop) {
1926         ASSERT(!inlineCallFrame());
1927         localBase = addToGraph(SkipTopScope, localBase);
1928     }
1929     for (unsigned n = skipCount; n--;)
1930         localBase = addToGraph(SkipScope, localBase);
1931     return localBase;
1932 }
1933
1934 bool ByteCodeParser::parseBlock(unsigned limit)
1935 {
1936     bool shouldContinueParsing = true;
1937
1938     Interpreter* interpreter = m_vm->interpreter;
1939     Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1940     unsigned blockBegin = m_currentIndex;
1941     
1942     // If we are the first basic block, introduce markers for arguments. This allows
1943     // us to track if a use of an argument may use the actual argument passed, as
1944     // opposed to using a value we set explicitly.
1945     if (m_currentBlock == m_graph.block(0) && !inlineCallFrame()) {
1946         m_graph.m_arguments.resize(m_numArguments);
1947         for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1948             VariableAccessData* variable = newVariableAccessData(
1949                 virtualRegisterForArgument(argument), m_codeBlock->isCaptured(virtualRegisterForArgument(argument)));
1950             variable->mergeStructureCheckHoistingFailed(
1951                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1952                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint));
1953             variable->mergeCheckArrayHoistingFailed(
1954                 m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadIndexingType));
1955             
1956             Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1957             m_graph.m_arguments[argument] = setArgument;
1958             m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1959         }
1960     }
1961
1962     while (true) {
1963         for (unsigned i = 0; i < m_setLocalQueue.size(); ++i)
1964             m_setLocalQueue[i].execute(this);
1965         m_setLocalQueue.resize(0);
1966         
1967         // Don't extend over jump destinations.
1968         if (m_currentIndex == limit) {
1969             // Ordinarily we want to plant a jump. But refuse to do this if the block is
1970             // empty. This is a special case for inlining, which might otherwise create
1971             // some empty blocks in some cases. When parseBlock() returns with an empty
1972             // block, it will get repurposed instead of creating a new one. Note that this
1973             // logic relies on every bytecode resulting in one or more nodes, which would
1974             // be true anyway except for op_loop_hint, which emits a Phantom to force this
1975             // to be true.
1976             if (!m_currentBlock->isEmpty())
1977                 addToGraph(Jump, OpInfo(m_currentIndex));
1978             return shouldContinueParsing;
1979         }
1980         
1981         // Switch on the current bytecode opcode.
1982         Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1983         m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1984         OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
1985         
1986         if (m_graph.compilation()) {
1987             addToGraph(CountExecution, OpInfo(m_graph.compilation()->executionCounterFor(
1988                 Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
1989         }
1990         
1991         switch (opcodeID) {
1992
1993         // === Function entry opcodes ===
1994
1995         case op_enter:
1996             // Initialize all locals to undefined.
1997             for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
1998                 set(virtualRegisterForLocal(i), constantUndefined(), ImmediateSet);
1999             if (m_inlineStackTop->m_codeBlock->specializationKind() == CodeForConstruct)
2000                 set(virtualRegisterForArgument(0), constantUndefined(), ImmediateSet);
2001             NEXT_OPCODE(op_enter);
2002             
2003         case op_touch_entry:
2004             if (m_inlineStackTop->m_codeBlock->symbolTable()->m_functionEnteredOnce.isStillValid())
2005                 addToGraph(ForceOSRExit);
2006             NEXT_OPCODE(op_touch_entry);
2007             
2008         case op_to_this: {
2009             Node* op1 = getThis();
2010             if (op1->op() != ToThis) {
2011                 Structure* cachedStructure = currentInstruction[2].u.structure.get();
2012                 if (!cachedStructure
2013                     || cachedStructure->classInfo()->methodTable.toThis != JSObject::info()->methodTable.toThis
2014                     || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2015                     || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2016                     || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCacheWatchpoint)
2017                     || (op1->op() == GetLocal && op1->variableAccessData()->structureCheckHoistingFailed())) {
2018                     setThis(addToGraph(ToThis, op1));
2019                 } else {
2020                     addToGraph(
2021                         CheckStructure,
2022                         OpInfo(m_graph.addStructureSet(cachedStructure)),
2023                         op1);
2024                 }
2025             }
2026             NEXT_OPCODE(op_to_this);
2027         }
2028
2029         case op_create_this: {
2030             int calleeOperand = currentInstruction[2].u.operand;
2031             Node* callee = get(VirtualRegister(calleeOperand));
2032             bool alreadyEmitted = false;
2033             if (callee->op() == WeakJSConstant) {
2034                 JSCell* cell = callee->weakConstant();
2035                 ASSERT(cell->inherits(JSFunction::info()));
2036                 
2037                 JSFunction* function = jsCast<JSFunction*>(cell);
2038                 if (Structure* structure = function->allocationStructure()) {
2039                     addToGraph(AllocationProfileWatchpoint, OpInfo(function));
2040                     // The callee is still live up to this point.
2041                     addToGraph(Phantom, callee);
2042                     set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewObject, OpInfo(structure)));
2043                     alreadyEmitted = true;
2044                 }
2045             }
2046             if (!alreadyEmitted) {
2047                 set(VirtualRegister(currentInstruction[1].u.operand),
2048                     addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
2049             }
2050             NEXT_OPCODE(op_create_this);
2051         }
2052
2053         case op_new_object: {
2054             set(VirtualRegister(currentInstruction[1].u.operand),
2055                 addToGraph(NewObject,
2056                     OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
2057             NEXT_OPCODE(op_new_object);
2058         }
2059             
2060         case op_new_array: {
2061             int startOperand = currentInstruction[2].u.operand;
2062             int numOperands = currentInstruction[3].u.operand;
2063             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2064             for (int operandIdx = startOperand; operandIdx > startOperand - numOperands; --operandIdx)
2065                 addVarArgChild(get(VirtualRegister(operandIdx)));
2066             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
2067             NEXT_OPCODE(op_new_array);
2068         }
2069             
2070         case op_new_array_with_size: {
2071             int lengthOperand = currentInstruction[2].u.operand;
2072             ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
2073             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(VirtualRegister(lengthOperand))));
2074             NEXT_OPCODE(op_new_array_with_size);
2075         }
2076             
2077         case op_new_array_buffer: {
2078             int startConstant = currentInstruction[2].u.operand;
2079             int numConstants = currentInstruction[3].u.operand;
2080             ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2081             NewArrayBufferData data;
2082             data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2083             data.numConstants = numConstants;
2084             data.indexingType = profile->selectIndexingType();
2085
2086             // If this statement has never executed, we'll have the wrong indexing type in the profile.
2087             for (int i = 0; i < numConstants; ++i) {
2088                 data.indexingType =
2089                     leastUpperBoundOfIndexingTypeAndValue(
2090                         data.indexingType,
2091                         m_codeBlock->constantBuffer(data.startConstant)[i]);
2092             }
2093             
2094             m_graph.m_newArrayBufferData.append(data);
2095             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
2096             NEXT_OPCODE(op_new_array_buffer);
2097         }
2098             
2099         case op_new_regexp: {
2100             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
2101             NEXT_OPCODE(op_new_regexp);
2102         }
2103             
2104         case op_get_callee: {
2105             JSCell* cachedFunction = currentInstruction[2].u.jsCell.get();
2106             if (!cachedFunction 
2107                 || m_inlineStackTop->m_profiledBlock->couldTakeSlowCase(m_currentIndex)
2108                 || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction)) {
2109                 set(VirtualRegister(currentInstruction[1].u.operand), get(VirtualRegister(JSStack::Callee)));
2110             } else {
2111                 ASSERT(cachedFunction->inherits(JSFunction::info()));
2112                 Node* actualCallee = get(VirtualRegister(JSStack::Callee));
2113                 addToGraph(CheckFunction, OpInfo(cachedFunction), actualCallee);
2114                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(WeakJSConstant, OpInfo(cachedFunction)));
2115             }
2116             NEXT_OPCODE(op_get_callee);
2117         }
2118
2119         // === Bitwise operations ===
2120
2121         case op_bitand: {
2122             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2123             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2124             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitAnd, op1, op2));
2125             NEXT_OPCODE(op_bitand);
2126         }
2127
2128         case op_bitor: {
2129             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2130             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2131             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitOr, op1, op2));
2132             NEXT_OPCODE(op_bitor);
2133         }
2134
2135         case op_bitxor: {
2136             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2137             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2138             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(BitXor, op1, op2));
2139             NEXT_OPCODE(op_bitxor);
2140         }
2141
2142         case op_rshift: {
2143             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2144             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2145             set(VirtualRegister(currentInstruction[1].u.operand),
2146                 addToGraph(BitRShift, op1, op2));
2147             NEXT_OPCODE(op_rshift);
2148         }
2149
2150         case op_lshift: {
2151             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2152             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2153             set(VirtualRegister(currentInstruction[1].u.operand),
2154                 addToGraph(BitLShift, op1, op2));
2155             NEXT_OPCODE(op_lshift);
2156         }
2157
2158         case op_urshift: {
2159             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2160             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2161             set(VirtualRegister(currentInstruction[1].u.operand),
2162                 addToGraph(BitURShift, op1, op2));
2163             NEXT_OPCODE(op_urshift);
2164         }
2165             
2166         case op_unsigned: {
2167             set(VirtualRegister(currentInstruction[1].u.operand),
2168                 makeSafe(addToGraph(UInt32ToNumber, get(VirtualRegister(currentInstruction[2].u.operand)))));
2169             NEXT_OPCODE(op_unsigned);
2170         }
2171
2172         // === Increment/Decrement opcodes ===
2173
2174         case op_inc: {
2175             int srcDst = currentInstruction[1].u.operand;
2176             VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2177             Node* op = get(srcDstVirtualRegister);
2178             set(srcDstVirtualRegister, makeSafe(addToGraph(ArithAdd, op, one())));
2179             NEXT_OPCODE(op_inc);
2180         }
2181
2182         case op_dec: {
2183             int srcDst = currentInstruction[1].u.operand;
2184             VirtualRegister srcDstVirtualRegister = VirtualRegister(srcDst);
2185             Node* op = get(srcDstVirtualRegister);
2186             set(srcDstVirtualRegister, makeSafe(addToGraph(ArithSub, op, one())));
2187             NEXT_OPCODE(op_dec);
2188         }
2189
2190         // === Arithmetic operations ===
2191
2192         case op_add: {
2193             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2194             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2195             if (op1->hasNumberResult() && op2->hasNumberResult())
2196                 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithAdd, op1, op2)));
2197             else
2198                 set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ValueAdd, op1, op2)));
2199             NEXT_OPCODE(op_add);
2200         }
2201
2202         case op_sub: {
2203             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2204             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2205             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithSub, op1, op2)));
2206             NEXT_OPCODE(op_sub);
2207         }
2208
2209         case op_negate: {
2210             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2211             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithNegate, op1)));
2212             NEXT_OPCODE(op_negate);
2213         }
2214
2215         case op_mul: {
2216             // Multiply requires that the inputs are not truncated, unfortunately.
2217             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2218             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2219             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMul, op1, op2)));
2220             NEXT_OPCODE(op_mul);
2221         }
2222
2223         case op_mod: {
2224             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2225             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2226             set(VirtualRegister(currentInstruction[1].u.operand), makeSafe(addToGraph(ArithMod, op1, op2)));
2227             NEXT_OPCODE(op_mod);
2228         }
2229
2230         case op_div: {
2231             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2232             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2233             set(VirtualRegister(currentInstruction[1].u.operand), makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2234             NEXT_OPCODE(op_div);
2235         }
2236
2237         // === Misc operations ===
2238
2239         case op_debug:
2240             addToGraph(Breakpoint);
2241             NEXT_OPCODE(op_debug);
2242
2243         case op_profile_will_call: {
2244             addToGraph(ProfileWillCall);
2245             NEXT_OPCODE(op_profile_will_call);
2246         }
2247
2248         case op_profile_did_call: {
2249             addToGraph(ProfileDidCall);
2250             NEXT_OPCODE(op_profile_did_call);
2251         }
2252
2253         case op_mov: {
2254             Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2255             set(VirtualRegister(currentInstruction[1].u.operand), op);
2256             NEXT_OPCODE(op_mov);
2257         }
2258             
2259         case op_captured_mov: {
2260             Node* op = get(VirtualRegister(currentInstruction[2].u.operand));
2261             if (VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet) {
2262                 if (set->state() != IsInvalidated)
2263                     addToGraph(NotifyWrite, OpInfo(set), op);
2264             }
2265             set(VirtualRegister(currentInstruction[1].u.operand), op);
2266             NEXT_OPCODE(op_captured_mov);
2267         }
2268
2269         case op_check_has_instance:
2270             addToGraph(CheckHasInstance, get(VirtualRegister(currentInstruction[3].u.operand)));
2271             NEXT_OPCODE(op_check_has_instance);
2272
2273         case op_instanceof: {
2274             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2275             Node* prototype = get(VirtualRegister(currentInstruction[3].u.operand));
2276             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(InstanceOf, value, prototype));
2277             NEXT_OPCODE(op_instanceof);
2278         }
2279             
2280         case op_is_undefined: {
2281             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2282             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsUndefined, value));
2283             NEXT_OPCODE(op_is_undefined);
2284         }
2285
2286         case op_is_boolean: {
2287             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2288             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsBoolean, value));
2289             NEXT_OPCODE(op_is_boolean);
2290         }
2291
2292         case op_is_number: {
2293             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2294             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsNumber, value));
2295             NEXT_OPCODE(op_is_number);
2296         }
2297
2298         case op_is_string: {
2299             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2300             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsString, value));
2301             NEXT_OPCODE(op_is_string);
2302         }
2303
2304         case op_is_object: {
2305             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2306             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsObject, value));
2307             NEXT_OPCODE(op_is_object);
2308         }
2309
2310         case op_is_function: {
2311             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2312             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(IsFunction, value));
2313             NEXT_OPCODE(op_is_function);
2314         }
2315
2316         case op_not: {
2317             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2318             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, value));
2319             NEXT_OPCODE(op_not);
2320         }
2321             
2322         case op_to_primitive: {
2323             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2324             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(ToPrimitive, value));
2325             NEXT_OPCODE(op_to_primitive);
2326         }
2327             
2328         case op_strcat: {
2329             int startOperand = currentInstruction[2].u.operand;
2330             int numOperands = currentInstruction[3].u.operand;
2331 #if CPU(X86)
2332             // X86 doesn't have enough registers to compile MakeRope with three arguments.
2333             // Rather than try to be clever, we just make MakeRope dumber on this processor.
2334             const unsigned maxRopeArguments = 2;
2335 #else
2336             const unsigned maxRopeArguments = 3;
2337 #endif
2338             auto toStringNodes = std::make_unique<Node*[]>(numOperands);
2339             for (int i = 0; i < numOperands; i++)
2340                 toStringNodes[i] = addToGraph(ToString, get(VirtualRegister(startOperand - i)));
2341
2342             for (int i = 0; i < numOperands; i++)
2343                 addToGraph(Phantom, toStringNodes[i]);
2344
2345             Node* operands[AdjacencyList::Size];
2346             unsigned indexInOperands = 0;
2347             for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2348                 operands[i] = 0;
2349             for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2350                 if (indexInOperands == maxRopeArguments) {
2351                     operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2352                     for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2353                         operands[i] = 0;
2354                     indexInOperands = 1;
2355                 }
2356                 
2357                 ASSERT(indexInOperands < AdjacencyList::Size);
2358                 ASSERT(indexInOperands < maxRopeArguments);
2359                 operands[indexInOperands++] = toStringNodes[operandIdx];
2360             }
2361             set(VirtualRegister(currentInstruction[1].u.operand),
2362                 addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2363             NEXT_OPCODE(op_strcat);
2364         }
2365
2366         case op_less: {
2367             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2368             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2369             if (canFold(op1) && canFold(op2)) {
2370                 JSValue a = valueOfJSConstant(op1);
2371                 JSValue b = valueOfJSConstant(op2);
2372                 if (a.isNumber() && b.isNumber()) {
2373                     set(VirtualRegister(currentInstruction[1].u.operand),
2374                         getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2375                     NEXT_OPCODE(op_less);
2376                 }
2377             }
2378             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLess, op1, op2));
2379             NEXT_OPCODE(op_less);
2380         }
2381
2382         case op_lesseq: {
2383             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2384             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2385             if (canFold(op1) && canFold(op2)) {
2386                 JSValue a = valueOfJSConstant(op1);
2387                 JSValue b = valueOfJSConstant(op2);
2388                 if (a.isNumber() && b.isNumber()) {
2389                     set(VirtualRegister(currentInstruction[1].u.operand),
2390                         getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2391                     NEXT_OPCODE(op_lesseq);
2392                 }
2393             }
2394             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareLessEq, op1, op2));
2395             NEXT_OPCODE(op_lesseq);
2396         }
2397
2398         case op_greater: {
2399             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2400             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2401             if (canFold(op1) && canFold(op2)) {
2402                 JSValue a = valueOfJSConstant(op1);
2403                 JSValue b = valueOfJSConstant(op2);
2404                 if (a.isNumber() && b.isNumber()) {
2405                     set(VirtualRegister(currentInstruction[1].u.operand),
2406                         getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2407                     NEXT_OPCODE(op_greater);
2408                 }
2409             }
2410             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreater, op1, op2));
2411             NEXT_OPCODE(op_greater);
2412         }
2413
2414         case op_greatereq: {
2415             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2416             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2417             if (canFold(op1) && canFold(op2)) {
2418                 JSValue a = valueOfJSConstant(op1);
2419                 JSValue b = valueOfJSConstant(op2);
2420                 if (a.isNumber() && b.isNumber()) {
2421                     set(VirtualRegister(currentInstruction[1].u.operand),
2422                         getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2423                     NEXT_OPCODE(op_greatereq);
2424                 }
2425             }
2426             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareGreaterEq, op1, op2));
2427             NEXT_OPCODE(op_greatereq);
2428         }
2429
2430         case op_eq: {
2431             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2432             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2433             if (canFold(op1) && canFold(op2)) {
2434                 JSValue a = valueOfJSConstant(op1);
2435                 JSValue b = valueOfJSConstant(op2);
2436                 set(VirtualRegister(currentInstruction[1].u.operand),
2437                     getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2438                 NEXT_OPCODE(op_eq);
2439             }
2440             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEq, op1, op2));
2441             NEXT_OPCODE(op_eq);
2442         }
2443
2444         case op_eq_null: {
2445             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2446             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareEqConstant, value, constantNull()));
2447             NEXT_OPCODE(op_eq_null);
2448         }
2449
2450         case op_stricteq: {
2451             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2452             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2453             if (canFold(op1) && canFold(op2)) {
2454                 JSValue a = valueOfJSConstant(op1);
2455                 JSValue b = valueOfJSConstant(op2);
2456                 set(VirtualRegister(currentInstruction[1].u.operand),
2457                     getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2458                 NEXT_OPCODE(op_stricteq);
2459             }
2460             if (isConstantForCompareStrictEq(op1))
2461                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEqConstant, op2, op1));
2462             else if (isConstantForCompareStrictEq(op2))
2463                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEqConstant, op1, op2));
2464             else
2465                 set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CompareStrictEq, op1, op2));
2466             NEXT_OPCODE(op_stricteq);
2467         }
2468
2469         case op_neq: {
2470             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2471             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2472             if (canFold(op1) && canFold(op2)) {
2473                 JSValue a = valueOfJSConstant(op1);
2474                 JSValue b = valueOfJSConstant(op2);
2475                 set(VirtualRegister(currentInstruction[1].u.operand),
2476                     getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2477                 NEXT_OPCODE(op_neq);
2478             }
2479             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2480             NEXT_OPCODE(op_neq);
2481         }
2482
2483         case op_neq_null: {
2484             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2485             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2486             NEXT_OPCODE(op_neq_null);
2487         }
2488
2489         case op_nstricteq: {
2490             Node* op1 = get(VirtualRegister(currentInstruction[2].u.operand));
2491             Node* op2 = get(VirtualRegister(currentInstruction[3].u.operand));
2492             if (canFold(op1) && canFold(op2)) {
2493                 JSValue a = valueOfJSConstant(op1);
2494                 JSValue b = valueOfJSConstant(op2);
2495                 set(VirtualRegister(currentInstruction[1].u.operand),
2496                     getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2497                 NEXT_OPCODE(op_nstricteq);
2498             }
2499             Node* invertedResult;
2500             if (isConstantForCompareStrictEq(op1))
2501                 invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2502             else if (isConstantForCompareStrictEq(op2))
2503                 invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2504             else
2505                 invertedResult = addToGraph(CompareStrictEq, op1, op2);
2506             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(LogicalNot, invertedResult));
2507             NEXT_OPCODE(op_nstricteq);
2508         }
2509
2510         // === Property access operations ===
2511
2512         case op_get_by_val: {
2513             SpeculatedType prediction = getPrediction();
2514             
2515             Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
2516             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Read);
2517             Node* property = get(VirtualRegister(currentInstruction[3].u.operand));
2518             Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2519             set(VirtualRegister(currentInstruction[1].u.operand), getByVal);
2520
2521             NEXT_OPCODE(op_get_by_val);
2522         }
2523
2524         case op_put_by_val_direct:
2525         case op_put_by_val: {
2526             Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
2527
2528             ArrayMode arrayMode = getArrayModeConsideringSlowPath(currentInstruction[4].u.arrayProfile, Array::Write);
2529             
2530             Node* property = get(VirtualRegister(currentInstruction[2].u.operand));
2531             Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
2532             
2533             addVarArgChild(base);
2534             addVarArgChild(property);
2535             addVarArgChild(value);
2536             addVarArgChild(0); // Leave room for property storage.
2537             addVarArgChild(0); // Leave room for length.
2538             addToGraph(Node::VarArg, opcodeID == op_put_by_val_direct ? PutByValDirect : PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2539
2540             NEXT_OPCODE(op_put_by_val);
2541         }
2542             
2543         case op_get_by_id:
2544         case op_get_by_id_out_of_line:
2545         case op_get_array_length: {
2546             SpeculatedType prediction = getPrediction();
2547             
2548             Node* base = get(VirtualRegister(currentInstruction[2].u.operand));
2549             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2550             
2551             StringImpl* uid = m_graph.identifiers()[identifierNumber];
2552             GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2553                 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
2554                 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
2555                 currentCodeOrigin(), uid);
2556             
2557             handleGetById(
2558                 currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2559
2560             NEXT_OPCODE(op_get_by_id);
2561         }
2562         case op_put_by_id:
2563         case op_put_by_id_out_of_line:
2564         case op_put_by_id_transition_direct:
2565         case op_put_by_id_transition_normal:
2566         case op_put_by_id_transition_direct_out_of_line:
2567         case op_put_by_id_transition_normal_out_of_line: {
2568             Node* value = get(VirtualRegister(currentInstruction[3].u.operand));
2569             Node* base = get(VirtualRegister(currentInstruction[1].u.operand));
2570             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2571             bool direct = currentInstruction[8].u.operand;
2572
2573             PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2574                 m_inlineStackTop->m_profiledBlock, m_dfgCodeBlock,
2575                 m_inlineStackTop->m_stubInfos, m_dfgStubInfos,
2576                 currentCodeOrigin(), m_graph.identifiers()[identifierNumber]);
2577             bool canCountAsInlined = true;
2578             if (!putByIdStatus.isSet()) {
2579                 addToGraph(ForceOSRExit);
2580                 canCountAsInlined = false;
2581             }
2582             
2583             if (putByIdStatus.isSimpleReplace()) {
2584                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2585                 handlePutByOffset(base, identifierNumber, putByIdStatus.offset(), value);
2586             } else if (
2587                 putByIdStatus.isSimpleTransition()
2588                 && (!putByIdStatus.structureChain()
2589                     || putByIdStatus.structureChain()->isStillValid())) {
2590                 
2591                 m_graph.chains().addLazily(putByIdStatus.structureChain());
2592                 
2593                 addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2594                 if (!direct) {
2595                     if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2596                         cellConstantWithStructureCheck(
2597                             putByIdStatus.oldStructure()->storedPrototype().asCell());
2598                     }
2599                     
2600                     for (unsigned i = 0; i < putByIdStatus.structureChain()->size(); ++i) {
2601                         JSValue prototype = putByIdStatus.structureChain()->at(i)->storedPrototype();
2602                         if (prototype.isNull())
2603                             continue;
2604                         cellConstantWithStructureCheck(prototype.asCell());
2605                     }
2606                 }
2607                 ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2608                 
2609                 Node* propertyStorage;
2610                 StructureTransitionData* transitionData =
2611                     m_graph.addStructureTransitionData(
2612                         StructureTransitionData(
2613                             putByIdStatus.oldStructure(),
2614                             putByIdStatus.newStructure()));
2615
2616                 if (putByIdStatus.oldStructure()->outOfLineCapacity()
2617                     != putByIdStatus.newStructure()->outOfLineCapacity()) {
2618                     
2619                     // If we're growing the property storage then it must be because we're
2620                     // storing into the out-of-line storage.
2621                     ASSERT(!isInlineOffset(putByIdStatus.offset()));
2622                     
2623                     if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2624                         propertyStorage = addToGraph(
2625                             AllocatePropertyStorage, OpInfo(transitionData), base);
2626                     } else {
2627                         propertyStorage = addToGraph(
2628                             ReallocatePropertyStorage, OpInfo(transitionData),
2629                             base, addToGraph(GetButterfly, base));
2630                     }
2631                 } else {
2632                     if (isInlineOffset(putByIdStatus.offset()))
2633                         propertyStorage = base;
2634                     else
2635                         propertyStorage = addToGraph(GetButterfly, base);
2636                 }
2637                 
2638                 addToGraph(PutStructure, OpInfo(transitionData), base);
2639                 
2640                 addToGraph(
2641                     PutByOffset,
2642                     OpInfo(m_graph.m_storageAccessData.size()),
2643                     propertyStorage,
2644                     base,
2645                     value);
2646                 
2647                 StorageAccessData storageAccessData;
2648                 storageAccessData.offset = putByIdStatus.offset();
2649                 storageAccessData.identifierNumber = identifierNumber;
2650                 m_graph.m_storageAccessData.append(storageAccessData);
2651             } else {
2652                 if (direct)
2653                     addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2654                 else
2655                     addToGraph(PutById, OpInfo(identifierNumber), base, value);
2656                 canCountAsInlined = false;
2657             }
2658             
2659             if (canCountAsInlined && m_graph.compilation())
2660                 m_graph.compilation()->noticeInlinedPutById();
2661
2662             NEXT_OPCODE(op_put_by_id);
2663         }
2664
2665         case op_init_global_const_nop: {
2666             NEXT_OPCODE(op_init_global_const_nop);
2667         }
2668
2669         case op_init_global_const: {
2670             Node* value = get(VirtualRegister(currentInstruction[2].u.operand));
2671             addToGraph(
2672                 PutGlobalVar,
2673                 OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2674                 value);
2675             NEXT_OPCODE(op_init_global_const);
2676         }
2677
2678         // === Block terminators. ===
2679
2680         case op_jmp: {
2681             unsigned relativeOffset = currentInstruction[1].u.operand;
2682             addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2683             LAST_OPCODE(op_jmp);
2684         }
2685
2686         case op_jtrue: {
2687             unsigned relativeOffset = currentInstruction[2].u.operand;
2688             Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2689             if (canFold(condition)) {
2690                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2691                 if (state == TrueTriState) {
2692                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2693                     LAST_OPCODE(op_jtrue);
2694                 } else if (state == FalseTriState) {
2695                     // Emit a placeholder for this bytecode operation but otherwise
2696                     // just fall through.
2697                     addToGraph(Phantom);
2698                     NEXT_OPCODE(op_jtrue);
2699                 }
2700             }
2701             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2702             LAST_OPCODE(op_jtrue);
2703         }
2704
2705         case op_jfalse: {
2706             unsigned relativeOffset = currentInstruction[2].u.operand;
2707             Node* condition = get(VirtualRegister(currentInstruction[1].u.operand));
2708             if (canFold(condition)) {
2709                 TriState state = valueOfJSConstant(condition).pureToBoolean();
2710                 if (state == FalseTriState) {
2711                     addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2712                     LAST_OPCODE(op_jfalse);
2713                 } else if (state == TrueTriState) {
2714                     // Emit a placeholder for this bytecode operation but otherwise
2715                     // just fall through.
2716                     addToGraph(Phantom);
2717                     NEXT_OPCODE(op_jfalse);
2718                 }
2719             }
2720             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2721             LAST_OPCODE(op_jfalse);
2722         }
2723
2724         case op_jeq_null: {
2725             unsigned relativeOffset = currentInstruction[2].u.operand;
2726             Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
2727             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2728             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2729             LAST_OPCODE(op_jeq_null);
2730         }
2731
2732         case op_jneq_null: {
2733             unsigned relativeOffset = currentInstruction[2].u.operand;
2734             Node* value = get(VirtualRegister(currentInstruction[1].u.operand));
2735             Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2736             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2737             LAST_OPCODE(op_jneq_null);
2738         }
2739
2740         case op_jless: {
2741             unsigned relativeOffset = currentInstruction[3].u.operand;
2742             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2743             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2744             if (canFold(op1) && canFold(op2)) {
2745                 JSValue aValue = valueOfJSConstant(op1);
2746                 JSValue bValue = valueOfJSConstant(op2);
2747                 if (aValue.isNumber() && bValue.isNumber()) {
2748                     double a = aValue.asNumber();
2749                     double b = bValue.asNumber();
2750                     if (a < b) {
2751                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2752                         LAST_OPCODE(op_jless);
2753                     } else {
2754                         // Emit a placeholder for this bytecode operation but otherwise
2755                         // just fall through.
2756                         addToGraph(Phantom);
2757                         NEXT_OPCODE(op_jless);
2758                     }
2759                 }
2760             }
2761             Node* condition = addToGraph(CompareLess, op1, op2);
2762             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2763             LAST_OPCODE(op_jless);
2764         }
2765
2766         case op_jlesseq: {
2767             unsigned relativeOffset = currentInstruction[3].u.operand;
2768             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2769             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2770             if (canFold(op1) && canFold(op2)) {
2771                 JSValue aValue = valueOfJSConstant(op1);
2772                 JSValue bValue = valueOfJSConstant(op2);
2773                 if (aValue.isNumber() && bValue.isNumber()) {
2774                     double a = aValue.asNumber();
2775                     double b = bValue.asNumber();
2776                     if (a <= b) {
2777                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2778                         LAST_OPCODE(op_jlesseq);
2779                     } else {
2780                         // Emit a placeholder for this bytecode operation but otherwise
2781                         // just fall through.
2782                         addToGraph(Phantom);
2783                         NEXT_OPCODE(op_jlesseq);
2784                     }
2785                 }
2786             }
2787             Node* condition = addToGraph(CompareLessEq, op1, op2);
2788             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2789             LAST_OPCODE(op_jlesseq);
2790         }
2791
2792         case op_jgreater: {
2793             unsigned relativeOffset = currentInstruction[3].u.operand;
2794             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2795             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2796             if (canFold(op1) && canFold(op2)) {
2797                 JSValue aValue = valueOfJSConstant(op1);
2798                 JSValue bValue = valueOfJSConstant(op2);
2799                 if (aValue.isNumber() && bValue.isNumber()) {
2800                     double a = aValue.asNumber();
2801                     double b = bValue.asNumber();
2802                     if (a > b) {
2803                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2804                         LAST_OPCODE(op_jgreater);
2805                     } else {
2806                         // Emit a placeholder for this bytecode operation but otherwise
2807                         // just fall through.
2808                         addToGraph(Phantom);
2809                         NEXT_OPCODE(op_jgreater);
2810                     }
2811                 }
2812             }
2813             Node* condition = addToGraph(CompareGreater, op1, op2);
2814             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2815             LAST_OPCODE(op_jgreater);
2816         }
2817
2818         case op_jgreatereq: {
2819             unsigned relativeOffset = currentInstruction[3].u.operand;
2820             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2821             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2822             if (canFold(op1) && canFold(op2)) {
2823                 JSValue aValue = valueOfJSConstant(op1);
2824                 JSValue bValue = valueOfJSConstant(op2);
2825                 if (aValue.isNumber() && bValue.isNumber()) {
2826                     double a = aValue.asNumber();
2827                     double b = bValue.asNumber();
2828                     if (a >= b) {
2829                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2830                         LAST_OPCODE(op_jgreatereq);
2831                     } else {
2832                         // Emit a placeholder for this bytecode operation but otherwise
2833                         // just fall through.
2834                         addToGraph(Phantom);
2835                         NEXT_OPCODE(op_jgreatereq);
2836                     }
2837                 }
2838             }
2839             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2840             addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2841             LAST_OPCODE(op_jgreatereq);
2842         }
2843
2844         case op_jnless: {
2845             unsigned relativeOffset = currentInstruction[3].u.operand;
2846             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2847             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2848             if (canFold(op1) && canFold(op2)) {
2849                 JSValue aValue = valueOfJSConstant(op1);
2850                 JSValue bValue = valueOfJSConstant(op2);
2851                 if (aValue.isNumber() && bValue.isNumber()) {
2852                     double a = aValue.asNumber();
2853                     double b = bValue.asNumber();
2854                     if (a < b) {
2855                         // Emit a placeholder for this bytecode operation but otherwise
2856                         // just fall through.
2857                         addToGraph(Phantom);
2858                         NEXT_OPCODE(op_jnless);
2859                     } else {
2860                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2861                         LAST_OPCODE(op_jnless);
2862                     }
2863                 }
2864             }
2865             Node* condition = addToGraph(CompareLess, op1, op2);
2866             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2867             LAST_OPCODE(op_jnless);
2868         }
2869
2870         case op_jnlesseq: {
2871             unsigned relativeOffset = currentInstruction[3].u.operand;
2872             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2873             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2874             if (canFold(op1) && canFold(op2)) {
2875                 JSValue aValue = valueOfJSConstant(op1);
2876                 JSValue bValue = valueOfJSConstant(op2);
2877                 if (aValue.isNumber() && bValue.isNumber()) {
2878                     double a = aValue.asNumber();
2879                     double b = bValue.asNumber();
2880                     if (a <= b) {
2881                         // Emit a placeholder for this bytecode operation but otherwise
2882                         // just fall through.
2883                         addToGraph(Phantom);
2884                         NEXT_OPCODE(op_jnlesseq);
2885                     } else {
2886                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2887                         LAST_OPCODE(op_jnlesseq);
2888                     }
2889                 }
2890             }
2891             Node* condition = addToGraph(CompareLessEq, op1, op2);
2892             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2893             LAST_OPCODE(op_jnlesseq);
2894         }
2895
2896         case op_jngreater: {
2897             unsigned relativeOffset = currentInstruction[3].u.operand;
2898             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2899             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2900             if (canFold(op1) && canFold(op2)) {
2901                 JSValue aValue = valueOfJSConstant(op1);
2902                 JSValue bValue = valueOfJSConstant(op2);
2903                 if (aValue.isNumber() && bValue.isNumber()) {
2904                     double a = aValue.asNumber();
2905                     double b = bValue.asNumber();
2906                     if (a > b) {
2907                         // Emit a placeholder for this bytecode operation but otherwise
2908                         // just fall through.
2909                         addToGraph(Phantom);
2910                         NEXT_OPCODE(op_jngreater);
2911                     } else {
2912                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2913                         LAST_OPCODE(op_jngreater);
2914                     }
2915                 }
2916             }
2917             Node* condition = addToGraph(CompareGreater, op1, op2);
2918             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2919             LAST_OPCODE(op_jngreater);
2920         }
2921
2922         case op_jngreatereq: {
2923             unsigned relativeOffset = currentInstruction[3].u.operand;
2924             Node* op1 = get(VirtualRegister(currentInstruction[1].u.operand));
2925             Node* op2 = get(VirtualRegister(currentInstruction[2].u.operand));
2926             if (canFold(op1) && canFold(op2)) {
2927                 JSValue aValue = valueOfJSConstant(op1);
2928                 JSValue bValue = valueOfJSConstant(op2);
2929                 if (aValue.isNumber() && bValue.isNumber()) {
2930                     double a = aValue.asNumber();
2931                     double b = bValue.asNumber();
2932                     if (a >= b) {
2933                         // Emit a placeholder for this bytecode operation but otherwise
2934                         // just fall through.
2935                         addToGraph(Phantom);
2936                         NEXT_OPCODE(op_jngreatereq);
2937                     } else {
2938                         addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2939                         LAST_OPCODE(op_jngreatereq);
2940                     }
2941                 }
2942             }
2943             Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2944             addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2945             LAST_OPCODE(op_jngreatereq);
2946         }
2947             
2948         case op_switch_imm: {
2949             SwitchData data;
2950             data.kind = SwitchImm;
2951             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2952             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2953             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2954             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2955                 if (!table.branchOffsets[i])
2956                     continue;
2957                 unsigned target = m_currentIndex + table.branchOffsets[i];
2958                 if (target == data.fallThroughBytecodeIndex())
2959                     continue;
2960                 data.cases.append(SwitchCase::withBytecodeIndex(jsNumber(static_cast<int32_t>(table.min + i)), target));
2961             }
2962             m_graph.m_switchData.append(data);
2963             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
2964             LAST_OPCODE(op_switch_imm);
2965         }
2966             
2967         case op_switch_char: {
2968             SwitchData data;
2969             data.kind = SwitchChar;
2970             data.switchTableIndex = m_inlineStackTop->m_switchRemap[currentInstruction[1].u.operand];
2971             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2972             SimpleJumpTable& table = m_codeBlock->switchJumpTable(data.switchTableIndex);
2973             for (unsigned i = 0; i < table.branchOffsets.size(); ++i) {
2974                 if (!table.branchOffsets[i])
2975                     continue;
2976                 unsigned target = m_currentIndex + table.branchOffsets[i];
2977                 if (target == data.fallThroughBytecodeIndex())
2978                     continue;
2979                 data.cases.append(
2980                     SwitchCase::withBytecodeIndex(LazyJSValue::singleCharacterString(table.min + i), target));
2981             }
2982             m_graph.m_switchData.append(data);
2983             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
2984             LAST_OPCODE(op_switch_char);
2985         }
2986
2987         case op_switch_string: {
2988             SwitchData data;
2989             data.kind = SwitchString;
2990             data.switchTableIndex = currentInstruction[1].u.operand;
2991             data.setFallThroughBytecodeIndex(m_currentIndex + currentInstruction[2].u.operand);
2992             StringJumpTable& table = m_codeBlock->stringSwitchJumpTable(data.switchTableIndex);
2993             StringJumpTable::StringOffsetTable::iterator iter;
2994             StringJumpTable::StringOffsetTable::iterator end = table.offsetTable.end();
2995             for (iter = table.offsetTable.begin(); iter != end; ++iter) {
2996                 unsigned target = m_currentIndex + iter->value.branchOffset;
2997                 if (target == data.fallThroughBytecodeIndex())
2998                     continue;
2999                 data.cases.append(
3000                     SwitchCase::withBytecodeIndex(LazyJSValue::knownStringImpl(iter->key.get()), target));
3001             }
3002             m_graph.m_switchData.append(data);
3003             addToGraph(Switch, OpInfo(&m_graph.m_switchData.last()), get(VirtualRegister(currentInstruction[3].u.operand)));
3004             LAST_OPCODE(op_switch_string);
3005         }
3006
3007         case op_ret:
3008             flushArgumentsAndCapturedVariables();
3009             if (inlineCallFrame()) {
3010                 ASSERT(m_inlineStackTop->m_returnValue.isValid());
3011                 setDirect(m_inlineStackTop->m_returnValue, get(VirtualRegister(currentInstruction[1].u.operand)), ImmediateSet);
3012                 m_inlineStackTop->m_didReturn = true;
3013                 if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
3014                     // If we're returning from the first block, then we're done parsing.
3015                     ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.lastBlock());
3016                     shouldContinueParsing = false;
3017                     LAST_OPCODE(op_ret);
3018                 } else {
3019                     // If inlining created blocks, and we're doing a return, then we need some
3020                     // special linking.
3021                     ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_block == m_graph.lastBlock());
3022                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
3023                 }
3024                 if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
3025                     ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
3026                     addToGraph(Jump, OpInfo(0));
3027                     m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
3028                     m_inlineStackTop->m_didEarlyReturn = true;
3029                 }
3030                 LAST_OPCODE(op_ret);
3031             }
3032             addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
3033             LAST_OPCODE(op_ret);
3034             
3035         case op_end:
3036             flushArgumentsAndCapturedVariables();
3037             ASSERT(!inlineCallFrame());
3038             addToGraph(Return, get(VirtualRegister(currentInstruction[1].u.operand)));
3039             LAST_OPCODE(op_end);
3040
3041         case op_throw:
3042             addToGraph(Throw, get(VirtualRegister(currentInstruction[1].u.operand)));
3043             flushAllArgumentsAndCapturedVariablesInInlineStack();
3044             addToGraph(Unreachable);
3045             LAST_OPCODE(op_throw);
3046             
3047         case op_throw_static_error:
3048             addToGraph(ThrowReferenceError);
3049             flushAllArgumentsAndCapturedVariablesInInlineStack();
3050             addToGraph(Unreachable);
3051             LAST_OPCODE(op_throw_static_error);
3052             
3053         case op_call:
3054             handleCall(currentInstruction, Call, CodeForCall);
3055             NEXT_OPCODE(op_call);
3056             
3057         case op_construct:
3058             handleCall(currentInstruction, Construct, CodeForConstruct);
3059             NEXT_OPCODE(op_construct);
3060             
3061         case op_call_varargs: {
3062             int result = currentInstruction[1].u.operand;
3063             int callee = currentInstruction[2].u.operand;
3064             int thisReg = currentInstruction[3].u.operand;
3065             int arguments = currentInstruction[4].u.operand;
3066             int firstFreeReg = currentInstruction[5].u.operand;
3067             
3068             ASSERT(inlineCallFrame());
3069             ASSERT_UNUSED(arguments, arguments == m_inlineStackTop->m_codeBlock->argumentsRegister().offset());
3070             ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
3071
3072             addToGraph(CheckArgumentsNotCreated);
3073
3074             unsigned argCount = inlineCallFrame()->arguments.size();
3075             
3076             // Let's compute the register offset. We start with the last used register, and
3077             // then adjust for the things we want in the call frame.
3078             int registerOffset = firstFreeReg + 1;
3079             registerOffset -= argCount; // We will be passing some arguments.
3080             registerOffset -= JSStack::CallFrameHeaderSize; // We will pretend to have a call frame header.
3081             
3082             // Get the alignment right.
3083             registerOffset = -WTF::roundUpToMultipleOf(
3084                 stackAlignmentRegisters(),
3085                 -registerOffset);
3086             
3087             // The bytecode wouldn't have set up the arguments. But we'll do it and make it
3088             // look like the bytecode had done it.
3089             int nextRegister = registerOffset + JSStack::CallFrameHeaderSize;
3090             set(VirtualRegister(nextRegister++), get(VirtualRegister(thisReg)), ImmediateSet);
3091             for (unsigned argument = 1; argument < argCount; ++argument)
3092                 set(VirtualRegister(nextRegister++), get(virtualRegisterForArgument(argument)), ImmediateSet);
3093             
3094             handleCall(
3095                 result, Call, CodeForCall, OPCODE_LENGTH(op_call_varargs),
3096                 callee, argCount, registerOffset);
3097             NEXT_OPCODE(op_call_varargs);
3098         }
3099             
3100         case op_jneq_ptr:
3101             // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
3102             // support simmer for a while before making it more general, since it's
3103             // already gnarly enough as it is.
3104             ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
3105             addToGraph(
3106                 CheckFunction,
3107                 OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
3108                 get(VirtualRegister(currentInstruction[1].u.operand)));
3109             addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
3110             LAST_OPCODE(op_jneq_ptr);
3111
3112         case op_resolve_scope: {
3113             int dst = currentInstruction[1].u.operand;
3114             ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
3115             unsigned depth = currentInstruction[4].u.operand;
3116
3117             // get_from_scope and put_to_scope depend on this watchpoint forcing OSR exit, so they don't add their own watchpoints.
3118             if (needsVarInjectionChecks(resolveType))
3119                 addToGraph(VarInjectionWatchpoint);
3120
3121             switch (resolveType) {
3122             case GlobalProperty:
3123             case GlobalVar:
3124             case GlobalPropertyWithVarInjectionChecks:
3125             case GlobalVarWithVarInjectionChecks:
3126                 set(VirtualRegister(dst), cellConstant(m_inlineStackTop->m_codeBlock->globalObject()));
3127                 break;
3128             case ClosureVar:
3129             case ClosureVarWithVarInjectionChecks: {
3130                 JSActivation* activation = currentInstruction[5].u.activation.get();
3131                 if (activation
3132                     && activation->symbolTable()->m_functionEnteredOnce.isStillValid()) {
3133                     addToGraph(FunctionReentryWatchpoint, OpInfo(activation->symbolTable()));
3134                     set(VirtualRegister(dst), cellConstant(activation));
3135                     break;
3136                 }
3137                 set(VirtualRegister(dst),
3138                     getScope(m_inlineStackTop->m_codeBlock->needsActivation(), depth));
3139                 break;
3140             }
3141             case Dynamic:
3142                 RELEASE_ASSERT_NOT_REACHED();
3143                 break;
3144             }
3145             NEXT_OPCODE(op_resolve_scope);
3146         }
3147
3148         case op_get_from_scope: {
3149             int dst = currentInstruction[1].u.operand;
3150             int scope = currentInstruction[2].u.operand;
3151             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3152             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3153             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3154
3155             Structure* structure = 0;
3156             WatchpointSet* watchpoints = 0;
3157             uintptr_t operand;
3158             {
3159                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3160                 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
3161                     watchpoints = currentInstruction[5].u.watchpointSet;
3162                 else
3163                     structure = currentInstruction[5].u.structure.get();
3164                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3165             }
3166
3167             UNUSED_PARAM(watchpoints); // We will use this in the future. For now we set it as a way of documenting the fact that that's what index 5 is in GlobalVar mode.
3168
3169             SpeculatedType prediction = getPrediction();
3170             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3171
3172             switch (resolveType) {
3173             case GlobalProperty:
3174             case GlobalPropertyWithVarInjectionChecks: {
3175                 GetByIdStatus status = GetByIdStatus::computeFor(*m_vm, structure, uid);
3176                 if (status.state() != GetByIdStatus::Simple || status.numVariants() != 1) {
3177                     set(VirtualRegister(dst), addToGraph(GetByIdFlush, OpInfo(identifierNumber), OpInfo(prediction), get(VirtualRegister(scope))));
3178                     break;
3179                 }
3180                 Node* base = cellConstantWithStructureCheck(globalObject, status[0].structureSet().singletonStructure());
3181                 addToGraph(Phantom, get(VirtualRegister(scope)));
3182                 if (JSValue specificValue = status[0].specificValue())
3183                     set(VirtualRegister(dst), cellConstant(specificValue.asCell()));
3184                 else
3185                     set(VirtualRegister(dst), handleGetByOffset(prediction, base, identifierNumber, operand));
3186                 break;
3187             }
3188             case GlobalVar:
3189             case GlobalVarWithVarInjectionChecks: {
3190                 addToGraph(Phantom, get(VirtualRegister(scope)));
3191                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3192                 VariableWatchpointSet* watchpointSet = entry.watchpointSet();
3193                 JSValue specificValue =
3194                     watchpointSet ? watchpointSet->inferredValue() : JSValue();
3195                 if (!specificValue) {
3196                     set(VirtualRegister(dst), addToGraph(GetGlobalVar, OpInfo(operand), OpInfo(prediction)));
3197                     break;
3198                 }
3199                 
3200                 addToGraph(VariableWatchpoint, OpInfo(watchpointSet));
3201                 set(VirtualRegister(dst), inferredConstant(specificValue));
3202                 break;
3203             }
3204             case ClosureVar:
3205             case ClosureVarWithVarInjectionChecks: {
3206                 Node* scopeNode = get(VirtualRegister(scope));
3207                 if (JSActivation* activation = m_graph.tryGetActivation(scopeNode)) {
3208                     SymbolTable* symbolTable = activation->symbolTable();
3209                     ConcurrentJITLocker locker(symbolTable->m_lock);
3210                     SymbolTable::Map::iterator iter = symbolTable->find(locker, uid);
3211                     ASSERT(iter != symbolTable->end(locker));
3212                     VariableWatchpointSet* watchpointSet = iter->value.watchpointSet();
3213                     if (watchpointSet) {
3214                         if (JSValue value = watchpointSet->inferredValue()) {
3215                             addToGraph(Phantom, scopeNode);
3216                             addToGraph(VariableWatchpoint, OpInfo(watchpointSet));
3217                             set(VirtualRegister(dst), inferredConstant(value));
3218                             break;
3219                         }
3220                     }
3221                 }
3222                 set(VirtualRegister(dst),
3223                     addToGraph(GetClosureVar, OpInfo(operand), OpInfo(prediction), 
3224                         addToGraph(GetClosureRegisters, scopeNode)));
3225                 break;
3226             }
3227             case Dynamic:
3228                 RELEASE_ASSERT_NOT_REACHED();
3229                 break;
3230             }
3231             NEXT_OPCODE(op_get_from_scope);
3232         }
3233
3234         case op_put_to_scope: {
3235             unsigned scope = currentInstruction[1].u.operand;
3236             unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3237             unsigned value = currentInstruction[3].u.operand;
3238             ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
3239             StringImpl* uid = m_graph.identifiers()[identifierNumber];
3240
3241             Structure* structure = 0;
3242             VariableWatchpointSet* watchpoints = 0;
3243             uintptr_t operand;
3244             {
3245                 ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
3246                 if (resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
3247                     watchpoints = currentInstruction[5].u.watchpointSet;
3248                 else
3249                     structure = currentInstruction[5].u.structure.get();
3250                 operand = reinterpret_cast<uintptr_t>(currentInstruction[6].u.pointer);
3251             }
3252
3253             JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
3254
3255             switch (resolveType) {
3256             case GlobalProperty:
3257             case GlobalPropertyWithVarInjectionChecks: {
3258                 PutByIdStatus status = PutByIdStatus::computeFor(*m_vm, globalObject, structure, uid, false);
3259                 if (!status.isSimpleReplace()) {
3260                     addToGraph(PutById, OpInfo(identifierNumber), get(VirtualRegister(scope)), get(VirtualRegister(value)));
3261                     break;
3262                 }
3263                 Node* base = cellConstantWithStructureCheck(globalObject, status.oldStructure());
3264                 addToGraph(Phantom, get(VirtualRegister(scope)));
3265                 handlePutByOffset(base, identifierNumber, static_cast<PropertyOffset>(operand), get(VirtualRegister(value)));
3266                 // Keep scope alive until after put.
3267                 addToGraph(Phantom, get(VirtualRegister(scope)));
3268                 break;
3269             }
3270             case GlobalVar:
3271             case GlobalVarWithVarInjectionChecks: {
3272                 SymbolTableEntry entry = globalObject->symbolTable()->get(uid);
3273                 ASSERT(watchpoints == entry.watchpointSet());
3274                 Node* valueNode = get(VirtualRegister(value));
3275                 addToGraph(PutGlobalVar, OpInfo(operand), valueNode);
3276                 if (watchpoints->state() != IsInvalidated)
3277                     addToGraph(NotifyWrite, OpInfo(watchpoints), valueNode);
3278                 // Keep scope alive until after put.
3279                 addToGraph(Phantom, get(VirtualRegister(scope)));
3280                 break;
3281             }
3282             case ClosureVar:
3283             case ClosureVarWithVarInjectionChecks: {
3284                 Node* scopeNode = get(VirtualRegister(scope));
3285                 Node* scopeRegisters = addToGraph(GetClosureRegisters, scopeNode);
3286                 addToGraph(PutClosureVar, OpInfo(operand), scopeNode, scopeRegisters, get(VirtualRegister(value)));
3287                 break;
3288             }
3289             case Dynamic:
3290                 RELEASE_ASSERT_NOT_REACHED();
3291                 break;
3292             }
3293             NEXT_OPCODE(op_put_to_scope);
3294         }
3295
3296         case op_loop_hint: {
3297             // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3298             // OSR can only happen at basic block boundaries. Assert that these two statements
3299             // are compatible.
3300             RELEASE_ASSERT(m_currentIndex == blockBegin);
3301             
3302             // We never do OSR into an inlined code block. That could not happen, since OSR
3303             // looks up the code block that is the replacement for the baseline JIT code
3304             // block. Hence, machine code block = true code block = not inline code block.
3305             if (!m_inlineStackTop->m_caller)
3306                 m_currentBlock->isOSRTarget = true;
3307
3308             addToGraph(LoopHint);
3309             
3310             if (m_vm->watchdog.isEnabled())
3311                 addToGraph(CheckWatchdogTimer);
3312             
3313             NEXT_OPCODE(op_loop_hint);
3314         }
3315             
3316         case op_init_lazy_reg: {
3317             set(VirtualRegister(currentInstruction[1].u.operand), getJSConstantForValue(JSValue()));
3318             ASSERT(operandIsLocal(currentInstruction[1].u.operand));
3319             m_graph.m_lazyVars.set(VirtualRegister(currentInstruction[1].u.operand).toLocal());
3320             NEXT_OPCODE(op_init_lazy_reg);
3321         }
3322             
3323         case op_create_activation: {
3324             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(CreateActivation, get(VirtualRegister(currentInstruction[1].u.operand))));
3325             NEXT_OPCODE(op_create_activation);
3326         }
3327             
3328         case op_create_arguments: {
3329             m_graph.m_hasArguments = true;
3330             Node* createArguments = addToGraph(CreateArguments, get(VirtualRegister(currentInstruction[1].u.operand)));
3331             set(VirtualRegister(currentInstruction[1].u.operand), createArguments);
3332             set(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand)), createArguments);
3333             NEXT_OPCODE(op_create_arguments);
3334         }
3335             
3336         case op_tear_off_activation: {
3337             addToGraph(TearOffActivation, get(VirtualRegister(currentInstruction[1].u.operand)));
3338             NEXT_OPCODE(op_tear_off_activation);
3339         }
3340
3341         case op_tear_off_arguments: {
3342             m_graph.m_hasArguments = true;
3343             addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(VirtualRegister(currentInstruction[1].u.operand))), get(VirtualRegister(currentInstruction[2].u.operand)));
3344             NEXT_OPCODE(op_tear_off_arguments);
3345         }
3346             
3347         case op_get_arguments_length: {
3348             m_graph.m_hasArguments = true;
3349             set(VirtualRegister(currentInstruction[1].u.operand), addToGraph(GetMyArgumentsLengthSafe));
3350             NEXT_OPCODE(op_get_arguments_length);
3351         }
3352             
3353         case op_get_argument_by_val: {
3354             m_graph.m_hasArguments = true;
3355             set(VirtualRegister(currentInstruction[1].u.operand),
3356                 addToGraph(
3357                     GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
3358                     get(VirtualRegister(currentInstruction[3].u.operand))));
3359             NEXT_OPCODE(op_get_argument_by_val);
3360         }
3361             
3362         case op_new_func: {
3363             if (!currentInstruction[3].u.operand) {
3364                 set(VirtualRegister(currentInstruction[1].u.operand),
3365                     addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3366             } else {
3367                 set(VirtualRegister(currentInstruction[1].u.operand),
3368                     addToGraph(
3369                         NewFunction,
3370                         OpInfo(currentInstruction[2].u.operand),
3371                         get(VirtualRegister(currentInstruction[1].u.operand))));
3372             }
3373             NEXT_OPCODE(op_new_func);
3374         }
3375             
3376         case op_new_captured_func: {
3377             Node* function = addToGraph(
3378                 NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand));
3379             if (VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet)
3380                 addToGraph(NotifyWrite, OpInfo(set), function);
3381             set(VirtualRegister(currentInstruction[1].u.operand), function);
3382             NEXT_OPCODE(op_new_captured_func);
3383         }
3384             
3385         case op_new_func_exp: {
3386             set(VirtualRegister(currentInstruction[1].u.operand),
3387                 addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3388             NEXT_OPCODE(op_new_func_exp);
3389         }
3390
3391         case op_typeof: {
3392             set(VirtualRegister(currentInstruction[1].u.operand),
3393                 addToGraph(TypeOf, get(VirtualRegister(currentInstruction[2].u.operand))));
3394             NEXT_OPCODE(op_typeof);
3395         }
3396
3397         case op_to_number: {
3398             set(VirtualRegister(currentInstruction[1].u.operand),
3399                 addToGraph(Identity, Edge(get(VirtualRegister(currentInstruction[2].u.operand)), NumberUse)));
3400             NEXT_OPCODE(op_to_number);
3401         }
3402             
3403         case op_in: {
3404             set(VirtualRegister(currentInstruction[1].u.operand),
3405                 addToGraph(In, get(VirtualRegister(currentInstruction[2].u.operand)), get(VirtualRegister(currentInstruction[3].u.operand))));
3406             NEXT_OPCODE(op_in);
3407         }
3408
3409         default:
3410             // Parse failed! This should not happen because the capabilities checker
3411             // should have caught it.
3412             RELEASE_ASSERT_NOT_REACHED();
3413             return false;
3414         }
3415     }
3416 }
3417
3418 void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BasicBlock*>& possibleTargets)
3419 {
3420     ASSERT(!block->isLinked);
3421     ASSERT(!block->isEmpty());
3422     Node* node = block->last();
3423     ASSERT(node->isTerminal());
3424     
3425     switch (node->op()) {
3426     case Jump:
3427         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3428         break;
3429         
3430     case Branch:
3431         node->setTakenBlock(blockForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3432         node->setNotTakenBlock(blockForBytecodeOffset(possibleTargets, node->notTakenBytecodeOffsetDuringParsing()));
3433         break;
3434         
3435     case Switch:
3436         for (unsigned i = node->switchData()->cases.size(); i--;)
3437             node->switchData()->cases[i].target = blockForBytecodeOffset(possibleTargets, node->switchData()->cases[i].targetBytecodeIndex());
3438         node->switchData()->fallThrough = blockForBytecodeOffset(possibleTargets, node->switchData()->fallThroughBytecodeIndex());
3439         break;
3440         
3441     default:
3442         break;
3443     }
3444     
3445 #if !ASSERT_DISABLED
3446     block->isLinked = true;
3447 #endif
3448 }
3449
3450 void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BasicBlock*>& possibleTargets)
3451 {
3452     for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3453         if (unlinkedBlocks[i].m_needsNormalLinking) {
3454             linkBlock(unlinkedBlocks[i].m_block, possibleTargets);
3455             unlinkedBlocks[i].m_needsNormalLinking = false;
3456         }
3457     }
3458 }
3459
3460 void ByteCodeParser::buildOperandMapsIfNecessary()
3461 {
3462     if (m_haveBuiltOperandMaps)
3463         return;
3464     
3465     for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3466         m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3467     for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3468         JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3469         if (!value)
3470             m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3471         else
3472             m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3473     }
3474     
3475     m_haveBuiltOperandMaps = true;
3476 }
3477
3478 ByteCodeParser::InlineStackEntry::InlineStackEntry(
3479     ByteCodeParser* byteCodeParser,
3480     CodeBlock* codeBlock,
3481     CodeBlock* profiledBlock,
3482     BasicBlock* callsiteBlockHead,
3483     JSFunction* callee, // Null if this is a closure call.
3484     VirtualRegister returnValueVR,
3485     VirtualRegister inlineCallFrameStart,
3486     int argumentCountIncludingThis,
3487     CodeSpecializationKind kind)
3488     : m_byteCodeParser(byteCodeParser)
3489     , m_codeBlock(codeBlock)
3490     , m_profiledBlock(profiledBlock)
3491     , m_callsiteBlockHead(callsiteBlockHead)
3492     , m_returnValue(returnValueVR)
3493     , m_didReturn(false)
3494     , m_didEarlyReturn(false)
3495     , m_caller(byteCodeParser->m_inlineStackTop)
3496 {
3497     {
3498         ConcurrentJITLocker locker(m_profiledBlock->m_lock);
3499         m_lazyOperands.initialize(locker, m_profiledBlock->lazyOperandValueProfiles());
3500         m_exitProfile.initialize(locker, profiledBlock->exitProfile());
3501         
3502         // We do this while holding the lock because we want to encourage StructureStubInfo's
3503         // to be potentially added to operations and because the profiled block could be in the
3504         // middle of LLInt->JIT tier-up in which case we would be adding the info's right now.
3505         if (m_profiledBlock->hasBaselineJITProfiling())
3506             m_profiledBlock->getStubInfoMap(locker, m_stubInfos);
3507     }
3508     
3509     m_argumentPositions.resize(argumentCountIncludingThis);
3510     for (int i = 0; i < argumentCountIncludingThis; ++i) {
3511         byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3512         ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3513         m_argumentPositions[i] = argumentPosition;
3514     }
3515     
3516     // Track the code-block-global exit sites.
3517     if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3518         byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3519             codeBlock->ownerExecutable());
3520     }
3521         
3522     if (m_caller) {
3523         // Inline case.
3524         ASSERT(codeBlock != byteCodeParser->m_codeBlock);
3525         ASSERT(inlineCallFrameStart.isValid());
3526         ASSERT(callsiteBlockHead);
3527         
3528         m_inlineCallFrame = byteCodeParser->m_graph.m_inlineCallFrames->add();
3529