2125c4ffec69885a82e76559da8d9abf4240f046
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012, 2013 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "Interpreter.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "JSNameScope.h"
38 #include "LowLevelInterpreter.h"
39 #include "Operations.h"
40 #include "Options.h"
41 #include "StrongInlines.h"
42 #include "UnlinkedCodeBlock.h"
43 #include <wtf/StdLibExtras.h>
44 #include <wtf/text/WTFString.h>
45
46 using namespace std;
47
48 namespace JSC {
49
50 void Label::setLocation(unsigned location)
51 {
52     m_location = location;
53     
54     unsigned size = m_unresolvedJumps.size();
55     for (unsigned i = 0; i < size; ++i)
56         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
57 }
58
59 ParserError BytecodeGenerator::generate()
60 {
61     SamplingRegion samplingRegion("Bytecode Generation");
62     
63     m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
64
65     m_scopeNode->emitBytecode(*this);
66
67     m_staticPropertyAnalyzer.kill();
68
69     for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
70         TryRange& range = m_tryRanges[i];
71         int start = range.start->bind();
72         int end = range.end->bind();
73         
74         // This will happen for empty try blocks and for some cases of finally blocks:
75         //
76         // try {
77         //    try {
78         //    } finally {
79         //        return 42;
80         //        // *HERE*
81         //    }
82         // } finally {
83         //    print("things");
84         // }
85         //
86         // The return will pop scopes to execute the outer finally block. But this includes
87         // popping the try context for the inner try. The try context is live in the fall-through
88         // part of the finally block not because we will emit a handler that overlaps the finally,
89         // but because we haven't yet had a chance to plant the catch target. Then when we finish
90         // emitting code for the outer finally block, we repush the try contex, this time with a
91         // new start index. But that means that the start index for the try range corresponding
92         // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
93         // than the end index of the try block. This is harmless since end < start handlers will
94         // never get matched in our logic, but we do the runtime a favor and choose to not emit
95         // such handlers at all.
96         if (end <= start)
97             continue;
98         
99         ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
100         UnlinkedHandlerInfo info = {
101             static_cast<uint32_t>(start), static_cast<uint32_t>(end),
102             static_cast<uint32_t>(range.tryData->target->bind()),
103             range.tryData->targetScopeDepth
104         };
105         m_codeBlock->addExceptionHandler(info);
106     }
107     
108     m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
109
110     m_codeBlock->shrinkToFit();
111
112     if (m_expressionTooDeep)
113         return ParserError(ParserError::OutOfMemory);
114     return ParserError(ParserError::ErrorNone);
115 }
116
117 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
118 {
119     ConcurrentJITLocker locker(symbolTable().m_lock);
120     int index = virtualRegisterForLocal(m_calleeRegisters.size()).offset();
121     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
122     SymbolTable::Map::AddResult result = symbolTable().add(locker, ident.impl(), newEntry);
123
124     if (!result.isNewEntry) {
125         r0 = &registerFor(result.iterator->value.getIndex());
126         return false;
127     }
128
129     r0 = addVar();
130     return true;
131 }
132
133 void BytecodeGenerator::preserveLastVar()
134 {
135     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
136         m_lastVar = &m_calleeRegisters.last();
137 }
138
139 BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
140     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
141     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
142     , m_symbolTable(0)
143     , m_scopeNode(programNode)
144     , m_codeBlock(vm, codeBlock)
145     , m_thisRegister(CallFrame::thisArgumentOffset())
146     , m_emptyValueRegister(0)
147     , m_globalObjectRegister(0)
148     , m_finallyDepth(0)
149     , m_localScopeDepth(0)
150     , m_codeType(GlobalCode)
151     , m_nextConstantOffset(0)
152     , m_globalConstantIndex(0)
153     , m_hasCreatedActivation(true)
154     , m_firstLazyFunction(0)
155     , m_lastLazyFunction(0)
156     , m_staticPropertyAnalyzer(&m_instructions)
157     , m_vm(&vm)
158     , m_lastOpcodeID(op_end)
159 #ifndef NDEBUG
160     , m_lastOpcodePosition(0)
161 #endif
162     , m_stack(vm, wtfThreadData().stack())
163     , m_usesExceptions(false)
164     , m_expressionTooDeep(false)
165 {
166     if (m_shouldEmitDebugHooks)
167         m_codeBlock->setNeedsFullScopeChain(true);
168
169     m_codeBlock->setNumParameters(1); // Allocate space for "this"
170
171     emitOpcode(op_enter);
172
173     const VarStack& varStack = programNode->varStack();
174     const FunctionStack& functionStack = programNode->functionStack();
175
176     for (size_t i = 0; i < functionStack.size(); ++i) {
177         FunctionBodyNode* function = functionStack[i];
178         UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
179         codeBlock->addFunctionDeclaration(*m_vm, function->ident(), unlinkedFunction);
180     }
181
182     for (size_t i = 0; i < varStack.size(); ++i)
183         codeBlock->addVariableDeclaration(varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
184
185 }
186
187 BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
188     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
189     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
190     , m_symbolTable(codeBlock->symbolTable())
191     , m_scopeNode(functionBody)
192     , m_codeBlock(vm, codeBlock)
193     , m_activationRegister(0)
194     , m_emptyValueRegister(0)
195     , m_globalObjectRegister(0)
196     , m_finallyDepth(0)
197     , m_localScopeDepth(0)
198     , m_codeType(FunctionCode)
199     , m_nextConstantOffset(0)
200     , m_globalConstantIndex(0)
201     , m_hasCreatedActivation(false)
202     , m_firstLazyFunction(0)
203     , m_lastLazyFunction(0)
204     , m_staticPropertyAnalyzer(&m_instructions)
205     , m_vm(&vm)
206     , m_lastOpcodeID(op_end)
207 #ifndef NDEBUG
208     , m_lastOpcodePosition(0)
209 #endif
210     , m_stack(vm, wtfThreadData().stack())
211     , m_usesExceptions(false)
212     , m_expressionTooDeep(false)
213 {
214     if (m_shouldEmitDebugHooks)
215         m_codeBlock->setNeedsFullScopeChain(true);
216
217     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
218     Vector<Identifier> boundParameterProperties;
219     FunctionParameters& parameters = *functionBody->parameters();
220     for (size_t i = 0; i < parameters.size(); i++) {
221         auto pattern = parameters.at(i);
222         if (pattern->isBindingNode())
223             continue;
224         pattern->collectBoundIdentifiers(boundParameterProperties);
225         continue;
226     }
227     m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
228
229     emitOpcode(op_enter);
230     if (m_codeBlock->needsFullScopeChain()) {
231         m_activationRegister = addVar();
232         emitInitLazyRegister(m_activationRegister);
233         m_codeBlock->setActivationRegister(m_activationRegister->virtualRegister());
234     }
235
236     m_symbolTable->setCaptureStart(virtualRegisterForLocal(m_codeBlock->m_numVars).offset());
237
238     if (functionBody->usesArguments() || codeBlock->usesEval()) { // May reify arguments object.
239         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
240         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
241
242         // We can save a little space by hard-coding the knowledge that the two
243         // 'arguments' values are stored in consecutive registers, and storing
244         // only the index of the assignable one.
245         codeBlock->setArgumentsRegister(argumentsRegister->virtualRegister());
246         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->virtualRegister() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
247
248         emitInitLazyRegister(argumentsRegister);
249         emitInitLazyRegister(unmodifiedArgumentsRegister);
250         
251         if (shouldTearOffArgumentsEagerly()) {
252             emitOpcode(op_create_arguments);
253             instructions().append(argumentsRegister->index());
254         }
255     }
256
257     bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
258
259     bool capturesAnyArgumentByName = false;
260     Vector<RegisterID*, 0, UnsafeVectorOverflow> capturedArguments;
261     if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
262         FunctionParameters& parameters = *functionBody->parameters();
263         capturedArguments.resize(parameters.size());
264         for (size_t i = 0; i < parameters.size(); ++i) {
265             capturedArguments[i] = 0;
266             auto pattern = parameters.at(i);
267             if (!pattern->isBindingNode())
268                 continue;
269             const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
270             if (!functionBody->captures(ident) && !shouldCaptureAllTheThings)
271                 continue;
272             capturesAnyArgumentByName = true;
273             capturedArguments[i] = addVar();
274         }
275     }
276
277     if (capturesAnyArgumentByName && !shouldTearOffArgumentsEagerly()) {
278         size_t parameterCount = m_symbolTable->parameterCount();
279         auto slowArguments = std::make_unique<SlowArgument[]>(parameterCount);
280         for (size_t i = 0; i < parameterCount; ++i) {
281             if (!capturedArguments[i]) {
282                 ASSERT(slowArguments[i].status == SlowArgument::Normal);
283                 slowArguments[i].index = CallFrame::argumentOffset(i);
284                 continue;
285             }
286             slowArguments[i].status = SlowArgument::Captured;
287             slowArguments[i].index = capturedArguments[i]->index();
288         }
289         m_symbolTable->setSlowArguments(std::move(slowArguments));
290     }
291
292     RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
293
294     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
295     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
296
297     // Captured variables and functions go first so that activations don't have
298     // to step over the non-captured locals to mark them.
299     m_hasCreatedActivation = false;
300     if (functionBody->hasCapturedVariables()) {
301         for (size_t i = 0; i < functionStack.size(); ++i) {
302             FunctionBodyNode* function = functionStack[i];
303             const Identifier& ident = function->ident();
304             if (functionBody->captures(ident)) {
305                 if (!m_hasCreatedActivation) {
306                     m_hasCreatedActivation = true;
307                     emitOpcode(op_create_activation);
308                     instructions().append(m_activationRegister->index());
309                 }
310                 m_functions.add(ident.impl());
311                 emitNewFunction(addVar(ident, false), function);
312             }
313         }
314         for (size_t i = 0; i < varStack.size(); ++i) {
315             const Identifier& ident = varStack[i].first;
316             if (functionBody->captures(ident))
317                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
318         }
319     }
320     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
321     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
322         m_hasCreatedActivation = true;
323         emitOpcode(op_create_activation);
324         instructions().append(m_activationRegister->index());
325     }
326
327     m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
328
329     m_firstLazyFunction = codeBlock->m_numVars;
330     for (size_t i = 0; i < functionStack.size(); ++i) {
331         FunctionBodyNode* function = functionStack[i];
332         const Identifier& ident = function->ident();
333         if (!functionBody->captures(ident)) {
334             m_functions.add(ident.impl());
335             RefPtr<RegisterID> reg = addVar(ident, false);
336             // Don't lazily create functions that override the name 'arguments'
337             // as this would complicate lazy instantiation of actual arguments.
338             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
339                 emitNewFunction(reg.get(), function);
340             else {
341                 emitInitLazyRegister(reg.get());
342                 m_lazyFunctions.set(reg->virtualRegister().toLocal(), function);
343             }
344         }
345     }
346     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
347     for (size_t i = 0; i < varStack.size(); ++i) {
348         const Identifier& ident = varStack[i].first;
349         if (!functionBody->captures(ident))
350             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
351     }
352
353     if (shouldCaptureAllTheThings)
354         m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
355
356     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
357
358     // Add "this" as a parameter
359     int nextParameterIndex = CallFrame::thisArgumentOffset();
360     m_thisRegister.setIndex(nextParameterIndex++);
361     m_codeBlock->addParameter();
362     Vector<std::pair<RegisterID*, const DeconstructionPatternNode*>> deconstructedParameters;
363     for (size_t i = 0; i < parameters.size(); ++i, ++nextParameterIndex) {
364         int index = nextParameterIndex;
365         auto pattern = parameters.at(i);
366         if (!pattern->isBindingNode()) {
367             m_codeBlock->addParameter();
368             RegisterID& parameter = registerFor(index);
369             parameter.setIndex(index);
370             deconstructedParameters.append(make_pair(&parameter, pattern));
371             continue;
372         }
373         auto simpleParameter = static_cast<const BindingNode*>(pattern);
374         if (capturedArguments.size() && capturedArguments[i]) {
375             ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(simpleParameter->boundProperty())) || shouldCaptureAllTheThings);
376             index = capturedArguments[i]->index();
377             RegisterID original(nextParameterIndex);
378             emitMove(capturedArguments[i], &original);
379         }
380         addParameter(simpleParameter->boundProperty(), index);
381     }
382     preserveLastVar();
383
384     // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
385     addCallee(functionBody, calleeRegister);
386
387     if (isConstructor()) {
388         emitCreateThis(&m_thisRegister);
389     } else if (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks) {
390         m_codeBlock->addPropertyAccessInstruction(instructions().size());
391         emitOpcode(op_to_this);
392         instructions().append(kill(&m_thisRegister));
393         instructions().append(0);
394     }
395     for (size_t i = 0; i < deconstructedParameters.size(); i++) {
396         auto& entry = deconstructedParameters[i];
397         entry.second->emitBytecode(*this, entry.first);
398     }
399 }
400
401 BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
402     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
403     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
404     , m_symbolTable(codeBlock->symbolTable())
405     , m_scopeNode(evalNode)
406     , m_codeBlock(vm, codeBlock)
407     , m_thisRegister(CallFrame::thisArgumentOffset())
408     , m_emptyValueRegister(0)
409     , m_globalObjectRegister(0)
410     , m_finallyDepth(0)
411     , m_localScopeDepth(0)
412     , m_codeType(EvalCode)
413     , m_nextConstantOffset(0)
414     , m_globalConstantIndex(0)
415     , m_hasCreatedActivation(true)
416     , m_firstLazyFunction(0)
417     , m_lastLazyFunction(0)
418     , m_staticPropertyAnalyzer(&m_instructions)
419     , m_vm(&vm)
420     , m_lastOpcodeID(op_end)
421 #ifndef NDEBUG
422     , m_lastOpcodePosition(0)
423 #endif
424     , m_stack(vm, wtfThreadData().stack())
425     , m_usesExceptions(false)
426     , m_expressionTooDeep(false)
427 {
428     m_codeBlock->setNeedsFullScopeChain(true);
429
430     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
431     m_codeBlock->setNumParameters(1);
432
433     emitOpcode(op_enter);
434
435     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
436     for (size_t i = 0; i < functionStack.size(); ++i)
437         m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
438
439     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
440     unsigned numVariables = varStack.size();
441     Vector<Identifier, 0, UnsafeVectorOverflow> variables;
442     variables.reserveCapacity(numVariables);
443     for (size_t i = 0; i < numVariables; ++i) {
444         ASSERT(varStack[i].first.impl()->isIdentifier());
445         variables.append(varStack[i].first);
446     }
447     codeBlock->adoptVariables(variables);
448     preserveLastVar();
449 }
450
451 BytecodeGenerator::~BytecodeGenerator()
452 {
453 }
454
455 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
456 {
457     emitOpcode(op_init_lazy_reg);
458     instructions().append(reg->index());
459     return reg;
460 }
461
462 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
463 {
464     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
465         return 0;
466
467     m_calleeRegister.setIndex(JSStack::Callee);
468
469     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
470     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
471         emitPushNameScope(functionBodyNode->ident(), &m_calleeRegister, ReadOnly | DontDelete);
472
473     if (!functionBodyNode->captures(functionBodyNode->ident()))
474         return &m_calleeRegister;
475
476     // Move the callee into the captured section of the stack.
477     return emitMove(addVar(), &m_calleeRegister);
478 }
479
480 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
481 {
482     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
483         return;
484
485     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
486     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
487         return;
488
489     ASSERT(calleeRegister);
490     symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
491 }
492
493 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
494 {
495     // Parameters overwrite var declarations, but not function declarations.
496     StringImpl* rep = ident.impl();
497     if (!m_functions.contains(rep)) {
498         symbolTable().set(rep, parameterIndex);
499         RegisterID& parameter = registerFor(parameterIndex);
500         parameter.setIndex(parameterIndex);
501     }
502
503     // To maintain the calling convention, we have to allocate unique space for
504     // each parameter, even if the parameter doesn't make it into the symbol table.
505     m_codeBlock->addParameter();
506 }
507
508 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
509 {
510     if (ident != propertyNames().arguments)
511         return false;
512     
513     if (!shouldOptimizeLocals())
514         return false;
515     
516     SymbolTableEntry entry = symbolTable().get(ident.impl());
517     if (entry.isNull())
518         return false;
519
520     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
521         return true;
522     
523     return false;
524 }
525
526 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
527 {
528     ASSERT(willResolveToArguments(propertyNames().arguments));
529
530     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
531     ASSERT(!entry.isNull());
532     return &registerFor(entry.getIndex());
533 }
534
535 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
536 {
537     if (!reg->virtualRegister().isLocal())
538         return reg;
539
540     int localVariableNumber = reg->virtualRegister().toLocal();
541
542     if (m_lastLazyFunction <= localVariableNumber || localVariableNumber < m_firstLazyFunction)
543         return reg;
544     emitLazyNewFunction(reg, m_lazyFunctions.get(localVariableNumber));
545     return reg;
546 }
547
548 RegisterID* BytecodeGenerator::newRegister()
549 {
550     m_calleeRegisters.append(virtualRegisterForLocal(m_calleeRegisters.size()));
551     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
552     return &m_calleeRegisters.last();
553 }
554
555 RegisterID* BytecodeGenerator::newTemporary()
556 {
557     // Reclaim free register IDs.
558     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
559         m_calleeRegisters.removeLast();
560         
561     RegisterID* result = newRegister();
562     result->setTemporary();
563     return result;
564 }
565
566 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
567 {
568     // Reclaim free label scopes.
569     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
570         m_labelScopes.removeLast();
571
572     // Allocate new label scope.
573     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
574     m_labelScopes.append(scope);
575     return LabelScopePtr(&m_labelScopes, m_labelScopes.size() - 1);
576 }
577
578 PassRefPtr<Label> BytecodeGenerator::newLabel()
579 {
580     // Reclaim free label IDs.
581     while (m_labels.size() && !m_labels.last().refCount())
582         m_labels.removeLast();
583
584     // Allocate new label ID.
585     m_labels.append(this);
586     return &m_labels.last();
587 }
588
589 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
590 {
591     unsigned newLabelIndex = instructions().size();
592     l0->setLocation(newLabelIndex);
593
594     if (m_codeBlock->numberOfJumpTargets()) {
595         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
596         ASSERT(lastLabelIndex <= newLabelIndex);
597         if (newLabelIndex == lastLabelIndex) {
598             // Peephole optimizations have already been disabled by emitting the last label
599             return l0;
600         }
601     }
602
603     m_codeBlock->addJumpTarget(newLabelIndex);
604
605     // This disables peephole optimizations when an instruction is a jump target
606     m_lastOpcodeID = op_end;
607     return l0;
608 }
609
610 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
611 {
612 #ifndef NDEBUG
613     size_t opcodePosition = instructions().size();
614     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
615     m_lastOpcodePosition = opcodePosition;
616 #endif
617     instructions().append(opcodeID);
618     m_lastOpcodeID = opcodeID;
619 }
620
621 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
622 {
623 #if ENABLE(VALUE_PROFILER)
624     return m_codeBlock->addArrayProfile();
625 #else
626     return 0;
627 #endif
628 }
629
630 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
631 {
632 #if ENABLE(VALUE_PROFILER)
633     return m_codeBlock->addArrayAllocationProfile();
634 #else
635     return 0;
636 #endif
637 }
638
639 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
640 {
641     return m_codeBlock->addObjectAllocationProfile();
642 }
643
644 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
645 {
646 #if ENABLE(VALUE_PROFILER)
647     UnlinkedValueProfile result = m_codeBlock->addValueProfile();
648 #else
649     UnlinkedValueProfile result = 0;
650 #endif
651     emitOpcode(opcodeID);
652     return result;
653 }
654
655 void BytecodeGenerator::emitLoopHint()
656 {
657     emitOpcode(op_loop_hint);
658 }
659
660 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
661 {
662     ASSERT(instructions().size() >= 4);
663     size_t size = instructions().size();
664     dstIndex = instructions().at(size - 3).u.operand;
665     src1Index = instructions().at(size - 2).u.operand;
666     src2Index = instructions().at(size - 1).u.operand;
667 }
668
669 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
670 {
671     ASSERT(instructions().size() >= 3);
672     size_t size = instructions().size();
673     dstIndex = instructions().at(size - 2).u.operand;
674     srcIndex = instructions().at(size - 1).u.operand;
675 }
676
677 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
678 {
679     ASSERT(instructions().size() >= 4);
680     instructions().shrink(instructions().size() - 4);
681     m_lastOpcodeID = op_end;
682 }
683
684 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
685 {
686     ASSERT(instructions().size() >= 3);
687     instructions().shrink(instructions().size() - 3);
688     m_lastOpcodeID = op_end;
689 }
690
691 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
692 {
693     size_t begin = instructions().size();
694     emitOpcode(op_jmp);
695     instructions().append(target->bind(begin, instructions().size()));
696     return target;
697 }
698
699 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
700 {
701     if (m_lastOpcodeID == op_less) {
702         int dstIndex;
703         int src1Index;
704         int src2Index;
705
706         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
707
708         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
709             rewindBinaryOp();
710
711             size_t begin = instructions().size();
712             emitOpcode(op_jless);
713             instructions().append(src1Index);
714             instructions().append(src2Index);
715             instructions().append(target->bind(begin, instructions().size()));
716             return target;
717         }
718     } else if (m_lastOpcodeID == op_lesseq) {
719         int dstIndex;
720         int src1Index;
721         int src2Index;
722
723         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
724
725         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
726             rewindBinaryOp();
727
728             size_t begin = instructions().size();
729             emitOpcode(op_jlesseq);
730             instructions().append(src1Index);
731             instructions().append(src2Index);
732             instructions().append(target->bind(begin, instructions().size()));
733             return target;
734         }
735     } else if (m_lastOpcodeID == op_greater) {
736         int dstIndex;
737         int src1Index;
738         int src2Index;
739
740         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
741
742         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
743             rewindBinaryOp();
744
745             size_t begin = instructions().size();
746             emitOpcode(op_jgreater);
747             instructions().append(src1Index);
748             instructions().append(src2Index);
749             instructions().append(target->bind(begin, instructions().size()));
750             return target;
751         }
752     } else if (m_lastOpcodeID == op_greatereq) {
753         int dstIndex;
754         int src1Index;
755         int src2Index;
756
757         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
758
759         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
760             rewindBinaryOp();
761
762             size_t begin = instructions().size();
763             emitOpcode(op_jgreatereq);
764             instructions().append(src1Index);
765             instructions().append(src2Index);
766             instructions().append(target->bind(begin, instructions().size()));
767             return target;
768         }
769     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
770         int dstIndex;
771         int srcIndex;
772
773         retrieveLastUnaryOp(dstIndex, srcIndex);
774
775         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
776             rewindUnaryOp();
777
778             size_t begin = instructions().size();
779             emitOpcode(op_jeq_null);
780             instructions().append(srcIndex);
781             instructions().append(target->bind(begin, instructions().size()));
782             return target;
783         }
784     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
785         int dstIndex;
786         int srcIndex;
787
788         retrieveLastUnaryOp(dstIndex, srcIndex);
789
790         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
791             rewindUnaryOp();
792
793             size_t begin = instructions().size();
794             emitOpcode(op_jneq_null);
795             instructions().append(srcIndex);
796             instructions().append(target->bind(begin, instructions().size()));
797             return target;
798         }
799     }
800
801     size_t begin = instructions().size();
802
803     emitOpcode(op_jtrue);
804     instructions().append(cond->index());
805     instructions().append(target->bind(begin, instructions().size()));
806     return target;
807 }
808
809 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
810 {
811     if (m_lastOpcodeID == op_less && target->isForward()) {
812         int dstIndex;
813         int src1Index;
814         int src2Index;
815
816         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
817
818         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
819             rewindBinaryOp();
820
821             size_t begin = instructions().size();
822             emitOpcode(op_jnless);
823             instructions().append(src1Index);
824             instructions().append(src2Index);
825             instructions().append(target->bind(begin, instructions().size()));
826             return target;
827         }
828     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
829         int dstIndex;
830         int src1Index;
831         int src2Index;
832
833         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
834
835         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
836             rewindBinaryOp();
837
838             size_t begin = instructions().size();
839             emitOpcode(op_jnlesseq);
840             instructions().append(src1Index);
841             instructions().append(src2Index);
842             instructions().append(target->bind(begin, instructions().size()));
843             return target;
844         }
845     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
846         int dstIndex;
847         int src1Index;
848         int src2Index;
849
850         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
851
852         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
853             rewindBinaryOp();
854
855             size_t begin = instructions().size();
856             emitOpcode(op_jngreater);
857             instructions().append(src1Index);
858             instructions().append(src2Index);
859             instructions().append(target->bind(begin, instructions().size()));
860             return target;
861         }
862     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
863         int dstIndex;
864         int src1Index;
865         int src2Index;
866
867         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
868
869         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
870             rewindBinaryOp();
871
872             size_t begin = instructions().size();
873             emitOpcode(op_jngreatereq);
874             instructions().append(src1Index);
875             instructions().append(src2Index);
876             instructions().append(target->bind(begin, instructions().size()));
877             return target;
878         }
879     } else if (m_lastOpcodeID == op_not) {
880         int dstIndex;
881         int srcIndex;
882
883         retrieveLastUnaryOp(dstIndex, srcIndex);
884
885         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
886             rewindUnaryOp();
887
888             size_t begin = instructions().size();
889             emitOpcode(op_jtrue);
890             instructions().append(srcIndex);
891             instructions().append(target->bind(begin, instructions().size()));
892             return target;
893         }
894     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
895         int dstIndex;
896         int srcIndex;
897
898         retrieveLastUnaryOp(dstIndex, srcIndex);
899
900         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
901             rewindUnaryOp();
902
903             size_t begin = instructions().size();
904             emitOpcode(op_jneq_null);
905             instructions().append(srcIndex);
906             instructions().append(target->bind(begin, instructions().size()));
907             return target;
908         }
909     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
910         int dstIndex;
911         int srcIndex;
912
913         retrieveLastUnaryOp(dstIndex, srcIndex);
914
915         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
916             rewindUnaryOp();
917
918             size_t begin = instructions().size();
919             emitOpcode(op_jeq_null);
920             instructions().append(srcIndex);
921             instructions().append(target->bind(begin, instructions().size()));
922             return target;
923         }
924     }
925
926     size_t begin = instructions().size();
927     emitOpcode(op_jfalse);
928     instructions().append(cond->index());
929     instructions().append(target->bind(begin, instructions().size()));
930     return target;
931 }
932
933 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
934 {
935     size_t begin = instructions().size();
936
937     emitOpcode(op_jneq_ptr);
938     instructions().append(cond->index());
939     instructions().append(Special::CallFunction);
940     instructions().append(target->bind(begin, instructions().size()));
941     return target;
942 }
943
944 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
945 {
946     size_t begin = instructions().size();
947
948     emitOpcode(op_jneq_ptr);
949     instructions().append(cond->index());
950     instructions().append(Special::ApplyFunction);
951     instructions().append(target->bind(begin, instructions().size()));
952     return target;
953 }
954
955 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
956 {
957     StringImpl* rep = ident.impl();
958     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
959     if (result.isNewEntry)
960         m_codeBlock->addIdentifier(Identifier(m_vm, rep));
961
962     return result.iterator->value;
963 }
964
965 // We can't hash JSValue(), so we use a dedicated data member to cache it.
966 RegisterID* BytecodeGenerator::addConstantEmptyValue()
967 {
968     if (!m_emptyValueRegister) {
969         int index = m_nextConstantOffset;
970         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
971         ++m_nextConstantOffset;
972         m_codeBlock->addConstant(JSValue());
973         m_emptyValueRegister = &m_constantPoolRegisters[index];
974     }
975
976     return m_emptyValueRegister;
977 }
978
979 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
980 {
981     if (!v)
982         return addConstantEmptyValue();
983
984     int index = m_nextConstantOffset;
985     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
986     if (result.isNewEntry) {
987         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
988         ++m_nextConstantOffset;
989         m_codeBlock->addConstant(v);
990     } else
991         index = result.iterator->value;
992     return &m_constantPoolRegisters[index];
993 }
994
995 unsigned BytecodeGenerator::addRegExp(RegExp* r)
996 {
997     return m_codeBlock->addRegExp(r);
998 }
999
1000 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1001 {
1002     m_staticPropertyAnalyzer.mov(dst->index(), src->index());
1003
1004     emitOpcode(op_mov);
1005     instructions().append(dst->index());
1006     instructions().append(src->index());
1007     return dst;
1008 }
1009
1010 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1011 {
1012     emitOpcode(opcodeID);
1013     instructions().append(dst->index());
1014     instructions().append(src->index());
1015     return dst;
1016 }
1017
1018 RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1019 {
1020     emitOpcode(op_inc);
1021     instructions().append(srcDst->index());
1022     return srcDst;
1023 }
1024
1025 RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1026 {
1027     emitOpcode(op_dec);
1028     instructions().append(srcDst->index());
1029     return srcDst;
1030 }
1031
1032 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1033 {
1034     emitOpcode(opcodeID);
1035     instructions().append(dst->index());
1036     instructions().append(src1->index());
1037     instructions().append(src2->index());
1038
1039     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1040         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1041         instructions().append(types.toInt());
1042
1043     return dst;
1044 }
1045
1046 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1047 {
1048     if (m_lastOpcodeID == op_typeof) {
1049         int dstIndex;
1050         int srcIndex;
1051
1052         retrieveLastUnaryOp(dstIndex, srcIndex);
1053
1054         if (src1->index() == dstIndex
1055             && src1->isTemporary()
1056             && m_codeBlock->isConstantRegisterIndex(src2->index())
1057             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1058             const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1059             if (value == "undefined") {
1060                 rewindUnaryOp();
1061                 emitOpcode(op_is_undefined);
1062                 instructions().append(dst->index());
1063                 instructions().append(srcIndex);
1064                 return dst;
1065             }
1066             if (value == "boolean") {
1067                 rewindUnaryOp();
1068                 emitOpcode(op_is_boolean);
1069                 instructions().append(dst->index());
1070                 instructions().append(srcIndex);
1071                 return dst;
1072             }
1073             if (value == "number") {
1074                 rewindUnaryOp();
1075                 emitOpcode(op_is_number);
1076                 instructions().append(dst->index());
1077                 instructions().append(srcIndex);
1078                 return dst;
1079             }
1080             if (value == "string") {
1081                 rewindUnaryOp();
1082                 emitOpcode(op_is_string);
1083                 instructions().append(dst->index());
1084                 instructions().append(srcIndex);
1085                 return dst;
1086             }
1087             if (value == "object") {
1088                 rewindUnaryOp();
1089                 emitOpcode(op_is_object);
1090                 instructions().append(dst->index());
1091                 instructions().append(srcIndex);
1092                 return dst;
1093             }
1094             if (value == "function") {
1095                 rewindUnaryOp();
1096                 emitOpcode(op_is_function);
1097                 instructions().append(dst->index());
1098                 instructions().append(srcIndex);
1099                 return dst;
1100             }
1101         }
1102     }
1103
1104     emitOpcode(opcodeID);
1105     instructions().append(dst->index());
1106     instructions().append(src1->index());
1107     instructions().append(src2->index());
1108     return dst;
1109 }
1110
1111 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1112 {
1113     return emitLoad(dst, jsBoolean(b));
1114 }
1115
1116 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1117 {
1118     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1119     // Later we can do the extra work to handle that like the other cases.  They also don't
1120     // work correctly with NaN as a key.
1121     if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1122         return emitLoad(dst, jsNumber(number));
1123     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1124     if (!valueInMap)
1125         valueInMap = jsNumber(number);
1126     return emitLoad(dst, valueInMap);
1127 }
1128
1129 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1130 {
1131     JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1132     if (!stringInMap)
1133         stringInMap = jsOwnedString(vm(), identifier.string());
1134     return emitLoad(dst, JSValue(stringInMap));
1135 }
1136
1137 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1138 {
1139     RegisterID* constantID = addConstantValue(v);
1140     if (dst)
1141         return emitMove(dst, constantID);
1142     return constantID;
1143 }
1144
1145 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1146 {
1147     if (!m_globalObjectRegister) {
1148         int index = m_nextConstantOffset;
1149         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1150         ++m_nextConstantOffset;
1151         m_codeBlock->addConstant(JSValue());
1152         m_globalObjectRegister = &m_constantPoolRegisters[index];
1153         m_codeBlock->setGlobalObjectRegister(VirtualRegister(index));
1154     }
1155     if (dst)
1156         emitMove(dst, m_globalObjectRegister);
1157     return m_globalObjectRegister;
1158 }
1159
1160 Local BytecodeGenerator::local(const Identifier& property)
1161 {
1162     if (property == propertyNames().thisIdentifier)
1163         return Local(thisRegister(), ReadOnly);
1164
1165     if (property == propertyNames().arguments)
1166         createArgumentsIfNecessary();
1167
1168     if (!shouldOptimizeLocals())
1169         return Local();
1170
1171     SymbolTableEntry entry = symbolTable().get(property.impl());
1172     if (entry.isNull())
1173         return Local();
1174
1175     RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1176     return Local(local, entry.getAttributes());
1177 }
1178
1179 Local BytecodeGenerator::constLocal(const Identifier& property)
1180 {
1181     if (m_codeType != FunctionCode)
1182         return Local();
1183
1184     SymbolTableEntry entry = symbolTable().get(property.impl());
1185     if (entry.isNull())
1186         return Local();
1187
1188     RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1189     return Local(local, entry.getAttributes());
1190 }
1191
1192 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1193 {
1194     size_t begin = instructions().size();
1195     emitOpcode(op_check_has_instance);
1196     instructions().append(dst->index());
1197     instructions().append(value->index());
1198     instructions().append(base->index());
1199     instructions().append(target->bind(begin, instructions().size()));
1200 }
1201
1202 // Indicates the least upper bound of resolve type based on local scope. The bytecode linker
1203 // will start with this ResolveType and compute the least upper bound including intercepting scopes.
1204 ResolveType BytecodeGenerator::resolveType()
1205 {
1206     if (m_localScopeDepth)
1207         return Dynamic;
1208     if (m_symbolTable && m_symbolTable->usesNonStrictEval())
1209         return GlobalPropertyWithVarInjectionChecks;
1210     return GlobalProperty;
1211 }
1212
1213 RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Identifier& identifier)
1214 {
1215     ASSERT(!m_symbolTable || !m_symbolTable->contains(identifier.impl()) || resolveType() == Dynamic);
1216
1217     // resolve_scope dst, id, ResolveType, depth
1218     emitOpcode(op_resolve_scope);
1219     instructions().append(kill(dst));
1220     instructions().append(addConstant(identifier));
1221     instructions().append(resolveType());
1222     instructions().append(0);
1223     return dst;
1224 }
1225
1226 RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Identifier& identifier, ResolveMode resolveMode)
1227 {
1228     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1229
1230     // get_from_scope dst, scope, id, ResolveModeAndType, Structure, Operand
1231     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_from_scope);
1232     instructions().append(kill(dst));
1233     instructions().append(scope->index());
1234     instructions().append(addConstant(identifier));
1235     instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1236     instructions().append(0);
1237     instructions().append(0);
1238     instructions().append(profile);
1239     return dst;
1240 }
1241
1242 RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Identifier& identifier, RegisterID* value, ResolveMode resolveMode)
1243 {
1244     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1245
1246     // put_to_scope scope, id, value, ResolveModeAndType, Structure, Operand
1247     emitOpcode(op_put_to_scope);
1248     instructions().append(scope->index());
1249     instructions().append(addConstant(identifier));
1250     instructions().append(value->index());
1251     instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1252     instructions().append(0);
1253     instructions().append(0);
1254     return value;
1255 }
1256
1257 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1258
1259     emitOpcode(op_instanceof);
1260     instructions().append(dst->index());
1261     instructions().append(value->index());
1262     instructions().append(basePrototype->index());
1263     return dst;
1264 }
1265
1266 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1267 {
1268     ASSERT(m_codeType == GlobalCode);
1269     emitOpcode(op_init_global_const_nop);
1270     instructions().append(0);
1271     instructions().append(value->index());
1272     instructions().append(0);
1273     instructions().append(addConstant(identifier));
1274     return value;
1275 }
1276
1277 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1278 {
1279     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1280
1281     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1282     instructions().append(kill(dst));
1283     instructions().append(base->index());
1284     instructions().append(addConstant(property));
1285     instructions().append(0);
1286     instructions().append(0);
1287     instructions().append(0);
1288     instructions().append(0);
1289     instructions().append(profile);
1290     return dst;
1291 }
1292
1293 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1294 {
1295     emitOpcode(op_get_arguments_length);
1296     instructions().append(dst->index());
1297     ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1298     instructions().append(base->index());
1299     instructions().append(addConstant(propertyNames().length));
1300     return dst;
1301 }
1302
1303 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1304 {
1305     unsigned propertyIndex = addConstant(property);
1306
1307     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1308
1309     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1310
1311     emitOpcode(op_put_by_id);
1312     instructions().append(base->index());
1313     instructions().append(propertyIndex);
1314     instructions().append(value->index());
1315     instructions().append(0);
1316     instructions().append(0);
1317     instructions().append(0);
1318     instructions().append(0);
1319     instructions().append(0);
1320     return value;
1321 }
1322
1323 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1324 {
1325     unsigned propertyIndex = addConstant(property);
1326
1327     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1328
1329     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1330     
1331     emitOpcode(op_put_by_id);
1332     instructions().append(base->index());
1333     instructions().append(propertyIndex);
1334     instructions().append(value->index());
1335     instructions().append(0);
1336     instructions().append(0);
1337     instructions().append(0);
1338     instructions().append(0);
1339     instructions().append(
1340         property != m_vm->propertyNames->underscoreProto
1341         && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1342     return value;
1343 }
1344
1345 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1346 {
1347     unsigned propertyIndex = addConstant(property);
1348
1349     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1350
1351     emitOpcode(op_put_getter_setter);
1352     instructions().append(base->index());
1353     instructions().append(propertyIndex);
1354     instructions().append(getter->index());
1355     instructions().append(setter->index());
1356 }
1357
1358 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1359 {
1360     emitOpcode(op_del_by_id);
1361     instructions().append(dst->index());
1362     instructions().append(base->index());
1363     instructions().append(addConstant(property));
1364     return dst;
1365 }
1366
1367 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1368 {
1369     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1370     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1371     instructions().append(kill(dst));
1372     ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1373     instructions().append(base->index());
1374     instructions().append(property->index());
1375     instructions().append(arrayProfile);
1376     instructions().append(profile);
1377     return dst;
1378 }
1379
1380 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1381 {
1382     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1383         ForInContext& context = m_forInContextStack[i - 1];
1384         if (context.propertyRegister == property) {
1385             emitOpcode(op_get_by_pname);
1386             instructions().append(dst->index());
1387             instructions().append(base->index());
1388             instructions().append(property->index());
1389             instructions().append(context.expectedSubscriptRegister->index());
1390             instructions().append(context.iterRegister->index());
1391             instructions().append(context.indexRegister->index());
1392             return dst;
1393         }
1394     }
1395     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1396     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1397     instructions().append(kill(dst));
1398     instructions().append(base->index());
1399     instructions().append(property->index());
1400     instructions().append(arrayProfile);
1401     instructions().append(profile);
1402     return dst;
1403 }
1404
1405 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1406 {
1407     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1408     emitOpcode(op_put_by_val);
1409     instructions().append(base->index());
1410     instructions().append(property->index());
1411     instructions().append(value->index());
1412     instructions().append(arrayProfile);
1413     return value;
1414 }
1415
1416 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1417 {
1418     emitOpcode(op_del_by_val);
1419     instructions().append(dst->index());
1420     instructions().append(base->index());
1421     instructions().append(property->index());
1422     return dst;
1423 }
1424
1425 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1426 {
1427     emitOpcode(op_put_by_index);
1428     instructions().append(base->index());
1429     instructions().append(index);
1430     instructions().append(value->index());
1431     return value;
1432 }
1433
1434 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1435 {
1436     RefPtr<RegisterID> func = newTemporary(); 
1437
1438     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1439     emitOpcode(op_get_callee);
1440     instructions().append(func->index());
1441     instructions().append(0);
1442
1443     size_t begin = instructions().size();
1444     m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1445
1446     emitOpcode(op_create_this); 
1447     instructions().append(m_thisRegister.index()); 
1448     instructions().append(func->index()); 
1449     instructions().append(0);
1450     return dst;
1451 }
1452
1453 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1454 {
1455     size_t begin = instructions().size();
1456     m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1457
1458     emitOpcode(op_new_object);
1459     instructions().append(dst->index());
1460     instructions().append(0);
1461     instructions().append(newObjectAllocationProfile());
1462     return dst;
1463 }
1464
1465 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1466 {
1467     return m_codeBlock->addConstantBuffer(length);
1468 }
1469
1470 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1471 {
1472     JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1473     if (!stringInMap) {
1474         stringInMap = jsString(vm(), identifier.string());
1475         addConstantValue(stringInMap);
1476     }
1477     return stringInMap;
1478 }
1479
1480 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1481 {
1482 #if !ASSERT_DISABLED
1483     unsigned checkLength = 0;
1484 #endif
1485     bool hadVariableExpression = false;
1486     if (length) {
1487         for (ElementNode* n = elements; n; n = n->next()) {
1488             if (!n->value()->isConstant()) {
1489                 hadVariableExpression = true;
1490                 break;
1491             }
1492             if (n->elision())
1493                 break;
1494 #if !ASSERT_DISABLED
1495             checkLength++;
1496 #endif
1497         }
1498         if (!hadVariableExpression) {
1499             ASSERT(length == checkLength);
1500             unsigned constantBufferIndex = addConstantBuffer(length);
1501             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1502             unsigned index = 0;
1503             for (ElementNode* n = elements; index < length; n = n->next()) {
1504                 ASSERT(n->value()->isConstant());
1505                 constantBuffer[index++] = static_cast<ConstantNode*>(n->value())->jsValue(*this);
1506             }
1507             emitOpcode(op_new_array_buffer);
1508             instructions().append(dst->index());
1509             instructions().append(constantBufferIndex);
1510             instructions().append(length);
1511             instructions().append(newArrayAllocationProfile());
1512             return dst;
1513         }
1514     }
1515
1516     Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
1517     for (ElementNode* n = elements; n; n = n->next()) {
1518         if (n->elision())
1519             break;
1520         argv.append(newTemporary());
1521         // op_new_array requires the initial values to be a sequential range of registers
1522         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
1523         emitNode(argv.last().get(), n->value());
1524     }
1525     emitOpcode(op_new_array);
1526     instructions().append(dst->index());
1527     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1528     instructions().append(argv.size()); // argc
1529     instructions().append(newArrayAllocationProfile());
1530     return dst;
1531 }
1532
1533 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1534 {
1535     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1536 }
1537
1538 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1539 {
1540     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1541     if (ptr.isNewEntry)
1542         ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1543     return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1544 }
1545
1546 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1547 {
1548     createActivationIfNecessary();
1549     emitOpcode(op_new_func);
1550     instructions().append(dst->index());
1551     instructions().append(index);
1552     instructions().append(doNullCheck);
1553     return dst;
1554 }
1555
1556 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1557 {
1558     emitOpcode(op_new_regexp);
1559     instructions().append(dst->index());
1560     instructions().append(addRegExp(regExp));
1561     return dst;
1562 }
1563
1564 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1565 {
1566     FunctionBodyNode* function = n->body();
1567     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1568     
1569     createActivationIfNecessary();
1570     emitOpcode(op_new_func_exp);
1571     instructions().append(r0->index());
1572     instructions().append(index);
1573     return r0;
1574 }
1575
1576 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1577 {
1578     return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd);
1579 }
1580
1581 void BytecodeGenerator::createArgumentsIfNecessary()
1582 {
1583     if (m_codeType != FunctionCode)
1584         return;
1585     
1586     if (!m_codeBlock->usesArguments())
1587         return;
1588
1589     if (shouldTearOffArgumentsEagerly())
1590         return;
1591
1592     emitOpcode(op_create_arguments);
1593     instructions().append(m_codeBlock->argumentsRegister().offset());
1594 }
1595
1596 void BytecodeGenerator::createActivationIfNecessary()
1597 {
1598     if (m_hasCreatedActivation)
1599         return;
1600     if (!m_codeBlock->needsFullScopeChain())
1601         return;
1602     emitOpcode(op_create_activation);
1603     instructions().append(m_activationRegister->index());
1604 }
1605
1606 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1607 {
1608     return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd);
1609 }
1610
1611 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1612 {
1613     if (identifier == m_vm->propertyNames->Object)
1614         return ExpectObjectConstructor;
1615     if (identifier == m_vm->propertyNames->Array)
1616         return ExpectArrayConstructor;
1617     return NoExpectedFunction;
1618 }
1619
1620 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1621 {
1622     RefPtr<Label> realCall = newLabel();
1623     switch (expectedFunction) {
1624     case ExpectObjectConstructor: {
1625         // If the number of arguments is non-zero, then we can't do anything interesting.
1626         if (callArguments.argumentCountIncludingThis() >= 2)
1627             return NoExpectedFunction;
1628         
1629         size_t begin = instructions().size();
1630         emitOpcode(op_jneq_ptr);
1631         instructions().append(func->index());
1632         instructions().append(Special::ObjectConstructor);
1633         instructions().append(realCall->bind(begin, instructions().size()));
1634         
1635         if (dst != ignoredResult())
1636             emitNewObject(dst);
1637         break;
1638     }
1639         
1640     case ExpectArrayConstructor: {
1641         // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1642         // don't do inline it, for now. The only reason is that call arguments are in
1643         // the opposite order of what op_new_array expects, so we'd either need to change
1644         // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1645         // things sounds like it's worth it.
1646         if (callArguments.argumentCountIncludingThis() > 2)
1647             return NoExpectedFunction;
1648         
1649         size_t begin = instructions().size();
1650         emitOpcode(op_jneq_ptr);
1651         instructions().append(func->index());
1652         instructions().append(Special::ArrayConstructor);
1653         instructions().append(realCall->bind(begin, instructions().size()));
1654         
1655         if (dst != ignoredResult()) {
1656             if (callArguments.argumentCountIncludingThis() == 2) {
1657                 emitOpcode(op_new_array_with_size);
1658                 instructions().append(dst->index());
1659                 instructions().append(callArguments.argumentRegister(0)->index());
1660                 instructions().append(newArrayAllocationProfile());
1661             } else {
1662                 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1663                 emitOpcode(op_new_array);
1664                 instructions().append(dst->index());
1665                 instructions().append(0);
1666                 instructions().append(0);
1667                 instructions().append(newArrayAllocationProfile());
1668             }
1669         }
1670         break;
1671     }
1672         
1673     default:
1674         ASSERT(expectedFunction == NoExpectedFunction);
1675         return NoExpectedFunction;
1676     }
1677     
1678     size_t begin = instructions().size();
1679     emitOpcode(op_jmp);
1680     instructions().append(done->bind(begin, instructions().size()));
1681     emitLabel(realCall.get());
1682     
1683     return expectedFunction;
1684 }
1685
1686 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1687 {
1688     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1689     ASSERT(func->refCount());
1690
1691     if (m_shouldEmitProfileHooks)
1692         emitMove(callArguments.profileHookRegister(), func);
1693
1694     // Generate code for arguments.
1695     unsigned argument = 0;
1696     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1697         emitNode(callArguments.argumentRegister(argument++), n);
1698
1699     // Reserve space for call frame.
1700     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1701     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1702         callFrame.append(newTemporary());
1703
1704     if (m_shouldEmitProfileHooks) {
1705         emitOpcode(op_profile_will_call);
1706         instructions().append(callArguments.profileHookRegister()->index());
1707     }
1708
1709     emitExpressionInfo(divot, divotStart, divotEnd);
1710
1711     RefPtr<Label> done = newLabel();
1712     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1713     
1714     // Emit call.
1715     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1716     UnlinkedValueProfile profile = emitProfiledOpcode(opcodeID);
1717     ASSERT(dst);
1718     ASSERT(dst != ignoredResult());
1719     instructions().append(dst->index()); // result
1720     instructions().append(func->index()); // func
1721     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1722     instructions().append(callArguments.registerOffset()); // registerOffset
1723 #if ENABLE(LLINT)
1724     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1725 #else
1726     instructions().append(0);
1727 #endif
1728     instructions().append(arrayProfile);
1729     instructions().append(profile);
1730     
1731     if (expectedFunction != NoExpectedFunction)
1732         emitLabel(done.get());
1733
1734     if (m_shouldEmitProfileHooks) {
1735         emitOpcode(op_profile_did_call);
1736         instructions().append(callArguments.profileHookRegister()->index());
1737     }
1738
1739     return dst;
1740 }
1741
1742 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1743 {
1744     if (m_shouldEmitProfileHooks) {
1745         emitMove(profileHookRegister, func);
1746         emitOpcode(op_profile_will_call);
1747         instructions().append(profileHookRegister->index());
1748     }
1749     
1750     emitExpressionInfo(divot, divotStart, divotEnd);
1751
1752     // Emit call.
1753     UnlinkedValueProfile profile = emitProfiledOpcode(op_call_varargs);
1754     ASSERT(dst != ignoredResult());
1755     instructions().append(dst->index());
1756     instructions().append(func->index());
1757     instructions().append(thisRegister->index());
1758     instructions().append(arguments->index());
1759     instructions().append(firstFreeRegister->index());
1760     instructions().append(0); // Pad to make it as big as an op_call.
1761     instructions().append(profile);
1762     if (m_shouldEmitProfileHooks) {
1763         emitOpcode(op_profile_did_call);
1764         instructions().append(profileHookRegister->index());
1765     }
1766     return dst;
1767 }
1768
1769 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1770 {
1771     if (m_codeBlock->needsFullScopeChain()) {
1772         emitOpcode(op_tear_off_activation);
1773         instructions().append(m_activationRegister->index());
1774     }
1775
1776     if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !isStrictMode()) {
1777         emitOpcode(op_tear_off_arguments);
1778         instructions().append(m_codeBlock->argumentsRegister().offset());
1779         instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1780     }
1781
1782     // Constructors use op_ret_object_or_this to check the result is an
1783     // object, unless we can trivially determine the check is not
1784     // necessary (currently, if the return value is 'this').
1785     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1786         emitOpcode(op_ret_object_or_this);
1787         instructions().append(src->index());
1788         instructions().append(m_thisRegister.index());
1789         return src;
1790     }
1791     return emitUnaryNoDstOp(op_ret, src);
1792 }
1793
1794 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1795 {
1796     emitOpcode(opcodeID);
1797     instructions().append(src->index());
1798     return src;
1799 }
1800
1801 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1802 {
1803     ASSERT(func->refCount());
1804
1805     if (m_shouldEmitProfileHooks)
1806         emitMove(callArguments.profileHookRegister(), func);
1807
1808     // Generate code for arguments.
1809     unsigned argument = 0;
1810     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1811         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1812             emitNode(callArguments.argumentRegister(argument++), n);
1813     }
1814
1815     if (m_shouldEmitProfileHooks) {
1816         emitOpcode(op_profile_will_call);
1817         instructions().append(callArguments.profileHookRegister()->index());
1818     }
1819
1820     // Reserve space for call frame.
1821     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1822     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1823         callFrame.append(newTemporary());
1824
1825     emitExpressionInfo(divot, divotStart, divotEnd);
1826     
1827     RefPtr<Label> done = newLabel();
1828     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1829
1830     UnlinkedValueProfile profile = emitProfiledOpcode(op_construct);
1831     ASSERT(dst != ignoredResult());
1832     instructions().append(dst->index());
1833     instructions().append(func->index()); // func
1834     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1835     instructions().append(callArguments.registerOffset()); // registerOffset
1836 #if ENABLE(LLINT)
1837     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1838 #else
1839     instructions().append(0);
1840 #endif
1841     instructions().append(0);
1842     instructions().append(profile);
1843
1844     if (expectedFunction != NoExpectedFunction)
1845         emitLabel(done.get());
1846
1847     if (m_shouldEmitProfileHooks) {
1848         emitOpcode(op_profile_did_call);
1849         instructions().append(callArguments.profileHookRegister()->index());
1850     }
1851
1852     return dst;
1853 }
1854
1855 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1856 {
1857     emitOpcode(op_strcat);
1858     instructions().append(dst->index());
1859     instructions().append(src->index());
1860     instructions().append(count);
1861
1862     return dst;
1863 }
1864
1865 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1866 {
1867     emitOpcode(op_to_primitive);
1868     instructions().append(dst->index());
1869     instructions().append(src->index());
1870 }
1871
1872 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
1873 {
1874     ControlFlowContext context;
1875     context.isFinallyBlock = false;
1876     m_scopeContextStack.append(context);
1877     m_localScopeDepth++;
1878
1879     return emitUnaryNoDstOp(op_push_with_scope, scope);
1880 }
1881
1882 void BytecodeGenerator::emitPopScope()
1883 {
1884     ASSERT(m_scopeContextStack.size());
1885     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1886
1887     emitOpcode(op_pop_scope);
1888
1889     m_scopeContextStack.removeLast();
1890     m_localScopeDepth--;
1891 }
1892
1893 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, unsigned line, unsigned charOffset, unsigned lineStart)
1894 {
1895 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1896     if (debugHookID != DidReachBreakpoint)
1897         return;
1898 #else
1899     if (!m_shouldEmitDebugHooks)
1900         return;
1901 #endif
1902     JSTextPosition divot(line, charOffset, lineStart);
1903     emitExpressionInfo(divot, divot, divot);
1904     emitOpcode(op_debug);
1905     instructions().append(debugHookID);
1906 }
1907
1908 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
1909 {
1910     ControlFlowContext scope;
1911     scope.isFinallyBlock = true;
1912     FinallyContext context = {
1913         finallyBlock,
1914         static_cast<unsigned>(m_scopeContextStack.size()),
1915         static_cast<unsigned>(m_switchContextStack.size()),
1916         static_cast<unsigned>(m_forInContextStack.size()),
1917         static_cast<unsigned>(m_tryContextStack.size()),
1918         static_cast<unsigned>(m_labelScopes.size()),
1919         m_finallyDepth,
1920         m_localScopeDepth
1921     };
1922     scope.finallyContext = context;
1923     m_scopeContextStack.append(scope);
1924     m_finallyDepth++;
1925 }
1926
1927 void BytecodeGenerator::popFinallyContext()
1928 {
1929     ASSERT(m_scopeContextStack.size());
1930     ASSERT(m_scopeContextStack.last().isFinallyBlock);
1931     ASSERT(m_finallyDepth > 0);
1932     m_scopeContextStack.removeLast();
1933     m_finallyDepth--;
1934 }
1935
1936 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1937 {
1938     // Reclaim free label scopes.
1939     //
1940     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1941     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1942     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
1943     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1944     // loop condition is a workaround.
1945     while (m_labelScopes.size()) {
1946         if  (m_labelScopes.last().refCount())
1947             break;
1948         m_labelScopes.removeLast();
1949     }
1950
1951     if (!m_labelScopes.size())
1952         return 0;
1953
1954     // We special-case the following, which is a syntax error in Firefox:
1955     // label:
1956     //     break;
1957     if (name.isEmpty()) {
1958         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1959             LabelScope* scope = &m_labelScopes[i];
1960             if (scope->type() != LabelScope::NamedLabel) {
1961                 ASSERT(scope->breakTarget());
1962                 return scope;
1963             }
1964         }
1965         return 0;
1966     }
1967
1968     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1969         LabelScope* scope = &m_labelScopes[i];
1970         if (scope->name() && *scope->name() == name) {
1971             ASSERT(scope->breakTarget());
1972             return scope;
1973         }
1974     }
1975     return 0;
1976 }
1977
1978 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
1979 {
1980     // Reclaim free label scopes.
1981     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1982         m_labelScopes.removeLast();
1983
1984     if (!m_labelScopes.size())
1985         return 0;
1986
1987     if (name.isEmpty()) {
1988         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1989             LabelScope* scope = &m_labelScopes[i];
1990             if (scope->type() == LabelScope::Loop) {
1991                 ASSERT(scope->continueTarget());
1992                 return scope;
1993             }
1994         }
1995         return 0;
1996     }
1997
1998     // Continue to the loop nested nearest to the label scope that matches
1999     // 'name'.
2000     LabelScope* result = 0;
2001     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2002         LabelScope* scope = &m_labelScopes[i];
2003         if (scope->type() == LabelScope::Loop) {
2004             ASSERT(scope->continueTarget());
2005             result = scope;
2006         }
2007         if (scope->name() && *scope->name() == name)
2008             return result; // may be 0
2009     }
2010     return 0;
2011 }
2012
2013 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2014 {
2015     while (topScope > bottomScope) {
2016         // First we count the number of dynamic scopes we need to remove to get
2017         // to a finally block.
2018         int nNormalScopes = 0;
2019         while (topScope > bottomScope) {
2020             if (topScope->isFinallyBlock)
2021                 break;
2022             ++nNormalScopes;
2023             --topScope;
2024         }
2025
2026         if (nNormalScopes) {
2027             // We need to remove a number of dynamic scopes to get to the next
2028             // finally block
2029             while (nNormalScopes--)
2030                 emitOpcode(op_pop_scope);
2031
2032             // If topScope == bottomScope then there isn't a finally block left to emit.
2033             if (topScope == bottomScope)
2034                 return;
2035         }
2036         
2037         Vector<ControlFlowContext> savedScopeContextStack;
2038         Vector<SwitchInfo> savedSwitchContextStack;
2039         Vector<ForInContext> savedForInContextStack;
2040         Vector<TryContext> poppedTryContexts;
2041         LabelScopeStore savedLabelScopes;
2042         while (topScope > bottomScope && topScope->isFinallyBlock) {
2043             RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2044             
2045             // Save the current state of the world while instating the state of the world
2046             // for the finally block.
2047             FinallyContext finallyContext = topScope->finallyContext;
2048             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2049             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2050             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2051             bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2052             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2053             int topScopeIndex = -1;
2054             int bottomScopeIndex = -1;
2055             if (flipScopes) {
2056                 topScopeIndex = topScope - m_scopeContextStack.begin();
2057                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2058                 savedScopeContextStack = m_scopeContextStack;
2059                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2060             }
2061             if (flipSwitches) {
2062                 savedSwitchContextStack = m_switchContextStack;
2063                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2064             }
2065             if (flipForIns) {
2066                 savedForInContextStack = m_forInContextStack;
2067                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2068             }
2069             if (flipTries) {
2070                 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2071                     ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2072                     TryContext context = m_tryContextStack.last();
2073                     m_tryContextStack.removeLast();
2074                     TryRange range;
2075                     range.start = context.start;
2076                     range.end = beforeFinally;
2077                     range.tryData = context.tryData;
2078                     m_tryRanges.append(range);
2079                     poppedTryContexts.append(context);
2080                 }
2081             }
2082             if (flipLabelScopes) {
2083                 savedLabelScopes = m_labelScopes;
2084                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2085                     m_labelScopes.removeLast();
2086             }
2087             int savedFinallyDepth = m_finallyDepth;
2088             m_finallyDepth = finallyContext.finallyDepth;
2089             int savedDynamicScopeDepth = m_localScopeDepth;
2090             m_localScopeDepth = finallyContext.dynamicScopeDepth;
2091             
2092             // Emit the finally block.
2093             emitNode(finallyContext.finallyBlock);
2094             
2095             RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2096             
2097             // Restore the state of the world.
2098             if (flipScopes) {
2099                 m_scopeContextStack = savedScopeContextStack;
2100                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2101                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2102             }
2103             if (flipSwitches)
2104                 m_switchContextStack = savedSwitchContextStack;
2105             if (flipForIns)
2106                 m_forInContextStack = savedForInContextStack;
2107             if (flipTries) {
2108                 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2109                 for (unsigned i = poppedTryContexts.size(); i--;) {
2110                     TryContext context = poppedTryContexts[i];
2111                     context.start = afterFinally;
2112                     m_tryContextStack.append(context);
2113                 }
2114                 poppedTryContexts.clear();
2115             }
2116             if (flipLabelScopes)
2117                 m_labelScopes = savedLabelScopes;
2118             m_finallyDepth = savedFinallyDepth;
2119             m_localScopeDepth = savedDynamicScopeDepth;
2120             
2121             --topScope;
2122         }
2123     }
2124 }
2125
2126 void BytecodeGenerator::emitPopScopes(int targetScopeDepth)
2127 {
2128     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2129
2130     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2131     ASSERT(scopeDelta <= m_scopeContextStack.size());
2132     if (!scopeDelta)
2133         return;
2134
2135     if (!m_finallyDepth) {
2136         while (scopeDelta--)
2137             emitOpcode(op_pop_scope);
2138         return;
2139     }
2140
2141     emitComplexPopScopes(&m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2142 }
2143
2144 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2145 {
2146     size_t begin = instructions().size();
2147
2148     emitOpcode(op_get_pnames);
2149     instructions().append(dst->index());
2150     instructions().append(base->index());
2151     instructions().append(i->index());
2152     instructions().append(size->index());
2153     instructions().append(breakTarget->bind(begin, instructions().size()));
2154     return dst;
2155 }
2156
2157 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2158 {
2159     size_t begin = instructions().size();
2160
2161     emitOpcode(op_next_pname);
2162     instructions().append(dst->index());
2163     instructions().append(base->index());
2164     instructions().append(i->index());
2165     instructions().append(size->index());
2166     instructions().append(iter->index());
2167     instructions().append(target->bind(begin, instructions().size()));
2168     return dst;
2169 }
2170
2171 TryData* BytecodeGenerator::pushTry(Label* start)
2172 {
2173     TryData tryData;
2174     tryData.target = newLabel();
2175     tryData.targetScopeDepth = UINT_MAX;
2176     m_tryData.append(tryData);
2177     TryData* result = &m_tryData.last();
2178     
2179     TryContext tryContext;
2180     tryContext.start = start;
2181     tryContext.tryData = result;
2182     
2183     m_tryContextStack.append(tryContext);
2184     
2185     return result;
2186 }
2187
2188 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2189 {
2190     m_usesExceptions = true;
2191     
2192     ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2193     
2194     TryRange tryRange;
2195     tryRange.start = m_tryContextStack.last().start;
2196     tryRange.end = end;
2197     tryRange.tryData = m_tryContextStack.last().tryData;
2198     m_tryRanges.append(tryRange);
2199     m_tryContextStack.removeLast();
2200     
2201     emitLabel(tryRange.tryData->target.get());
2202     tryRange.tryData->targetScopeDepth = m_localScopeDepth;
2203
2204     emitOpcode(op_catch);
2205     instructions().append(targetRegister->index());
2206     return targetRegister;
2207 }
2208
2209 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2210 {
2211     emitOpcode(op_throw_static_error);
2212     instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, message)))->index());
2213     instructions().append(true);
2214 }
2215
2216 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2217 {
2218     ControlFlowContext context;
2219     context.isFinallyBlock = false;
2220     m_scopeContextStack.append(context);
2221     m_localScopeDepth++;
2222
2223     emitOpcode(op_push_name_scope);
2224     instructions().append(addConstant(property));
2225     instructions().append(value->index());
2226     instructions().append(attributes);
2227 }
2228
2229 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2230 {
2231     SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2232     switch (type) {
2233         case SwitchInfo::SwitchImmediate:
2234             emitOpcode(op_switch_imm);
2235             break;
2236         case SwitchInfo::SwitchCharacter:
2237             emitOpcode(op_switch_char);
2238             break;
2239         case SwitchInfo::SwitchString:
2240             emitOpcode(op_switch_string);
2241             break;
2242         default:
2243             RELEASE_ASSERT_NOT_REACHED();
2244     }
2245
2246     instructions().append(0); // place holder for table index
2247     instructions().append(0); // place holder for default target    
2248     instructions().append(scrutineeRegister->index());
2249     m_switchContextStack.append(info);
2250 }
2251
2252 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2253 {
2254     UNUSED_PARAM(max);
2255     ASSERT(node->isNumber());
2256     double value = static_cast<NumberNode*>(node)->value();
2257     int32_t key = static_cast<int32_t>(value);
2258     ASSERT(key == value);
2259     ASSERT(key >= min);
2260     ASSERT(key <= max);
2261     return key - min;
2262 }
2263
2264 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2265 {
2266     UNUSED_PARAM(max);
2267     ASSERT(node->isString());
2268     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2269     ASSERT(clause->length() == 1);
2270     
2271     int32_t key = (*clause)[0];
2272     ASSERT(key >= min);
2273     ASSERT(key <= max);
2274     return key - min;
2275 }
2276
2277 static void prepareJumpTableForSwitch(
2278     UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
2279     RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max,
2280     int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
2281 {
2282     jumpTable.min = min;
2283     jumpTable.branchOffsets.resize(max - min + 1);
2284     jumpTable.branchOffsets.fill(0);
2285     for (uint32_t i = 0; i < clauseCount; ++i) {
2286         // We're emitting this after the clause labels should have been fixed, so 
2287         // the labels should not be "forward" references
2288         ASSERT(!labels[i]->isForward());
2289         jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2290     }
2291 }
2292
2293 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2294 {
2295     for (uint32_t i = 0; i < clauseCount; ++i) {
2296         // We're emitting this after the clause labels should have been fixed, so 
2297         // the labels should not be "forward" references
2298         ASSERT(!labels[i]->isForward());
2299         
2300         ASSERT(nodes[i]->isString());
2301         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2302         jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2303     }
2304 }
2305
2306 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2307 {
2308     SwitchInfo switchInfo = m_switchContextStack.last();
2309     m_switchContextStack.removeLast();
2310     
2311     switch (switchInfo.switchType) {
2312     case SwitchInfo::SwitchImmediate:
2313     case SwitchInfo::SwitchCharacter: {
2314         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfSwitchJumpTables();
2315         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2316
2317         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addSwitchJumpTable();
2318         prepareJumpTableForSwitch(
2319             jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
2320             switchInfo.switchType == SwitchInfo::SwitchImmediate
2321                 ? keyForImmediateSwitch
2322                 : keyForCharacterSwitch); 
2323         break;
2324     }
2325         
2326     case SwitchInfo::SwitchString: {
2327         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2328         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2329
2330         UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2331         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2332         break;
2333     }
2334         
2335     default:
2336         RELEASE_ASSERT_NOT_REACHED();
2337         break;
2338     }
2339 }
2340
2341 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2342 {
2343     // It would be nice to do an even better job of identifying exactly where the expression is.
2344     // And we could make the caller pass the node pointer in, if there was some way of getting
2345     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2346     // is still good enough to get us an accurate line number.
2347     m_expressionTooDeep = true;
2348     return newTemporary();
2349 }
2350
2351 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2352 {
2353     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2354 }
2355
2356 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2357 {
2358     RegisterID* registerID = local(ident).get();
2359     if (!registerID || registerID->index() >= 0)
2360          return 0;
2361     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2362 }
2363
2364 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2365 {
2366     if (!isStrictMode())
2367         return;
2368     emitOpcode(op_throw_static_error);
2369     instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, StrictModeReadonlyPropertyWriteError)))->index());
2370     instructions().append(false);
2371 }
2372
2373 } // namespace JSC