c9632eb168edc9b8e745a2eb0bd87c5e76739c30
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012, 2013 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "Interpreter.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "JSNameScope.h"
38 #include "LowLevelInterpreter.h"
39 #include "Operations.h"
40 #include "Options.h"
41 #include "StrongInlines.h"
42 #include "UnlinkedCodeBlock.h"
43 #include <wtf/StdLibExtras.h>
44 #include <wtf/text/WTFString.h>
45
46 using namespace std;
47
48 namespace JSC {
49
50 void Label::setLocation(unsigned location)
51 {
52     m_location = location;
53     
54     unsigned size = m_unresolvedJumps.size();
55     for (unsigned i = 0; i < size; ++i)
56         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
57 }
58
59 ParserError BytecodeGenerator::generate()
60 {
61     SamplingRegion samplingRegion("Bytecode Generation");
62     
63     m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
64
65     m_scopeNode->emitBytecode(*this);
66
67     m_staticPropertyAnalyzer.kill();
68
69     for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
70         TryRange& range = m_tryRanges[i];
71         int start = range.start->bind();
72         int end = range.end->bind();
73         
74         // This will happen for empty try blocks and for some cases of finally blocks:
75         //
76         // try {
77         //    try {
78         //    } finally {
79         //        return 42;
80         //        // *HERE*
81         //    }
82         // } finally {
83         //    print("things");
84         // }
85         //
86         // The return will pop scopes to execute the outer finally block. But this includes
87         // popping the try context for the inner try. The try context is live in the fall-through
88         // part of the finally block not because we will emit a handler that overlaps the finally,
89         // but because we haven't yet had a chance to plant the catch target. Then when we finish
90         // emitting code for the outer finally block, we repush the try contex, this time with a
91         // new start index. But that means that the start index for the try range corresponding
92         // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
93         // than the end index of the try block. This is harmless since end < start handlers will
94         // never get matched in our logic, but we do the runtime a favor and choose to not emit
95         // such handlers at all.
96         if (end <= start)
97             continue;
98         
99         ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
100         UnlinkedHandlerInfo info = {
101             static_cast<uint32_t>(start), static_cast<uint32_t>(end),
102             static_cast<uint32_t>(range.tryData->target->bind()),
103             range.tryData->targetScopeDepth
104         };
105         m_codeBlock->addExceptionHandler(info);
106     }
107     
108     m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
109
110     m_codeBlock->shrinkToFit();
111
112     if (m_expressionTooDeep)
113         return ParserError(ParserError::OutOfMemory);
114     return ParserError(ParserError::ErrorNone);
115 }
116
117 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
118 {
119     ConcurrentJITLocker locker(symbolTable().m_lock);
120     int index = virtualRegisterForLocal(m_calleeRegisters.size()).offset();
121     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
122     SymbolTable::Map::AddResult result = symbolTable().add(locker, ident.impl(), newEntry);
123
124     if (!result.isNewEntry) {
125         r0 = &registerFor(result.iterator->value.getIndex());
126         return false;
127     }
128
129     r0 = addVar();
130     return true;
131 }
132
133 void BytecodeGenerator::preserveLastVar()
134 {
135     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
136         m_lastVar = &m_calleeRegisters.last();
137 }
138
139 BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
140     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
141     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
142     , m_symbolTable(0)
143     , m_scopeNode(programNode)
144     , m_codeBlock(vm, codeBlock)
145     , m_thisRegister(CallFrame::thisArgumentOffset())
146     , m_emptyValueRegister(0)
147     , m_globalObjectRegister(0)
148     , m_finallyDepth(0)
149     , m_localScopeDepth(0)
150     , m_codeType(GlobalCode)
151     , m_nextConstantOffset(0)
152     , m_globalConstantIndex(0)
153     , m_hasCreatedActivation(true)
154     , m_firstLazyFunction(0)
155     , m_lastLazyFunction(0)
156     , m_staticPropertyAnalyzer(&m_instructions)
157     , m_vm(&vm)
158     , m_lastOpcodeID(op_end)
159 #ifndef NDEBUG
160     , m_lastOpcodePosition(0)
161 #endif
162     , m_stack(vm, wtfThreadData().stack())
163     , m_usesExceptions(false)
164     , m_expressionTooDeep(false)
165 {
166     if (m_shouldEmitDebugHooks)
167         m_codeBlock->setNeedsFullScopeChain(true);
168
169     m_codeBlock->setNumParameters(1); // Allocate space for "this"
170
171     emitOpcode(op_enter);
172
173     const VarStack& varStack = programNode->varStack();
174     const FunctionStack& functionStack = programNode->functionStack();
175
176     for (size_t i = 0; i < functionStack.size(); ++i) {
177         FunctionBodyNode* function = functionStack[i];
178         UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
179         codeBlock->addFunctionDeclaration(*m_vm, function->ident(), unlinkedFunction);
180     }
181
182     for (size_t i = 0; i < varStack.size(); ++i)
183         codeBlock->addVariableDeclaration(varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
184
185 }
186
187 BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
188     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
189     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
190     , m_symbolTable(codeBlock->symbolTable())
191     , m_scopeNode(functionBody)
192     , m_codeBlock(vm, codeBlock)
193     , m_activationRegister(0)
194     , m_emptyValueRegister(0)
195     , m_globalObjectRegister(0)
196     , m_finallyDepth(0)
197     , m_localScopeDepth(0)
198     , m_codeType(FunctionCode)
199     , m_nextConstantOffset(0)
200     , m_globalConstantIndex(0)
201     , m_hasCreatedActivation(false)
202     , m_firstLazyFunction(0)
203     , m_lastLazyFunction(0)
204     , m_staticPropertyAnalyzer(&m_instructions)
205     , m_vm(&vm)
206     , m_lastOpcodeID(op_end)
207 #ifndef NDEBUG
208     , m_lastOpcodePosition(0)
209 #endif
210     , m_stack(vm, wtfThreadData().stack())
211     , m_usesExceptions(false)
212     , m_expressionTooDeep(false)
213 {
214     if (m_shouldEmitDebugHooks)
215         m_codeBlock->setNeedsFullScopeChain(true);
216
217     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
218     Vector<Identifier> boundParameterProperties;
219     FunctionParameters& parameters = *functionBody->parameters();
220     for (size_t i = 0; i < parameters.size(); i++) {
221         auto pattern = parameters.at(i);
222         if (pattern->isBindingNode())
223             continue;
224         pattern->collectBoundIdentifiers(boundParameterProperties);
225         continue;
226     }
227     m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
228
229     emitOpcode(op_enter);
230     if (m_codeBlock->needsFullScopeChain()) {
231         m_activationRegister = addVar();
232         emitInitLazyRegister(m_activationRegister);
233         m_codeBlock->setActivationRegister(m_activationRegister->virtualRegister());
234     }
235
236     m_symbolTable->setCaptureStart(virtualRegisterForLocal(m_codeBlock->m_numVars).offset());
237
238     if (functionBody->usesArguments() || codeBlock->usesEval()) { // May reify arguments object.
239         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
240         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
241
242         // We can save a little space by hard-coding the knowledge that the two
243         // 'arguments' values are stored in consecutive registers, and storing
244         // only the index of the assignable one.
245         codeBlock->setArgumentsRegister(argumentsRegister->virtualRegister());
246         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->virtualRegister() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
247
248         emitInitLazyRegister(argumentsRegister);
249         emitInitLazyRegister(unmodifiedArgumentsRegister);
250         
251         if (shouldTearOffArgumentsEagerly()) {
252             emitOpcode(op_create_arguments);
253             instructions().append(argumentsRegister->index());
254         }
255     }
256
257     bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
258
259     bool capturesAnyArgumentByName = false;
260     Vector<RegisterID*, 0, UnsafeVectorOverflow> capturedArguments;
261     if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
262         FunctionParameters& parameters = *functionBody->parameters();
263         capturedArguments.resize(parameters.size());
264         for (size_t i = 0; i < parameters.size(); ++i) {
265             capturedArguments[i] = 0;
266             auto pattern = parameters.at(i);
267             if (!pattern->isBindingNode())
268                 continue;
269             const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
270             if (!functionBody->captures(ident) && !shouldCaptureAllTheThings)
271                 continue;
272             capturesAnyArgumentByName = true;
273             capturedArguments[i] = addVar();
274         }
275     }
276
277     if (capturesAnyArgumentByName && !shouldTearOffArgumentsEagerly()) {
278         size_t parameterCount = m_symbolTable->parameterCount();
279         auto slowArguments = std::make_unique<SlowArgument[]>(parameterCount);
280         for (size_t i = 0; i < parameterCount; ++i) {
281             if (!capturedArguments[i]) {
282                 ASSERT(slowArguments[i].status == SlowArgument::Normal);
283                 slowArguments[i].index = CallFrame::argumentOffset(i);
284                 continue;
285             }
286             slowArguments[i].status = SlowArgument::Captured;
287             slowArguments[i].index = capturedArguments[i]->index();
288         }
289         m_symbolTable->setSlowArguments(std::move(slowArguments));
290     }
291
292     RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
293
294     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
295     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
296
297     // Captured variables and functions go first so that activations don't have
298     // to step over the non-captured locals to mark them.
299     m_hasCreatedActivation = false;
300     if (functionBody->hasCapturedVariables()) {
301         for (size_t i = 0; i < functionStack.size(); ++i) {
302             FunctionBodyNode* function = functionStack[i];
303             const Identifier& ident = function->ident();
304             if (functionBody->captures(ident)) {
305                 if (!m_hasCreatedActivation) {
306                     m_hasCreatedActivation = true;
307                     emitOpcode(op_create_activation);
308                     instructions().append(m_activationRegister->index());
309                 }
310                 m_functions.add(ident.impl());
311                 emitNewFunction(addVar(ident, false), function);
312             }
313         }
314         for (size_t i = 0; i < varStack.size(); ++i) {
315             const Identifier& ident = varStack[i].first;
316             if (functionBody->captures(ident))
317                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
318         }
319     }
320     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
321     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
322         m_hasCreatedActivation = true;
323         emitOpcode(op_create_activation);
324         instructions().append(m_activationRegister->index());
325     }
326
327     m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
328
329     m_firstLazyFunction = codeBlock->m_numVars;
330     for (size_t i = 0; i < functionStack.size(); ++i) {
331         FunctionBodyNode* function = functionStack[i];
332         const Identifier& ident = function->ident();
333         if (!functionBody->captures(ident)) {
334             m_functions.add(ident.impl());
335             RefPtr<RegisterID> reg = addVar(ident, false);
336             // Don't lazily create functions that override the name 'arguments'
337             // as this would complicate lazy instantiation of actual arguments.
338             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
339                 emitNewFunction(reg.get(), function);
340             else {
341                 emitInitLazyRegister(reg.get());
342                 m_lazyFunctions.set(reg->virtualRegister().toLocal(), function);
343             }
344         }
345     }
346     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
347     for (size_t i = 0; i < varStack.size(); ++i) {
348         const Identifier& ident = varStack[i].first;
349         if (!functionBody->captures(ident))
350             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
351     }
352
353     if (shouldCaptureAllTheThings)
354         m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
355
356     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
357
358     // Add "this" as a parameter
359     int nextParameterIndex = CallFrame::thisArgumentOffset();
360     m_thisRegister.setIndex(nextParameterIndex++);
361     m_codeBlock->addParameter();
362     Vector<std::pair<RegisterID*, const DeconstructionPatternNode*>> deconstructedParameters;
363     for (size_t i = 0; i < parameters.size(); ++i, ++nextParameterIndex) {
364         int index = nextParameterIndex;
365         auto pattern = parameters.at(i);
366         if (!pattern->isBindingNode()) {
367             m_codeBlock->addParameter();
368             RegisterID& parameter = registerFor(index);
369             parameter.setIndex(index);
370             deconstructedParameters.append(make_pair(&parameter, pattern));
371             continue;
372         }
373         auto simpleParameter = static_cast<const BindingNode*>(pattern);
374         if (capturedArguments.size() && capturedArguments[i]) {
375             ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(simpleParameter->boundProperty())) || shouldCaptureAllTheThings);
376             index = capturedArguments[i]->index();
377             RegisterID original(nextParameterIndex);
378             emitMove(capturedArguments[i], &original);
379         }
380         addParameter(simpleParameter->boundProperty(), index);
381     }
382     preserveLastVar();
383
384     // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
385     addCallee(functionBody, calleeRegister);
386
387     if (isConstructor()) {
388         emitCreateThis(&m_thisRegister);
389     } else if (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks) {
390         emitOpcode(op_to_this);
391         instructions().append(kill(&m_thisRegister));
392         instructions().append(0);
393     }
394     for (size_t i = 0; i < deconstructedParameters.size(); i++) {
395         auto& entry = deconstructedParameters[i];
396         entry.second->emitBytecode(*this, entry.first);
397     }
398 }
399
400 BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
401     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
402     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
403     , m_symbolTable(codeBlock->symbolTable())
404     , m_scopeNode(evalNode)
405     , m_codeBlock(vm, codeBlock)
406     , m_thisRegister(CallFrame::thisArgumentOffset())
407     , m_emptyValueRegister(0)
408     , m_globalObjectRegister(0)
409     , m_finallyDepth(0)
410     , m_localScopeDepth(0)
411     , m_codeType(EvalCode)
412     , m_nextConstantOffset(0)
413     , m_globalConstantIndex(0)
414     , m_hasCreatedActivation(true)
415     , m_firstLazyFunction(0)
416     , m_lastLazyFunction(0)
417     , m_staticPropertyAnalyzer(&m_instructions)
418     , m_vm(&vm)
419     , m_lastOpcodeID(op_end)
420 #ifndef NDEBUG
421     , m_lastOpcodePosition(0)
422 #endif
423     , m_stack(vm, wtfThreadData().stack())
424     , m_usesExceptions(false)
425     , m_expressionTooDeep(false)
426 {
427     m_codeBlock->setNeedsFullScopeChain(true);
428
429     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
430     m_codeBlock->setNumParameters(1);
431
432     emitOpcode(op_enter);
433
434     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
435     for (size_t i = 0; i < functionStack.size(); ++i)
436         m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
437
438     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
439     unsigned numVariables = varStack.size();
440     Vector<Identifier, 0, UnsafeVectorOverflow> variables;
441     variables.reserveCapacity(numVariables);
442     for (size_t i = 0; i < numVariables; ++i) {
443         ASSERT(varStack[i].first.impl()->isIdentifier());
444         variables.append(varStack[i].first);
445     }
446     codeBlock->adoptVariables(variables);
447     preserveLastVar();
448 }
449
450 BytecodeGenerator::~BytecodeGenerator()
451 {
452 }
453
454 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
455 {
456     emitOpcode(op_init_lazy_reg);
457     instructions().append(reg->index());
458     return reg;
459 }
460
461 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
462 {
463     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
464         return 0;
465
466     m_calleeRegister.setIndex(JSStack::Callee);
467
468     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
469     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
470         emitPushNameScope(functionBodyNode->ident(), &m_calleeRegister, ReadOnly | DontDelete);
471
472     if (!functionBodyNode->captures(functionBodyNode->ident()))
473         return &m_calleeRegister;
474
475     // Move the callee into the captured section of the stack.
476     return emitMove(addVar(), &m_calleeRegister);
477 }
478
479 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
480 {
481     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
482         return;
483
484     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
485     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
486         return;
487
488     ASSERT(calleeRegister);
489     symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
490 }
491
492 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
493 {
494     // Parameters overwrite var declarations, but not function declarations.
495     StringImpl* rep = ident.impl();
496     if (!m_functions.contains(rep)) {
497         symbolTable().set(rep, parameterIndex);
498         RegisterID& parameter = registerFor(parameterIndex);
499         parameter.setIndex(parameterIndex);
500     }
501
502     // To maintain the calling convention, we have to allocate unique space for
503     // each parameter, even if the parameter doesn't make it into the symbol table.
504     m_codeBlock->addParameter();
505 }
506
507 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
508 {
509     if (ident != propertyNames().arguments)
510         return false;
511     
512     if (!shouldOptimizeLocals())
513         return false;
514     
515     SymbolTableEntry entry = symbolTable().get(ident.impl());
516     if (entry.isNull())
517         return false;
518
519     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
520         return true;
521     
522     return false;
523 }
524
525 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
526 {
527     ASSERT(willResolveToArguments(propertyNames().arguments));
528
529     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
530     ASSERT(!entry.isNull());
531     return &registerFor(entry.getIndex());
532 }
533
534 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
535 {
536     if (!reg->virtualRegister().isLocal())
537         return reg;
538
539     int localVariableNumber = reg->virtualRegister().toLocal();
540
541     if (m_lastLazyFunction <= localVariableNumber || localVariableNumber < m_firstLazyFunction)
542         return reg;
543     emitLazyNewFunction(reg, m_lazyFunctions.get(localVariableNumber));
544     return reg;
545 }
546
547 RegisterID* BytecodeGenerator::newRegister()
548 {
549     m_calleeRegisters.append(virtualRegisterForLocal(m_calleeRegisters.size()));
550     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
551     return &m_calleeRegisters.last();
552 }
553
554 RegisterID* BytecodeGenerator::newTemporary()
555 {
556     // Reclaim free register IDs.
557     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
558         m_calleeRegisters.removeLast();
559         
560     RegisterID* result = newRegister();
561     result->setTemporary();
562     return result;
563 }
564
565 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
566 {
567     // Reclaim free label scopes.
568     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
569         m_labelScopes.removeLast();
570
571     // Allocate new label scope.
572     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
573     m_labelScopes.append(scope);
574     return LabelScopePtr(&m_labelScopes, m_labelScopes.size() - 1);
575 }
576
577 PassRefPtr<Label> BytecodeGenerator::newLabel()
578 {
579     // Reclaim free label IDs.
580     while (m_labels.size() && !m_labels.last().refCount())
581         m_labels.removeLast();
582
583     // Allocate new label ID.
584     m_labels.append(this);
585     return &m_labels.last();
586 }
587
588 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
589 {
590     unsigned newLabelIndex = instructions().size();
591     l0->setLocation(newLabelIndex);
592
593     if (m_codeBlock->numberOfJumpTargets()) {
594         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
595         ASSERT(lastLabelIndex <= newLabelIndex);
596         if (newLabelIndex == lastLabelIndex) {
597             // Peephole optimizations have already been disabled by emitting the last label
598             return l0;
599         }
600     }
601
602     m_codeBlock->addJumpTarget(newLabelIndex);
603
604     // This disables peephole optimizations when an instruction is a jump target
605     m_lastOpcodeID = op_end;
606     return l0;
607 }
608
609 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
610 {
611 #ifndef NDEBUG
612     size_t opcodePosition = instructions().size();
613     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
614     m_lastOpcodePosition = opcodePosition;
615 #endif
616     instructions().append(opcodeID);
617     m_lastOpcodeID = opcodeID;
618 }
619
620 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
621 {
622 #if ENABLE(VALUE_PROFILER)
623     return m_codeBlock->addArrayProfile();
624 #else
625     return 0;
626 #endif
627 }
628
629 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
630 {
631 #if ENABLE(VALUE_PROFILER)
632     return m_codeBlock->addArrayAllocationProfile();
633 #else
634     return 0;
635 #endif
636 }
637
638 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
639 {
640     return m_codeBlock->addObjectAllocationProfile();
641 }
642
643 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
644 {
645 #if ENABLE(VALUE_PROFILER)
646     UnlinkedValueProfile result = m_codeBlock->addValueProfile();
647 #else
648     UnlinkedValueProfile result = 0;
649 #endif
650     emitOpcode(opcodeID);
651     return result;
652 }
653
654 void BytecodeGenerator::emitLoopHint()
655 {
656     emitOpcode(op_loop_hint);
657 }
658
659 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
660 {
661     ASSERT(instructions().size() >= 4);
662     size_t size = instructions().size();
663     dstIndex = instructions().at(size - 3).u.operand;
664     src1Index = instructions().at(size - 2).u.operand;
665     src2Index = instructions().at(size - 1).u.operand;
666 }
667
668 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
669 {
670     ASSERT(instructions().size() >= 3);
671     size_t size = instructions().size();
672     dstIndex = instructions().at(size - 2).u.operand;
673     srcIndex = instructions().at(size - 1).u.operand;
674 }
675
676 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
677 {
678     ASSERT(instructions().size() >= 4);
679     instructions().shrink(instructions().size() - 4);
680     m_lastOpcodeID = op_end;
681 }
682
683 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
684 {
685     ASSERT(instructions().size() >= 3);
686     instructions().shrink(instructions().size() - 3);
687     m_lastOpcodeID = op_end;
688 }
689
690 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
691 {
692     size_t begin = instructions().size();
693     emitOpcode(op_jmp);
694     instructions().append(target->bind(begin, instructions().size()));
695     return target;
696 }
697
698 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
699 {
700     if (m_lastOpcodeID == op_less) {
701         int dstIndex;
702         int src1Index;
703         int src2Index;
704
705         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
706
707         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
708             rewindBinaryOp();
709
710             size_t begin = instructions().size();
711             emitOpcode(op_jless);
712             instructions().append(src1Index);
713             instructions().append(src2Index);
714             instructions().append(target->bind(begin, instructions().size()));
715             return target;
716         }
717     } else if (m_lastOpcodeID == op_lesseq) {
718         int dstIndex;
719         int src1Index;
720         int src2Index;
721
722         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
723
724         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
725             rewindBinaryOp();
726
727             size_t begin = instructions().size();
728             emitOpcode(op_jlesseq);
729             instructions().append(src1Index);
730             instructions().append(src2Index);
731             instructions().append(target->bind(begin, instructions().size()));
732             return target;
733         }
734     } else if (m_lastOpcodeID == op_greater) {
735         int dstIndex;
736         int src1Index;
737         int src2Index;
738
739         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
740
741         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
742             rewindBinaryOp();
743
744             size_t begin = instructions().size();
745             emitOpcode(op_jgreater);
746             instructions().append(src1Index);
747             instructions().append(src2Index);
748             instructions().append(target->bind(begin, instructions().size()));
749             return target;
750         }
751     } else if (m_lastOpcodeID == op_greatereq) {
752         int dstIndex;
753         int src1Index;
754         int src2Index;
755
756         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
757
758         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
759             rewindBinaryOp();
760
761             size_t begin = instructions().size();
762             emitOpcode(op_jgreatereq);
763             instructions().append(src1Index);
764             instructions().append(src2Index);
765             instructions().append(target->bind(begin, instructions().size()));
766             return target;
767         }
768     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
769         int dstIndex;
770         int srcIndex;
771
772         retrieveLastUnaryOp(dstIndex, srcIndex);
773
774         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
775             rewindUnaryOp();
776
777             size_t begin = instructions().size();
778             emitOpcode(op_jeq_null);
779             instructions().append(srcIndex);
780             instructions().append(target->bind(begin, instructions().size()));
781             return target;
782         }
783     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
784         int dstIndex;
785         int srcIndex;
786
787         retrieveLastUnaryOp(dstIndex, srcIndex);
788
789         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
790             rewindUnaryOp();
791
792             size_t begin = instructions().size();
793             emitOpcode(op_jneq_null);
794             instructions().append(srcIndex);
795             instructions().append(target->bind(begin, instructions().size()));
796             return target;
797         }
798     }
799
800     size_t begin = instructions().size();
801
802     emitOpcode(op_jtrue);
803     instructions().append(cond->index());
804     instructions().append(target->bind(begin, instructions().size()));
805     return target;
806 }
807
808 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
809 {
810     if (m_lastOpcodeID == op_less && target->isForward()) {
811         int dstIndex;
812         int src1Index;
813         int src2Index;
814
815         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
816
817         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
818             rewindBinaryOp();
819
820             size_t begin = instructions().size();
821             emitOpcode(op_jnless);
822             instructions().append(src1Index);
823             instructions().append(src2Index);
824             instructions().append(target->bind(begin, instructions().size()));
825             return target;
826         }
827     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
828         int dstIndex;
829         int src1Index;
830         int src2Index;
831
832         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
833
834         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
835             rewindBinaryOp();
836
837             size_t begin = instructions().size();
838             emitOpcode(op_jnlesseq);
839             instructions().append(src1Index);
840             instructions().append(src2Index);
841             instructions().append(target->bind(begin, instructions().size()));
842             return target;
843         }
844     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
845         int dstIndex;
846         int src1Index;
847         int src2Index;
848
849         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
850
851         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
852             rewindBinaryOp();
853
854             size_t begin = instructions().size();
855             emitOpcode(op_jngreater);
856             instructions().append(src1Index);
857             instructions().append(src2Index);
858             instructions().append(target->bind(begin, instructions().size()));
859             return target;
860         }
861     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
862         int dstIndex;
863         int src1Index;
864         int src2Index;
865
866         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
867
868         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
869             rewindBinaryOp();
870
871             size_t begin = instructions().size();
872             emitOpcode(op_jngreatereq);
873             instructions().append(src1Index);
874             instructions().append(src2Index);
875             instructions().append(target->bind(begin, instructions().size()));
876             return target;
877         }
878     } else if (m_lastOpcodeID == op_not) {
879         int dstIndex;
880         int srcIndex;
881
882         retrieveLastUnaryOp(dstIndex, srcIndex);
883
884         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
885             rewindUnaryOp();
886
887             size_t begin = instructions().size();
888             emitOpcode(op_jtrue);
889             instructions().append(srcIndex);
890             instructions().append(target->bind(begin, instructions().size()));
891             return target;
892         }
893     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
894         int dstIndex;
895         int srcIndex;
896
897         retrieveLastUnaryOp(dstIndex, srcIndex);
898
899         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
900             rewindUnaryOp();
901
902             size_t begin = instructions().size();
903             emitOpcode(op_jneq_null);
904             instructions().append(srcIndex);
905             instructions().append(target->bind(begin, instructions().size()));
906             return target;
907         }
908     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
909         int dstIndex;
910         int srcIndex;
911
912         retrieveLastUnaryOp(dstIndex, srcIndex);
913
914         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
915             rewindUnaryOp();
916
917             size_t begin = instructions().size();
918             emitOpcode(op_jeq_null);
919             instructions().append(srcIndex);
920             instructions().append(target->bind(begin, instructions().size()));
921             return target;
922         }
923     }
924
925     size_t begin = instructions().size();
926     emitOpcode(op_jfalse);
927     instructions().append(cond->index());
928     instructions().append(target->bind(begin, instructions().size()));
929     return target;
930 }
931
932 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
933 {
934     size_t begin = instructions().size();
935
936     emitOpcode(op_jneq_ptr);
937     instructions().append(cond->index());
938     instructions().append(Special::CallFunction);
939     instructions().append(target->bind(begin, instructions().size()));
940     return target;
941 }
942
943 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
944 {
945     size_t begin = instructions().size();
946
947     emitOpcode(op_jneq_ptr);
948     instructions().append(cond->index());
949     instructions().append(Special::ApplyFunction);
950     instructions().append(target->bind(begin, instructions().size()));
951     return target;
952 }
953
954 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
955 {
956     StringImpl* rep = ident.impl();
957     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
958     if (result.isNewEntry)
959         m_codeBlock->addIdentifier(Identifier(m_vm, rep));
960
961     return result.iterator->value;
962 }
963
964 // We can't hash JSValue(), so we use a dedicated data member to cache it.
965 RegisterID* BytecodeGenerator::addConstantEmptyValue()
966 {
967     if (!m_emptyValueRegister) {
968         int index = m_nextConstantOffset;
969         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
970         ++m_nextConstantOffset;
971         m_codeBlock->addConstant(JSValue());
972         m_emptyValueRegister = &m_constantPoolRegisters[index];
973     }
974
975     return m_emptyValueRegister;
976 }
977
978 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
979 {
980     if (!v)
981         return addConstantEmptyValue();
982
983     int index = m_nextConstantOffset;
984     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
985     if (result.isNewEntry) {
986         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
987         ++m_nextConstantOffset;
988         m_codeBlock->addConstant(v);
989     } else
990         index = result.iterator->value;
991     return &m_constantPoolRegisters[index];
992 }
993
994 unsigned BytecodeGenerator::addRegExp(RegExp* r)
995 {
996     return m_codeBlock->addRegExp(r);
997 }
998
999 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1000 {
1001     m_staticPropertyAnalyzer.mov(dst->index(), src->index());
1002
1003     emitOpcode(op_mov);
1004     instructions().append(dst->index());
1005     instructions().append(src->index());
1006     return dst;
1007 }
1008
1009 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1010 {
1011     emitOpcode(opcodeID);
1012     instructions().append(dst->index());
1013     instructions().append(src->index());
1014     return dst;
1015 }
1016
1017 RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1018 {
1019     emitOpcode(op_inc);
1020     instructions().append(srcDst->index());
1021     return srcDst;
1022 }
1023
1024 RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1025 {
1026     emitOpcode(op_dec);
1027     instructions().append(srcDst->index());
1028     return srcDst;
1029 }
1030
1031 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1032 {
1033     emitOpcode(opcodeID);
1034     instructions().append(dst->index());
1035     instructions().append(src1->index());
1036     instructions().append(src2->index());
1037
1038     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1039         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1040         instructions().append(types.toInt());
1041
1042     return dst;
1043 }
1044
1045 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1046 {
1047     if (m_lastOpcodeID == op_typeof) {
1048         int dstIndex;
1049         int srcIndex;
1050
1051         retrieveLastUnaryOp(dstIndex, srcIndex);
1052
1053         if (src1->index() == dstIndex
1054             && src1->isTemporary()
1055             && m_codeBlock->isConstantRegisterIndex(src2->index())
1056             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1057             const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1058             if (value == "undefined") {
1059                 rewindUnaryOp();
1060                 emitOpcode(op_is_undefined);
1061                 instructions().append(dst->index());
1062                 instructions().append(srcIndex);
1063                 return dst;
1064             }
1065             if (value == "boolean") {
1066                 rewindUnaryOp();
1067                 emitOpcode(op_is_boolean);
1068                 instructions().append(dst->index());
1069                 instructions().append(srcIndex);
1070                 return dst;
1071             }
1072             if (value == "number") {
1073                 rewindUnaryOp();
1074                 emitOpcode(op_is_number);
1075                 instructions().append(dst->index());
1076                 instructions().append(srcIndex);
1077                 return dst;
1078             }
1079             if (value == "string") {
1080                 rewindUnaryOp();
1081                 emitOpcode(op_is_string);
1082                 instructions().append(dst->index());
1083                 instructions().append(srcIndex);
1084                 return dst;
1085             }
1086             if (value == "object") {
1087                 rewindUnaryOp();
1088                 emitOpcode(op_is_object);
1089                 instructions().append(dst->index());
1090                 instructions().append(srcIndex);
1091                 return dst;
1092             }
1093             if (value == "function") {
1094                 rewindUnaryOp();
1095                 emitOpcode(op_is_function);
1096                 instructions().append(dst->index());
1097                 instructions().append(srcIndex);
1098                 return dst;
1099             }
1100         }
1101     }
1102
1103     emitOpcode(opcodeID);
1104     instructions().append(dst->index());
1105     instructions().append(src1->index());
1106     instructions().append(src2->index());
1107     return dst;
1108 }
1109
1110 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1111 {
1112     return emitLoad(dst, jsBoolean(b));
1113 }
1114
1115 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1116 {
1117     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1118     // Later we can do the extra work to handle that like the other cases.  They also don't
1119     // work correctly with NaN as a key.
1120     if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1121         return emitLoad(dst, jsNumber(number));
1122     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1123     if (!valueInMap)
1124         valueInMap = jsNumber(number);
1125     return emitLoad(dst, valueInMap);
1126 }
1127
1128 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1129 {
1130     JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1131     if (!stringInMap)
1132         stringInMap = jsOwnedString(vm(), identifier.string());
1133     return emitLoad(dst, JSValue(stringInMap));
1134 }
1135
1136 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1137 {
1138     RegisterID* constantID = addConstantValue(v);
1139     if (dst)
1140         return emitMove(dst, constantID);
1141     return constantID;
1142 }
1143
1144 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1145 {
1146     if (!m_globalObjectRegister) {
1147         int index = m_nextConstantOffset;
1148         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1149         ++m_nextConstantOffset;
1150         m_codeBlock->addConstant(JSValue());
1151         m_globalObjectRegister = &m_constantPoolRegisters[index];
1152         m_codeBlock->setGlobalObjectRegister(VirtualRegister(index));
1153     }
1154     if (dst)
1155         emitMove(dst, m_globalObjectRegister);
1156     return m_globalObjectRegister;
1157 }
1158
1159 Local BytecodeGenerator::local(const Identifier& property)
1160 {
1161     if (property == propertyNames().thisIdentifier)
1162         return Local(thisRegister(), ReadOnly);
1163
1164     if (property == propertyNames().arguments)
1165         createArgumentsIfNecessary();
1166
1167     if (!shouldOptimizeLocals())
1168         return Local();
1169
1170     SymbolTableEntry entry = symbolTable().get(property.impl());
1171     if (entry.isNull())
1172         return Local();
1173
1174     RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1175     return Local(local, entry.getAttributes());
1176 }
1177
1178 Local BytecodeGenerator::constLocal(const Identifier& property)
1179 {
1180     if (m_codeType != FunctionCode)
1181         return Local();
1182
1183     SymbolTableEntry entry = symbolTable().get(property.impl());
1184     if (entry.isNull())
1185         return Local();
1186
1187     RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1188     return Local(local, entry.getAttributes());
1189 }
1190
1191 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1192 {
1193     size_t begin = instructions().size();
1194     emitOpcode(op_check_has_instance);
1195     instructions().append(dst->index());
1196     instructions().append(value->index());
1197     instructions().append(base->index());
1198     instructions().append(target->bind(begin, instructions().size()));
1199 }
1200
1201 // Indicates the least upper bound of resolve type based on local scope. The bytecode linker
1202 // will start with this ResolveType and compute the least upper bound including intercepting scopes.
1203 ResolveType BytecodeGenerator::resolveType()
1204 {
1205     if (m_localScopeDepth)
1206         return Dynamic;
1207     if (m_symbolTable && m_symbolTable->usesNonStrictEval())
1208         return GlobalPropertyWithVarInjectionChecks;
1209     return GlobalProperty;
1210 }
1211
1212 RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Identifier& identifier)
1213 {
1214     ASSERT(!m_symbolTable || !m_symbolTable->contains(identifier.impl()) || resolveType() == Dynamic);
1215
1216     // resolve_scope dst, id, ResolveType, depth
1217     emitOpcode(op_resolve_scope);
1218     instructions().append(kill(dst));
1219     instructions().append(addConstant(identifier));
1220     instructions().append(resolveType());
1221     instructions().append(0);
1222     return dst;
1223 }
1224
1225 RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Identifier& identifier, ResolveMode resolveMode)
1226 {
1227     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1228
1229     // get_from_scope dst, scope, id, ResolveModeAndType, Structure, Operand
1230     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_from_scope);
1231     instructions().append(kill(dst));
1232     instructions().append(scope->index());
1233     instructions().append(addConstant(identifier));
1234     instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1235     instructions().append(0);
1236     instructions().append(0);
1237     instructions().append(profile);
1238     return dst;
1239 }
1240
1241 RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Identifier& identifier, RegisterID* value, ResolveMode resolveMode)
1242 {
1243     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1244
1245     // put_to_scope scope, id, value, ResolveModeAndType, Structure, Operand
1246     emitOpcode(op_put_to_scope);
1247     instructions().append(scope->index());
1248     instructions().append(addConstant(identifier));
1249     instructions().append(value->index());
1250     instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1251     instructions().append(0);
1252     instructions().append(0);
1253     return value;
1254 }
1255
1256 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1257
1258     emitOpcode(op_instanceof);
1259     instructions().append(dst->index());
1260     instructions().append(value->index());
1261     instructions().append(basePrototype->index());
1262     return dst;
1263 }
1264
1265 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1266 {
1267     ASSERT(m_codeType == GlobalCode);
1268     emitOpcode(op_init_global_const_nop);
1269     instructions().append(0);
1270     instructions().append(value->index());
1271     instructions().append(0);
1272     instructions().append(addConstant(identifier));
1273     return value;
1274 }
1275
1276 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1277 {
1278     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1279
1280     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1281     instructions().append(kill(dst));
1282     instructions().append(base->index());
1283     instructions().append(addConstant(property));
1284     instructions().append(0);
1285     instructions().append(0);
1286     instructions().append(0);
1287     instructions().append(0);
1288     instructions().append(profile);
1289     return dst;
1290 }
1291
1292 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1293 {
1294     emitOpcode(op_get_arguments_length);
1295     instructions().append(dst->index());
1296     ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1297     instructions().append(base->index());
1298     instructions().append(addConstant(propertyNames().length));
1299     return dst;
1300 }
1301
1302 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1303 {
1304     unsigned propertyIndex = addConstant(property);
1305
1306     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1307
1308     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1309
1310     emitOpcode(op_put_by_id);
1311     instructions().append(base->index());
1312     instructions().append(propertyIndex);
1313     instructions().append(value->index());
1314     instructions().append(0);
1315     instructions().append(0);
1316     instructions().append(0);
1317     instructions().append(0);
1318     instructions().append(0);
1319     return value;
1320 }
1321
1322 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1323 {
1324     unsigned propertyIndex = addConstant(property);
1325
1326     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1327
1328     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1329     
1330     emitOpcode(op_put_by_id);
1331     instructions().append(base->index());
1332     instructions().append(propertyIndex);
1333     instructions().append(value->index());
1334     instructions().append(0);
1335     instructions().append(0);
1336     instructions().append(0);
1337     instructions().append(0);
1338     instructions().append(
1339         property != m_vm->propertyNames->underscoreProto
1340         && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1341     return value;
1342 }
1343
1344 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1345 {
1346     unsigned propertyIndex = addConstant(property);
1347
1348     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1349
1350     emitOpcode(op_put_getter_setter);
1351     instructions().append(base->index());
1352     instructions().append(propertyIndex);
1353     instructions().append(getter->index());
1354     instructions().append(setter->index());
1355 }
1356
1357 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1358 {
1359     emitOpcode(op_del_by_id);
1360     instructions().append(dst->index());
1361     instructions().append(base->index());
1362     instructions().append(addConstant(property));
1363     return dst;
1364 }
1365
1366 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1367 {
1368     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1369     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1370     instructions().append(kill(dst));
1371     ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1372     instructions().append(base->index());
1373     instructions().append(property->index());
1374     instructions().append(arrayProfile);
1375     instructions().append(profile);
1376     return dst;
1377 }
1378
1379 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1380 {
1381     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1382         ForInContext& context = m_forInContextStack[i - 1];
1383         if (context.propertyRegister == property) {
1384             emitOpcode(op_get_by_pname);
1385             instructions().append(dst->index());
1386             instructions().append(base->index());
1387             instructions().append(property->index());
1388             instructions().append(context.expectedSubscriptRegister->index());
1389             instructions().append(context.iterRegister->index());
1390             instructions().append(context.indexRegister->index());
1391             return dst;
1392         }
1393     }
1394     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1395     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1396     instructions().append(kill(dst));
1397     instructions().append(base->index());
1398     instructions().append(property->index());
1399     instructions().append(arrayProfile);
1400     instructions().append(profile);
1401     return dst;
1402 }
1403
1404 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1405 {
1406     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1407     emitOpcode(op_put_by_val);
1408     instructions().append(base->index());
1409     instructions().append(property->index());
1410     instructions().append(value->index());
1411     instructions().append(arrayProfile);
1412     return value;
1413 }
1414
1415 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1416 {
1417     emitOpcode(op_del_by_val);
1418     instructions().append(dst->index());
1419     instructions().append(base->index());
1420     instructions().append(property->index());
1421     return dst;
1422 }
1423
1424 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1425 {
1426     emitOpcode(op_put_by_index);
1427     instructions().append(base->index());
1428     instructions().append(index);
1429     instructions().append(value->index());
1430     return value;
1431 }
1432
1433 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1434 {
1435     RefPtr<RegisterID> func = newTemporary(); 
1436
1437     emitOpcode(op_get_callee);
1438     instructions().append(func->index());
1439     instructions().append(0);
1440
1441     size_t begin = instructions().size();
1442     m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1443
1444     emitOpcode(op_create_this); 
1445     instructions().append(m_thisRegister.index()); 
1446     instructions().append(func->index()); 
1447     instructions().append(0);
1448     return dst;
1449 }
1450
1451 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1452 {
1453     size_t begin = instructions().size();
1454     m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1455
1456     emitOpcode(op_new_object);
1457     instructions().append(dst->index());
1458     instructions().append(0);
1459     instructions().append(newObjectAllocationProfile());
1460     return dst;
1461 }
1462
1463 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1464 {
1465     return m_codeBlock->addConstantBuffer(length);
1466 }
1467
1468 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1469 {
1470     JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1471     if (!stringInMap) {
1472         stringInMap = jsString(vm(), identifier.string());
1473         addConstantValue(stringInMap);
1474     }
1475     return stringInMap;
1476 }
1477
1478 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1479 {
1480 #if !ASSERT_DISABLED
1481     unsigned checkLength = 0;
1482 #endif
1483     bool hadVariableExpression = false;
1484     if (length) {
1485         for (ElementNode* n = elements; n; n = n->next()) {
1486             if (!n->value()->isConstant()) {
1487                 hadVariableExpression = true;
1488                 break;
1489             }
1490             if (n->elision())
1491                 break;
1492 #if !ASSERT_DISABLED
1493             checkLength++;
1494 #endif
1495         }
1496         if (!hadVariableExpression) {
1497             ASSERT(length == checkLength);
1498             unsigned constantBufferIndex = addConstantBuffer(length);
1499             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1500             unsigned index = 0;
1501             for (ElementNode* n = elements; index < length; n = n->next()) {
1502                 ASSERT(n->value()->isConstant());
1503                 constantBuffer[index++] = static_cast<ConstantNode*>(n->value())->jsValue(*this);
1504             }
1505             emitOpcode(op_new_array_buffer);
1506             instructions().append(dst->index());
1507             instructions().append(constantBufferIndex);
1508             instructions().append(length);
1509             instructions().append(newArrayAllocationProfile());
1510             return dst;
1511         }
1512     }
1513
1514     Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
1515     for (ElementNode* n = elements; n; n = n->next()) {
1516         if (n->elision())
1517             break;
1518         argv.append(newTemporary());
1519         // op_new_array requires the initial values to be a sequential range of registers
1520         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
1521         emitNode(argv.last().get(), n->value());
1522     }
1523     emitOpcode(op_new_array);
1524     instructions().append(dst->index());
1525     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1526     instructions().append(argv.size()); // argc
1527     instructions().append(newArrayAllocationProfile());
1528     return dst;
1529 }
1530
1531 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1532 {
1533     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1534 }
1535
1536 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1537 {
1538     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1539     if (ptr.isNewEntry)
1540         ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1541     return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1542 }
1543
1544 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1545 {
1546     createActivationIfNecessary();
1547     emitOpcode(op_new_func);
1548     instructions().append(dst->index());
1549     instructions().append(index);
1550     instructions().append(doNullCheck);
1551     return dst;
1552 }
1553
1554 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1555 {
1556     emitOpcode(op_new_regexp);
1557     instructions().append(dst->index());
1558     instructions().append(addRegExp(regExp));
1559     return dst;
1560 }
1561
1562 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1563 {
1564     FunctionBodyNode* function = n->body();
1565     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1566     
1567     createActivationIfNecessary();
1568     emitOpcode(op_new_func_exp);
1569     instructions().append(r0->index());
1570     instructions().append(index);
1571     return r0;
1572 }
1573
1574 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1575 {
1576     return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd);
1577 }
1578
1579 void BytecodeGenerator::createArgumentsIfNecessary()
1580 {
1581     if (m_codeType != FunctionCode)
1582         return;
1583     
1584     if (!m_codeBlock->usesArguments())
1585         return;
1586
1587     if (shouldTearOffArgumentsEagerly())
1588         return;
1589
1590     emitOpcode(op_create_arguments);
1591     instructions().append(m_codeBlock->argumentsRegister().offset());
1592 }
1593
1594 void BytecodeGenerator::createActivationIfNecessary()
1595 {
1596     if (m_hasCreatedActivation)
1597         return;
1598     if (!m_codeBlock->needsFullScopeChain())
1599         return;
1600     emitOpcode(op_create_activation);
1601     instructions().append(m_activationRegister->index());
1602 }
1603
1604 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1605 {
1606     return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd);
1607 }
1608
1609 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1610 {
1611     if (identifier == m_vm->propertyNames->Object)
1612         return ExpectObjectConstructor;
1613     if (identifier == m_vm->propertyNames->Array)
1614         return ExpectArrayConstructor;
1615     return NoExpectedFunction;
1616 }
1617
1618 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1619 {
1620     RefPtr<Label> realCall = newLabel();
1621     switch (expectedFunction) {
1622     case ExpectObjectConstructor: {
1623         // If the number of arguments is non-zero, then we can't do anything interesting.
1624         if (callArguments.argumentCountIncludingThis() >= 2)
1625             return NoExpectedFunction;
1626         
1627         size_t begin = instructions().size();
1628         emitOpcode(op_jneq_ptr);
1629         instructions().append(func->index());
1630         instructions().append(Special::ObjectConstructor);
1631         instructions().append(realCall->bind(begin, instructions().size()));
1632         
1633         if (dst != ignoredResult())
1634             emitNewObject(dst);
1635         break;
1636     }
1637         
1638     case ExpectArrayConstructor: {
1639         // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1640         // don't do inline it, for now. The only reason is that call arguments are in
1641         // the opposite order of what op_new_array expects, so we'd either need to change
1642         // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1643         // things sounds like it's worth it.
1644         if (callArguments.argumentCountIncludingThis() > 2)
1645             return NoExpectedFunction;
1646         
1647         size_t begin = instructions().size();
1648         emitOpcode(op_jneq_ptr);
1649         instructions().append(func->index());
1650         instructions().append(Special::ArrayConstructor);
1651         instructions().append(realCall->bind(begin, instructions().size()));
1652         
1653         if (dst != ignoredResult()) {
1654             if (callArguments.argumentCountIncludingThis() == 2) {
1655                 emitOpcode(op_new_array_with_size);
1656                 instructions().append(dst->index());
1657                 instructions().append(callArguments.argumentRegister(0)->index());
1658                 instructions().append(newArrayAllocationProfile());
1659             } else {
1660                 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1661                 emitOpcode(op_new_array);
1662                 instructions().append(dst->index());
1663                 instructions().append(0);
1664                 instructions().append(0);
1665                 instructions().append(newArrayAllocationProfile());
1666             }
1667         }
1668         break;
1669     }
1670         
1671     default:
1672         ASSERT(expectedFunction == NoExpectedFunction);
1673         return NoExpectedFunction;
1674     }
1675     
1676     size_t begin = instructions().size();
1677     emitOpcode(op_jmp);
1678     instructions().append(done->bind(begin, instructions().size()));
1679     emitLabel(realCall.get());
1680     
1681     return expectedFunction;
1682 }
1683
1684 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1685 {
1686     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1687     ASSERT(func->refCount());
1688
1689     if (m_shouldEmitProfileHooks)
1690         emitMove(callArguments.profileHookRegister(), func);
1691
1692     // Generate code for arguments.
1693     unsigned argument = 0;
1694     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1695         emitNode(callArguments.argumentRegister(argument++), n);
1696
1697     // Reserve space for call frame.
1698     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1699     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1700         callFrame.append(newTemporary());
1701
1702     if (m_shouldEmitProfileHooks) {
1703         emitOpcode(op_profile_will_call);
1704         instructions().append(callArguments.profileHookRegister()->index());
1705     }
1706
1707     emitExpressionInfo(divot, divotStart, divotEnd);
1708
1709     RefPtr<Label> done = newLabel();
1710     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1711     
1712     // Emit call.
1713     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1714     UnlinkedValueProfile profile = emitProfiledOpcode(opcodeID);
1715     ASSERT(dst);
1716     ASSERT(dst != ignoredResult());
1717     instructions().append(dst->index()); // result
1718     instructions().append(func->index()); // func
1719     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1720     instructions().append(callArguments.registerOffset()); // registerOffset
1721 #if ENABLE(LLINT)
1722     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1723 #else
1724     instructions().append(0);
1725 #endif
1726     instructions().append(arrayProfile);
1727     instructions().append(profile);
1728     
1729     if (expectedFunction != NoExpectedFunction)
1730         emitLabel(done.get());
1731
1732     if (m_shouldEmitProfileHooks) {
1733         emitOpcode(op_profile_did_call);
1734         instructions().append(callArguments.profileHookRegister()->index());
1735     }
1736
1737     return dst;
1738 }
1739
1740 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1741 {
1742     if (m_shouldEmitProfileHooks) {
1743         emitMove(profileHookRegister, func);
1744         emitOpcode(op_profile_will_call);
1745         instructions().append(profileHookRegister->index());
1746     }
1747     
1748     emitExpressionInfo(divot, divotStart, divotEnd);
1749
1750     // Emit call.
1751     UnlinkedValueProfile profile = emitProfiledOpcode(op_call_varargs);
1752     ASSERT(dst != ignoredResult());
1753     instructions().append(dst->index());
1754     instructions().append(func->index());
1755     instructions().append(thisRegister->index());
1756     instructions().append(arguments->index());
1757     instructions().append(firstFreeRegister->index());
1758     instructions().append(0); // Pad to make it as big as an op_call.
1759     instructions().append(profile);
1760     if (m_shouldEmitProfileHooks) {
1761         emitOpcode(op_profile_did_call);
1762         instructions().append(profileHookRegister->index());
1763     }
1764     return dst;
1765 }
1766
1767 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1768 {
1769     if (m_codeBlock->needsFullScopeChain()) {
1770         emitOpcode(op_tear_off_activation);
1771         instructions().append(m_activationRegister->index());
1772     }
1773
1774     if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !isStrictMode()) {
1775         emitOpcode(op_tear_off_arguments);
1776         instructions().append(m_codeBlock->argumentsRegister().offset());
1777         instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1778     }
1779
1780     // Constructors use op_ret_object_or_this to check the result is an
1781     // object, unless we can trivially determine the check is not
1782     // necessary (currently, if the return value is 'this').
1783     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1784         emitOpcode(op_ret_object_or_this);
1785         instructions().append(src->index());
1786         instructions().append(m_thisRegister.index());
1787         return src;
1788     }
1789     return emitUnaryNoDstOp(op_ret, src);
1790 }
1791
1792 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1793 {
1794     emitOpcode(opcodeID);
1795     instructions().append(src->index());
1796     return src;
1797 }
1798
1799 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1800 {
1801     ASSERT(func->refCount());
1802
1803     if (m_shouldEmitProfileHooks)
1804         emitMove(callArguments.profileHookRegister(), func);
1805
1806     // Generate code for arguments.
1807     unsigned argument = 0;
1808     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1809         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1810             emitNode(callArguments.argumentRegister(argument++), n);
1811     }
1812
1813     if (m_shouldEmitProfileHooks) {
1814         emitOpcode(op_profile_will_call);
1815         instructions().append(callArguments.profileHookRegister()->index());
1816     }
1817
1818     // Reserve space for call frame.
1819     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1820     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1821         callFrame.append(newTemporary());
1822
1823     emitExpressionInfo(divot, divotStart, divotEnd);
1824     
1825     RefPtr<Label> done = newLabel();
1826     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1827
1828     UnlinkedValueProfile profile = emitProfiledOpcode(op_construct);
1829     ASSERT(dst != ignoredResult());
1830     instructions().append(dst->index());
1831     instructions().append(func->index()); // func
1832     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1833     instructions().append(callArguments.registerOffset()); // registerOffset
1834 #if ENABLE(LLINT)
1835     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1836 #else
1837     instructions().append(0);
1838 #endif
1839     instructions().append(0);
1840     instructions().append(profile);
1841
1842     if (expectedFunction != NoExpectedFunction)
1843         emitLabel(done.get());
1844
1845     if (m_shouldEmitProfileHooks) {
1846         emitOpcode(op_profile_did_call);
1847         instructions().append(callArguments.profileHookRegister()->index());
1848     }
1849
1850     return dst;
1851 }
1852
1853 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1854 {
1855     emitOpcode(op_strcat);
1856     instructions().append(dst->index());
1857     instructions().append(src->index());
1858     instructions().append(count);
1859
1860     return dst;
1861 }
1862
1863 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1864 {
1865     emitOpcode(op_to_primitive);
1866     instructions().append(dst->index());
1867     instructions().append(src->index());
1868 }
1869
1870 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
1871 {
1872     ControlFlowContext context;
1873     context.isFinallyBlock = false;
1874     m_scopeContextStack.append(context);
1875     m_localScopeDepth++;
1876
1877     return emitUnaryNoDstOp(op_push_with_scope, scope);
1878 }
1879
1880 void BytecodeGenerator::emitPopScope()
1881 {
1882     ASSERT(m_scopeContextStack.size());
1883     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1884
1885     emitOpcode(op_pop_scope);
1886
1887     m_scopeContextStack.removeLast();
1888     m_localScopeDepth--;
1889 }
1890
1891 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, unsigned line, unsigned charOffset, unsigned lineStart)
1892 {
1893 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1894     if (debugHookID != DidReachBreakpoint)
1895         return;
1896 #else
1897     if (!m_shouldEmitDebugHooks)
1898         return;
1899 #endif
1900     JSTextPosition divot(line, charOffset, lineStart);
1901     emitExpressionInfo(divot, divot, divot);
1902     emitOpcode(op_debug);
1903     instructions().append(debugHookID);
1904 }
1905
1906 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
1907 {
1908     ControlFlowContext scope;
1909     scope.isFinallyBlock = true;
1910     FinallyContext context = {
1911         finallyBlock,
1912         static_cast<unsigned>(m_scopeContextStack.size()),
1913         static_cast<unsigned>(m_switchContextStack.size()),
1914         static_cast<unsigned>(m_forInContextStack.size()),
1915         static_cast<unsigned>(m_tryContextStack.size()),
1916         static_cast<unsigned>(m_labelScopes.size()),
1917         m_finallyDepth,
1918         m_localScopeDepth
1919     };
1920     scope.finallyContext = context;
1921     m_scopeContextStack.append(scope);
1922     m_finallyDepth++;
1923 }
1924
1925 void BytecodeGenerator::popFinallyContext()
1926 {
1927     ASSERT(m_scopeContextStack.size());
1928     ASSERT(m_scopeContextStack.last().isFinallyBlock);
1929     ASSERT(m_finallyDepth > 0);
1930     m_scopeContextStack.removeLast();
1931     m_finallyDepth--;
1932 }
1933
1934 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1935 {
1936     // Reclaim free label scopes.
1937     //
1938     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1939     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1940     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
1941     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1942     // loop condition is a workaround.
1943     while (m_labelScopes.size()) {
1944         if  (m_labelScopes.last().refCount())
1945             break;
1946         m_labelScopes.removeLast();
1947     }
1948
1949     if (!m_labelScopes.size())
1950         return 0;
1951
1952     // We special-case the following, which is a syntax error in Firefox:
1953     // label:
1954     //     break;
1955     if (name.isEmpty()) {
1956         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1957             LabelScope* scope = &m_labelScopes[i];
1958             if (scope->type() != LabelScope::NamedLabel) {
1959                 ASSERT(scope->breakTarget());
1960                 return scope;
1961             }
1962         }
1963         return 0;
1964     }
1965
1966     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1967         LabelScope* scope = &m_labelScopes[i];
1968         if (scope->name() && *scope->name() == name) {
1969             ASSERT(scope->breakTarget());
1970             return scope;
1971         }
1972     }
1973     return 0;
1974 }
1975
1976 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
1977 {
1978     // Reclaim free label scopes.
1979     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1980         m_labelScopes.removeLast();
1981
1982     if (!m_labelScopes.size())
1983         return 0;
1984
1985     if (name.isEmpty()) {
1986         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1987             LabelScope* scope = &m_labelScopes[i];
1988             if (scope->type() == LabelScope::Loop) {
1989                 ASSERT(scope->continueTarget());
1990                 return scope;
1991             }
1992         }
1993         return 0;
1994     }
1995
1996     // Continue to the loop nested nearest to the label scope that matches
1997     // 'name'.
1998     LabelScope* result = 0;
1999     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2000         LabelScope* scope = &m_labelScopes[i];
2001         if (scope->type() == LabelScope::Loop) {
2002             ASSERT(scope->continueTarget());
2003             result = scope;
2004         }
2005         if (scope->name() && *scope->name() == name)
2006             return result; // may be 0
2007     }
2008     return 0;
2009 }
2010
2011 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2012 {
2013     while (topScope > bottomScope) {
2014         // First we count the number of dynamic scopes we need to remove to get
2015         // to a finally block.
2016         int nNormalScopes = 0;
2017         while (topScope > bottomScope) {
2018             if (topScope->isFinallyBlock)
2019                 break;
2020             ++nNormalScopes;
2021             --topScope;
2022         }
2023
2024         if (nNormalScopes) {
2025             // We need to remove a number of dynamic scopes to get to the next
2026             // finally block
2027             while (nNormalScopes--)
2028                 emitOpcode(op_pop_scope);
2029
2030             // If topScope == bottomScope then there isn't a finally block left to emit.
2031             if (topScope == bottomScope)
2032                 return;
2033         }
2034         
2035         Vector<ControlFlowContext> savedScopeContextStack;
2036         Vector<SwitchInfo> savedSwitchContextStack;
2037         Vector<ForInContext> savedForInContextStack;
2038         Vector<TryContext> poppedTryContexts;
2039         LabelScopeStore savedLabelScopes;
2040         while (topScope > bottomScope && topScope->isFinallyBlock) {
2041             RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2042             
2043             // Save the current state of the world while instating the state of the world
2044             // for the finally block.
2045             FinallyContext finallyContext = topScope->finallyContext;
2046             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2047             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2048             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2049             bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2050             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2051             int topScopeIndex = -1;
2052             int bottomScopeIndex = -1;
2053             if (flipScopes) {
2054                 topScopeIndex = topScope - m_scopeContextStack.begin();
2055                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2056                 savedScopeContextStack = m_scopeContextStack;
2057                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2058             }
2059             if (flipSwitches) {
2060                 savedSwitchContextStack = m_switchContextStack;
2061                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2062             }
2063             if (flipForIns) {
2064                 savedForInContextStack = m_forInContextStack;
2065                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2066             }
2067             if (flipTries) {
2068                 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2069                     ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2070                     TryContext context = m_tryContextStack.last();
2071                     m_tryContextStack.removeLast();
2072                     TryRange range;
2073                     range.start = context.start;
2074                     range.end = beforeFinally;
2075                     range.tryData = context.tryData;
2076                     m_tryRanges.append(range);
2077                     poppedTryContexts.append(context);
2078                 }
2079             }
2080             if (flipLabelScopes) {
2081                 savedLabelScopes = m_labelScopes;
2082                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2083                     m_labelScopes.removeLast();
2084             }
2085             int savedFinallyDepth = m_finallyDepth;
2086             m_finallyDepth = finallyContext.finallyDepth;
2087             int savedDynamicScopeDepth = m_localScopeDepth;
2088             m_localScopeDepth = finallyContext.dynamicScopeDepth;
2089             
2090             // Emit the finally block.
2091             emitNode(finallyContext.finallyBlock);
2092             
2093             RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2094             
2095             // Restore the state of the world.
2096             if (flipScopes) {
2097                 m_scopeContextStack = savedScopeContextStack;
2098                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2099                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2100             }
2101             if (flipSwitches)
2102                 m_switchContextStack = savedSwitchContextStack;
2103             if (flipForIns)
2104                 m_forInContextStack = savedForInContextStack;
2105             if (flipTries) {
2106                 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2107                 for (unsigned i = poppedTryContexts.size(); i--;) {
2108                     TryContext context = poppedTryContexts[i];
2109                     context.start = afterFinally;
2110                     m_tryContextStack.append(context);
2111                 }
2112                 poppedTryContexts.clear();
2113             }
2114             if (flipLabelScopes)
2115                 m_labelScopes = savedLabelScopes;
2116             m_finallyDepth = savedFinallyDepth;
2117             m_localScopeDepth = savedDynamicScopeDepth;
2118             
2119             --topScope;
2120         }
2121     }
2122 }
2123
2124 void BytecodeGenerator::emitPopScopes(int targetScopeDepth)
2125 {
2126     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2127
2128     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2129     ASSERT(scopeDelta <= m_scopeContextStack.size());
2130     if (!scopeDelta)
2131         return;
2132
2133     if (!m_finallyDepth) {
2134         while (scopeDelta--)
2135             emitOpcode(op_pop_scope);
2136         return;
2137     }
2138
2139     emitComplexPopScopes(&m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2140 }
2141
2142 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2143 {
2144     size_t begin = instructions().size();
2145
2146     emitOpcode(op_get_pnames);
2147     instructions().append(dst->index());
2148     instructions().append(base->index());
2149     instructions().append(i->index());
2150     instructions().append(size->index());
2151     instructions().append(breakTarget->bind(begin, instructions().size()));
2152     return dst;
2153 }
2154
2155 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2156 {
2157     size_t begin = instructions().size();
2158
2159     emitOpcode(op_next_pname);
2160     instructions().append(dst->index());
2161     instructions().append(base->index());
2162     instructions().append(i->index());
2163     instructions().append(size->index());
2164     instructions().append(iter->index());
2165     instructions().append(target->bind(begin, instructions().size()));
2166     return dst;
2167 }
2168
2169 TryData* BytecodeGenerator::pushTry(Label* start)
2170 {
2171     TryData tryData;
2172     tryData.target = newLabel();
2173     tryData.targetScopeDepth = UINT_MAX;
2174     m_tryData.append(tryData);
2175     TryData* result = &m_tryData.last();
2176     
2177     TryContext tryContext;
2178     tryContext.start = start;
2179     tryContext.tryData = result;
2180     
2181     m_tryContextStack.append(tryContext);
2182     
2183     return result;
2184 }
2185
2186 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2187 {
2188     m_usesExceptions = true;
2189     
2190     ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2191     
2192     TryRange tryRange;
2193     tryRange.start = m_tryContextStack.last().start;
2194     tryRange.end = end;
2195     tryRange.tryData = m_tryContextStack.last().tryData;
2196     m_tryRanges.append(tryRange);
2197     m_tryContextStack.removeLast();
2198     
2199     emitLabel(tryRange.tryData->target.get());
2200     tryRange.tryData->targetScopeDepth = m_localScopeDepth;
2201
2202     emitOpcode(op_catch);
2203     instructions().append(targetRegister->index());
2204     return targetRegister;
2205 }
2206
2207 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2208 {
2209     emitOpcode(op_throw_static_error);
2210     instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, message)))->index());
2211     instructions().append(true);
2212 }
2213
2214 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2215 {
2216     ControlFlowContext context;
2217     context.isFinallyBlock = false;
2218     m_scopeContextStack.append(context);
2219     m_localScopeDepth++;
2220
2221     emitOpcode(op_push_name_scope);
2222     instructions().append(addConstant(property));
2223     instructions().append(value->index());
2224     instructions().append(attributes);
2225 }
2226
2227 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2228 {
2229     SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2230     switch (type) {
2231         case SwitchInfo::SwitchImmediate:
2232             emitOpcode(op_switch_imm);
2233             break;
2234         case SwitchInfo::SwitchCharacter:
2235             emitOpcode(op_switch_char);
2236             break;
2237         case SwitchInfo::SwitchString:
2238             emitOpcode(op_switch_string);
2239             break;
2240         default:
2241             RELEASE_ASSERT_NOT_REACHED();
2242     }
2243
2244     instructions().append(0); // place holder for table index
2245     instructions().append(0); // place holder for default target    
2246     instructions().append(scrutineeRegister->index());
2247     m_switchContextStack.append(info);
2248 }
2249
2250 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2251 {
2252     UNUSED_PARAM(max);
2253     ASSERT(node->isNumber());
2254     double value = static_cast<NumberNode*>(node)->value();
2255     int32_t key = static_cast<int32_t>(value);
2256     ASSERT(key == value);
2257     ASSERT(key >= min);
2258     ASSERT(key <= max);
2259     return key - min;
2260 }
2261
2262 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2263 {
2264     UNUSED_PARAM(max);
2265     ASSERT(node->isString());
2266     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2267     ASSERT(clause->length() == 1);
2268     
2269     int32_t key = (*clause)[0];
2270     ASSERT(key >= min);
2271     ASSERT(key <= max);
2272     return key - min;
2273 }
2274
2275 static void prepareJumpTableForSwitch(
2276     UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
2277     RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max,
2278     int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
2279 {
2280     jumpTable.min = min;
2281     jumpTable.branchOffsets.resize(max - min + 1);
2282     jumpTable.branchOffsets.fill(0);
2283     for (uint32_t i = 0; i < clauseCount; ++i) {
2284         // We're emitting this after the clause labels should have been fixed, so 
2285         // the labels should not be "forward" references
2286         ASSERT(!labels[i]->isForward());
2287         jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2288     }
2289 }
2290
2291 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2292 {
2293     for (uint32_t i = 0; i < clauseCount; ++i) {
2294         // We're emitting this after the clause labels should have been fixed, so 
2295         // the labels should not be "forward" references
2296         ASSERT(!labels[i]->isForward());
2297         
2298         ASSERT(nodes[i]->isString());
2299         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2300         jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2301     }
2302 }
2303
2304 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2305 {
2306     SwitchInfo switchInfo = m_switchContextStack.last();
2307     m_switchContextStack.removeLast();
2308     
2309     switch (switchInfo.switchType) {
2310     case SwitchInfo::SwitchImmediate:
2311     case SwitchInfo::SwitchCharacter: {
2312         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfSwitchJumpTables();
2313         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2314
2315         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addSwitchJumpTable();
2316         prepareJumpTableForSwitch(
2317             jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
2318             switchInfo.switchType == SwitchInfo::SwitchImmediate
2319                 ? keyForImmediateSwitch
2320                 : keyForCharacterSwitch); 
2321         break;
2322     }
2323         
2324     case SwitchInfo::SwitchString: {
2325         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2326         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2327
2328         UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2329         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2330         break;
2331     }
2332         
2333     default:
2334         RELEASE_ASSERT_NOT_REACHED();
2335         break;
2336     }
2337 }
2338
2339 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2340 {
2341     // It would be nice to do an even better job of identifying exactly where the expression is.
2342     // And we could make the caller pass the node pointer in, if there was some way of getting
2343     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2344     // is still good enough to get us an accurate line number.
2345     m_expressionTooDeep = true;
2346     return newTemporary();
2347 }
2348
2349 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2350 {
2351     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2352 }
2353
2354 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2355 {
2356     RegisterID* registerID = local(ident).get();
2357     if (!registerID || registerID->index() >= 0)
2358          return 0;
2359     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2360 }
2361
2362 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2363 {
2364     if (!isStrictMode())
2365         return;
2366     emitOpcode(op_throw_static_error);
2367     instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, StrictModeReadonlyPropertyWriteError)))->index());
2368     instructions().append(false);
2369 }
2370
2371 } // namespace JSC