Make SegmentedVector Noncopyable
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "Interpreter.h"
36 #include "JSActivation.h"
37 #include "JSFunction.h"
38 #include "JSNameScope.h"
39 #include "LowLevelInterpreter.h"
40 #include "Operations.h"
41 #include "Options.h"
42 #include "StrongInlines.h"
43 #include <wtf/text/WTFString.h>
44
45 using namespace std;
46
47 namespace JSC {
48
49 /*
50     The layout of a register frame looks like this:
51
52     For
53
54     function f(x, y) {
55         var v1;
56         function g() { }
57         var v2;
58         return (x) * (y);
59     }
60
61     assuming (x) and (y) generated temporaries t1 and t2, you would have
62
63     ------------------------------------
64     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
65     ------------------------------------
66     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
67     ------------------------------------
68     | params->|<-locals      | temps->
69
70     Because temporary registers are allocated in a stack-like fashion, we
71     can reclaim them with a simple popping algorithm. The same goes for labels.
72     (We never reclaim parameter or local registers, because parameters and
73     locals are DontDelete.)
74
75     The register layout before a function call looks like this:
76
77     For
78
79     function f(x, y)
80     {
81     }
82
83     f(1);
84
85     >                        <------------------------------
86     <                        >  reserved: call frame  |  1 | <-- value held
87     >         >snip<         <------------------------------
88     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
89     >                        <------------------------------
90     | params->|<-locals      | temps->
91
92     The call instruction fills in the "call frame" registers. It also pads
93     missing arguments at the end of the call:
94
95     >                        <-----------------------------------
96     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
97     >         >snip<         <-----------------------------------
98     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
99     >                        <-----------------------------------
100     | params->|<-locals      | temps->
101
102     After filling in missing arguments, the call instruction sets up the new
103     stack frame to overlap the end of the old stack frame:
104
105                              |---------------------------------->                        <
106                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
107                              |---------------------------------->         >snip<         <
108                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
109                              |---------------------------------->                        <
110                              |                        | params->|<-locals       | temps->
111
112     That way, arguments are "copied" into the callee's stack frame for free.
113
114     If the caller supplies too many arguments, this trick doesn't work. The
115     extra arguments protrude into space reserved for locals and temporaries.
116     In that case, the call instruction makes a real copy of the call frame header,
117     along with just the arguments expected by the callee, leaving the original
118     call frame header and arguments behind. (The call instruction can't just discard
119     extra arguments, because the "arguments" object may access them later.)
120     This copying strategy ensures that all named values will be at the indices
121     expected by the callee.
122 */
123
124 void Label::setLocation(unsigned location)
125 {
126     m_location = location;
127     
128     unsigned size = m_unresolvedJumps.size();
129     for (unsigned i = 0; i < size; ++i)
130         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
131 }
132
133 #ifndef NDEBUG
134 void ResolveResult::checkValidity()
135 {
136     switch (m_type) {
137     case Register:
138     case ReadOnlyRegister:
139         ASSERT(m_local);
140         return;
141     case Dynamic:
142         ASSERT(!m_local);
143         return;
144     case Lexical:
145     case ReadOnlyLexical:
146         ASSERT(!m_local);
147         return;
148     default:
149         RELEASE_ASSERT_NOT_REACHED();
150     }
151 }
152 #endif
153
154 ParserError BytecodeGenerator::generate()
155 {
156     SamplingRegion samplingRegion("Bytecode Generation");
157     
158     m_codeBlock->setThisRegister(m_thisRegister.index());
159
160     m_scopeNode->emitBytecode(*this);
161
162     m_staticPropertyAnalyzer.kill();
163
164     for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
165         TryRange& range = m_tryRanges[i];
166         int start = range.start->bind();
167         int end = range.end->bind();
168         
169         // This will happen for empty try blocks and for some cases of finally blocks:
170         //
171         // try {
172         //    try {
173         //    } finally {
174         //        return 42;
175         //        // *HERE*
176         //    }
177         // } finally {
178         //    print("things");
179         // }
180         //
181         // The return will pop scopes to execute the outer finally block. But this includes
182         // popping the try context for the inner try. The try context is live in the fall-through
183         // part of the finally block not because we will emit a handler that overlaps the finally,
184         // but because we haven't yet had a chance to plant the catch target. Then when we finish
185         // emitting code for the outer finally block, we repush the try contex, this time with a
186         // new start index. But that means that the start index for the try range corresponding
187         // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
188         // than the end index of the try block. This is harmless since end < start handlers will
189         // never get matched in our logic, but we do the runtime a favor and choose to not emit
190         // such handlers at all.
191         if (end <= start)
192             continue;
193         
194         ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
195         UnlinkedHandlerInfo info = {
196             static_cast<uint32_t>(start), static_cast<uint32_t>(end),
197             static_cast<uint32_t>(range.tryData->target->bind()),
198             range.tryData->targetScopeDepth
199         };
200         m_codeBlock->addExceptionHandler(info);
201     }
202     
203     m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
204
205     m_codeBlock->shrinkToFit();
206
207     if (m_expressionTooDeep)
208         return ParserError::OutOfMemory;
209     return ParserError::ErrorNone;
210 }
211
212 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
213 {
214     int index = m_calleeRegisters.size();
215     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
216     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
217
218     if (!result.isNewEntry) {
219         r0 = &registerFor(result.iterator->value.getIndex());
220         return false;
221     }
222
223     r0 = addVar();
224     return true;
225 }
226
227 void BytecodeGenerator::preserveLastVar()
228 {
229     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
230         m_lastVar = &m_calleeRegisters.last();
231 }
232
233 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, JSScope*, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
234     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
235     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
236     , m_symbolTable(0)
237     , m_scopeNode(programNode)
238     , m_codeBlock(globalData, codeBlock)
239     , m_thisRegister(CallFrame::thisArgumentOffset())
240     , m_emptyValueRegister(0)
241     , m_globalObjectRegister(0)
242     , m_finallyDepth(0)
243     , m_dynamicScopeDepth(0)
244     , m_codeType(GlobalCode)
245     , m_nextConstantOffset(0)
246     , m_globalConstantIndex(0)
247     , m_hasCreatedActivation(true)
248     , m_firstLazyFunction(0)
249     , m_lastLazyFunction(0)
250     , m_staticPropertyAnalyzer(&m_instructions)
251     , m_globalData(&globalData)
252     , m_lastOpcodeID(op_end)
253 #ifndef NDEBUG
254     , m_lastOpcodePosition(0)
255 #endif
256     , m_stack(wtfThreadData().stack())
257     , m_usesExceptions(false)
258     , m_expressionTooDeep(false)
259 {
260     if (m_shouldEmitDebugHooks)
261         m_codeBlock->setNeedsFullScopeChain(true);
262
263     m_codeBlock->setNumParameters(1); // Allocate space for "this"
264
265     emitOpcode(op_enter);
266
267     const VarStack& varStack = programNode->varStack();
268     const FunctionStack& functionStack = programNode->functionStack();
269
270     for (size_t i = 0; i < functionStack.size(); ++i) {
271         FunctionBodyNode* function = functionStack[i];
272         UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
273         codeBlock->addFunctionDeclaration(*m_globalData, function->ident(), unlinkedFunction);
274     }
275
276     for (size_t i = 0; i < varStack.size(); ++i)
277         codeBlock->addVariableDeclaration(*varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
278
279 }
280
281 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, JSScope* scope, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
282     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
283     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
284     , m_symbolTable(codeBlock->symbolTable())
285     , m_scopeNode(functionBody)
286     , m_scope(globalData, scope)
287     , m_codeBlock(globalData, codeBlock)
288     , m_activationRegister(0)
289     , m_emptyValueRegister(0)
290     , m_globalObjectRegister(0)
291     , m_finallyDepth(0)
292     , m_dynamicScopeDepth(0)
293     , m_codeType(FunctionCode)
294     , m_nextConstantOffset(0)
295     , m_globalConstantIndex(0)
296     , m_hasCreatedActivation(false)
297     , m_firstLazyFunction(0)
298     , m_lastLazyFunction(0)
299     , m_staticPropertyAnalyzer(&m_instructions)
300     , m_globalData(&globalData)
301     , m_lastOpcodeID(op_end)
302 #ifndef NDEBUG
303     , m_lastOpcodePosition(0)
304 #endif
305     , m_stack(wtfThreadData().stack())
306     , m_usesExceptions(false)
307     , m_expressionTooDeep(false)
308 {
309     if (m_shouldEmitDebugHooks)
310         m_codeBlock->setNeedsFullScopeChain(true);
311
312     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
313     m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
314
315     emitOpcode(op_enter);
316     if (m_codeBlock->needsFullScopeChain()) {
317         m_activationRegister = addVar();
318         emitInitLazyRegister(m_activationRegister);
319         m_codeBlock->setActivationRegister(m_activationRegister->index());
320     }
321
322     m_symbolTable->setCaptureStart(m_codeBlock->m_numVars);
323
324     if (functionBody->usesArguments() || codeBlock->usesEval() || m_shouldEmitDebugHooks) { // May reify arguments object.
325         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
326         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
327
328         // We can save a little space by hard-coding the knowledge that the two
329         // 'arguments' values are stored in consecutive registers, and storing
330         // only the index of the assignable one.
331         codeBlock->setArgumentsRegister(argumentsRegister->index());
332         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
333
334         emitInitLazyRegister(argumentsRegister);
335         emitInitLazyRegister(unmodifiedArgumentsRegister);
336         
337         if (m_codeBlock->isStrictMode()) {
338             emitOpcode(op_create_arguments);
339             instructions().append(argumentsRegister->index());
340         }
341
342         // The debugger currently retrieves the arguments object from an activation rather than pulling
343         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
344         // but for now we force eager creation of the arguments object when debugging.
345         if (m_shouldEmitDebugHooks) {
346             emitOpcode(op_create_arguments);
347             instructions().append(argumentsRegister->index());
348         }
349     }
350
351     bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
352
353     bool capturesAnyArgumentByName = false;
354     Vector<RegisterID*> capturedArguments;
355     if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
356         FunctionParameters& parameters = *functionBody->parameters();
357         capturedArguments.resize(parameters.size());
358         for (size_t i = 0; i < parameters.size(); ++i) {
359             capturedArguments[i] = 0;
360             if (!functionBody->captures(parameters.at(i)) && !shouldCaptureAllTheThings)
361                 continue;
362             capturesAnyArgumentByName = true;
363             capturedArguments[i] = addVar();
364         }
365     }
366
367     if (capturesAnyArgumentByName && !codeBlock->isStrictMode()) {
368         size_t parameterCount = m_symbolTable->parameterCount();
369         OwnArrayPtr<SlowArgument> slowArguments = adoptArrayPtr(new SlowArgument[parameterCount]);
370         for (size_t i = 0; i < parameterCount; ++i) {
371             if (!capturedArguments[i]) {
372                 ASSERT(slowArguments[i].status == SlowArgument::Normal);
373                 slowArguments[i].index = CallFrame::argumentOffset(i);
374                 continue;
375             }
376             slowArguments[i].status = SlowArgument::Captured;
377             slowArguments[i].index = capturedArguments[i]->index();
378         }
379         m_symbolTable->setSlowArguments(slowArguments.release());
380     }
381
382     RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
383
384     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
385     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
386
387     // Captured variables and functions go first so that activations don't have
388     // to step over the non-captured locals to mark them.
389     m_hasCreatedActivation = false;
390     if (functionBody->hasCapturedVariables()) {
391         for (size_t i = 0; i < functionStack.size(); ++i) {
392             FunctionBodyNode* function = functionStack[i];
393             const Identifier& ident = function->ident();
394             if (functionBody->captures(ident)) {
395                 if (!m_hasCreatedActivation) {
396                     m_hasCreatedActivation = true;
397                     emitOpcode(op_create_activation);
398                     instructions().append(m_activationRegister->index());
399                 }
400                 m_functions.add(ident.impl());
401                 emitNewFunction(addVar(ident, false), function);
402             }
403         }
404         for (size_t i = 0; i < varStack.size(); ++i) {
405             const Identifier& ident = *varStack[i].first;
406             if (functionBody->captures(ident))
407                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
408         }
409     }
410     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
411     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
412         m_hasCreatedActivation = true;
413         emitOpcode(op_create_activation);
414         instructions().append(m_activationRegister->index());
415     }
416
417     m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
418
419     m_firstLazyFunction = codeBlock->m_numVars;
420     for (size_t i = 0; i < functionStack.size(); ++i) {
421         FunctionBodyNode* function = functionStack[i];
422         const Identifier& ident = function->ident();
423         if (!functionBody->captures(ident)) {
424             m_functions.add(ident.impl());
425             RefPtr<RegisterID> reg = addVar(ident, false);
426             // Don't lazily create functions that override the name 'arguments'
427             // as this would complicate lazy instantiation of actual arguments.
428             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
429                 emitNewFunction(reg.get(), function);
430             else {
431                 emitInitLazyRegister(reg.get());
432                 m_lazyFunctions.set(reg->index(), function);
433             }
434         }
435     }
436     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
437     for (size_t i = 0; i < varStack.size(); ++i) {
438         const Identifier& ident = *varStack[i].first;
439         if (!functionBody->captures(ident))
440             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
441     }
442
443     if (shouldCaptureAllTheThings)
444         m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
445
446     FunctionParameters& parameters = *functionBody->parameters();
447     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
448
449     // Add "this" as a parameter
450     int nextParameterIndex = CallFrame::thisArgumentOffset();
451     m_thisRegister.setIndex(nextParameterIndex--);
452     m_codeBlock->addParameter();
453     
454     for (size_t i = 0; i < parameters.size(); ++i, --nextParameterIndex) {
455         int index = nextParameterIndex;
456         if (capturedArguments.size() && capturedArguments[i]) {
457             ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(parameters.at(i))) || shouldCaptureAllTheThings);
458             index = capturedArguments[i]->index();
459             RegisterID original(nextParameterIndex);
460             emitMove(capturedArguments[i], &original);
461         }
462         addParameter(parameters.at(i), index);
463     }
464     preserveLastVar();
465
466     // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
467     addCallee(functionBody, calleeRegister);
468
469     if (isConstructor()) {
470         emitCreateThis(&m_thisRegister);
471     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
472         UnlinkedValueProfile profile = emitProfiledOpcode(op_convert_this);
473         instructions().append(kill(&m_thisRegister));
474         instructions().append(profile);
475     }
476 }
477
478 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, JSScope* scope, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
479     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
480     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
481     , m_symbolTable(codeBlock->symbolTable())
482     , m_scopeNode(evalNode)
483     , m_scope(globalData, scope)
484     , m_codeBlock(globalData, codeBlock)
485     , m_thisRegister(CallFrame::thisArgumentOffset())
486     , m_emptyValueRegister(0)
487     , m_globalObjectRegister(0)
488     , m_finallyDepth(0)
489     , m_dynamicScopeDepth(0)
490     , m_codeType(EvalCode)
491     , m_nextConstantOffset(0)
492     , m_globalConstantIndex(0)
493     , m_hasCreatedActivation(true)
494     , m_firstLazyFunction(0)
495     , m_lastLazyFunction(0)
496     , m_staticPropertyAnalyzer(&m_instructions)
497     , m_globalData(&globalData)
498     , m_lastOpcodeID(op_end)
499 #ifndef NDEBUG
500     , m_lastOpcodePosition(0)
501 #endif
502     , m_stack(wtfThreadData().stack())
503     , m_usesExceptions(false)
504     , m_expressionTooDeep(false)
505 {
506     m_codeBlock->setNeedsFullScopeChain(true);
507
508     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
509     m_codeBlock->setNumParameters(1);
510
511     emitOpcode(op_enter);
512
513     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
514     for (size_t i = 0; i < functionStack.size(); ++i)
515         m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
516
517     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
518     unsigned numVariables = varStack.size();
519     Vector<Identifier> variables;
520     variables.reserveCapacity(numVariables);
521     for (size_t i = 0; i < numVariables; ++i)
522         variables.append(*varStack[i].first);
523     codeBlock->adoptVariables(variables);
524     preserveLastVar();
525 }
526
527 BytecodeGenerator::~BytecodeGenerator()
528 {
529 }
530
531 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
532 {
533     emitOpcode(op_init_lazy_reg);
534     instructions().append(reg->index());
535     return reg;
536 }
537
538 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
539 {
540     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
541         return 0;
542
543     m_calleeRegister.setIndex(JSStack::Callee);
544
545     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
546     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks) {
547         emitOpcode(op_push_name_scope);
548         instructions().append(addConstant(functionBodyNode->ident()));
549         instructions().append(m_calleeRegister.index());
550         instructions().append(ReadOnly | DontDelete);
551         return 0;
552     }
553
554     if (!functionBodyNode->captures(functionBodyNode->ident()))
555         return &m_calleeRegister;
556
557     // Move the callee into the captured section of the stack.
558     return emitMove(addVar(), &m_calleeRegister);
559 }
560
561 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
562 {
563     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
564         return;
565
566     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
567     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
568         return;
569
570     ASSERT(calleeRegister);
571     symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
572 }
573
574 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
575 {
576     // Parameters overwrite var declarations, but not function declarations.
577     StringImpl* rep = ident.impl();
578     if (!m_functions.contains(rep)) {
579         symbolTable().set(rep, parameterIndex);
580         RegisterID& parameter = registerFor(parameterIndex);
581         parameter.setIndex(parameterIndex);
582     }
583
584     // To maintain the calling convention, we have to allocate unique space for
585     // each parameter, even if the parameter doesn't make it into the symbol table.
586     m_codeBlock->addParameter();
587 }
588
589 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
590 {
591     if (ident != propertyNames().arguments)
592         return false;
593     
594     if (!shouldOptimizeLocals())
595         return false;
596     
597     SymbolTableEntry entry = symbolTable().get(ident.impl());
598     if (entry.isNull())
599         return false;
600
601     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
602         return true;
603     
604     return false;
605 }
606
607 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
608 {
609     ASSERT(willResolveToArguments(propertyNames().arguments));
610
611     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
612     ASSERT(!entry.isNull());
613     return &registerFor(entry.getIndex());
614 }
615
616 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
617 {
618     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
619         return reg;
620     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
621     return reg;
622 }
623
624 RegisterID* BytecodeGenerator::newRegister()
625 {
626     m_calleeRegisters.append(m_calleeRegisters.size());
627     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
628     return &m_calleeRegisters.last();
629 }
630
631 RegisterID* BytecodeGenerator::newTemporary()
632 {
633     // Reclaim free register IDs.
634     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
635         m_calleeRegisters.removeLast();
636         
637     RegisterID* result = newRegister();
638     result->setTemporary();
639     return result;
640 }
641
642 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
643 {
644     // Reclaim free label scopes.
645     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
646         m_labelScopes.removeLast();
647
648     // Allocate new label scope.
649     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
650     m_labelScopes.append(scope);
651     return LabelScopePtr(&m_labelScopes, m_labelScopes.size() - 1);
652 }
653
654 PassRefPtr<Label> BytecodeGenerator::newLabel()
655 {
656     // Reclaim free label IDs.
657     while (m_labels.size() && !m_labels.last().refCount())
658         m_labels.removeLast();
659
660     // Allocate new label ID.
661     m_labels.append(this);
662     return &m_labels.last();
663 }
664
665 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
666 {
667     unsigned newLabelIndex = instructions().size();
668     l0->setLocation(newLabelIndex);
669
670     if (m_codeBlock->numberOfJumpTargets()) {
671         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
672         ASSERT(lastLabelIndex <= newLabelIndex);
673         if (newLabelIndex == lastLabelIndex) {
674             // Peephole optimizations have already been disabled by emitting the last label
675             return l0;
676         }
677     }
678
679     m_codeBlock->addJumpTarget(newLabelIndex);
680
681     // This disables peephole optimizations when an instruction is a jump target
682     m_lastOpcodeID = op_end;
683     return l0;
684 }
685
686 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
687 {
688 #ifndef NDEBUG
689     size_t opcodePosition = instructions().size();
690     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
691     m_lastOpcodePosition = opcodePosition;
692 #endif
693     instructions().append(opcodeID);
694     m_lastOpcodeID = opcodeID;
695 }
696
697 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
698 {
699 #if ENABLE(VALUE_PROFILER)
700     return m_codeBlock->addArrayProfile();
701 #else
702     return 0;
703 #endif
704 }
705
706 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
707 {
708 #if ENABLE(VALUE_PROFILER)
709     return m_codeBlock->addArrayAllocationProfile();
710 #else
711     return 0;
712 #endif
713 }
714
715 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
716 {
717     return m_codeBlock->addObjectAllocationProfile();
718 }
719
720 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
721 {
722 #if ENABLE(VALUE_PROFILER)
723     UnlinkedValueProfile result = m_codeBlock->addValueProfile();
724 #else
725     UnlinkedValueProfile result = 0;
726 #endif
727     emitOpcode(opcodeID);
728     return result;
729 }
730
731 void BytecodeGenerator::emitLoopHint()
732 {
733 #if ENABLE(DFG_JIT)
734     emitOpcode(op_loop_hint);
735 #endif
736 }
737
738 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
739 {
740     ASSERT(instructions().size() >= 4);
741     size_t size = instructions().size();
742     dstIndex = instructions().at(size - 3).u.operand;
743     src1Index = instructions().at(size - 2).u.operand;
744     src2Index = instructions().at(size - 1).u.operand;
745 }
746
747 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
748 {
749     ASSERT(instructions().size() >= 3);
750     size_t size = instructions().size();
751     dstIndex = instructions().at(size - 2).u.operand;
752     srcIndex = instructions().at(size - 1).u.operand;
753 }
754
755 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
756 {
757     ASSERT(instructions().size() >= 4);
758     instructions().shrink(instructions().size() - 4);
759     m_lastOpcodeID = op_end;
760 }
761
762 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
763 {
764     ASSERT(instructions().size() >= 3);
765     instructions().shrink(instructions().size() - 3);
766     m_lastOpcodeID = op_end;
767 }
768
769 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
770 {
771     size_t begin = instructions().size();
772     emitOpcode(target->isForward() ? op_jmp : op_loop);
773     instructions().append(target->bind(begin, instructions().size()));
774     return target;
775 }
776
777 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
778 {
779     if (m_lastOpcodeID == op_less) {
780         int dstIndex;
781         int src1Index;
782         int src2Index;
783
784         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
785
786         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
787             rewindBinaryOp();
788
789             size_t begin = instructions().size();
790             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
791             instructions().append(src1Index);
792             instructions().append(src2Index);
793             instructions().append(target->bind(begin, instructions().size()));
794             return target;
795         }
796     } else if (m_lastOpcodeID == op_lesseq) {
797         int dstIndex;
798         int src1Index;
799         int src2Index;
800
801         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
802
803         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
804             rewindBinaryOp();
805
806             size_t begin = instructions().size();
807             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
808             instructions().append(src1Index);
809             instructions().append(src2Index);
810             instructions().append(target->bind(begin, instructions().size()));
811             return target;
812         }
813     } else if (m_lastOpcodeID == op_greater) {
814         int dstIndex;
815         int src1Index;
816         int src2Index;
817
818         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
819
820         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
821             rewindBinaryOp();
822
823             size_t begin = instructions().size();
824             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
825             instructions().append(src1Index);
826             instructions().append(src2Index);
827             instructions().append(target->bind(begin, instructions().size()));
828             return target;
829         }
830     } else if (m_lastOpcodeID == op_greatereq) {
831         int dstIndex;
832         int src1Index;
833         int src2Index;
834
835         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
836
837         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
838             rewindBinaryOp();
839
840             size_t begin = instructions().size();
841             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
842             instructions().append(src1Index);
843             instructions().append(src2Index);
844             instructions().append(target->bind(begin, instructions().size()));
845             return target;
846         }
847     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
848         int dstIndex;
849         int srcIndex;
850
851         retrieveLastUnaryOp(dstIndex, srcIndex);
852
853         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
854             rewindUnaryOp();
855
856             size_t begin = instructions().size();
857             emitOpcode(op_jeq_null);
858             instructions().append(srcIndex);
859             instructions().append(target->bind(begin, instructions().size()));
860             return target;
861         }
862     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
863         int dstIndex;
864         int srcIndex;
865
866         retrieveLastUnaryOp(dstIndex, srcIndex);
867
868         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
869             rewindUnaryOp();
870
871             size_t begin = instructions().size();
872             emitOpcode(op_jneq_null);
873             instructions().append(srcIndex);
874             instructions().append(target->bind(begin, instructions().size()));
875             return target;
876         }
877     }
878
879     size_t begin = instructions().size();
880
881     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
882     instructions().append(cond->index());
883     instructions().append(target->bind(begin, instructions().size()));
884     return target;
885 }
886
887 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
888 {
889     if (m_lastOpcodeID == op_less && target->isForward()) {
890         int dstIndex;
891         int src1Index;
892         int src2Index;
893
894         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
895
896         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
897             rewindBinaryOp();
898
899             size_t begin = instructions().size();
900             emitOpcode(op_jnless);
901             instructions().append(src1Index);
902             instructions().append(src2Index);
903             instructions().append(target->bind(begin, instructions().size()));
904             return target;
905         }
906     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
907         int dstIndex;
908         int src1Index;
909         int src2Index;
910
911         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
912
913         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
914             rewindBinaryOp();
915
916             size_t begin = instructions().size();
917             emitOpcode(op_jnlesseq);
918             instructions().append(src1Index);
919             instructions().append(src2Index);
920             instructions().append(target->bind(begin, instructions().size()));
921             return target;
922         }
923     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
924         int dstIndex;
925         int src1Index;
926         int src2Index;
927
928         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
929
930         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
931             rewindBinaryOp();
932
933             size_t begin = instructions().size();
934             emitOpcode(op_jngreater);
935             instructions().append(src1Index);
936             instructions().append(src2Index);
937             instructions().append(target->bind(begin, instructions().size()));
938             return target;
939         }
940     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
941         int dstIndex;
942         int src1Index;
943         int src2Index;
944
945         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
946
947         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
948             rewindBinaryOp();
949
950             size_t begin = instructions().size();
951             emitOpcode(op_jngreatereq);
952             instructions().append(src1Index);
953             instructions().append(src2Index);
954             instructions().append(target->bind(begin, instructions().size()));
955             return target;
956         }
957     } else if (m_lastOpcodeID == op_not) {
958         int dstIndex;
959         int srcIndex;
960
961         retrieveLastUnaryOp(dstIndex, srcIndex);
962
963         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
964             rewindUnaryOp();
965
966             size_t begin = instructions().size();
967             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
968             instructions().append(srcIndex);
969             instructions().append(target->bind(begin, instructions().size()));
970             return target;
971         }
972     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
973         int dstIndex;
974         int srcIndex;
975
976         retrieveLastUnaryOp(dstIndex, srcIndex);
977
978         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
979             rewindUnaryOp();
980
981             size_t begin = instructions().size();
982             emitOpcode(op_jneq_null);
983             instructions().append(srcIndex);
984             instructions().append(target->bind(begin, instructions().size()));
985             return target;
986         }
987     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
988         int dstIndex;
989         int srcIndex;
990
991         retrieveLastUnaryOp(dstIndex, srcIndex);
992
993         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
994             rewindUnaryOp();
995
996             size_t begin = instructions().size();
997             emitOpcode(op_jeq_null);
998             instructions().append(srcIndex);
999             instructions().append(target->bind(begin, instructions().size()));
1000             return target;
1001         }
1002     }
1003
1004     size_t begin = instructions().size();
1005     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
1006     instructions().append(cond->index());
1007     instructions().append(target->bind(begin, instructions().size()));
1008     return target;
1009 }
1010
1011 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
1012 {
1013     size_t begin = instructions().size();
1014
1015     emitOpcode(op_jneq_ptr);
1016     instructions().append(cond->index());
1017     instructions().append(Special::CallFunction);
1018     instructions().append(target->bind(begin, instructions().size()));
1019     return target;
1020 }
1021
1022 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
1023 {
1024     size_t begin = instructions().size();
1025
1026     emitOpcode(op_jneq_ptr);
1027     instructions().append(cond->index());
1028     instructions().append(Special::ApplyFunction);
1029     instructions().append(target->bind(begin, instructions().size()));
1030     return target;
1031 }
1032
1033 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1034 {
1035     StringImpl* rep = ident.impl();
1036     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1037     if (result.isNewEntry)
1038         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
1039
1040     return result.iterator->value;
1041 }
1042
1043 // We can't hash JSValue(), so we use a dedicated data member to cache it.
1044 RegisterID* BytecodeGenerator::addConstantEmptyValue()
1045 {
1046     if (!m_emptyValueRegister) {
1047         int index = m_nextConstantOffset;
1048         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1049         ++m_nextConstantOffset;
1050         m_codeBlock->addConstant(JSValue());
1051         m_emptyValueRegister = &m_constantPoolRegisters[index];
1052     }
1053
1054     return m_emptyValueRegister;
1055 }
1056
1057 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
1058 {
1059     if (!v)
1060         return addConstantEmptyValue();
1061
1062     int index = m_nextConstantOffset;
1063     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1064     if (result.isNewEntry) {
1065         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1066         ++m_nextConstantOffset;
1067         m_codeBlock->addConstant(v);
1068     } else
1069         index = result.iterator->value;
1070     return &m_constantPoolRegisters[index];
1071 }
1072
1073 unsigned BytecodeGenerator::addRegExp(RegExp* r)
1074 {
1075     return m_codeBlock->addRegExp(r);
1076 }
1077
1078 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1079 {
1080     m_staticPropertyAnalyzer.mov(dst->index(), src->index());
1081
1082     emitOpcode(op_mov);
1083     instructions().append(dst->index());
1084     instructions().append(src->index());
1085     return dst;
1086 }
1087
1088 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1089 {
1090     emitOpcode(opcodeID);
1091     instructions().append(dst->index());
1092     instructions().append(src->index());
1093     return dst;
1094 }
1095
1096 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1097 {
1098     emitOpcode(op_pre_inc);
1099     instructions().append(srcDst->index());
1100     return srcDst;
1101 }
1102
1103 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1104 {
1105     emitOpcode(op_pre_dec);
1106     instructions().append(srcDst->index());
1107     return srcDst;
1108 }
1109
1110 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1111 {
1112     emitOpcode(op_post_inc);
1113     instructions().append(dst->index());
1114     instructions().append(srcDst->index());
1115     return dst;
1116 }
1117
1118 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1119 {
1120     emitOpcode(op_post_dec);
1121     instructions().append(dst->index());
1122     instructions().append(srcDst->index());
1123     return dst;
1124 }
1125
1126 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1127 {
1128     emitOpcode(opcodeID);
1129     instructions().append(dst->index());
1130     instructions().append(src1->index());
1131     instructions().append(src2->index());
1132
1133     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1134         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1135         instructions().append(types.toInt());
1136
1137     return dst;
1138 }
1139
1140 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1141 {
1142     if (m_lastOpcodeID == op_typeof) {
1143         int dstIndex;
1144         int srcIndex;
1145
1146         retrieveLastUnaryOp(dstIndex, srcIndex);
1147
1148         if (src1->index() == dstIndex
1149             && src1->isTemporary()
1150             && m_codeBlock->isConstantRegisterIndex(src2->index())
1151             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1152             const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1153             if (value == "undefined") {
1154                 rewindUnaryOp();
1155                 emitOpcode(op_is_undefined);
1156                 instructions().append(dst->index());
1157                 instructions().append(srcIndex);
1158                 return dst;
1159             }
1160             if (value == "boolean") {
1161                 rewindUnaryOp();
1162                 emitOpcode(op_is_boolean);
1163                 instructions().append(dst->index());
1164                 instructions().append(srcIndex);
1165                 return dst;
1166             }
1167             if (value == "number") {
1168                 rewindUnaryOp();
1169                 emitOpcode(op_is_number);
1170                 instructions().append(dst->index());
1171                 instructions().append(srcIndex);
1172                 return dst;
1173             }
1174             if (value == "string") {
1175                 rewindUnaryOp();
1176                 emitOpcode(op_is_string);
1177                 instructions().append(dst->index());
1178                 instructions().append(srcIndex);
1179                 return dst;
1180             }
1181             if (value == "object") {
1182                 rewindUnaryOp();
1183                 emitOpcode(op_is_object);
1184                 instructions().append(dst->index());
1185                 instructions().append(srcIndex);
1186                 return dst;
1187             }
1188             if (value == "function") {
1189                 rewindUnaryOp();
1190                 emitOpcode(op_is_function);
1191                 instructions().append(dst->index());
1192                 instructions().append(srcIndex);
1193                 return dst;
1194             }
1195         }
1196     }
1197
1198     emitOpcode(opcodeID);
1199     instructions().append(dst->index());
1200     instructions().append(src1->index());
1201     instructions().append(src2->index());
1202     return dst;
1203 }
1204
1205 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1206 {
1207     return emitLoad(dst, jsBoolean(b));
1208 }
1209
1210 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1211 {
1212     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1213     // Later we can do the extra work to handle that like the other cases.  They also don't
1214     // work correctly with NaN as a key.
1215     if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1216         return emitLoad(dst, jsNumber(number));
1217     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1218     if (!valueInMap)
1219         valueInMap = jsNumber(number);
1220     return emitLoad(dst, valueInMap);
1221 }
1222
1223 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1224 {
1225     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1226     if (!stringInMap)
1227         stringInMap = jsOwnedString(globalData(), identifier.string());
1228     return emitLoad(dst, JSValue(stringInMap));
1229 }
1230
1231 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1232 {
1233     RegisterID* constantID = addConstantValue(v);
1234     if (dst)
1235         return emitMove(dst, constantID);
1236     return constantID;
1237 }
1238
1239 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1240 {
1241     if (!m_globalObjectRegister) {
1242         int index = m_nextConstantOffset;
1243         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1244         ++m_nextConstantOffset;
1245         m_codeBlock->addConstant(JSValue());
1246         m_globalObjectRegister = &m_constantPoolRegisters[index];
1247         m_codeBlock->setGlobalObjectRegister(index);
1248     }
1249     if (dst)
1250         emitMove(dst, m_globalObjectRegister);
1251     return m_globalObjectRegister;
1252 }
1253
1254 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1255 {
1256     if (property == propertyNames().thisIdentifier)
1257         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1258
1259     // Check if the property should be allocated in a register.
1260     if (m_codeType != GlobalCode && shouldOptimizeLocals() && m_symbolTable) {
1261         SymbolTableEntry entry = symbolTable().get(property.impl());
1262         if (!entry.isNull()) {
1263             if (property == propertyNames().arguments)
1264                 createArgumentsIfNecessary();
1265             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1266             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1267             return ResolveResult::registerResolve(local, flags);
1268         }
1269     }
1270     // Cases where we cannot statically optimize the lookup.
1271     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1272         return ResolveResult::dynamicResolve();
1273
1274     if (!m_scope || m_codeType != FunctionCode)
1275         return ResolveResult::dynamicResolve();
1276
1277     ScopeChainIterator iter = m_scope->begin();
1278     ScopeChainIterator end = m_scope->end();
1279     size_t depth = m_codeBlock->needsFullScopeChain();
1280     unsigned flags = 0;
1281     for (; iter != end; ++iter, ++depth) {
1282         JSObject* currentScope = iter.get();
1283         if (!currentScope->isVariableObject())
1284             return ResolveResult::dynamicResolve();
1285
1286         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1287         SymbolTableEntry entry = currentVariableObject->symbolTable()->get(property.impl());
1288
1289         // Found the property
1290         if (!entry.isNull()) {
1291             if (entry.isReadOnly())
1292                 flags |= ResolveResult::ReadOnlyFlag;
1293             if (++iter == end)
1294                 return ResolveResult::dynamicResolve();
1295 #if !ASSERT_DISABLED
1296             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1297                 ASSERT(activation->isValid(entry));
1298 #endif
1299             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1300         }
1301         bool scopeRequiresDynamicChecks = false;
1302         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1303             break;
1304         if (scopeRequiresDynamicChecks)
1305             flags |= ResolveResult::DynamicFlag;
1306     }
1307
1308     return ResolveResult::dynamicResolve();
1309 }
1310
1311 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1312 {
1313     // Register-allocated const declarations.
1314     if (m_codeType == FunctionCode && m_symbolTable) {
1315         SymbolTableEntry entry = symbolTable().get(property.impl());
1316         if (!entry.isNull()) {
1317             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1318             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1319             return ResolveResult::registerResolve(local, flags);
1320         }
1321     }
1322
1323     return ResolveResult::dynamicResolve();
1324 }
1325
1326 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1327 {
1328     size_t begin = instructions().size();
1329     emitOpcode(op_check_has_instance);
1330     instructions().append(dst->index());
1331     instructions().append(value->index());
1332     instructions().append(base->index());
1333     instructions().append(target->bind(begin, instructions().size()));
1334 }
1335
1336 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1337
1338     emitOpcode(op_instanceof);
1339     instructions().append(dst->index());
1340     instructions().append(value->index());
1341     instructions().append(basePrototype->index());
1342     return dst;
1343 }
1344
1345 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1346 {
1347     return !m_labelScopes.size();
1348 }
1349
1350 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1351 {
1352
1353     if (resolveResult.isStatic())
1354         return emitGetStaticVar(dst, resolveResult, property);
1355
1356     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve);
1357     instructions().append(kill(dst));
1358     instructions().append(addConstant(property));
1359     instructions().append(getResolveOperations(property));
1360     instructions().append(profile);
1361     return dst;
1362 }
1363
1364 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1365 {
1366     if (!resolveResult.isDynamic()) {
1367         // Global object is the base
1368         return emitLoadGlobalObject(dst);
1369     }
1370
1371     // We can't optimise at all :-(
1372     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1373     instructions().append(kill(dst));
1374     instructions().append(addConstant(property));
1375     instructions().append(false);
1376     instructions().append(getResolveBaseOperations(property));
1377     instructions().append(0);
1378     instructions().append(profile);
1379     return dst;
1380 }
1381
1382 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult&, const Identifier& property, NonlocalResolveInfo& verifier)
1383 {
1384     // We can't optimise at all :-(
1385     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1386     instructions().append(kill(dst));
1387     instructions().append(addConstant(property));
1388     instructions().append(m_codeBlock->isStrictMode());
1389     uint32_t putToBaseIndex = 0;
1390     instructions().append(getResolveBaseForPutOperations(property, putToBaseIndex));
1391     verifier.resolved(putToBaseIndex);
1392     instructions().append(putToBaseIndex);
1393     instructions().append(profile);
1394     return dst;
1395 }
1396
1397 RegisterID* BytecodeGenerator::emitResolveWithBaseForPut(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1398 {
1399     ASSERT_UNUSED(resolveResult, !resolveResult.isStatic());
1400     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_base);
1401     instructions().append(kill(baseDst));
1402     instructions().append(propDst->index());
1403     instructions().append(addConstant(property));
1404     uint32_t putToBaseIndex = 0;
1405     instructions().append(getResolveWithBaseForPutOperations(property, putToBaseIndex));
1406     verifier.resolved(putToBaseIndex);
1407     instructions().append(putToBaseIndex);
1408     instructions().append(profile);
1409     return baseDst;
1410 }
1411
1412 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1413 {
1414     if (resolveResult.isStatic()) {
1415         emitLoad(baseDst, jsUndefined());
1416         emitGetStaticVar(propDst, resolveResult, property);
1417         return baseDst;
1418     }
1419
1420     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_this);
1421     instructions().append(kill(baseDst));
1422     instructions().append(propDst->index());
1423     instructions().append(addConstant(property));
1424     instructions().append(getResolveWithThisOperations(property));
1425     instructions().append(profile);
1426     return baseDst;
1427 }
1428
1429 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier&)
1430 {
1431     ASSERT(m_codeType == FunctionCode);
1432     switch (resolveResult.type()) {
1433     case ResolveResult::Register:
1434     case ResolveResult::ReadOnlyRegister:
1435         if (dst == ignoredResult())
1436             return 0;
1437         return moveToDestinationIfNeeded(dst, resolveResult.local());
1438
1439     case ResolveResult::Lexical:
1440     case ResolveResult::ReadOnlyLexical: {
1441         UnlinkedValueProfile profile = emitProfiledOpcode(op_get_scoped_var);
1442         instructions().append(dst->index());
1443         instructions().append(resolveResult.index());
1444         instructions().append(resolveResult.depth());
1445         instructions().append(profile);
1446         return dst;
1447     }
1448
1449     default:
1450         RELEASE_ASSERT_NOT_REACHED();
1451         return 0;
1452     }
1453 }
1454
1455 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1456 {
1457     ASSERT(m_codeType == GlobalCode);
1458     emitOpcode(op_init_global_const_nop);
1459     instructions().append(0);
1460     instructions().append(value->index());
1461     instructions().append(0);
1462     instructions().append(addConstant(identifier));
1463     return value;
1464 }
1465
1466 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, const Identifier&, RegisterID* value)
1467 {
1468     ASSERT(m_codeType == FunctionCode);
1469     switch (resolveResult.type()) {
1470     case ResolveResult::Register:
1471     case ResolveResult::ReadOnlyRegister:
1472         return moveToDestinationIfNeeded(resolveResult.local(), value);
1473
1474     case ResolveResult::Lexical:
1475     case ResolveResult::ReadOnlyLexical:
1476         emitOpcode(op_put_scoped_var);
1477         instructions().append(resolveResult.index());
1478         instructions().append(resolveResult.depth());
1479         instructions().append(value->index());
1480         return value;
1481
1482     default:
1483         RELEASE_ASSERT_NOT_REACHED();
1484         return 0;
1485     }
1486 }
1487
1488 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1489 {
1490     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1491
1492     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1493     instructions().append(kill(dst));
1494     instructions().append(base->index());
1495     instructions().append(addConstant(property));
1496     instructions().append(0);
1497     instructions().append(0);
1498     instructions().append(0);
1499     instructions().append(0);
1500     instructions().append(profile);
1501     return dst;
1502 }
1503
1504 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1505 {
1506     emitOpcode(op_get_arguments_length);
1507     instructions().append(dst->index());
1508     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1509     instructions().append(base->index());
1510     instructions().append(addConstant(propertyNames().length));
1511     return dst;
1512 }
1513
1514 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1515 {
1516     unsigned propertyIndex = addConstant(property);
1517
1518     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1519
1520     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1521
1522     emitOpcode(op_put_by_id);
1523     instructions().append(base->index());
1524     instructions().append(propertyIndex);
1525     instructions().append(value->index());
1526     instructions().append(0);
1527     instructions().append(0);
1528     instructions().append(0);
1529     instructions().append(0);
1530     instructions().append(0);
1531     return value;
1532 }
1533
1534 RegisterID* BytecodeGenerator::emitPutToBase(RegisterID* base, const Identifier& property, RegisterID* value, NonlocalResolveInfo& resolveInfo)
1535 {
1536     emitOpcode(op_put_to_base);
1537     instructions().append(base->index());
1538     instructions().append(addConstant(property));
1539     instructions().append(value->index());
1540     instructions().append(resolveInfo.put());
1541     return value;
1542 }
1543
1544 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1545 {
1546     unsigned propertyIndex = addConstant(property);
1547
1548     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1549
1550     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1551     
1552     emitOpcode(op_put_by_id);
1553     instructions().append(base->index());
1554     instructions().append(propertyIndex);
1555     instructions().append(value->index());
1556     instructions().append(0);
1557     instructions().append(0);
1558     instructions().append(0);
1559     instructions().append(0);
1560     instructions().append(
1561         property != m_globalData->propertyNames->underscoreProto
1562         && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1563     return value;
1564 }
1565
1566 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1567 {
1568     unsigned propertyIndex = addConstant(property);
1569
1570     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1571
1572     emitOpcode(op_put_getter_setter);
1573     instructions().append(base->index());
1574     instructions().append(propertyIndex);
1575     instructions().append(getter->index());
1576     instructions().append(setter->index());
1577 }
1578
1579 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1580 {
1581     emitOpcode(op_del_by_id);
1582     instructions().append(dst->index());
1583     instructions().append(base->index());
1584     instructions().append(addConstant(property));
1585     return dst;
1586 }
1587
1588 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1589 {
1590     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1591     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1592     instructions().append(kill(dst));
1593     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1594     instructions().append(base->index());
1595     instructions().append(property->index());
1596     instructions().append(arrayProfile);
1597     instructions().append(profile);
1598     return dst;
1599 }
1600
1601 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1602 {
1603     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1604         ForInContext& context = m_forInContextStack[i - 1];
1605         if (context.propertyRegister == property) {
1606             emitOpcode(op_get_by_pname);
1607             instructions().append(dst->index());
1608             instructions().append(base->index());
1609             instructions().append(property->index());
1610             instructions().append(context.expectedSubscriptRegister->index());
1611             instructions().append(context.iterRegister->index());
1612             instructions().append(context.indexRegister->index());
1613             return dst;
1614         }
1615     }
1616     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1617     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1618     instructions().append(kill(dst));
1619     instructions().append(base->index());
1620     instructions().append(property->index());
1621     instructions().append(arrayProfile);
1622     instructions().append(profile);
1623     return dst;
1624 }
1625
1626 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1627 {
1628     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1629     emitOpcode(op_put_by_val);
1630     instructions().append(base->index());
1631     instructions().append(property->index());
1632     instructions().append(value->index());
1633     instructions().append(arrayProfile);
1634     return value;
1635 }
1636
1637 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1638 {
1639     emitOpcode(op_del_by_val);
1640     instructions().append(dst->index());
1641     instructions().append(base->index());
1642     instructions().append(property->index());
1643     return dst;
1644 }
1645
1646 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1647 {
1648     emitOpcode(op_put_by_index);
1649     instructions().append(base->index());
1650     instructions().append(index);
1651     instructions().append(value->index());
1652     return value;
1653 }
1654
1655 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1656 {
1657     RefPtr<RegisterID> func = newTemporary(); 
1658
1659     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_callee);
1660     instructions().append(func->index());
1661     instructions().append(profile);
1662
1663     size_t begin = instructions().size();
1664     m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1665
1666     emitOpcode(op_create_this); 
1667     instructions().append(m_thisRegister.index()); 
1668     instructions().append(func->index()); 
1669     instructions().append(0);
1670     return dst;
1671 }
1672
1673 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1674 {
1675     size_t begin = instructions().size();
1676     m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1677
1678     emitOpcode(op_new_object);
1679     instructions().append(dst->index());
1680     instructions().append(0);
1681     instructions().append(newObjectAllocationProfile());
1682     return dst;
1683 }
1684
1685 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1686 {
1687     return m_codeBlock->addConstantBuffer(length);
1688 }
1689
1690 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1691 {
1692     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1693     if (!stringInMap) {
1694         stringInMap = jsString(globalData(), identifier.string());
1695         addConstantValue(stringInMap);
1696     }
1697     return stringInMap;
1698 }
1699
1700 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1701 {
1702 #if !ASSERT_DISABLED
1703     unsigned checkLength = 0;
1704 #endif
1705     bool hadVariableExpression = false;
1706     if (length) {
1707         for (ElementNode* n = elements; n; n = n->next()) {
1708             if (!n->value()->isNumber() && !n->value()->isString()) {
1709                 hadVariableExpression = true;
1710                 break;
1711             }
1712             if (n->elision())
1713                 break;
1714 #if !ASSERT_DISABLED
1715             checkLength++;
1716 #endif
1717         }
1718         if (!hadVariableExpression) {
1719             ASSERT(length == checkLength);
1720             unsigned constantBufferIndex = addConstantBuffer(length);
1721             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1722             unsigned index = 0;
1723             for (ElementNode* n = elements; index < length; n = n->next()) {
1724                 if (n->value()->isNumber())
1725                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1726                 else {
1727                     ASSERT(n->value()->isString());
1728                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1729                 }
1730             }
1731             emitOpcode(op_new_array_buffer);
1732             instructions().append(dst->index());
1733             instructions().append(constantBufferIndex);
1734             instructions().append(length);
1735             instructions().append(newArrayAllocationProfile());
1736             return dst;
1737         }
1738     }
1739
1740     Vector<RefPtr<RegisterID>, 16> argv;
1741     for (ElementNode* n = elements; n; n = n->next()) {
1742         if (n->elision())
1743             break;
1744         argv.append(newTemporary());
1745         // op_new_array requires the initial values to be a sequential range of registers
1746         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1747         emitNode(argv.last().get(), n->value());
1748     }
1749     emitOpcode(op_new_array);
1750     instructions().append(dst->index());
1751     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1752     instructions().append(argv.size()); // argc
1753     instructions().append(newArrayAllocationProfile());
1754     return dst;
1755 }
1756
1757 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1758 {
1759     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1760 }
1761
1762 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1763 {
1764     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1765     if (ptr.isNewEntry)
1766         ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1767     return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1768 }
1769
1770 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1771 {
1772     createActivationIfNecessary();
1773     emitOpcode(op_new_func);
1774     instructions().append(dst->index());
1775     instructions().append(index);
1776     instructions().append(doNullCheck);
1777     return dst;
1778 }
1779
1780 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1781 {
1782     emitOpcode(op_new_regexp);
1783     instructions().append(dst->index());
1784     instructions().append(addRegExp(regExp));
1785     return dst;
1786 }
1787
1788 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1789 {
1790     FunctionBodyNode* function = n->body();
1791     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1792     
1793     createActivationIfNecessary();
1794     emitOpcode(op_new_func_exp);
1795     instructions().append(r0->index());
1796     instructions().append(index);
1797     return r0;
1798 }
1799
1800 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1801 {
1802     return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, startOffset, endOffset);
1803 }
1804
1805 void BytecodeGenerator::createArgumentsIfNecessary()
1806 {
1807     if (m_codeType != FunctionCode)
1808         return;
1809     
1810     if (!m_codeBlock->usesArguments())
1811         return;
1812
1813     // If we're in strict mode we tear off the arguments on function
1814     // entry, so there's no need to check if we need to create them
1815     // now
1816     if (m_codeBlock->isStrictMode())
1817         return;
1818
1819     emitOpcode(op_create_arguments);
1820     instructions().append(m_codeBlock->argumentsRegister());
1821 }
1822
1823 void BytecodeGenerator::createActivationIfNecessary()
1824 {
1825     if (m_hasCreatedActivation)
1826         return;
1827     if (!m_codeBlock->needsFullScopeChain())
1828         return;
1829     emitOpcode(op_create_activation);
1830     instructions().append(m_activationRegister->index());
1831 }
1832
1833 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1834 {
1835     return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, startOffset, endOffset);
1836 }
1837
1838 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1839 {
1840     if (identifier == m_globalData->propertyNames->Object)
1841         return ExpectObjectConstructor;
1842     if (identifier == m_globalData->propertyNames->Array)
1843         return ExpectArrayConstructor;
1844     return NoExpectedFunction;
1845 }
1846
1847 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1848 {
1849     RefPtr<Label> realCall = newLabel();
1850     switch (expectedFunction) {
1851     case ExpectObjectConstructor: {
1852         // If the number of arguments is non-zero, then we can't do anything interesting.
1853         if (callArguments.argumentCountIncludingThis() >= 2)
1854             return NoExpectedFunction;
1855         
1856         size_t begin = instructions().size();
1857         emitOpcode(op_jneq_ptr);
1858         instructions().append(func->index());
1859         instructions().append(Special::ObjectConstructor);
1860         instructions().append(realCall->bind(begin, instructions().size()));
1861         
1862         if (dst != ignoredResult())
1863             emitNewObject(dst);
1864         break;
1865     }
1866         
1867     case ExpectArrayConstructor: {
1868         // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1869         // don't do inline it, for now. The only reason is that call arguments are in
1870         // the opposite order of what op_new_array expects, so we'd either need to change
1871         // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1872         // things sounds like it's worth it.
1873         if (callArguments.argumentCountIncludingThis() > 2)
1874             return NoExpectedFunction;
1875         
1876         size_t begin = instructions().size();
1877         emitOpcode(op_jneq_ptr);
1878         instructions().append(func->index());
1879         instructions().append(Special::ArrayConstructor);
1880         instructions().append(realCall->bind(begin, instructions().size()));
1881         
1882         if (dst != ignoredResult()) {
1883             if (callArguments.argumentCountIncludingThis() == 2) {
1884                 emitOpcode(op_new_array_with_size);
1885                 instructions().append(dst->index());
1886                 instructions().append(callArguments.argumentRegister(0)->index());
1887                 instructions().append(newArrayAllocationProfile());
1888             } else {
1889                 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1890                 emitOpcode(op_new_array);
1891                 instructions().append(dst->index());
1892                 instructions().append(0);
1893                 instructions().append(0);
1894                 instructions().append(newArrayAllocationProfile());
1895             }
1896         }
1897         break;
1898     }
1899         
1900     default:
1901         ASSERT(expectedFunction == NoExpectedFunction);
1902         return NoExpectedFunction;
1903     }
1904     
1905     size_t begin = instructions().size();
1906     emitOpcode(op_jmp);
1907     instructions().append(done->bind(begin, instructions().size()));
1908     emitLabel(realCall.get());
1909     
1910     return expectedFunction;
1911 }
1912
1913 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1914 {
1915     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1916     ASSERT(func->refCount());
1917
1918     if (m_shouldEmitProfileHooks)
1919         emitMove(callArguments.profileHookRegister(), func);
1920
1921     // Generate code for arguments.
1922     unsigned argument = 0;
1923     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1924         emitNode(callArguments.argumentRegister(argument++), n);
1925
1926     // Reserve space for call frame.
1927     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
1928     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1929         callFrame.append(newTemporary());
1930
1931     if (m_shouldEmitProfileHooks) {
1932         emitOpcode(op_profile_will_call);
1933         instructions().append(callArguments.profileHookRegister()->index());
1934     }
1935
1936     emitExpressionInfo(divot, startOffset, endOffset);
1937
1938     RefPtr<Label> done = newLabel();
1939     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1940     
1941     // Emit call.
1942     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1943     emitOpcode(opcodeID);
1944     instructions().append(func->index()); // func
1945     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1946     instructions().append(callArguments.registerOffset()); // registerOffset
1947 #if ENABLE(LLINT)
1948     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1949 #else
1950     instructions().append(0);
1951 #endif
1952     instructions().append(arrayProfile);
1953     if (dst != ignoredResult()) {
1954         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1955         instructions().append(kill(dst));
1956         instructions().append(profile);
1957     }
1958     
1959     if (expectedFunction != NoExpectedFunction)
1960         emitLabel(done.get());
1961
1962     if (m_shouldEmitProfileHooks) {
1963         emitOpcode(op_profile_did_call);
1964         instructions().append(callArguments.profileHookRegister()->index());
1965     }
1966
1967     return dst;
1968 }
1969
1970 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1971 {
1972     if (m_shouldEmitProfileHooks) {
1973         emitMove(profileHookRegister, func);
1974         emitOpcode(op_profile_will_call);
1975         instructions().append(profileHookRegister->index());
1976     }
1977     
1978     emitExpressionInfo(divot, startOffset, endOffset);
1979
1980     // Emit call.
1981     emitOpcode(op_call_varargs);
1982     instructions().append(func->index());
1983     instructions().append(thisRegister->index());
1984     instructions().append(arguments->index());
1985     instructions().append(firstFreeRegister->index());
1986     if (dst != ignoredResult()) {
1987         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1988         instructions().append(kill(dst));
1989         instructions().append(profile);
1990     }
1991     if (m_shouldEmitProfileHooks) {
1992         emitOpcode(op_profile_did_call);
1993         instructions().append(profileHookRegister->index());
1994     }
1995     return dst;
1996 }
1997
1998 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1999 {
2000     if (m_codeBlock->needsFullScopeChain()) {
2001         emitOpcode(op_tear_off_activation);
2002         instructions().append(m_activationRegister->index());
2003     }
2004
2005     if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
2006         emitOpcode(op_tear_off_arguments);
2007         instructions().append(m_codeBlock->argumentsRegister());
2008         instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
2009     }
2010
2011     // Constructors use op_ret_object_or_this to check the result is an
2012     // object, unless we can trivially determine the check is not
2013     // necessary (currently, if the return value is 'this').
2014     if (isConstructor() && (src->index() != m_thisRegister.index())) {
2015         emitOpcode(op_ret_object_or_this);
2016         instructions().append(src->index());
2017         instructions().append(m_thisRegister.index());
2018         return src;
2019     }
2020     return emitUnaryNoDstOp(op_ret, src);
2021 }
2022
2023 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
2024 {
2025     emitOpcode(opcodeID);
2026     instructions().append(src->index());
2027     return src;
2028 }
2029
2030 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
2031 {
2032     ASSERT(func->refCount());
2033
2034     if (m_shouldEmitProfileHooks)
2035         emitMove(callArguments.profileHookRegister(), func);
2036
2037     // Generate code for arguments.
2038     unsigned argument = 0;
2039     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
2040         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
2041             emitNode(callArguments.argumentRegister(argument++), n);
2042     }
2043
2044     if (m_shouldEmitProfileHooks) {
2045         emitOpcode(op_profile_will_call);
2046         instructions().append(callArguments.profileHookRegister()->index());
2047     }
2048
2049     // Reserve space for call frame.
2050     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
2051     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
2052         callFrame.append(newTemporary());
2053
2054     emitExpressionInfo(divot, startOffset, endOffset);
2055     
2056     RefPtr<Label> done = newLabel();
2057     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
2058
2059     emitOpcode(op_construct);
2060     instructions().append(func->index()); // func
2061     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
2062     instructions().append(callArguments.registerOffset()); // registerOffset
2063 #if ENABLE(LLINT)
2064     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
2065 #else
2066     instructions().append(0);
2067 #endif
2068     instructions().append(0);
2069     if (dst != ignoredResult()) {
2070         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
2071         instructions().append(kill(dst));
2072         instructions().append(profile);
2073     }
2074
2075     if (expectedFunction != NoExpectedFunction)
2076         emitLabel(done.get());
2077
2078     if (m_shouldEmitProfileHooks) {
2079         emitOpcode(op_profile_did_call);
2080         instructions().append(callArguments.profileHookRegister()->index());
2081     }
2082
2083     return dst;
2084 }
2085
2086 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
2087 {
2088     emitOpcode(op_strcat);
2089     instructions().append(dst->index());
2090     instructions().append(src->index());
2091     instructions().append(count);
2092
2093     return dst;
2094 }
2095
2096 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2097 {
2098     emitOpcode(op_to_primitive);
2099     instructions().append(dst->index());
2100     instructions().append(src->index());
2101 }
2102
2103 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
2104 {
2105     ControlFlowContext context;
2106     context.isFinallyBlock = false;
2107     m_scopeContextStack.append(context);
2108     m_dynamicScopeDepth++;
2109
2110     return emitUnaryNoDstOp(op_push_with_scope, scope);
2111 }
2112
2113 void BytecodeGenerator::emitPopScope()
2114 {
2115     ASSERT(m_scopeContextStack.size());
2116     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2117
2118     emitOpcode(op_pop_scope);
2119
2120     m_scopeContextStack.removeLast();
2121     m_dynamicScopeDepth--;
2122 }
2123
2124 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine, int column)
2125 {
2126 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2127     if (debugHookID != DidReachBreakpoint)
2128         return;
2129 #else
2130     if (!m_shouldEmitDebugHooks)
2131         return;
2132 #endif
2133     emitOpcode(op_debug);
2134     instructions().append(debugHookID);
2135     instructions().append(firstLine);
2136     instructions().append(lastLine);
2137     instructions().append(column);
2138 }
2139
2140 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2141 {
2142     ControlFlowContext scope;
2143     scope.isFinallyBlock = true;
2144     FinallyContext context = {
2145         finallyBlock,
2146         static_cast<unsigned>(m_scopeContextStack.size()),
2147         static_cast<unsigned>(m_switchContextStack.size()),
2148         static_cast<unsigned>(m_forInContextStack.size()),
2149         static_cast<unsigned>(m_tryContextStack.size()),
2150         static_cast<unsigned>(m_labelScopes.size()),
2151         m_finallyDepth,
2152         m_dynamicScopeDepth
2153     };
2154     scope.finallyContext = context;
2155     m_scopeContextStack.append(scope);
2156     m_finallyDepth++;
2157 }
2158
2159 void BytecodeGenerator::popFinallyContext()
2160 {
2161     ASSERT(m_scopeContextStack.size());
2162     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2163     ASSERT(m_finallyDepth > 0);
2164     m_scopeContextStack.removeLast();
2165     m_finallyDepth--;
2166 }
2167
2168 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2169 {
2170     // Reclaim free label scopes.
2171     //
2172     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2173     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2174     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2175     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2176     // loop condition is a workaround.
2177     while (m_labelScopes.size()) {
2178         if  (m_labelScopes.last().refCount())
2179             break;
2180         m_labelScopes.removeLast();
2181     }
2182
2183     if (!m_labelScopes.size())
2184         return 0;
2185
2186     // We special-case the following, which is a syntax error in Firefox:
2187     // label:
2188     //     break;
2189     if (name.isEmpty()) {
2190         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2191             LabelScope* scope = &m_labelScopes[i];
2192             if (scope->type() != LabelScope::NamedLabel) {
2193                 ASSERT(scope->breakTarget());
2194                 return scope;
2195             }
2196         }
2197         return 0;
2198     }
2199
2200     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2201         LabelScope* scope = &m_labelScopes[i];
2202         if (scope->name() && *scope->name() == name) {
2203             ASSERT(scope->breakTarget());
2204             return scope;
2205         }
2206     }
2207     return 0;
2208 }
2209
2210 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2211 {
2212     // Reclaim free label scopes.
2213     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2214         m_labelScopes.removeLast();
2215
2216     if (!m_labelScopes.size())
2217         return 0;
2218
2219     if (name.isEmpty()) {
2220         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2221             LabelScope* scope = &m_labelScopes[i];
2222             if (scope->type() == LabelScope::Loop) {
2223                 ASSERT(scope->continueTarget());
2224                 return scope;
2225             }
2226         }
2227         return 0;
2228     }
2229
2230     // Continue to the loop nested nearest to the label scope that matches
2231     // 'name'.
2232     LabelScope* result = 0;
2233     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2234         LabelScope* scope = &m_labelScopes[i];
2235         if (scope->type() == LabelScope::Loop) {
2236             ASSERT(scope->continueTarget());
2237             result = scope;
2238         }
2239         if (scope->name() && *scope->name() == name)
2240             return result; // may be 0
2241     }
2242     return 0;
2243 }
2244
2245 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2246 {
2247     while (topScope > bottomScope) {
2248         // First we count the number of dynamic scopes we need to remove to get
2249         // to a finally block.
2250         int nNormalScopes = 0;
2251         while (topScope > bottomScope) {
2252             if (topScope->isFinallyBlock)
2253                 break;
2254             ++nNormalScopes;
2255             --topScope;
2256         }
2257
2258         if (nNormalScopes) {
2259             size_t begin = instructions().size();
2260
2261             // We need to remove a number of dynamic scopes to get to the next
2262             // finally block
2263             emitOpcode(op_jmp_scopes);
2264             instructions().append(nNormalScopes);
2265
2266             // If topScope == bottomScope then there isn't actually a finally block
2267             // left to emit, so make the jmp_scopes jump directly to the target label
2268             if (topScope == bottomScope) {
2269                 instructions().append(target->bind(begin, instructions().size()));
2270                 return target;
2271             }
2272
2273             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2274             // to the next instruction
2275             RefPtr<Label> nextInsn = newLabel();
2276             instructions().append(nextInsn->bind(begin, instructions().size()));
2277             emitLabel(nextInsn.get());
2278         }
2279         
2280         Vector<ControlFlowContext> savedScopeContextStack;
2281         Vector<SwitchInfo> savedSwitchContextStack;
2282         Vector<ForInContext> savedForInContextStack;
2283         Vector<TryContext> poppedTryContexts;
2284         LabelScopeStore savedLabelScopes;
2285         while (topScope > bottomScope && topScope->isFinallyBlock) {
2286             RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2287             
2288             // Save the current state of the world while instating the state of the world
2289             // for the finally block.
2290             FinallyContext finallyContext = topScope->finallyContext;
2291             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2292             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2293             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2294             bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2295             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2296             int topScopeIndex = -1;
2297             int bottomScopeIndex = -1;
2298             if (flipScopes) {
2299                 topScopeIndex = topScope - m_scopeContextStack.begin();
2300                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2301                 savedScopeContextStack = m_scopeContextStack;
2302                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2303             }
2304             if (flipSwitches) {
2305                 savedSwitchContextStack = m_switchContextStack;
2306                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2307             }
2308             if (flipForIns) {
2309                 savedForInContextStack = m_forInContextStack;
2310                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2311             }
2312             if (flipTries) {
2313                 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2314                     ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2315                     TryContext context = m_tryContextStack.last();
2316                     m_tryContextStack.removeLast();
2317                     TryRange range;
2318                     range.start = context.start;
2319                     range.end = beforeFinally;
2320                     range.tryData = context.tryData;
2321                     m_tryRanges.append(range);
2322                     poppedTryContexts.append(context);
2323                 }
2324             }
2325             if (flipLabelScopes) {
2326                 savedLabelScopes = m_labelScopes;
2327                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2328                     m_labelScopes.removeLast();
2329             }
2330             int savedFinallyDepth = m_finallyDepth;
2331             m_finallyDepth = finallyContext.finallyDepth;
2332             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2333             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2334             
2335             // Emit the finally block.
2336             emitNode(finallyContext.finallyBlock);
2337             
2338             RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2339             
2340             // Restore the state of the world.
2341             if (flipScopes) {
2342                 m_scopeContextStack = savedScopeContextStack;
2343                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2344                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2345             }
2346             if (flipSwitches)
2347                 m_switchContextStack = savedSwitchContextStack;
2348             if (flipForIns)
2349                 m_forInContextStack = savedForInContextStack;
2350             if (flipTries) {
2351                 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2352                 for (unsigned i = poppedTryContexts.size(); i--;) {
2353                     TryContext context = poppedTryContexts[i];
2354                     context.start = afterFinally;
2355                     m_tryContextStack.append(context);
2356                 }
2357                 poppedTryContexts.clear();
2358             }
2359             if (flipLabelScopes)
2360                 m_labelScopes = savedLabelScopes;
2361             m_finallyDepth = savedFinallyDepth;
2362             m_dynamicScopeDepth = savedDynamicScopeDepth;
2363             
2364             --topScope;
2365         }
2366     }
2367     return emitJump(target);
2368 }
2369
2370 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2371 {
2372     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2373     ASSERT(target->isForward());
2374
2375     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2376     ASSERT(scopeDelta <= m_scopeContextStack.size());
2377     if (!scopeDelta)
2378         return emitJump(target);
2379
2380     if (m_finallyDepth)
2381         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2382
2383     size_t begin = instructions().size();
2384
2385     emitOpcode(op_jmp_scopes);
2386     instructions().append(scopeDelta);
2387     instructions().append(target->bind(begin, instructions().size()));
2388     return target;
2389 }
2390
2391 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2392 {
2393     size_t begin = instructions().size();
2394
2395     emitOpcode(op_get_pnames);
2396     instructions().append(dst->index());
2397     instructions().append(base->index());
2398     instructions().append(i->index());
2399     instructions().append(size->index());
2400     instructions().append(breakTarget->bind(begin, instructions().size()));
2401     return dst;
2402 }
2403
2404 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2405 {
2406     size_t begin = instructions().size();
2407
2408     emitOpcode(op_next_pname);
2409     instructions().append(dst->index());
2410     instructions().append(base->index());
2411     instructions().append(i->index());
2412     instructions().append(size->index());
2413     instructions().append(iter->index());
2414     instructions().append(target->bind(begin, instructions().size()));
2415     return dst;
2416 }
2417
2418 TryData* BytecodeGenerator::pushTry(Label* start)
2419 {
2420     TryData tryData;
2421     tryData.target = newLabel();
2422     tryData.targetScopeDepth = UINT_MAX;
2423     m_tryData.append(tryData);
2424     TryData* result = &m_tryData.last();
2425     
2426     TryContext tryContext;
2427     tryContext.start = start;
2428     tryContext.tryData = result;
2429     
2430     m_tryContextStack.append(tryContext);
2431     
2432     return result;
2433 }
2434
2435 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2436 {
2437     m_usesExceptions = true;
2438     
2439     ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2440     
2441     TryRange tryRange;
2442     tryRange.start = m_tryContextStack.last().start;
2443     tryRange.end = end;
2444     tryRange.tryData = m_tryContextStack.last().tryData;
2445     m_tryRanges.append(tryRange);
2446     m_tryContextStack.removeLast();
2447     
2448     emitLabel(tryRange.tryData->target.get());
2449     tryRange.tryData->targetScopeDepth = m_dynamicScopeDepth;
2450
2451     emitOpcode(op_catch);
2452     instructions().append(targetRegister->index());
2453     return targetRegister;
2454 }
2455
2456 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2457 {
2458     emitOpcode(op_throw_static_error);
2459     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2460     instructions().append(true);
2461 }
2462
2463 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2464 {
2465     ControlFlowContext context;
2466     context.isFinallyBlock = false;
2467     m_scopeContextStack.append(context);
2468     m_dynamicScopeDepth++;
2469
2470     emitOpcode(op_push_name_scope);
2471     instructions().append(addConstant(property));
2472     instructions().append(value->index());
2473     instructions().append(attributes);
2474 }
2475
2476 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2477 {
2478     SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2479     switch (type) {
2480         case SwitchInfo::SwitchImmediate:
2481             emitOpcode(op_switch_imm);
2482             break;
2483         case SwitchInfo::SwitchCharacter:
2484             emitOpcode(op_switch_char);
2485             break;
2486         case SwitchInfo::SwitchString:
2487             emitOpcode(op_switch_string);
2488             break;
2489         default:
2490             RELEASE_ASSERT_NOT_REACHED();
2491     }
2492
2493     instructions().append(0); // place holder for table index
2494     instructions().append(0); // place holder for default target    
2495     instructions().append(scrutineeRegister->index());
2496     m_switchContextStack.append(info);
2497 }
2498
2499 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2500 {
2501     UNUSED_PARAM(max);
2502     ASSERT(node->isNumber());
2503     double value = static_cast<NumberNode*>(node)->value();
2504     int32_t key = static_cast<int32_t>(value);
2505     ASSERT(key == value);
2506     ASSERT(key >= min);
2507     ASSERT(key <= max);
2508     return key - min;
2509 }
2510
2511 static void prepareJumpTableForImmediateSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2512 {
2513     jumpTable.min = min;
2514     jumpTable.branchOffsets.resize(max - min + 1);
2515     jumpTable.branchOffsets.fill(0);
2516     for (uint32_t i = 0; i < clauseCount; ++i) {
2517         // We're emitting this after the clause labels should have been fixed, so 
2518         // the labels should not be "forward" references
2519         ASSERT(!labels[i]->isForward());
2520         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2521     }
2522 }
2523
2524 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2525 {
2526     UNUSED_PARAM(max);
2527     ASSERT(node->isString());
2528     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2529     ASSERT(clause->length() == 1);
2530     
2531     int32_t key = (*clause)[0];
2532     ASSERT(key >= min);
2533     ASSERT(key <= max);
2534     return key - min;
2535 }
2536
2537 static void prepareJumpTableForCharacterSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2538 {
2539     jumpTable.min = min;
2540     jumpTable.branchOffsets.resize(max - min + 1);
2541     jumpTable.branchOffsets.fill(0);
2542     for (uint32_t i = 0; i < clauseCount; ++i) {
2543         // We're emitting this after the clause labels should have been fixed, so 
2544         // the labels should not be "forward" references
2545         ASSERT(!labels[i]->isForward());
2546         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2547     }
2548 }
2549
2550 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2551 {
2552     for (uint32_t i = 0; i < clauseCount; ++i) {
2553         // We're emitting this after the clause labels should have been fixed, so 
2554         // the labels should not be "forward" references
2555         ASSERT(!labels[i]->isForward());
2556         
2557         ASSERT(nodes[i]->isString());
2558         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2559         jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2560     }
2561 }
2562
2563 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2564 {
2565     SwitchInfo switchInfo = m_switchContextStack.last();
2566     m_switchContextStack.removeLast();
2567     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2568         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2569         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2570
2571         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2572         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2573     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2574         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2575         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2576         
2577         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2578         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2579     } else {
2580         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2581         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2582         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2583
2584         UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2585         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2586     }
2587 }
2588
2589 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2590 {
2591     // It would be nice to do an even better job of identifying exactly where the expression is.
2592     // And we could make the caller pass the node pointer in, if there was some way of getting
2593     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2594     // is still good enough to get us an accurate line number.
2595     m_expressionTooDeep = true;
2596     return newTemporary();
2597 }
2598
2599 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2600 {
2601     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2602 }
2603
2604 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2605 {
2606     RegisterID* registerID = resolve(ident).local();
2607     if (!registerID || registerID->index() >= 0)
2608          return 0;
2609     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2610 }
2611
2612 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2613 {
2614     if (!isStrictMode())
2615         return;
2616     emitOpcode(op_throw_static_error);
2617     instructions().append(addConstantValue(jsString(globalData(), StrictModeReadonlyPropertyWriteError))->index());
2618     instructions().append(false);
2619 }
2620
2621 } // namespace JSC