The 'global isinf/isnan' compiler quirk required when using clang with libstdc++
[WebKit.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "Comment.h"
36 #include "Interpreter.h"
37 #include "JSActivation.h"
38 #include "JSFunction.h"
39 #include "JSNameScope.h"
40 #include "LowLevelInterpreter.h"
41 #include "Operations.h"
42 #include "Options.h"
43 #include "StrongInlines.h"
44 #include <wtf/text/WTFString.h>
45
46 using namespace std;
47
48 namespace JSC {
49
50 /*
51     The layout of a register frame looks like this:
52
53     For
54
55     function f(x, y) {
56         var v1;
57         function g() { }
58         var v2;
59         return (x) * (y);
60     }
61
62     assuming (x) and (y) generated temporaries t1 and t2, you would have
63
64     ------------------------------------
65     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
66     ------------------------------------
67     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
68     ------------------------------------
69     | params->|<-locals      | temps->
70
71     Because temporary registers are allocated in a stack-like fashion, we
72     can reclaim them with a simple popping algorithm. The same goes for labels.
73     (We never reclaim parameter or local registers, because parameters and
74     locals are DontDelete.)
75
76     The register layout before a function call looks like this:
77
78     For
79
80     function f(x, y)
81     {
82     }
83
84     f(1);
85
86     >                        <------------------------------
87     <                        >  reserved: call frame  |  1 | <-- value held
88     >         >snip<         <------------------------------
89     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
90     >                        <------------------------------
91     | params->|<-locals      | temps->
92
93     The call instruction fills in the "call frame" registers. It also pads
94     missing arguments at the end of the call:
95
96     >                        <-----------------------------------
97     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
98     >         >snip<         <-----------------------------------
99     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
100     >                        <-----------------------------------
101     | params->|<-locals      | temps->
102
103     After filling in missing arguments, the call instruction sets up the new
104     stack frame to overlap the end of the old stack frame:
105
106                              |---------------------------------->                        <
107                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
108                              |---------------------------------->         >snip<         <
109                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
110                              |---------------------------------->                        <
111                              |                        | params->|<-locals       | temps->
112
113     That way, arguments are "copied" into the callee's stack frame for free.
114
115     If the caller supplies too many arguments, this trick doesn't work. The
116     extra arguments protrude into space reserved for locals and temporaries.
117     In that case, the call instruction makes a real copy of the call frame header,
118     along with just the arguments expected by the callee, leaving the original
119     call frame header and arguments behind. (The call instruction can't just discard
120     extra arguments, because the "arguments" object may access them later.)
121     This copying strategy ensures that all named values will be at the indices
122     expected by the callee.
123 */
124
125 void Label::setLocation(unsigned location)
126 {
127     m_location = location;
128     
129     unsigned size = m_unresolvedJumps.size();
130     for (unsigned i = 0; i < size; ++i)
131         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
132 }
133
134 #ifndef NDEBUG
135 void ResolveResult::checkValidity()
136 {
137     switch (m_type) {
138     case Register:
139     case ReadOnlyRegister:
140         ASSERT(m_local);
141         return;
142     case Dynamic:
143         ASSERT(!m_local);
144         return;
145     default:
146         RELEASE_ASSERT_NOT_REACHED();
147     }
148 }
149 #endif
150
151 ParserError BytecodeGenerator::generate()
152 {
153     SamplingRegion samplingRegion("Bytecode Generation");
154     
155     m_codeBlock->setThisRegister(m_thisRegister.index());
156
157     m_scopeNode->emitBytecode(*this);
158
159     m_staticPropertyAnalyzer.kill();
160
161     for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
162         TryRange& range = m_tryRanges[i];
163         int start = range.start->bind();
164         int end = range.end->bind();
165         
166         // This will happen for empty try blocks and for some cases of finally blocks:
167         //
168         // try {
169         //    try {
170         //    } finally {
171         //        return 42;
172         //        // *HERE*
173         //    }
174         // } finally {
175         //    print("things");
176         // }
177         //
178         // The return will pop scopes to execute the outer finally block. But this includes
179         // popping the try context for the inner try. The try context is live in the fall-through
180         // part of the finally block not because we will emit a handler that overlaps the finally,
181         // but because we haven't yet had a chance to plant the catch target. Then when we finish
182         // emitting code for the outer finally block, we repush the try contex, this time with a
183         // new start index. But that means that the start index for the try range corresponding
184         // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
185         // than the end index of the try block. This is harmless since end < start handlers will
186         // never get matched in our logic, but we do the runtime a favor and choose to not emit
187         // such handlers at all.
188         if (end <= start)
189             continue;
190         
191         ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
192         UnlinkedHandlerInfo info = {
193             static_cast<uint32_t>(start), static_cast<uint32_t>(end),
194             static_cast<uint32_t>(range.tryData->target->bind()),
195             range.tryData->targetScopeDepth
196         };
197         m_codeBlock->addExceptionHandler(info);
198     }
199     
200     m_codeBlock->instructions() = RefCountedArray<UnlinkedInstruction>(m_instructions);
201
202     m_codeBlock->shrinkToFit();
203
204     if (m_expressionTooDeep)
205         return ParserError::OutOfMemory;
206     return ParserError::ErrorNone;
207 }
208
209 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
210 {
211     int index = m_calleeRegisters.size();
212     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
213     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
214
215     if (!result.isNewEntry) {
216         r0 = &registerFor(result.iterator->value.getIndex());
217         return false;
218     }
219
220     r0 = addVar();
221     return true;
222 }
223
224 void BytecodeGenerator::preserveLastVar()
225 {
226     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
227         m_lastVar = &m_calleeRegisters.last();
228 }
229
230 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
231     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
232     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
233 #if ENABLE(BYTECODE_COMMENTS)
234     , m_currentCommentString(0)
235 #endif
236     , m_symbolTable(0)
237     , m_scopeNode(programNode)
238     , m_codeBlock(globalData, codeBlock)
239     , m_thisRegister(CallFrame::thisArgumentOffset())
240     , m_emptyValueRegister(0)
241     , m_finallyDepth(0)
242     , m_dynamicScopeDepth(0)
243     , m_codeType(GlobalCode)
244     , m_nextConstantOffset(0)
245     , m_globalConstantIndex(0)
246     , m_hasCreatedActivation(true)
247     , m_firstLazyFunction(0)
248     , m_lastLazyFunction(0)
249     , m_staticPropertyAnalyzer(&m_instructions)
250     , m_globalData(&globalData)
251     , m_lastOpcodeID(op_end)
252 #ifndef NDEBUG
253     , m_lastOpcodePosition(0)
254 #endif
255     , m_stack(wtfThreadData().stack())
256     , m_usesExceptions(false)
257     , m_expressionTooDeep(false)
258 {
259     if (m_shouldEmitDebugHooks)
260         m_codeBlock->setNeedsFullScopeChain(true);
261
262     m_codeBlock->setNumParameters(1); // Allocate space for "this"
263
264     prependComment("entering Program block");
265     emitOpcode(op_enter);
266
267     const VarStack& varStack = programNode->varStack();
268     const FunctionStack& functionStack = programNode->functionStack();
269
270     for (size_t i = 0; i < functionStack.size(); ++i) {
271         FunctionBodyNode* function = functionStack[i];
272         UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
273         codeBlock->addFunctionDeclaration(*m_globalData, function->ident(), unlinkedFunction);
274     }
275
276     for (size_t i = 0; i < varStack.size(); ++i)
277         codeBlock->addVariableDeclaration(*varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
278
279 }
280
281 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
282     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
283     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
284     , m_symbolTable(codeBlock->symbolTable())
285 #if ENABLE(BYTECODE_COMMENTS)
286     , m_currentCommentString(0)
287 #endif
288     , m_scopeNode(functionBody)
289     , m_codeBlock(globalData, codeBlock)
290     , m_activationRegister(0)
291     , m_emptyValueRegister(0)
292     , m_finallyDepth(0)
293     , m_dynamicScopeDepth(0)
294     , m_codeType(FunctionCode)
295     , m_nextConstantOffset(0)
296     , m_globalConstantIndex(0)
297     , m_hasCreatedActivation(false)
298     , m_firstLazyFunction(0)
299     , m_lastLazyFunction(0)
300     , m_staticPropertyAnalyzer(&m_instructions)
301     , m_globalData(&globalData)
302     , m_lastOpcodeID(op_end)
303 #ifndef NDEBUG
304     , m_lastOpcodePosition(0)
305 #endif
306     , m_stack(wtfThreadData().stack())
307     , m_usesExceptions(false)
308     , m_expressionTooDeep(false)
309 {
310     if (m_shouldEmitDebugHooks)
311         m_codeBlock->setNeedsFullScopeChain(true);
312
313     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
314     m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
315
316     prependComment("entering Function block");
317     emitOpcode(op_enter);
318     if (m_codeBlock->needsFullScopeChain()) {
319         m_activationRegister = addVar();
320         prependComment("activation for Full Scope Chain");
321         emitInitLazyRegister(m_activationRegister);
322         m_codeBlock->setActivationRegister(m_activationRegister->index());
323     }
324
325     m_symbolTable->setCaptureStart(m_codeBlock->m_numVars);
326
327     if (functionBody->usesArguments() || codeBlock->usesEval() || m_shouldEmitDebugHooks) { // May reify arguments object.
328         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
329         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
330
331         // We can save a little space by hard-coding the knowledge that the two
332         // 'arguments' values are stored in consecutive registers, and storing
333         // only the index of the assignable one.
334         codeBlock->setArgumentsRegister(argumentsRegister->index());
335         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
336
337         prependComment("arguments for Full Scope Chain");
338         emitInitLazyRegister(argumentsRegister);
339         prependComment("unmodified arguments for Full Scope Chain");
340         emitInitLazyRegister(unmodifiedArgumentsRegister);
341         
342         if (m_codeBlock->isStrictMode()) {
343             prependComment("create arguments for strict mode");
344             emitOpcode(op_create_arguments);
345             instructions().append(argumentsRegister->index());
346         }
347
348         // The debugger currently retrieves the arguments object from an activation rather than pulling
349         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
350         // but for now we force eager creation of the arguments object when debugging.
351         if (m_shouldEmitDebugHooks) {
352             prependComment("create arguments for debug hooks");
353             emitOpcode(op_create_arguments);
354             instructions().append(argumentsRegister->index());
355         }
356     }
357
358     bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
359
360     bool capturesAnyArgumentByName = false;
361     Vector<RegisterID*> capturedArguments;
362     if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
363         FunctionParameters& parameters = *functionBody->parameters();
364         capturedArguments.resize(parameters.size());
365         for (size_t i = 0; i < parameters.size(); ++i) {
366             capturedArguments[i] = 0;
367             if (!functionBody->captures(parameters.at(i)) && !shouldCaptureAllTheThings)
368                 continue;
369             capturesAnyArgumentByName = true;
370             capturedArguments[i] = addVar();
371         }
372     }
373
374     if (capturesAnyArgumentByName && !codeBlock->isStrictMode()) {
375         size_t parameterCount = m_symbolTable->parameterCount();
376         OwnArrayPtr<SlowArgument> slowArguments = adoptArrayPtr(new SlowArgument[parameterCount]);
377         for (size_t i = 0; i < parameterCount; ++i) {
378             if (!capturedArguments[i]) {
379                 ASSERT(slowArguments[i].status == SlowArgument::Normal);
380                 slowArguments[i].index = CallFrame::argumentOffset(i);
381                 continue;
382             }
383             slowArguments[i].status = SlowArgument::Captured;
384             slowArguments[i].index = capturedArguments[i]->index();
385         }
386         m_symbolTable->setSlowArguments(slowArguments.release());
387     }
388
389     RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
390
391     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
392     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
393
394     // Captured variables and functions go first so that activations don't have
395     // to step over the non-captured locals to mark them.
396     m_hasCreatedActivation = false;
397     if (functionBody->hasCapturedVariables()) {
398         for (size_t i = 0; i < functionStack.size(); ++i) {
399             FunctionBodyNode* function = functionStack[i];
400             const Identifier& ident = function->ident();
401             if (functionBody->captures(ident)) {
402                 if (!m_hasCreatedActivation) {
403                     m_hasCreatedActivation = true;
404                     prependComment("activation for captured vars");
405                     emitOpcode(op_create_activation);
406                     instructions().append(m_activationRegister->index());
407                 }
408                 m_functions.add(ident.impl());
409                 prependComment("captured function var");
410                 emitNewFunction(addVar(ident, false), function);
411             }
412         }
413         for (size_t i = 0; i < varStack.size(); ++i) {
414             const Identifier& ident = *varStack[i].first;
415             if (functionBody->captures(ident))
416                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
417         }
418     }
419     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
420     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
421         m_hasCreatedActivation = true;
422         prependComment("cannot lazily create functions");
423         emitOpcode(op_create_activation);
424         instructions().append(m_activationRegister->index());
425     }
426
427     m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
428
429     m_firstLazyFunction = codeBlock->m_numVars;
430     for (size_t i = 0; i < functionStack.size(); ++i) {
431         FunctionBodyNode* function = functionStack[i];
432         const Identifier& ident = function->ident();
433         if (!functionBody->captures(ident)) {
434             m_functions.add(ident.impl());
435             RefPtr<RegisterID> reg = addVar(ident, false);
436             // Don't lazily create functions that override the name 'arguments'
437             // as this would complicate lazy instantiation of actual arguments.
438             prependComment("a function that override 'arguments'");
439             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
440                 emitNewFunction(reg.get(), function);
441             else {
442                 emitInitLazyRegister(reg.get());
443                 m_lazyFunctions.set(reg->index(), function);
444             }
445         }
446     }
447     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
448     for (size_t i = 0; i < varStack.size(); ++i) {
449         const Identifier& ident = *varStack[i].first;
450         if (!functionBody->captures(ident))
451             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
452     }
453
454     if (shouldCaptureAllTheThings)
455         m_symbolTable->setCaptureEnd(codeBlock->m_numVars);
456
457     FunctionParameters& parameters = *functionBody->parameters();
458     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
459
460     // Add "this" as a parameter
461     int nextParameterIndex = CallFrame::thisArgumentOffset();
462     m_thisRegister.setIndex(nextParameterIndex--);
463     m_codeBlock->addParameter();
464     
465     for (size_t i = 0; i < parameters.size(); ++i, --nextParameterIndex) {
466         int index = nextParameterIndex;
467         if (capturedArguments.size() && capturedArguments[i]) {
468             ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(parameters.at(i))) || shouldCaptureAllTheThings);
469             index = capturedArguments[i]->index();
470             RegisterID original(nextParameterIndex);
471             emitMove(capturedArguments[i], &original);
472         }
473         addParameter(parameters.at(i), index);
474     }
475     preserveLastVar();
476
477     // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
478     addCallee(functionBody, calleeRegister);
479
480     if (isConstructor()) {
481         prependComment("'this' because we are a Constructor function");
482         emitCreateThis(&m_thisRegister);
483     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
484         UnlinkedValueProfile profile = emitProfiledOpcode(op_convert_this);
485         instructions().append(kill(&m_thisRegister));
486         instructions().append(profile);
487     }
488 }
489
490 BytecodeGenerator::BytecodeGenerator(JSGlobalData& globalData, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
491     : m_shouldEmitDebugHooks(debuggerMode == DebuggerOn)
492     , m_shouldEmitProfileHooks(profilerMode == ProfilerOn)
493     , m_symbolTable(codeBlock->symbolTable())
494 #if ENABLE(BYTECODE_COMMENTS)
495     , m_currentCommentString(0)
496 #endif
497     , m_scopeNode(evalNode)
498     , m_codeBlock(globalData, codeBlock)
499     , m_thisRegister(CallFrame::thisArgumentOffset())
500     , m_emptyValueRegister(0)
501     , m_finallyDepth(0)
502     , m_dynamicScopeDepth(0)
503     , m_codeType(EvalCode)
504     , m_nextConstantOffset(0)
505     , m_globalConstantIndex(0)
506     , m_hasCreatedActivation(true)
507     , m_firstLazyFunction(0)
508     , m_lastLazyFunction(0)
509     , m_staticPropertyAnalyzer(&m_instructions)
510     , m_globalData(&globalData)
511     , m_lastOpcodeID(op_end)
512 #ifndef NDEBUG
513     , m_lastOpcodePosition(0)
514 #endif
515     , m_stack(wtfThreadData().stack())
516     , m_usesExceptions(false)
517     , m_expressionTooDeep(false)
518 {
519     m_codeBlock->setNeedsFullScopeChain(true);
520
521     m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
522     m_codeBlock->setNumParameters(1);
523
524     prependComment("entering Eval block");
525     emitOpcode(op_enter);
526
527     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
528     for (size_t i = 0; i < functionStack.size(); ++i)
529         m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
530
531     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
532     unsigned numVariables = varStack.size();
533     Vector<Identifier> variables;
534     variables.reserveCapacity(numVariables);
535     for (size_t i = 0; i < numVariables; ++i)
536         variables.append(*varStack[i].first);
537     codeBlock->adoptVariables(variables);
538     preserveLastVar();
539 }
540
541 BytecodeGenerator::~BytecodeGenerator()
542 {
543 }
544
545 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
546 {
547     emitOpcode(op_init_lazy_reg);
548     instructions().append(reg->index());
549     return reg;
550 }
551
552 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
553 {
554     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
555         return 0;
556
557     m_calleeRegister.setIndex(JSStack::Callee);
558
559     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
560     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks) {
561         emitOpcode(op_push_name_scope);
562         instructions().append(addConstant(functionBodyNode->ident()));
563         instructions().append(m_calleeRegister.index());
564         instructions().append(ReadOnly | DontDelete);
565         return 0;
566     }
567
568     if (!functionBodyNode->captures(functionBodyNode->ident()))
569         return &m_calleeRegister;
570
571     // Move the callee into the captured section of the stack.
572     return emitMove(addVar(), &m_calleeRegister);
573 }
574
575 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
576 {
577     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
578         return;
579
580     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
581     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
582         return;
583
584     ASSERT(calleeRegister);
585     symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
586 }
587
588 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
589 {
590     // Parameters overwrite var declarations, but not function declarations.
591     StringImpl* rep = ident.impl();
592     if (!m_functions.contains(rep)) {
593         symbolTable().set(rep, parameterIndex);
594         RegisterID& parameter = registerFor(parameterIndex);
595         parameter.setIndex(parameterIndex);
596     }
597
598     // To maintain the calling convention, we have to allocate unique space for
599     // each parameter, even if the parameter doesn't make it into the symbol table.
600     m_codeBlock->addParameter();
601 }
602
603 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
604 {
605     if (ident != propertyNames().arguments)
606         return false;
607     
608     if (!shouldOptimizeLocals())
609         return false;
610     
611     SymbolTableEntry entry = symbolTable().get(ident.impl());
612     if (entry.isNull())
613         return false;
614
615     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
616         return true;
617     
618     return false;
619 }
620
621 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
622 {
623     ASSERT(willResolveToArguments(propertyNames().arguments));
624
625     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
626     ASSERT(!entry.isNull());
627     return &registerFor(entry.getIndex());
628 }
629
630 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
631 {
632     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
633         return reg;
634     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
635     return reg;
636 }
637
638 RegisterID* BytecodeGenerator::newRegister()
639 {
640     m_calleeRegisters.append(m_calleeRegisters.size());
641     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
642     return &m_calleeRegisters.last();
643 }
644
645 RegisterID* BytecodeGenerator::newTemporary()
646 {
647     // Reclaim free register IDs.
648     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
649         m_calleeRegisters.removeLast();
650         
651     RegisterID* result = newRegister();
652     result->setTemporary();
653     return result;
654 }
655
656 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
657 {
658     // Reclaim free label scopes.
659     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
660         m_labelScopes.removeLast();
661
662     // Allocate new label scope.
663     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
664     m_labelScopes.append(scope);
665     return &m_labelScopes.last();
666 }
667
668 PassRefPtr<Label> BytecodeGenerator::newLabel()
669 {
670     // Reclaim free label IDs.
671     while (m_labels.size() && !m_labels.last().refCount())
672         m_labels.removeLast();
673
674     // Allocate new label ID.
675     m_labels.append(this);
676     return &m_labels.last();
677 }
678
679 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
680 {
681     unsigned newLabelIndex = instructions().size();
682     l0->setLocation(newLabelIndex);
683
684     if (m_codeBlock->numberOfJumpTargets()) {
685         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
686         ASSERT(lastLabelIndex <= newLabelIndex);
687         if (newLabelIndex == lastLabelIndex) {
688             // Peephole optimizations have already been disabled by emitting the last label
689             return l0;
690         }
691     }
692
693     m_codeBlock->addJumpTarget(newLabelIndex);
694
695     // This disables peephole optimizations when an instruction is a jump target
696     m_lastOpcodeID = op_end;
697     return l0;
698 }
699
700 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
701 {
702 #ifndef NDEBUG
703     size_t opcodePosition = instructions().size();
704     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
705     m_lastOpcodePosition = opcodePosition;
706 #endif
707     emitComment();
708     instructions().append(opcodeID);
709     m_lastOpcodeID = opcodeID;
710 }
711
712 #if ENABLE(BYTECODE_COMMENTS)
713 // Record a comment in the CodeBlock's comments list for the current opcode
714 // that is about to be emitted.
715 void BytecodeGenerator::emitComment()
716 {
717     if (m_currentCommentString) {
718         size_t opcodePosition = instructions().size();
719         Comment comment = { opcodePosition, m_currentCommentString };
720         m_codeBlock->bytecodeComments().append(comment);
721         m_currentCommentString = 0;
722     }
723 }
724
725 // Register a comment to be associated with the next opcode that will be emitted.
726 void BytecodeGenerator::prependComment(const char* string)
727 {
728     m_currentCommentString = string;
729 }
730 #endif
731
732 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
733 {
734 #if ENABLE(VALUE_PROFILER)
735     return m_codeBlock->addArrayProfile();
736 #else
737     return 0;
738 #endif
739 }
740
741 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
742 {
743 #if ENABLE(VALUE_PROFILER)
744     return m_codeBlock->addArrayAllocationProfile();
745 #else
746     return 0;
747 #endif
748 }
749
750 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
751 {
752     return m_codeBlock->addObjectAllocationProfile();
753 }
754
755 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
756 {
757 #if ENABLE(VALUE_PROFILER)
758     UnlinkedValueProfile result = m_codeBlock->addValueProfile();
759 #else
760     UnlinkedValueProfile result = 0;
761 #endif
762     emitOpcode(opcodeID);
763     return result;
764 }
765
766 void BytecodeGenerator::emitLoopHint()
767 {
768 #if ENABLE(DFG_JIT)
769     emitOpcode(op_loop_hint);
770 #endif
771 }
772
773 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
774 {
775     ASSERT(instructions().size() >= 4);
776     size_t size = instructions().size();
777     dstIndex = instructions().at(size - 3).u.operand;
778     src1Index = instructions().at(size - 2).u.operand;
779     src2Index = instructions().at(size - 1).u.operand;
780 }
781
782 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
783 {
784     ASSERT(instructions().size() >= 3);
785     size_t size = instructions().size();
786     dstIndex = instructions().at(size - 2).u.operand;
787     srcIndex = instructions().at(size - 1).u.operand;
788 }
789
790 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
791 {
792     ASSERT(instructions().size() >= 4);
793     instructions().shrink(instructions().size() - 4);
794     m_lastOpcodeID = op_end;
795 }
796
797 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
798 {
799     ASSERT(instructions().size() >= 3);
800     instructions().shrink(instructions().size() - 3);
801     m_lastOpcodeID = op_end;
802 }
803
804 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
805 {
806     size_t begin = instructions().size();
807     emitOpcode(target->isForward() ? op_jmp : op_loop);
808     instructions().append(target->bind(begin, instructions().size()));
809     return target;
810 }
811
812 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
813 {
814     if (m_lastOpcodeID == op_less) {
815         int dstIndex;
816         int src1Index;
817         int src2Index;
818
819         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
820
821         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
822             rewindBinaryOp();
823
824             size_t begin = instructions().size();
825             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
826             instructions().append(src1Index);
827             instructions().append(src2Index);
828             instructions().append(target->bind(begin, instructions().size()));
829             return target;
830         }
831     } else if (m_lastOpcodeID == op_lesseq) {
832         int dstIndex;
833         int src1Index;
834         int src2Index;
835
836         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
837
838         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
839             rewindBinaryOp();
840
841             size_t begin = instructions().size();
842             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
843             instructions().append(src1Index);
844             instructions().append(src2Index);
845             instructions().append(target->bind(begin, instructions().size()));
846             return target;
847         }
848     } else if (m_lastOpcodeID == op_greater) {
849         int dstIndex;
850         int src1Index;
851         int src2Index;
852
853         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
854
855         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
856             rewindBinaryOp();
857
858             size_t begin = instructions().size();
859             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
860             instructions().append(src1Index);
861             instructions().append(src2Index);
862             instructions().append(target->bind(begin, instructions().size()));
863             return target;
864         }
865     } else if (m_lastOpcodeID == op_greatereq) {
866         int dstIndex;
867         int src1Index;
868         int src2Index;
869
870         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
871
872         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
873             rewindBinaryOp();
874
875             size_t begin = instructions().size();
876             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
877             instructions().append(src1Index);
878             instructions().append(src2Index);
879             instructions().append(target->bind(begin, instructions().size()));
880             return target;
881         }
882     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
883         int dstIndex;
884         int srcIndex;
885
886         retrieveLastUnaryOp(dstIndex, srcIndex);
887
888         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
889             rewindUnaryOp();
890
891             size_t begin = instructions().size();
892             emitOpcode(op_jeq_null);
893             instructions().append(srcIndex);
894             instructions().append(target->bind(begin, instructions().size()));
895             return target;
896         }
897     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
898         int dstIndex;
899         int srcIndex;
900
901         retrieveLastUnaryOp(dstIndex, srcIndex);
902
903         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
904             rewindUnaryOp();
905
906             size_t begin = instructions().size();
907             emitOpcode(op_jneq_null);
908             instructions().append(srcIndex);
909             instructions().append(target->bind(begin, instructions().size()));
910             return target;
911         }
912     }
913
914     size_t begin = instructions().size();
915
916     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
917     instructions().append(cond->index());
918     instructions().append(target->bind(begin, instructions().size()));
919     return target;
920 }
921
922 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
923 {
924     if (m_lastOpcodeID == op_less && target->isForward()) {
925         int dstIndex;
926         int src1Index;
927         int src2Index;
928
929         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
930
931         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
932             rewindBinaryOp();
933
934             size_t begin = instructions().size();
935             emitOpcode(op_jnless);
936             instructions().append(src1Index);
937             instructions().append(src2Index);
938             instructions().append(target->bind(begin, instructions().size()));
939             return target;
940         }
941     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
942         int dstIndex;
943         int src1Index;
944         int src2Index;
945
946         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
947
948         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
949             rewindBinaryOp();
950
951             size_t begin = instructions().size();
952             emitOpcode(op_jnlesseq);
953             instructions().append(src1Index);
954             instructions().append(src2Index);
955             instructions().append(target->bind(begin, instructions().size()));
956             return target;
957         }
958     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
959         int dstIndex;
960         int src1Index;
961         int src2Index;
962
963         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
964
965         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
966             rewindBinaryOp();
967
968             size_t begin = instructions().size();
969             emitOpcode(op_jngreater);
970             instructions().append(src1Index);
971             instructions().append(src2Index);
972             instructions().append(target->bind(begin, instructions().size()));
973             return target;
974         }
975     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
976         int dstIndex;
977         int src1Index;
978         int src2Index;
979
980         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
981
982         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
983             rewindBinaryOp();
984
985             size_t begin = instructions().size();
986             emitOpcode(op_jngreatereq);
987             instructions().append(src1Index);
988             instructions().append(src2Index);
989             instructions().append(target->bind(begin, instructions().size()));
990             return target;
991         }
992     } else if (m_lastOpcodeID == op_not) {
993         int dstIndex;
994         int srcIndex;
995
996         retrieveLastUnaryOp(dstIndex, srcIndex);
997
998         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
999             rewindUnaryOp();
1000
1001             size_t begin = instructions().size();
1002             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
1003             instructions().append(srcIndex);
1004             instructions().append(target->bind(begin, instructions().size()));
1005             return target;
1006         }
1007     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
1008         int dstIndex;
1009         int srcIndex;
1010
1011         retrieveLastUnaryOp(dstIndex, srcIndex);
1012
1013         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1014             rewindUnaryOp();
1015
1016             size_t begin = instructions().size();
1017             emitOpcode(op_jneq_null);
1018             instructions().append(srcIndex);
1019             instructions().append(target->bind(begin, instructions().size()));
1020             return target;
1021         }
1022     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
1023         int dstIndex;
1024         int srcIndex;
1025
1026         retrieveLastUnaryOp(dstIndex, srcIndex);
1027
1028         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1029             rewindUnaryOp();
1030
1031             size_t begin = instructions().size();
1032             emitOpcode(op_jeq_null);
1033             instructions().append(srcIndex);
1034             instructions().append(target->bind(begin, instructions().size()));
1035             return target;
1036         }
1037     }
1038
1039     size_t begin = instructions().size();
1040     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
1041     instructions().append(cond->index());
1042     instructions().append(target->bind(begin, instructions().size()));
1043     return target;
1044 }
1045
1046 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
1047 {
1048     size_t begin = instructions().size();
1049
1050     emitOpcode(op_jneq_ptr);
1051     instructions().append(cond->index());
1052     instructions().append(Special::CallFunction);
1053     instructions().append(target->bind(begin, instructions().size()));
1054     return target;
1055 }
1056
1057 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
1058 {
1059     size_t begin = instructions().size();
1060
1061     emitOpcode(op_jneq_ptr);
1062     instructions().append(cond->index());
1063     instructions().append(Special::ApplyFunction);
1064     instructions().append(target->bind(begin, instructions().size()));
1065     return target;
1066 }
1067
1068 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1069 {
1070     StringImpl* rep = ident.impl();
1071     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1072     if (result.isNewEntry)
1073         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
1074
1075     return result.iterator->value;
1076 }
1077
1078 // We can't hash JSValue(), so we use a dedicated data member to cache it.
1079 RegisterID* BytecodeGenerator::addConstantEmptyValue()
1080 {
1081     if (!m_emptyValueRegister) {
1082         int index = m_nextConstantOffset;
1083         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1084         ++m_nextConstantOffset;
1085         m_codeBlock->addConstant(JSValue());
1086         m_emptyValueRegister = &m_constantPoolRegisters[index];
1087     }
1088
1089     return m_emptyValueRegister;
1090 }
1091
1092 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
1093 {
1094     if (!v)
1095         return addConstantEmptyValue();
1096
1097     int index = m_nextConstantOffset;
1098     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1099     if (result.isNewEntry) {
1100         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1101         ++m_nextConstantOffset;
1102         m_codeBlock->addConstant(v);
1103     } else
1104         index = result.iterator->value;
1105     return &m_constantPoolRegisters[index];
1106 }
1107
1108 unsigned BytecodeGenerator::addRegExp(RegExp* r)
1109 {
1110     return m_codeBlock->addRegExp(r);
1111 }
1112
1113 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1114 {
1115     m_staticPropertyAnalyzer.mov(dst->index(), src->index());
1116
1117     emitOpcode(op_mov);
1118     instructions().append(dst->index());
1119     instructions().append(src->index());
1120     return dst;
1121 }
1122
1123 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1124 {
1125     emitOpcode(opcodeID);
1126     instructions().append(dst->index());
1127     instructions().append(src->index());
1128     return dst;
1129 }
1130
1131 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1132 {
1133     emitOpcode(op_pre_inc);
1134     instructions().append(srcDst->index());
1135     return srcDst;
1136 }
1137
1138 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1139 {
1140     emitOpcode(op_pre_dec);
1141     instructions().append(srcDst->index());
1142     return srcDst;
1143 }
1144
1145 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1146 {
1147     emitOpcode(op_post_inc);
1148     instructions().append(dst->index());
1149     instructions().append(srcDst->index());
1150     return dst;
1151 }
1152
1153 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1154 {
1155     emitOpcode(op_post_dec);
1156     instructions().append(dst->index());
1157     instructions().append(srcDst->index());
1158     return dst;
1159 }
1160
1161 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1162 {
1163     emitOpcode(opcodeID);
1164     instructions().append(dst->index());
1165     instructions().append(src1->index());
1166     instructions().append(src2->index());
1167
1168     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1169         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1170         instructions().append(types.toInt());
1171
1172     return dst;
1173 }
1174
1175 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1176 {
1177     if (m_lastOpcodeID == op_typeof) {
1178         int dstIndex;
1179         int srcIndex;
1180
1181         retrieveLastUnaryOp(dstIndex, srcIndex);
1182
1183         if (src1->index() == dstIndex
1184             && src1->isTemporary()
1185             && m_codeBlock->isConstantRegisterIndex(src2->index())
1186             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1187             const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1188             if (value == "undefined") {
1189                 rewindUnaryOp();
1190                 emitOpcode(op_is_undefined);
1191                 instructions().append(dst->index());
1192                 instructions().append(srcIndex);
1193                 return dst;
1194             }
1195             if (value == "boolean") {
1196                 rewindUnaryOp();
1197                 emitOpcode(op_is_boolean);
1198                 instructions().append(dst->index());
1199                 instructions().append(srcIndex);
1200                 return dst;
1201             }
1202             if (value == "number") {
1203                 rewindUnaryOp();
1204                 emitOpcode(op_is_number);
1205                 instructions().append(dst->index());
1206                 instructions().append(srcIndex);
1207                 return dst;
1208             }
1209             if (value == "string") {
1210                 rewindUnaryOp();
1211                 emitOpcode(op_is_string);
1212                 instructions().append(dst->index());
1213                 instructions().append(srcIndex);
1214                 return dst;
1215             }
1216             if (value == "object") {
1217                 rewindUnaryOp();
1218                 emitOpcode(op_is_object);
1219                 instructions().append(dst->index());
1220                 instructions().append(srcIndex);
1221                 return dst;
1222             }
1223             if (value == "function") {
1224                 rewindUnaryOp();
1225                 emitOpcode(op_is_function);
1226                 instructions().append(dst->index());
1227                 instructions().append(srcIndex);
1228                 return dst;
1229             }
1230         }
1231     }
1232
1233     emitOpcode(opcodeID);
1234     instructions().append(dst->index());
1235     instructions().append(src1->index());
1236     instructions().append(src2->index());
1237     return dst;
1238 }
1239
1240 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1241 {
1242     return emitLoad(dst, jsBoolean(b));
1243 }
1244
1245 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1246 {
1247     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1248     // Later we can do the extra work to handle that like the other cases.  They also don't
1249     // work correctly with NaN as a key.
1250     if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1251         return emitLoad(dst, jsNumber(number));
1252     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1253     if (!valueInMap)
1254         valueInMap = jsNumber(number);
1255     return emitLoad(dst, valueInMap);
1256 }
1257
1258 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1259 {
1260     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1261     if (!stringInMap)
1262         stringInMap = jsOwnedString(globalData(), identifier.string());
1263     return emitLoad(dst, JSValue(stringInMap));
1264 }
1265
1266 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1267 {
1268     RegisterID* constantID = addConstantValue(v);
1269     if (dst)
1270         return emitMove(dst, constantID);
1271     return constantID;
1272 }
1273
1274 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1275 {
1276     if (property == propertyNames().thisIdentifier)
1277         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1278
1279     // Check if the property should be allocated in a register.
1280     if (m_codeType != GlobalCode && shouldOptimizeLocals() && m_symbolTable) {
1281         SymbolTableEntry entry = symbolTable().get(property.impl());
1282         if (!entry.isNull()) {
1283             if (property == propertyNames().arguments)
1284                 createArgumentsIfNecessary();
1285             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1286             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1287             return ResolveResult::registerResolve(local, flags);
1288         }
1289     }
1290     return ResolveResult::dynamicResolve();
1291 }
1292
1293 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1294 {
1295     // Register-allocated const declarations.
1296     if (m_codeType != EvalCode && m_codeType != GlobalCode && m_symbolTable) {
1297         SymbolTableEntry entry = symbolTable().get(property.impl());
1298         if (!entry.isNull()) {
1299             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1300             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1301             return ResolveResult::registerResolve(local, flags);
1302         }
1303     }
1304
1305     return ResolveResult::dynamicResolve();
1306 }
1307
1308 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1309 {
1310     size_t begin = instructions().size();
1311     emitOpcode(op_check_has_instance);
1312     instructions().append(dst->index());
1313     instructions().append(value->index());
1314     instructions().append(base->index());
1315     instructions().append(target->bind(begin, instructions().size()));
1316 }
1317
1318 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1319
1320     emitOpcode(op_instanceof);
1321     instructions().append(dst->index());
1322     instructions().append(value->index());
1323     instructions().append(basePrototype->index());
1324     return dst;
1325 }
1326
1327 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1328 {
1329     return !m_labelScopes.size();
1330 }
1331
1332 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1333 {
1334
1335     if (resolveResult.isRegister())
1336         return emitGetLocalVar(dst, resolveResult, property);
1337
1338     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve);
1339     instructions().append(kill(dst));
1340     instructions().append(addConstant(property));
1341     instructions().append(getResolveOperations(property));
1342     instructions().append(profile);
1343     return dst;
1344 }
1345
1346 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1347 {
1348     ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1349     // We can't optimise at all :-(
1350     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1351     instructions().append(kill(dst));
1352     instructions().append(addConstant(property));
1353     instructions().append(false);
1354     instructions().append(getResolveBaseOperations(property));
1355     instructions().append(0);
1356     instructions().append(profile);
1357     return dst;
1358 }
1359
1360 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1361 {
1362     ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1363     // We can't optimise at all :-(
1364     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_base);
1365     instructions().append(kill(dst));
1366     instructions().append(addConstant(property));
1367     instructions().append(m_codeBlock->isStrictMode());
1368     uint32_t putToBaseIndex = 0;
1369     instructions().append(getResolveBaseForPutOperations(property, putToBaseIndex));
1370     verifier.resolved(putToBaseIndex);
1371     instructions().append(putToBaseIndex);
1372     instructions().append(profile);
1373     return dst;
1374 }
1375
1376 RegisterID* BytecodeGenerator::emitResolveWithBaseForPut(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property, NonlocalResolveInfo& verifier)
1377 {
1378     ASSERT_UNUSED(resolveResult, !resolveResult.isRegister());
1379     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_base);
1380     instructions().append(kill(baseDst));
1381     instructions().append(propDst->index());
1382     instructions().append(addConstant(property));
1383     uint32_t putToBaseIndex = 0;
1384     instructions().append(getResolveWithBaseForPutOperations(property, putToBaseIndex));
1385     verifier.resolved(putToBaseIndex);
1386     instructions().append(putToBaseIndex);
1387     instructions().append(profile);
1388     return baseDst;
1389 }
1390
1391 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1392 {
1393     if (resolveResult.isRegister()) {
1394         emitLoad(baseDst, jsUndefined());
1395         emitGetLocalVar(propDst, resolveResult, property);
1396         return baseDst;
1397     }
1398
1399     UnlinkedValueProfile profile = emitProfiledOpcode(op_resolve_with_this);
1400     instructions().append(kill(baseDst));
1401     instructions().append(propDst->index());
1402     instructions().append(addConstant(property));
1403     instructions().append(getResolveWithThisOperations(property));
1404     instructions().append(profile);
1405     return baseDst;
1406 }
1407
1408 RegisterID* BytecodeGenerator::emitGetLocalVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier&)
1409 {
1410     switch (resolveResult.type()) {
1411     case ResolveResult::Register:
1412     case ResolveResult::ReadOnlyRegister:
1413         if (dst == ignoredResult())
1414             return 0;
1415         return moveToDestinationIfNeeded(dst, resolveResult.local());
1416
1417     default:
1418         RELEASE_ASSERT_NOT_REACHED();
1419         return 0;
1420     }
1421 }
1422
1423 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1424 {
1425     ASSERT(m_codeType == GlobalCode);
1426     emitOpcode(op_init_global_const_nop);
1427     instructions().append(0);
1428     instructions().append(value->index());
1429     instructions().append(0);
1430     instructions().append(addConstant(identifier));
1431     return value;
1432 }
1433
1434 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1435 {
1436     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1437
1438     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1439     instructions().append(kill(dst));
1440     instructions().append(base->index());
1441     instructions().append(addConstant(property));
1442     instructions().append(0);
1443     instructions().append(0);
1444     instructions().append(0);
1445     instructions().append(0);
1446     instructions().append(profile);
1447     return dst;
1448 }
1449
1450 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1451 {
1452     emitOpcode(op_get_arguments_length);
1453     instructions().append(dst->index());
1454     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1455     instructions().append(base->index());
1456     instructions().append(addConstant(propertyNames().length));
1457     return dst;
1458 }
1459
1460 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1461 {
1462     unsigned propertyIndex = addConstant(property);
1463
1464     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1465
1466     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1467
1468     emitOpcode(op_put_by_id);
1469     instructions().append(base->index());
1470     instructions().append(propertyIndex);
1471     instructions().append(value->index());
1472     instructions().append(0);
1473     instructions().append(0);
1474     instructions().append(0);
1475     instructions().append(0);
1476     instructions().append(0);
1477     return value;
1478 }
1479
1480 RegisterID* BytecodeGenerator::emitPutToBase(RegisterID* base, const Identifier& property, RegisterID* value, NonlocalResolveInfo& resolveInfo)
1481 {
1482     emitOpcode(op_put_to_base);
1483     instructions().append(base->index());
1484     instructions().append(addConstant(property));
1485     instructions().append(value->index());
1486     instructions().append(resolveInfo.put());
1487     return value;
1488 }
1489
1490 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1491 {
1492     unsigned propertyIndex = addConstant(property);
1493
1494     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1495
1496     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1497     
1498     emitOpcode(op_put_by_id);
1499     instructions().append(base->index());
1500     instructions().append(propertyIndex);
1501     instructions().append(value->index());
1502     instructions().append(0);
1503     instructions().append(0);
1504     instructions().append(0);
1505     instructions().append(0);
1506     instructions().append(
1507         property != m_globalData->propertyNames->underscoreProto
1508         && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1509     return value;
1510 }
1511
1512 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1513 {
1514     unsigned propertyIndex = addConstant(property);
1515
1516     m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1517
1518     emitOpcode(op_put_getter_setter);
1519     instructions().append(base->index());
1520     instructions().append(propertyIndex);
1521     instructions().append(getter->index());
1522     instructions().append(setter->index());
1523 }
1524
1525 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1526 {
1527     emitOpcode(op_del_by_id);
1528     instructions().append(dst->index());
1529     instructions().append(base->index());
1530     instructions().append(addConstant(property));
1531     return dst;
1532 }
1533
1534 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1535 {
1536     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1537     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1538     instructions().append(kill(dst));
1539     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1540     instructions().append(base->index());
1541     instructions().append(property->index());
1542     instructions().append(arrayProfile);
1543     instructions().append(profile);
1544     return dst;
1545 }
1546
1547 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1548 {
1549     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1550         ForInContext& context = m_forInContextStack[i - 1];
1551         if (context.propertyRegister == property) {
1552             emitOpcode(op_get_by_pname);
1553             instructions().append(dst->index());
1554             instructions().append(base->index());
1555             instructions().append(property->index());
1556             instructions().append(context.expectedSubscriptRegister->index());
1557             instructions().append(context.iterRegister->index());
1558             instructions().append(context.indexRegister->index());
1559             return dst;
1560         }
1561     }
1562     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1563     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1564     instructions().append(kill(dst));
1565     instructions().append(base->index());
1566     instructions().append(property->index());
1567     instructions().append(arrayProfile);
1568     instructions().append(profile);
1569     return dst;
1570 }
1571
1572 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1573 {
1574     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1575     emitOpcode(op_put_by_val);
1576     instructions().append(base->index());
1577     instructions().append(property->index());
1578     instructions().append(value->index());
1579     instructions().append(arrayProfile);
1580     return value;
1581 }
1582
1583 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1584 {
1585     emitOpcode(op_del_by_val);
1586     instructions().append(dst->index());
1587     instructions().append(base->index());
1588     instructions().append(property->index());
1589     return dst;
1590 }
1591
1592 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1593 {
1594     emitOpcode(op_put_by_index);
1595     instructions().append(base->index());
1596     instructions().append(index);
1597     instructions().append(value->index());
1598     return value;
1599 }
1600
1601 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1602 {
1603     RefPtr<RegisterID> func = newTemporary(); 
1604
1605     UnlinkedValueProfile profile = emitProfiledOpcode(op_get_callee);
1606     instructions().append(func->index());
1607     instructions().append(profile);
1608
1609     size_t begin = instructions().size();
1610     m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1611
1612     emitOpcode(op_create_this); 
1613     instructions().append(m_thisRegister.index()); 
1614     instructions().append(func->index()); 
1615     instructions().append(0);
1616     return dst;
1617 }
1618
1619 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1620 {
1621     size_t begin = instructions().size();
1622     m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1623
1624     emitOpcode(op_new_object);
1625     instructions().append(dst->index());
1626     instructions().append(0);
1627     instructions().append(newObjectAllocationProfile());
1628     return dst;
1629 }
1630
1631 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1632 {
1633     return m_codeBlock->addConstantBuffer(length);
1634 }
1635
1636 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1637 {
1638     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->value;
1639     if (!stringInMap) {
1640         stringInMap = jsString(globalData(), identifier.string());
1641         addConstantValue(stringInMap);
1642     }
1643     return stringInMap;
1644 }
1645
1646 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1647 {
1648 #if !ASSERT_DISABLED
1649     unsigned checkLength = 0;
1650 #endif
1651     bool hadVariableExpression = false;
1652     if (length) {
1653         for (ElementNode* n = elements; n; n = n->next()) {
1654             if (!n->value()->isNumber() && !n->value()->isString()) {
1655                 hadVariableExpression = true;
1656                 break;
1657             }
1658             if (n->elision())
1659                 break;
1660 #if !ASSERT_DISABLED
1661             checkLength++;
1662 #endif
1663         }
1664         if (!hadVariableExpression) {
1665             ASSERT(length == checkLength);
1666             unsigned constantBufferIndex = addConstantBuffer(length);
1667             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1668             unsigned index = 0;
1669             for (ElementNode* n = elements; index < length; n = n->next()) {
1670                 if (n->value()->isNumber())
1671                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1672                 else {
1673                     ASSERT(n->value()->isString());
1674                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1675                 }
1676             }
1677             emitOpcode(op_new_array_buffer);
1678             instructions().append(dst->index());
1679             instructions().append(constantBufferIndex);
1680             instructions().append(length);
1681             instructions().append(newArrayAllocationProfile());
1682             return dst;
1683         }
1684     }
1685
1686     Vector<RefPtr<RegisterID>, 16> argv;
1687     for (ElementNode* n = elements; n; n = n->next()) {
1688         if (n->elision())
1689             break;
1690         argv.append(newTemporary());
1691         // op_new_array requires the initial values to be a sequential range of registers
1692         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1693         emitNode(argv.last().get(), n->value());
1694     }
1695     emitOpcode(op_new_array);
1696     instructions().append(dst->index());
1697     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1698     instructions().append(argv.size()); // argc
1699     instructions().append(newArrayAllocationProfile());
1700     return dst;
1701 }
1702
1703 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1704 {
1705     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1706 }
1707
1708 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1709 {
1710     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1711     if (ptr.isNewEntry)
1712         ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1713     return emitNewFunctionInternal(dst, ptr.iterator->value, true);
1714 }
1715
1716 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1717 {
1718     createActivationIfNecessary();
1719     emitOpcode(op_new_func);
1720     instructions().append(dst->index());
1721     instructions().append(index);
1722     instructions().append(doNullCheck);
1723     return dst;
1724 }
1725
1726 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1727 {
1728     emitOpcode(op_new_regexp);
1729     instructions().append(dst->index());
1730     instructions().append(addRegExp(regExp));
1731     return dst;
1732 }
1733
1734 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1735 {
1736     FunctionBodyNode* function = n->body();
1737     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1738     
1739     createActivationIfNecessary();
1740     emitOpcode(op_new_func_exp);
1741     instructions().append(r0->index());
1742     instructions().append(index);
1743     return r0;
1744 }
1745
1746 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1747 {
1748     return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, startOffset, endOffset);
1749 }
1750
1751 void BytecodeGenerator::createArgumentsIfNecessary()
1752 {
1753     if (m_codeType != FunctionCode)
1754         return;
1755     
1756     if (!m_codeBlock->usesArguments())
1757         return;
1758
1759     // If we're in strict mode we tear off the arguments on function
1760     // entry, so there's no need to check if we need to create them
1761     // now
1762     if (m_codeBlock->isStrictMode())
1763         return;
1764
1765     emitOpcode(op_create_arguments);
1766     instructions().append(m_codeBlock->argumentsRegister());
1767 }
1768
1769 void BytecodeGenerator::createActivationIfNecessary()
1770 {
1771     if (m_hasCreatedActivation)
1772         return;
1773     if (!m_codeBlock->needsFullScopeChain())
1774         return;
1775     emitOpcode(op_create_activation);
1776     instructions().append(m_activationRegister->index());
1777 }
1778
1779 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1780 {
1781     return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, startOffset, endOffset);
1782 }
1783
1784 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1785 {
1786     if (identifier == m_globalData->propertyNames->Object)
1787         return ExpectObjectConstructor;
1788     if (identifier == m_globalData->propertyNames->Array)
1789         return ExpectArrayConstructor;
1790     return NoExpectedFunction;
1791 }
1792
1793 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1794 {
1795     RefPtr<Label> realCall = newLabel();
1796     switch (expectedFunction) {
1797     case ExpectObjectConstructor: {
1798         // If the number of arguments is non-zero, then we can't do anything interesting.
1799         if (callArguments.argumentCountIncludingThis() >= 2)
1800             return NoExpectedFunction;
1801         
1802         size_t begin = instructions().size();
1803         emitOpcode(op_jneq_ptr);
1804         instructions().append(func->index());
1805         instructions().append(Special::ObjectConstructor);
1806         instructions().append(realCall->bind(begin, instructions().size()));
1807         
1808         if (dst != ignoredResult())
1809             emitNewObject(dst);
1810         break;
1811     }
1812         
1813     case ExpectArrayConstructor: {
1814         // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1815         // don't do inline it, for now. The only reason is that call arguments are in
1816         // the opposite order of what op_new_array expects, so we'd either need to change
1817         // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1818         // things sounds like it's worth it.
1819         if (callArguments.argumentCountIncludingThis() > 2)
1820             return NoExpectedFunction;
1821         
1822         size_t begin = instructions().size();
1823         emitOpcode(op_jneq_ptr);
1824         instructions().append(func->index());
1825         instructions().append(Special::ArrayConstructor);
1826         instructions().append(realCall->bind(begin, instructions().size()));
1827         
1828         if (dst != ignoredResult()) {
1829             if (callArguments.argumentCountIncludingThis() == 2) {
1830                 emitOpcode(op_new_array_with_size);
1831                 instructions().append(dst->index());
1832                 instructions().append(callArguments.argumentRegister(0)->index());
1833                 instructions().append(newArrayAllocationProfile());
1834             } else {
1835                 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1836                 emitOpcode(op_new_array);
1837                 instructions().append(dst->index());
1838                 instructions().append(0);
1839                 instructions().append(0);
1840                 instructions().append(newArrayAllocationProfile());
1841             }
1842         }
1843         break;
1844     }
1845         
1846     default:
1847         ASSERT(expectedFunction == NoExpectedFunction);
1848         return NoExpectedFunction;
1849     }
1850     
1851     size_t begin = instructions().size();
1852     emitOpcode(op_jmp);
1853     instructions().append(done->bind(begin, instructions().size()));
1854     emitLabel(realCall.get());
1855     
1856     return expectedFunction;
1857 }
1858
1859 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1860 {
1861     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1862     ASSERT(func->refCount());
1863
1864     if (m_shouldEmitProfileHooks)
1865         emitMove(callArguments.profileHookRegister(), func);
1866
1867     // Generate code for arguments.
1868     unsigned argument = 0;
1869     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1870         emitNode(callArguments.argumentRegister(argument++), n);
1871
1872     // Reserve space for call frame.
1873     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
1874     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1875         callFrame.append(newTemporary());
1876
1877     if (m_shouldEmitProfileHooks) {
1878         emitOpcode(op_profile_will_call);
1879         instructions().append(callArguments.profileHookRegister()->index());
1880     }
1881
1882     emitExpressionInfo(divot, startOffset, endOffset);
1883
1884     RefPtr<Label> done = newLabel();
1885     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1886     
1887     // Emit call.
1888     UnlinkedArrayProfile arrayProfile = newArrayProfile();
1889     emitOpcode(opcodeID);
1890     instructions().append(func->index()); // func
1891     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1892     instructions().append(callArguments.registerOffset()); // registerOffset
1893 #if ENABLE(LLINT)
1894     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1895 #else
1896     instructions().append(0);
1897 #endif
1898     instructions().append(arrayProfile);
1899     if (dst != ignoredResult()) {
1900         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1901         instructions().append(kill(dst));
1902         instructions().append(profile);
1903     }
1904     
1905     if (expectedFunction != NoExpectedFunction)
1906         emitLabel(done.get());
1907
1908     if (m_shouldEmitProfileHooks) {
1909         emitOpcode(op_profile_did_call);
1910         instructions().append(callArguments.profileHookRegister()->index());
1911     }
1912
1913     return dst;
1914 }
1915
1916 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1917 {
1918     if (m_shouldEmitProfileHooks) {
1919         emitMove(profileHookRegister, func);
1920         emitOpcode(op_profile_will_call);
1921         instructions().append(profileHookRegister->index());
1922     }
1923     
1924     emitExpressionInfo(divot, startOffset, endOffset);
1925
1926     // Emit call.
1927     emitOpcode(op_call_varargs);
1928     instructions().append(func->index());
1929     instructions().append(thisRegister->index());
1930     instructions().append(arguments->index());
1931     instructions().append(firstFreeRegister->index());
1932     if (dst != ignoredResult()) {
1933         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
1934         instructions().append(kill(dst));
1935         instructions().append(profile);
1936     }
1937     if (m_shouldEmitProfileHooks) {
1938         emitOpcode(op_profile_did_call);
1939         instructions().append(profileHookRegister->index());
1940     }
1941     return dst;
1942 }
1943
1944 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1945 {
1946     if (m_codeBlock->needsFullScopeChain()) {
1947         emitOpcode(op_tear_off_activation);
1948         instructions().append(m_activationRegister->index());
1949     }
1950
1951     if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1952         emitOpcode(op_tear_off_arguments);
1953         instructions().append(m_codeBlock->argumentsRegister());
1954         instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1955     }
1956
1957     // Constructors use op_ret_object_or_this to check the result is an
1958     // object, unless we can trivially determine the check is not
1959     // necessary (currently, if the return value is 'this').
1960     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1961         emitOpcode(op_ret_object_or_this);
1962         instructions().append(src->index());
1963         instructions().append(m_thisRegister.index());
1964         return src;
1965     }
1966     return emitUnaryNoDstOp(op_ret, src);
1967 }
1968
1969 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1970 {
1971     emitOpcode(opcodeID);
1972     instructions().append(src->index());
1973     return src;
1974 }
1975
1976 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1977 {
1978     ASSERT(func->refCount());
1979
1980     if (m_shouldEmitProfileHooks)
1981         emitMove(callArguments.profileHookRegister(), func);
1982
1983     // Generate code for arguments.
1984     unsigned argument = 0;
1985     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1986         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1987             emitNode(callArguments.argumentRegister(argument++), n);
1988     }
1989
1990     if (m_shouldEmitProfileHooks) {
1991         emitOpcode(op_profile_will_call);
1992         instructions().append(callArguments.profileHookRegister()->index());
1993     }
1994
1995     // Reserve space for call frame.
1996     Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize> callFrame;
1997     for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1998         callFrame.append(newTemporary());
1999
2000     emitExpressionInfo(divot, startOffset, endOffset);
2001     
2002     RefPtr<Label> done = newLabel();
2003     expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
2004
2005     emitOpcode(op_construct);
2006     instructions().append(func->index()); // func
2007     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
2008     instructions().append(callArguments.registerOffset()); // registerOffset
2009 #if ENABLE(LLINT)
2010     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
2011 #else
2012     instructions().append(0);
2013 #endif
2014     instructions().append(0);
2015     if (dst != ignoredResult()) {
2016         UnlinkedValueProfile profile = emitProfiledOpcode(op_call_put_result);
2017         instructions().append(kill(dst));
2018         instructions().append(profile);
2019     }
2020
2021     if (expectedFunction != NoExpectedFunction)
2022         emitLabel(done.get());
2023
2024     if (m_shouldEmitProfileHooks) {
2025         emitOpcode(op_profile_did_call);
2026         instructions().append(callArguments.profileHookRegister()->index());
2027     }
2028
2029     return dst;
2030 }
2031
2032 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
2033 {
2034     emitOpcode(op_strcat);
2035     instructions().append(dst->index());
2036     instructions().append(src->index());
2037     instructions().append(count);
2038
2039     return dst;
2040 }
2041
2042 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2043 {
2044     emitOpcode(op_to_primitive);
2045     instructions().append(dst->index());
2046     instructions().append(src->index());
2047 }
2048
2049 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
2050 {
2051     ControlFlowContext context;
2052     context.isFinallyBlock = false;
2053     m_scopeContextStack.append(context);
2054     m_dynamicScopeDepth++;
2055
2056     return emitUnaryNoDstOp(op_push_with_scope, scope);
2057 }
2058
2059 void BytecodeGenerator::emitPopScope()
2060 {
2061     ASSERT(m_scopeContextStack.size());
2062     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2063
2064     emitOpcode(op_pop_scope);
2065
2066     m_scopeContextStack.removeLast();
2067     m_dynamicScopeDepth--;
2068 }
2069
2070 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine, int column)
2071 {
2072 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2073     if (debugHookID != DidReachBreakpoint)
2074         return;
2075 #else
2076     if (!m_shouldEmitDebugHooks)
2077         return;
2078 #endif
2079     emitOpcode(op_debug);
2080     instructions().append(debugHookID);
2081     instructions().append(firstLine);
2082     instructions().append(lastLine);
2083     instructions().append(column);
2084 }
2085
2086 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2087 {
2088     ControlFlowContext scope;
2089     scope.isFinallyBlock = true;
2090     FinallyContext context = {
2091         finallyBlock,
2092         static_cast<unsigned>(m_scopeContextStack.size()),
2093         static_cast<unsigned>(m_switchContextStack.size()),
2094         static_cast<unsigned>(m_forInContextStack.size()),
2095         static_cast<unsigned>(m_tryContextStack.size()),
2096         static_cast<unsigned>(m_labelScopes.size()),
2097         m_finallyDepth,
2098         m_dynamicScopeDepth
2099     };
2100     scope.finallyContext = context;
2101     m_scopeContextStack.append(scope);
2102     m_finallyDepth++;
2103 }
2104
2105 void BytecodeGenerator::popFinallyContext()
2106 {
2107     ASSERT(m_scopeContextStack.size());
2108     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2109     ASSERT(m_finallyDepth > 0);
2110     m_scopeContextStack.removeLast();
2111     m_finallyDepth--;
2112 }
2113
2114 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2115 {
2116     // Reclaim free label scopes.
2117     //
2118     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2119     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2120     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2121     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2122     // loop condition is a workaround.
2123     while (m_labelScopes.size()) {
2124         if  (m_labelScopes.last().refCount())
2125             break;
2126         m_labelScopes.removeLast();
2127     }
2128
2129     if (!m_labelScopes.size())
2130         return 0;
2131
2132     // We special-case the following, which is a syntax error in Firefox:
2133     // label:
2134     //     break;
2135     if (name.isEmpty()) {
2136         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2137             LabelScope* scope = &m_labelScopes[i];
2138             if (scope->type() != LabelScope::NamedLabel) {
2139                 ASSERT(scope->breakTarget());
2140                 return scope;
2141             }
2142         }
2143         return 0;
2144     }
2145
2146     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2147         LabelScope* scope = &m_labelScopes[i];
2148         if (scope->name() && *scope->name() == name) {
2149             ASSERT(scope->breakTarget());
2150             return scope;
2151         }
2152     }
2153     return 0;
2154 }
2155
2156 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2157 {
2158     // Reclaim free label scopes.
2159     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2160         m_labelScopes.removeLast();
2161
2162     if (!m_labelScopes.size())
2163         return 0;
2164
2165     if (name.isEmpty()) {
2166         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2167             LabelScope* scope = &m_labelScopes[i];
2168             if (scope->type() == LabelScope::Loop) {
2169                 ASSERT(scope->continueTarget());
2170                 return scope;
2171             }
2172         }
2173         return 0;
2174     }
2175
2176     // Continue to the loop nested nearest to the label scope that matches
2177     // 'name'.
2178     LabelScope* result = 0;
2179     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2180         LabelScope* scope = &m_labelScopes[i];
2181         if (scope->type() == LabelScope::Loop) {
2182             ASSERT(scope->continueTarget());
2183             result = scope;
2184         }
2185         if (scope->name() && *scope->name() == name)
2186             return result; // may be 0
2187     }
2188     return 0;
2189 }
2190
2191 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2192 {
2193     while (topScope > bottomScope) {
2194         // First we count the number of dynamic scopes we need to remove to get
2195         // to a finally block.
2196         int nNormalScopes = 0;
2197         while (topScope > bottomScope) {
2198             if (topScope->isFinallyBlock)
2199                 break;
2200             ++nNormalScopes;
2201             --topScope;
2202         }
2203
2204         if (nNormalScopes) {
2205             size_t begin = instructions().size();
2206
2207             // We need to remove a number of dynamic scopes to get to the next
2208             // finally block
2209             emitOpcode(op_jmp_scopes);
2210             instructions().append(nNormalScopes);
2211
2212             // If topScope == bottomScope then there isn't actually a finally block
2213             // left to emit, so make the jmp_scopes jump directly to the target label
2214             if (topScope == bottomScope) {
2215                 instructions().append(target->bind(begin, instructions().size()));
2216                 return target;
2217             }
2218
2219             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2220             // to the next instruction
2221             RefPtr<Label> nextInsn = newLabel();
2222             instructions().append(nextInsn->bind(begin, instructions().size()));
2223             emitLabel(nextInsn.get());
2224         }
2225         
2226         Vector<ControlFlowContext> savedScopeContextStack;
2227         Vector<SwitchInfo> savedSwitchContextStack;
2228         Vector<ForInContext> savedForInContextStack;
2229         Vector<TryContext> poppedTryContexts;
2230         SegmentedVector<LabelScope, 8> savedLabelScopes;
2231         while (topScope > bottomScope && topScope->isFinallyBlock) {
2232             RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2233             
2234             // Save the current state of the world while instating the state of the world
2235             // for the finally block.
2236             FinallyContext finallyContext = topScope->finallyContext;
2237             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2238             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2239             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2240             bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2241             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2242             int topScopeIndex = -1;
2243             int bottomScopeIndex = -1;
2244             if (flipScopes) {
2245                 topScopeIndex = topScope - m_scopeContextStack.begin();
2246                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2247                 savedScopeContextStack = m_scopeContextStack;
2248                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2249             }
2250             if (flipSwitches) {
2251                 savedSwitchContextStack = m_switchContextStack;
2252                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2253             }
2254             if (flipForIns) {
2255                 savedForInContextStack = m_forInContextStack;
2256                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2257             }
2258             if (flipTries) {
2259                 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2260                     ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2261                     TryContext context = m_tryContextStack.last();
2262                     m_tryContextStack.removeLast();
2263                     TryRange range;
2264                     range.start = context.start;
2265                     range.end = beforeFinally;
2266                     range.tryData = context.tryData;
2267                     m_tryRanges.append(range);
2268                     poppedTryContexts.append(context);
2269                 }
2270             }
2271             if (flipLabelScopes) {
2272                 savedLabelScopes = m_labelScopes;
2273                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2274                     m_labelScopes.removeLast();
2275             }
2276             int savedFinallyDepth = m_finallyDepth;
2277             m_finallyDepth = finallyContext.finallyDepth;
2278             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2279             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2280             
2281             // Emit the finally block.
2282             emitNode(finallyContext.finallyBlock);
2283             
2284             RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2285             
2286             // Restore the state of the world.
2287             if (flipScopes) {
2288                 m_scopeContextStack = savedScopeContextStack;
2289                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2290                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2291             }
2292             if (flipSwitches)
2293                 m_switchContextStack = savedSwitchContextStack;
2294             if (flipForIns)
2295                 m_forInContextStack = savedForInContextStack;
2296             if (flipTries) {
2297                 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2298                 for (unsigned i = poppedTryContexts.size(); i--;) {
2299                     TryContext context = poppedTryContexts[i];
2300                     context.start = afterFinally;
2301                     m_tryContextStack.append(context);
2302                 }
2303                 poppedTryContexts.clear();
2304             }
2305             if (flipLabelScopes)
2306                 m_labelScopes = savedLabelScopes;
2307             m_finallyDepth = savedFinallyDepth;
2308             m_dynamicScopeDepth = savedDynamicScopeDepth;
2309             
2310             --topScope;
2311         }
2312     }
2313     return emitJump(target);
2314 }
2315
2316 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2317 {
2318     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2319     ASSERT(target->isForward());
2320
2321     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2322     ASSERT(scopeDelta <= m_scopeContextStack.size());
2323     if (!scopeDelta)
2324         return emitJump(target);
2325
2326     if (m_finallyDepth)
2327         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2328
2329     size_t begin = instructions().size();
2330
2331     emitOpcode(op_jmp_scopes);
2332     instructions().append(scopeDelta);
2333     instructions().append(target->bind(begin, instructions().size()));
2334     return target;
2335 }
2336
2337 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2338 {
2339     size_t begin = instructions().size();
2340
2341     emitOpcode(op_get_pnames);
2342     instructions().append(dst->index());
2343     instructions().append(base->index());
2344     instructions().append(i->index());
2345     instructions().append(size->index());
2346     instructions().append(breakTarget->bind(begin, instructions().size()));
2347     return dst;
2348 }
2349
2350 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2351 {
2352     size_t begin = instructions().size();
2353
2354     emitOpcode(op_next_pname);
2355     instructions().append(dst->index());
2356     instructions().append(base->index());
2357     instructions().append(i->index());
2358     instructions().append(size->index());
2359     instructions().append(iter->index());
2360     instructions().append(target->bind(begin, instructions().size()));
2361     return dst;
2362 }
2363
2364 TryData* BytecodeGenerator::pushTry(Label* start)
2365 {
2366     TryData tryData;
2367     tryData.target = newLabel();
2368     tryData.targetScopeDepth = UINT_MAX;
2369     m_tryData.append(tryData);
2370     TryData* result = &m_tryData.last();
2371     
2372     TryContext tryContext;
2373     tryContext.start = start;
2374     tryContext.tryData = result;
2375     
2376     m_tryContextStack.append(tryContext);
2377     
2378     return result;
2379 }
2380
2381 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2382 {
2383     m_usesExceptions = true;
2384     
2385     ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2386     
2387     TryRange tryRange;
2388     tryRange.start = m_tryContextStack.last().start;
2389     tryRange.end = end;
2390     tryRange.tryData = m_tryContextStack.last().tryData;
2391     m_tryRanges.append(tryRange);
2392     m_tryContextStack.removeLast();
2393     
2394     emitLabel(tryRange.tryData->target.get());
2395     tryRange.tryData->targetScopeDepth = m_dynamicScopeDepth;
2396
2397     emitOpcode(op_catch);
2398     instructions().append(targetRegister->index());
2399     return targetRegister;
2400 }
2401
2402 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2403 {
2404     emitOpcode(op_throw_static_error);
2405     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2406     instructions().append(true);
2407 }
2408
2409 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2410 {
2411     ControlFlowContext context;
2412     context.isFinallyBlock = false;
2413     m_scopeContextStack.append(context);
2414     m_dynamicScopeDepth++;
2415
2416     emitOpcode(op_push_name_scope);
2417     instructions().append(addConstant(property));
2418     instructions().append(value->index());
2419     instructions().append(attributes);
2420 }
2421
2422 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2423 {
2424     SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2425     switch (type) {
2426         case SwitchInfo::SwitchImmediate:
2427             emitOpcode(op_switch_imm);
2428             break;
2429         case SwitchInfo::SwitchCharacter:
2430             emitOpcode(op_switch_char);
2431             break;
2432         case SwitchInfo::SwitchString:
2433             emitOpcode(op_switch_string);
2434             break;
2435         default:
2436             RELEASE_ASSERT_NOT_REACHED();
2437     }
2438
2439     instructions().append(0); // place holder for table index
2440     instructions().append(0); // place holder for default target    
2441     instructions().append(scrutineeRegister->index());
2442     m_switchContextStack.append(info);
2443 }
2444
2445 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2446 {
2447     UNUSED_PARAM(max);
2448     ASSERT(node->isNumber());
2449     double value = static_cast<NumberNode*>(node)->value();
2450     int32_t key = static_cast<int32_t>(value);
2451     ASSERT(key == value);
2452     ASSERT(key >= min);
2453     ASSERT(key <= max);
2454     return key - min;
2455 }
2456
2457 static void prepareJumpTableForImmediateSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2458 {
2459     jumpTable.min = min;
2460     jumpTable.branchOffsets.resize(max - min + 1);
2461     jumpTable.branchOffsets.fill(0);
2462     for (uint32_t i = 0; i < clauseCount; ++i) {
2463         // We're emitting this after the clause labels should have been fixed, so 
2464         // the labels should not be "forward" references
2465         ASSERT(!labels[i]->isForward());
2466         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2467     }
2468 }
2469
2470 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2471 {
2472     UNUSED_PARAM(max);
2473     ASSERT(node->isString());
2474     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2475     ASSERT(clause->length() == 1);
2476     
2477     int32_t key = (*clause)[0];
2478     ASSERT(key >= min);
2479     ASSERT(key <= max);
2480     return key - min;
2481 }
2482
2483 static void prepareJumpTableForCharacterSwitch(UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2484 {
2485     jumpTable.min = min;
2486     jumpTable.branchOffsets.resize(max - min + 1);
2487     jumpTable.branchOffsets.fill(0);
2488     for (uint32_t i = 0; i < clauseCount; ++i) {
2489         // We're emitting this after the clause labels should have been fixed, so 
2490         // the labels should not be "forward" references
2491         ASSERT(!labels[i]->isForward());
2492         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2493     }
2494 }
2495
2496 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2497 {
2498     for (uint32_t i = 0; i < clauseCount; ++i) {
2499         // We're emitting this after the clause labels should have been fixed, so 
2500         // the labels should not be "forward" references
2501         ASSERT(!labels[i]->isForward());
2502         
2503         ASSERT(nodes[i]->isString());
2504         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2505         jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2506     }
2507 }
2508
2509 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2510 {
2511     SwitchInfo switchInfo = m_switchContextStack.last();
2512     m_switchContextStack.removeLast();
2513     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2514         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2515         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2516
2517         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2518         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2519     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2520         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2521         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2522         
2523         UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2524         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2525     } else {
2526         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2527         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2528         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2529
2530         UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2531         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2532     }
2533 }
2534
2535 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2536 {
2537     // It would be nice to do an even better job of identifying exactly where the expression is.
2538     // And we could make the caller pass the node pointer in, if there was some way of getting
2539     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2540     // is still good enough to get us an accurate line number.
2541     m_expressionTooDeep = true;
2542     return newTemporary();
2543 }
2544
2545 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2546 {
2547     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2548 }
2549
2550 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2551 {
2552     RegisterID* registerID = resolve(ident).local();
2553     if (!registerID || registerID->index() >= 0)
2554          return 0;
2555     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2556 }
2557
2558 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2559 {
2560     if (!isStrictMode())
2561         return;
2562     emitOpcode(op_throw_static_error);
2563     instructions().append(addConstantValue(jsString(globalData(), StrictModeReadonlyPropertyWriteError))->index());
2564     instructions().append(false);
2565 }
2566
2567 } // namespace JSC