dumpCallFrame is broken in ToT
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "Comment.h"
36 #include "JSActivation.h"
37 #include "JSFunction.h"
38 #include "Interpreter.h"
39 #include "LowLevelInterpreter.h"
40 #include "ScopeChain.h"
41 #include "StrongInlines.h"
42 #include "UString.h"
43
44 using namespace std;
45
46 namespace JSC {
47
48 /*
49     The layout of a register frame looks like this:
50
51     For
52
53     function f(x, y) {
54         var v1;
55         function g() { }
56         var v2;
57         return (x) * (y);
58     }
59
60     assuming (x) and (y) generated temporaries t1 and t2, you would have
61
62     ------------------------------------
63     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
64     ------------------------------------
65     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
66     ------------------------------------
67     | params->|<-locals      | temps->
68
69     Because temporary registers are allocated in a stack-like fashion, we
70     can reclaim them with a simple popping algorithm. The same goes for labels.
71     (We never reclaim parameter or local registers, because parameters and
72     locals are DontDelete.)
73
74     The register layout before a function call looks like this:
75
76     For
77
78     function f(x, y)
79     {
80     }
81
82     f(1);
83
84     >                        <------------------------------
85     <                        >  reserved: call frame  |  1 | <-- value held
86     >         >snip<         <------------------------------
87     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
88     >                        <------------------------------
89     | params->|<-locals      | temps->
90
91     The call instruction fills in the "call frame" registers. It also pads
92     missing arguments at the end of the call:
93
94     >                        <-----------------------------------
95     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
96     >         >snip<         <-----------------------------------
97     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
98     >                        <-----------------------------------
99     | params->|<-locals      | temps->
100
101     After filling in missing arguments, the call instruction sets up the new
102     stack frame to overlap the end of the old stack frame:
103
104                              |---------------------------------->                        <
105                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
106                              |---------------------------------->         >snip<         <
107                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
108                              |---------------------------------->                        <
109                              |                        | params->|<-locals       | temps->
110
111     That way, arguments are "copied" into the callee's stack frame for free.
112
113     If the caller supplies too many arguments, this trick doesn't work. The
114     extra arguments protrude into space reserved for locals and temporaries.
115     In that case, the call instruction makes a real copy of the call frame header,
116     along with just the arguments expected by the callee, leaving the original
117     call frame header and arguments behind. (The call instruction can't just discard
118     extra arguments, because the "arguments" object may access them later.)
119     This copying strategy ensures that all named values will be at the indices
120     expected by the callee.
121 */
122
123 void Label::setLocation(unsigned location)
124 {
125     m_location = location;
126     
127     unsigned size = m_unresolvedJumps.size();
128     for (unsigned i = 0; i < size; ++i)
129         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
130 }
131
132 #ifndef NDEBUG
133 void ResolveResult::checkValidity()
134 {
135     switch (m_type) {
136     case Register:
137     case ReadOnlyRegister:
138         ASSERT(m_local);
139         return;
140     case Lexical:
141     case ReadOnlyLexical:
142     case DynamicLexical:
143     case DynamicReadOnlyLexical:
144         ASSERT(m_index != missingSymbolMarker());
145         return;
146     case Global:
147     case DynamicGlobal:
148         ASSERT(m_globalObject);
149         return;
150     case IndexedGlobal:
151     case ReadOnlyIndexedGlobal:
152     case WatchedIndexedGlobal:
153     case DynamicIndexedGlobal:
154     case DynamicReadOnlyIndexedGlobal:
155         ASSERT(m_index != missingSymbolMarker());
156         ASSERT(m_globalObject);
157         return;
158     case Dynamic:
159         return;
160     default:
161         ASSERT_NOT_REACHED();
162     }
163 }
164 #endif
165
166 WriteBarrier<Unknown>* ResolveResult::registerPointer() const
167 {
168     return &jsCast<JSGlobalObject*>(globalObject())->registerAt(index());
169 }
170
171 static bool s_dumpsGeneratedCode = false;
172
173 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
174 {
175     s_dumpsGeneratedCode = dumpsGeneratedCode;
176 }
177
178 bool BytecodeGenerator::dumpsGeneratedCode()
179 {
180     return s_dumpsGeneratedCode;
181 }
182
183 JSObject* BytecodeGenerator::generate()
184 {
185     SamplingRegion samplingRegion("Bytecode Generation");
186     
187     m_codeBlock->setThisRegister(m_thisRegister.index());
188
189     m_scopeNode->emitBytecode(*this);
190     
191     m_codeBlock->instructions() = RefCountedArray<Instruction>(m_instructions);
192
193     if (s_dumpsGeneratedCode)
194         m_codeBlock->dump(m_scopeChain->globalObject->globalExec());
195
196 #ifdef NDEBUG
197     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
198         symbolTable().clear();
199 #endif
200
201     m_codeBlock->shrinkToFit(CodeBlock::EarlyShrink);
202
203     if (m_expressionTooDeep)
204         return createOutOfMemoryError(m_scopeChain->globalObject.get());
205     return 0;
206 }
207
208 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
209 {
210     int index = m_calleeRegisters.size();
211     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
212     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
213
214     if (!result.isNewEntry) {
215         r0 = &registerFor(result.iterator->second.getIndex());
216         return false;
217     }
218
219     r0 = addVar();
220     return true;
221 }
222
223 int BytecodeGenerator::addGlobalVar(
224     const Identifier& ident, ConstantMode constantMode, FunctionMode functionMode)
225 {
226     UNUSED_PARAM(functionMode);
227     int index = symbolTable().size();
228     SymbolTableEntry newEntry(index, (constantMode == IsConstant) ? ReadOnly : 0);
229     if (functionMode == IsFunctionToSpecialize)
230         newEntry.attemptToWatch();
231     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
232     if (!result.isNewEntry) {
233         result.iterator->second.notifyWrite();
234         index = result.iterator->second.getIndex();
235     }
236     return index;
237 }
238
239 void BytecodeGenerator::preserveLastVar()
240 {
241     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
242         m_lastVar = &m_calleeRegisters.last();
243 }
244
245 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
246     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
247     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
248     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
249     , m_scopeChain(*scopeChain->globalData, scopeChain)
250     , m_symbolTable(symbolTable)
251 #if ENABLE(BYTECODE_COMMENTS)
252     , m_currentCommentString(0)
253 #endif
254     , m_scopeNode(programNode)
255     , m_codeBlock(codeBlock)
256     , m_thisRegister(CallFrame::thisArgumentOffset())
257     , m_finallyDepth(0)
258     , m_dynamicScopeDepth(0)
259     , m_baseScopeDepth(0)
260     , m_codeType(GlobalCode)
261     , m_nextConstantOffset(0)
262     , m_globalConstantIndex(0)
263     , m_hasCreatedActivation(true)
264     , m_firstLazyFunction(0)
265     , m_lastLazyFunction(0)
266     , m_globalData(scopeChain->globalData)
267     , m_lastOpcodeID(op_end)
268 #ifndef NDEBUG
269     , m_lastOpcodePosition(0)
270 #endif
271     , m_stack(wtfThreadData().stack())
272     , m_usesExceptions(false)
273     , m_expressionTooDeep(false)
274 {
275     m_globalData->startedCompiling(m_codeBlock);
276     if (m_shouldEmitDebugHooks)
277         m_codeBlock->setNeedsFullScopeChain(true);
278
279     prependComment("entering Program block");
280     emitOpcode(op_enter);
281     codeBlock->setGlobalData(m_globalData);
282
283     // FIXME: Move code that modifies the global object to Interpreter::execute.
284     
285     m_codeBlock->setNumParameters(1); // Allocate space for "this"
286     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
287     
288     if (compilationKind == OptimizingCompilation)
289         return;
290
291     JSGlobalObject* globalObject = scopeChain->globalObject.get();
292     ExecState* exec = globalObject->globalExec();
293     
294     BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
295
296     const VarStack& varStack = programNode->varStack();
297     const FunctionStack& functionStack = programNode->functionStack();
298
299     size_t newGlobals = varStack.size() + functionStack.size();
300     if (!newGlobals)
301         return;
302     globalObject->addRegisters(newGlobals);
303
304     for (size_t i = 0; i < functionStack.size(); ++i) {
305         FunctionBodyNode* function = functionStack[i];
306         bool propertyDidExist = 
307             globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
308         
309         JSValue value = JSFunction::create(exec, makeFunction(exec, function), scopeChain);
310         int index = addGlobalVar(
311             function->ident(), IsVariable,
312             !propertyDidExist ? IsFunctionToSpecialize : NotFunctionOrNotSpecializable);
313         globalObject->registerAt(index).set(*m_globalData, globalObject, value);
314     }
315
316     for (size_t i = 0; i < varStack.size(); ++i) {
317         if (globalObject->hasProperty(exec, *varStack[i].first))
318             continue;
319         addGlobalVar(
320             *varStack[i].first,
321             (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable,
322             NotFunctionOrNotSpecializable);
323     }
324 }
325
326 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
327     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
328     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
329     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
330     , m_scopeChain(*scopeChain->globalData, scopeChain)
331     , m_symbolTable(symbolTable)
332 #if ENABLE(BYTECODE_COMMENTS)
333     , m_currentCommentString(0)
334 #endif
335     , m_scopeNode(functionBody)
336     , m_codeBlock(codeBlock)
337     , m_activationRegister(0)
338     , m_finallyDepth(0)
339     , m_dynamicScopeDepth(0)
340     , m_baseScopeDepth(0)
341     , m_codeType(FunctionCode)
342     , m_nextConstantOffset(0)
343     , m_globalConstantIndex(0)
344     , m_hasCreatedActivation(false)
345     , m_firstLazyFunction(0)
346     , m_lastLazyFunction(0)
347     , m_globalData(scopeChain->globalData)
348     , m_lastOpcodeID(op_end)
349 #ifndef NDEBUG
350     , m_lastOpcodePosition(0)
351 #endif
352     , m_stack(wtfThreadData().stack())
353     , m_usesExceptions(false)
354     , m_expressionTooDeep(false)
355 {
356     m_globalData->startedCompiling(m_codeBlock);
357     if (m_shouldEmitDebugHooks)
358         m_codeBlock->setNeedsFullScopeChain(true);
359
360     codeBlock->setGlobalData(m_globalData);
361     
362     prependComment("entering Function block");
363     emitOpcode(op_enter);
364     if (m_codeBlock->needsFullScopeChain()) {
365         m_activationRegister = addVar();
366         prependComment("activation for Full Scope Chain");
367         emitInitLazyRegister(m_activationRegister);
368         m_codeBlock->setActivationRegister(m_activationRegister->index());
369     }
370
371     // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
372     // object, if created.
373     if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) {
374         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
375         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
376
377         // We can save a little space by hard-coding the knowledge that the two
378         // 'arguments' values are stored in consecutive registers, and storing
379         // only the index of the assignable one.
380         codeBlock->setArgumentsRegister(argumentsRegister->index());
381         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
382
383         prependComment("arguments for Full Scope Chain");
384         emitInitLazyRegister(argumentsRegister);
385         prependComment("unmodified arguments for Full Scope Chain");
386         emitInitLazyRegister(unmodifiedArgumentsRegister);
387         
388         if (m_codeBlock->isStrictMode()) {
389             prependComment("create arguments for strict mode");
390             emitOpcode(op_create_arguments);
391             instructions().append(argumentsRegister->index());
392         }
393
394         // The debugger currently retrieves the arguments object from an activation rather than pulling
395         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
396         // but for now we force eager creation of the arguments object when debugging.
397         if (m_shouldEmitDebugHooks) {
398             prependComment("create arguments for debug hooks");
399             emitOpcode(op_create_arguments);
400             instructions().append(argumentsRegister->index());
401         }
402     }
403
404     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
405     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
406
407     // Captured variables and functions go first so that activations don't have
408     // to step over the non-captured locals to mark them.
409     m_hasCreatedActivation = false;
410     if (functionBody->hasCapturedVariables()) {
411         for (size_t i = 0; i < functionStack.size(); ++i) {
412             FunctionBodyNode* function = functionStack[i];
413             const Identifier& ident = function->ident();
414             if (functionBody->captures(ident)) {
415                 if (!m_hasCreatedActivation) {
416                     m_hasCreatedActivation = true;
417                     prependComment("activation for captured vars");
418                     emitOpcode(op_create_activation);
419                     instructions().append(m_activationRegister->index());
420                 }
421                 m_functions.add(ident.impl());
422                 prependComment("captured function var");
423                 emitNewFunction(addVar(ident, false), function);
424             }
425         }
426         for (size_t i = 0; i < varStack.size(); ++i) {
427             const Identifier& ident = *varStack[i].first;
428             if (functionBody->captures(ident))
429                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
430         }
431     }
432     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
433     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
434         m_hasCreatedActivation = true;
435         prependComment("cannot lazily create functions");
436         emitOpcode(op_create_activation);
437         instructions().append(m_activationRegister->index());
438     }
439
440     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
441     m_firstLazyFunction = codeBlock->m_numVars;
442     for (size_t i = 0; i < functionStack.size(); ++i) {
443         FunctionBodyNode* function = functionStack[i];
444         const Identifier& ident = function->ident();
445         if (!functionBody->captures(ident)) {
446             m_functions.add(ident.impl());
447             RefPtr<RegisterID> reg = addVar(ident, false);
448             // Don't lazily create functions that override the name 'arguments'
449             // as this would complicate lazy instantiation of actual arguments.
450             prependComment("a function that override 'arguments'");
451             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
452                 emitNewFunction(reg.get(), function);
453             else {
454                 emitInitLazyRegister(reg.get());
455                 m_lazyFunctions.set(reg->index(), function);
456             }
457         }
458     }
459     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
460     for (size_t i = 0; i < varStack.size(); ++i) {
461         const Identifier& ident = *varStack[i].first;
462         if (!functionBody->captures(ident))
463             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
464     }
465
466     if (m_shouldEmitDebugHooks)
467         codeBlock->m_numCapturedVars = codeBlock->m_numVars;
468
469     FunctionParameters& parameters = *functionBody->parameters();
470     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
471
472     // Add "this" as a parameter
473     int nextParameterIndex = CallFrame::thisArgumentOffset();
474     m_thisRegister.setIndex(nextParameterIndex--);
475     m_codeBlock->addParameter();
476     
477     for (size_t i = 0; i < parameters.size(); ++i)
478         addParameter(parameters[i], nextParameterIndex--);
479
480     preserveLastVar();
481
482     if (isConstructor()) {
483         prependComment("'this' because we are a Constructor function");
484         emitOpcode(op_create_this);
485         instructions().append(m_thisRegister.index());
486     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
487         ValueProfile* profile = emitProfiledOpcode(op_convert_this);
488         instructions().append(m_thisRegister.index());
489         instructions().append(profile);
490     }
491 }
492
493 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
494     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
495     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
496     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
497     , m_scopeChain(*scopeChain->globalData, scopeChain)
498     , m_symbolTable(symbolTable)
499 #if ENABLE(BYTECODE_COMMENTS)
500     , m_currentCommentString(0)
501 #endif
502     , m_scopeNode(evalNode)
503     , m_codeBlock(codeBlock)
504     , m_thisRegister(CallFrame::thisArgumentOffset())
505     , m_finallyDepth(0)
506     , m_dynamicScopeDepth(0)
507     , m_baseScopeDepth(codeBlock->baseScopeDepth())
508     , m_codeType(EvalCode)
509     , m_nextConstantOffset(0)
510     , m_globalConstantIndex(0)
511     , m_hasCreatedActivation(true)
512     , m_firstLazyFunction(0)
513     , m_lastLazyFunction(0)
514     , m_globalData(scopeChain->globalData)
515     , m_lastOpcodeID(op_end)
516 #ifndef NDEBUG
517     , m_lastOpcodePosition(0)
518 #endif
519     , m_stack(wtfThreadData().stack())
520     , m_usesExceptions(false)
521     , m_expressionTooDeep(false)
522 {
523     m_globalData->startedCompiling(m_codeBlock);
524     if (m_shouldEmitDebugHooks || m_baseScopeDepth)
525         m_codeBlock->setNeedsFullScopeChain(true);
526
527     prependComment("entering Eval block");
528     emitOpcode(op_enter);
529     codeBlock->setGlobalData(m_globalData);
530     m_codeBlock->setNumParameters(1);
531
532     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
533     for (size_t i = 0; i < functionStack.size(); ++i)
534         m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
535
536     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
537     unsigned numVariables = varStack.size();
538     Vector<Identifier> variables;
539     variables.reserveCapacity(numVariables);
540     for (size_t i = 0; i < numVariables; ++i)
541         variables.append(*varStack[i].first);
542     codeBlock->adoptVariables(variables);
543     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
544     preserveLastVar();
545 }
546
547 BytecodeGenerator::~BytecodeGenerator()
548 {
549     m_globalData->finishedCompiling(m_codeBlock);
550 }
551
552 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
553 {
554     emitOpcode(op_init_lazy_reg);
555     instructions().append(reg->index());
556     return reg;
557 }
558
559 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
560 {
561     // Parameters overwrite var declarations, but not function declarations.
562     StringImpl* rep = ident.impl();
563     if (!m_functions.contains(rep)) {
564         symbolTable().set(rep, parameterIndex);
565         RegisterID& parameter = registerFor(parameterIndex);
566         parameter.setIndex(parameterIndex);
567     }
568
569     // To maintain the calling convention, we have to allocate unique space for
570     // each parameter, even if the parameter doesn't make it into the symbol table.
571     m_codeBlock->addParameter();
572 }
573
574 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
575 {
576     if (ident != propertyNames().arguments)
577         return false;
578     
579     if (!shouldOptimizeLocals())
580         return false;
581     
582     SymbolTableEntry entry = symbolTable().get(ident.impl());
583     if (entry.isNull())
584         return false;
585     
586     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
587         return true;
588     
589     return false;
590 }
591
592 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
593 {
594     ASSERT(willResolveToArguments(propertyNames().arguments));
595
596     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
597     ASSERT(!entry.isNull());
598     return &registerFor(entry.getIndex());
599 }
600
601 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
602 {
603     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
604         return reg;
605     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
606     return reg;
607 }
608
609 RegisterID* BytecodeGenerator::newRegister()
610 {
611     m_calleeRegisters.append(m_calleeRegisters.size());
612     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
613     return &m_calleeRegisters.last();
614 }
615
616 RegisterID* BytecodeGenerator::newTemporary()
617 {
618     // Reclaim free register IDs.
619     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
620         m_calleeRegisters.removeLast();
621         
622     RegisterID* result = newRegister();
623     result->setTemporary();
624     return result;
625 }
626
627 RegisterID* BytecodeGenerator::highestUsedRegister()
628 {
629     size_t count = m_codeBlock->m_numCalleeRegisters;
630     while (m_calleeRegisters.size() < count)
631         newRegister();
632     return &m_calleeRegisters.last();
633 }
634
635 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
636 {
637     // Reclaim free label scopes.
638     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
639         m_labelScopes.removeLast();
640
641     // Allocate new label scope.
642     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
643     m_labelScopes.append(scope);
644     return &m_labelScopes.last();
645 }
646
647 PassRefPtr<Label> BytecodeGenerator::newLabel()
648 {
649     // Reclaim free label IDs.
650     while (m_labels.size() && !m_labels.last().refCount())
651         m_labels.removeLast();
652
653     // Allocate new label ID.
654     m_labels.append(this);
655     return &m_labels.last();
656 }
657
658 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
659 {
660     unsigned newLabelIndex = instructions().size();
661     l0->setLocation(newLabelIndex);
662
663     if (m_codeBlock->numberOfJumpTargets()) {
664         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
665         ASSERT(lastLabelIndex <= newLabelIndex);
666         if (newLabelIndex == lastLabelIndex) {
667             // Peephole optimizations have already been disabled by emitting the last label
668             return l0;
669         }
670     }
671
672     m_codeBlock->addJumpTarget(newLabelIndex);
673
674     // This disables peephole optimizations when an instruction is a jump target
675     m_lastOpcodeID = op_end;
676     return l0;
677 }
678
679 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
680 {
681 #ifndef NDEBUG
682     size_t opcodePosition = instructions().size();
683     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
684     m_lastOpcodePosition = opcodePosition;
685 #endif
686     emitComment();
687     instructions().append(globalData()->interpreter->getOpcode(opcodeID));
688     m_lastOpcodeID = opcodeID;
689 }
690
691 #if ENABLE(BYTECODE_COMMENTS)
692 // Record a comment in the CodeBlock's comments list for the current opcode
693 // that is about to be emitted.
694 void BytecodeGenerator::emitComment()
695 {
696     if (m_currentCommentString) {
697         size_t opcodePosition = instructions().size();
698         Comment comment = { opcodePosition, m_currentCommentString };
699         m_codeBlock->bytecodeComments().append(comment);
700         m_currentCommentString = 0;
701     }
702 }
703
704 // Register a comment to be associated with the next opcode that will be emitted.
705 void BytecodeGenerator::prependComment(const char* string)
706 {
707     m_currentCommentString = string;
708 }
709 #endif
710
711 ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
712 {
713 #if ENABLE(VALUE_PROFILER)
714     ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
715 #else
716     ValueProfile* result = 0;
717 #endif
718     emitOpcode(opcodeID);
719     return result;
720 }
721
722 void BytecodeGenerator::emitLoopHint()
723 {
724 #if ENABLE(DFG_JIT)
725     emitOpcode(op_loop_hint);
726 #endif
727 }
728
729 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
730 {
731     ASSERT(instructions().size() >= 4);
732     size_t size = instructions().size();
733     dstIndex = instructions().at(size - 3).u.operand;
734     src1Index = instructions().at(size - 2).u.operand;
735     src2Index = instructions().at(size - 1).u.operand;
736 }
737
738 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
739 {
740     ASSERT(instructions().size() >= 3);
741     size_t size = instructions().size();
742     dstIndex = instructions().at(size - 2).u.operand;
743     srcIndex = instructions().at(size - 1).u.operand;
744 }
745
746 void BytecodeGenerator::retrieveLastUnaryOp(WriteBarrier<Unknown>*& dstPointer, int& srcIndex)
747 {
748     ASSERT(instructions().size() >= 3);
749     size_t size = instructions().size();
750     dstPointer = instructions().at(size - 2).u.registerPointer;
751     srcIndex = instructions().at(size - 1).u.operand;
752 }
753
754 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
755 {
756     ASSERT(instructions().size() >= 4);
757     instructions().shrink(instructions().size() - 4);
758     m_lastOpcodeID = op_end;
759 }
760
761 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
762 {
763     ASSERT(instructions().size() >= 3);
764     instructions().shrink(instructions().size() - 3);
765     m_lastOpcodeID = op_end;
766 }
767
768 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
769 {
770     size_t begin = instructions().size();
771     emitOpcode(target->isForward() ? op_jmp : op_loop);
772     instructions().append(target->bind(begin, instructions().size()));
773     return target;
774 }
775
776 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
777 {
778     if (m_lastOpcodeID == op_less) {
779         int dstIndex;
780         int src1Index;
781         int src2Index;
782
783         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
784
785         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
786             rewindBinaryOp();
787
788             size_t begin = instructions().size();
789             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
790             instructions().append(src1Index);
791             instructions().append(src2Index);
792             instructions().append(target->bind(begin, instructions().size()));
793             return target;
794         }
795     } else if (m_lastOpcodeID == op_lesseq) {
796         int dstIndex;
797         int src1Index;
798         int src2Index;
799
800         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
801
802         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
803             rewindBinaryOp();
804
805             size_t begin = instructions().size();
806             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
807             instructions().append(src1Index);
808             instructions().append(src2Index);
809             instructions().append(target->bind(begin, instructions().size()));
810             return target;
811         }
812     } else if (m_lastOpcodeID == op_greater) {
813         int dstIndex;
814         int src1Index;
815         int src2Index;
816
817         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
818
819         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
820             rewindBinaryOp();
821
822             size_t begin = instructions().size();
823             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
824             instructions().append(src1Index);
825             instructions().append(src2Index);
826             instructions().append(target->bind(begin, instructions().size()));
827             return target;
828         }
829     } else if (m_lastOpcodeID == op_greatereq) {
830         int dstIndex;
831         int src1Index;
832         int src2Index;
833
834         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
835
836         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
837             rewindBinaryOp();
838
839             size_t begin = instructions().size();
840             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
841             instructions().append(src1Index);
842             instructions().append(src2Index);
843             instructions().append(target->bind(begin, instructions().size()));
844             return target;
845         }
846     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
847         int dstIndex;
848         int srcIndex;
849
850         retrieveLastUnaryOp(dstIndex, srcIndex);
851
852         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
853             rewindUnaryOp();
854
855             size_t begin = instructions().size();
856             emitOpcode(op_jeq_null);
857             instructions().append(srcIndex);
858             instructions().append(target->bind(begin, instructions().size()));
859             return target;
860         }
861     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
862         int dstIndex;
863         int srcIndex;
864
865         retrieveLastUnaryOp(dstIndex, srcIndex);
866
867         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
868             rewindUnaryOp();
869
870             size_t begin = instructions().size();
871             emitOpcode(op_jneq_null);
872             instructions().append(srcIndex);
873             instructions().append(target->bind(begin, instructions().size()));
874             return target;
875         }
876     }
877
878     size_t begin = instructions().size();
879
880     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
881     instructions().append(cond->index());
882     instructions().append(target->bind(begin, instructions().size()));
883     return target;
884 }
885
886 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
887 {
888     if (m_lastOpcodeID == op_less && target->isForward()) {
889         int dstIndex;
890         int src1Index;
891         int src2Index;
892
893         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
894
895         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
896             rewindBinaryOp();
897
898             size_t begin = instructions().size();
899             emitOpcode(op_jnless);
900             instructions().append(src1Index);
901             instructions().append(src2Index);
902             instructions().append(target->bind(begin, instructions().size()));
903             return target;
904         }
905     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
906         int dstIndex;
907         int src1Index;
908         int src2Index;
909
910         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
911
912         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
913             rewindBinaryOp();
914
915             size_t begin = instructions().size();
916             emitOpcode(op_jnlesseq);
917             instructions().append(src1Index);
918             instructions().append(src2Index);
919             instructions().append(target->bind(begin, instructions().size()));
920             return target;
921         }
922     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
923         int dstIndex;
924         int src1Index;
925         int src2Index;
926
927         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
928
929         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
930             rewindBinaryOp();
931
932             size_t begin = instructions().size();
933             emitOpcode(op_jngreater);
934             instructions().append(src1Index);
935             instructions().append(src2Index);
936             instructions().append(target->bind(begin, instructions().size()));
937             return target;
938         }
939     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
940         int dstIndex;
941         int src1Index;
942         int src2Index;
943
944         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
945
946         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
947             rewindBinaryOp();
948
949             size_t begin = instructions().size();
950             emitOpcode(op_jngreatereq);
951             instructions().append(src1Index);
952             instructions().append(src2Index);
953             instructions().append(target->bind(begin, instructions().size()));
954             return target;
955         }
956     } else if (m_lastOpcodeID == op_not) {
957         int dstIndex;
958         int srcIndex;
959
960         retrieveLastUnaryOp(dstIndex, srcIndex);
961
962         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
963             rewindUnaryOp();
964
965             size_t begin = instructions().size();
966             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
967             instructions().append(srcIndex);
968             instructions().append(target->bind(begin, instructions().size()));
969             return target;
970         }
971     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
972         int dstIndex;
973         int srcIndex;
974
975         retrieveLastUnaryOp(dstIndex, srcIndex);
976
977         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
978             rewindUnaryOp();
979
980             size_t begin = instructions().size();
981             emitOpcode(op_jneq_null);
982             instructions().append(srcIndex);
983             instructions().append(target->bind(begin, instructions().size()));
984             return target;
985         }
986     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
987         int dstIndex;
988         int srcIndex;
989
990         retrieveLastUnaryOp(dstIndex, srcIndex);
991
992         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
993             rewindUnaryOp();
994
995             size_t begin = instructions().size();
996             emitOpcode(op_jeq_null);
997             instructions().append(srcIndex);
998             instructions().append(target->bind(begin, instructions().size()));
999             return target;
1000         }
1001     }
1002
1003     size_t begin = instructions().size();
1004     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
1005     instructions().append(cond->index());
1006     instructions().append(target->bind(begin, instructions().size()));
1007     return target;
1008 }
1009
1010 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
1011 {
1012     size_t begin = instructions().size();
1013
1014     emitOpcode(op_jneq_ptr);
1015     instructions().append(cond->index());
1016     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction()));
1017     instructions().append(target->bind(begin, instructions().size()));
1018     return target;
1019 }
1020
1021 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
1022 {
1023     size_t begin = instructions().size();
1024
1025     emitOpcode(op_jneq_ptr);
1026     instructions().append(cond->index());
1027     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction()));
1028     instructions().append(target->bind(begin, instructions().size()));
1029     return target;
1030 }
1031
1032 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1033 {
1034     StringImpl* rep = ident.impl();
1035     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1036     if (result.isNewEntry)
1037         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
1038
1039     return result.iterator->second;
1040 }
1041
1042 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
1043 {
1044     int index = m_nextConstantOffset;
1045
1046     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1047     if (result.isNewEntry) {
1048         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1049         ++m_nextConstantOffset;
1050         m_codeBlock->addConstant(JSValue(v));
1051     } else
1052         index = result.iterator->second;
1053
1054     return &m_constantPoolRegisters[index];
1055 }
1056
1057 unsigned BytecodeGenerator::addRegExp(RegExp* r)
1058 {
1059     return m_codeBlock->addRegExp(r);
1060 }
1061
1062 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1063 {
1064     emitOpcode(op_mov);
1065     instructions().append(dst->index());
1066     instructions().append(src->index());
1067     return dst;
1068 }
1069
1070 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1071 {
1072     emitOpcode(opcodeID);
1073     instructions().append(dst->index());
1074     instructions().append(src->index());
1075     return dst;
1076 }
1077
1078 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1079 {
1080     emitOpcode(op_pre_inc);
1081     instructions().append(srcDst->index());
1082     return srcDst;
1083 }
1084
1085 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1086 {
1087     emitOpcode(op_pre_dec);
1088     instructions().append(srcDst->index());
1089     return srcDst;
1090 }
1091
1092 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1093 {
1094     emitOpcode(op_post_inc);
1095     instructions().append(dst->index());
1096     instructions().append(srcDst->index());
1097     return dst;
1098 }
1099
1100 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1101 {
1102     emitOpcode(op_post_dec);
1103     instructions().append(dst->index());
1104     instructions().append(srcDst->index());
1105     return dst;
1106 }
1107
1108 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1109 {
1110     emitOpcode(opcodeID);
1111     instructions().append(dst->index());
1112     instructions().append(src1->index());
1113     instructions().append(src2->index());
1114
1115     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1116         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1117         instructions().append(types.toInt());
1118
1119     return dst;
1120 }
1121
1122 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1123 {
1124     if (m_lastOpcodeID == op_typeof) {
1125         int dstIndex;
1126         int srcIndex;
1127
1128         retrieveLastUnaryOp(dstIndex, srcIndex);
1129
1130         if (src1->index() == dstIndex
1131             && src1->isTemporary()
1132             && m_codeBlock->isConstantRegisterIndex(src2->index())
1133             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1134             const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1135             if (value == "undefined") {
1136                 rewindUnaryOp();
1137                 emitOpcode(op_is_undefined);
1138                 instructions().append(dst->index());
1139                 instructions().append(srcIndex);
1140                 return dst;
1141             }
1142             if (value == "boolean") {
1143                 rewindUnaryOp();
1144                 emitOpcode(op_is_boolean);
1145                 instructions().append(dst->index());
1146                 instructions().append(srcIndex);
1147                 return dst;
1148             }
1149             if (value == "number") {
1150                 rewindUnaryOp();
1151                 emitOpcode(op_is_number);
1152                 instructions().append(dst->index());
1153                 instructions().append(srcIndex);
1154                 return dst;
1155             }
1156             if (value == "string") {
1157                 rewindUnaryOp();
1158                 emitOpcode(op_is_string);
1159                 instructions().append(dst->index());
1160                 instructions().append(srcIndex);
1161                 return dst;
1162             }
1163             if (value == "object") {
1164                 rewindUnaryOp();
1165                 emitOpcode(op_is_object);
1166                 instructions().append(dst->index());
1167                 instructions().append(srcIndex);
1168                 return dst;
1169             }
1170             if (value == "function") {
1171                 rewindUnaryOp();
1172                 emitOpcode(op_is_function);
1173                 instructions().append(dst->index());
1174                 instructions().append(srcIndex);
1175                 return dst;
1176             }
1177         }
1178     }
1179
1180     emitOpcode(opcodeID);
1181     instructions().append(dst->index());
1182     instructions().append(src1->index());
1183     instructions().append(src2->index());
1184     return dst;
1185 }
1186
1187 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1188 {
1189     return emitLoad(dst, jsBoolean(b));
1190 }
1191
1192 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1193 {
1194     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1195     // Later we can do the extra work to handle that like the other cases.  They also don't
1196     // work correctly with NaN as a key.
1197     if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1198         return emitLoad(dst, jsNumber(number));
1199     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->second;
1200     if (!valueInMap)
1201         valueInMap = jsNumber(number);
1202     return emitLoad(dst, valueInMap);
1203 }
1204
1205 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1206 {
1207     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1208     if (!stringInMap)
1209         stringInMap = jsOwnedString(globalData(), identifier.ustring());
1210     return emitLoad(dst, JSValue(stringInMap));
1211 }
1212
1213 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1214 {
1215     RegisterID* constantID = addConstantValue(v);
1216     if (dst)
1217         return emitMove(dst, constantID);
1218     return constantID;
1219 }
1220
1221 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1222 {
1223     if (property == propertyNames().thisIdentifier)
1224         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1225
1226     // Check if the property should be allocated in a register.
1227     if (m_codeType != GlobalCode && shouldOptimizeLocals()) {
1228         SymbolTableEntry entry = symbolTable().get(property.impl());
1229         if (!entry.isNull()) {
1230             if (property == propertyNames().arguments)
1231                 createArgumentsIfNecessary();
1232             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1233             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1234             return ResolveResult::registerResolve(local, flags);
1235         }
1236     }
1237
1238     // Cases where we cannot statically optimize the lookup.
1239     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1240         return ResolveResult::dynamicResolve(0);
1241
1242     ScopeChainIterator iter = m_scopeChain->begin();
1243     ScopeChainIterator end = m_scopeChain->end();
1244     size_t depth = 0;
1245     size_t depthOfFirstScopeWithDynamicChecks = 0;
1246     unsigned flags = 0;
1247     for (; iter != end; ++iter, ++depth) {
1248         JSObject* currentScope = iter->get();
1249         if (!currentScope->isVariableObject()) {
1250             flags |= ResolveResult::DynamicFlag;
1251             break;
1252         }        
1253         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1254         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1255
1256         // Found the property
1257         if (!entry.isNull()) {
1258             if (entry.isReadOnly())
1259                 flags |= ResolveResult::ReadOnlyFlag;
1260             depth += m_codeBlock->needsFullScopeChain();
1261             if (++iter == end) {
1262                 if (flags & ResolveResult::DynamicFlag)
1263                     return ResolveResult::dynamicIndexedGlobalResolve(entry.getIndex(), depth, currentScope, flags);
1264                 return ResolveResult::indexedGlobalResolve(
1265                     entry.getIndex(), currentScope,
1266                     flags | (entry.couldBeWatched() ? ResolveResult::WatchedFlag : 0));
1267             }
1268 #if !ASSERT_DISABLED
1269             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1270                 ASSERT(activation->isValidScopedLookup(entry.getIndex()));
1271 #endif
1272             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1273         }
1274         bool scopeRequiresDynamicChecks = false;
1275         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1276             break;
1277         if (!(flags & ResolveResult::DynamicFlag)) {
1278             if (scopeRequiresDynamicChecks)
1279                 flags |= ResolveResult::DynamicFlag;
1280             else
1281                 ++depthOfFirstScopeWithDynamicChecks;
1282         }
1283     }
1284
1285     // Can't locate the property but we're able to avoid a few lookups.
1286     JSObject* scope = iter->get();
1287     // Step over the function's activation, if it needs one. At this point we
1288     // know there is no dynamic scope in the function itself, so this is safe to
1289     // do.
1290     depth += m_codeBlock->needsFullScopeChain();
1291     depthOfFirstScopeWithDynamicChecks += m_codeBlock->needsFullScopeChain();
1292     if (++iter == end) {
1293         if ((flags & ResolveResult::DynamicFlag) && depth)
1294             return ResolveResult::dynamicGlobalResolve(depth, scope);
1295         return ResolveResult::globalResolve(scope);
1296     }
1297     return ResolveResult::dynamicResolve(depthOfFirstScopeWithDynamicChecks);
1298 }
1299
1300 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1301 {
1302     // Register-allocated const declarations.
1303     if (m_codeType != EvalCode && m_codeType != GlobalCode) {
1304         SymbolTableEntry entry = symbolTable().get(property.impl());
1305         if (!entry.isNull()) {
1306             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1307             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1308             return ResolveResult::registerResolve(local, flags);
1309         }
1310     }
1311
1312     // Const declarations in eval code or global code.
1313     ScopeChainIterator iter = scopeChain()->begin();
1314     ScopeChainIterator end = scopeChain()->end();
1315     size_t depth = 0;
1316     for (; iter != end; ++iter, ++depth) {
1317         JSObject* currentScope = iter->get();
1318         if (!currentScope->isVariableObject())
1319             continue;
1320         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1321         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1322         if (entry.isNull())
1323             continue;
1324         if (++iter == end)
1325             return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentVariableObject, 0);
1326         return ResolveResult::lexicalResolve(entry.getIndex(), depth + scopeDepth(), 0);
1327     }
1328
1329     // FIXME: While this code should only be hit in an eval block, it will assign
1330     // to the wrong base if property exists in an intervening with scope.
1331     return ResolveResult::dynamicResolve(scopeDepth());
1332 }
1333
1334 void BytecodeGenerator::emitCheckHasInstance(RegisterID* base)
1335
1336     emitOpcode(op_check_has_instance);
1337     instructions().append(base->index());
1338 }
1339
1340 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1341
1342     emitOpcode(op_instanceof);
1343     instructions().append(dst->index());
1344     instructions().append(value->index());
1345     instructions().append(base->index());
1346     instructions().append(basePrototype->index());
1347     return dst;
1348 }
1349
1350 static const unsigned maxGlobalResolves = 128;
1351
1352 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1353 {
1354     return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1355 }
1356
1357 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1358 {
1359     if (resolveResult.isStatic())
1360         return emitGetStaticVar(dst, resolveResult, property);
1361     
1362     if (resolveResult.isGlobal() && !shouldAvoidResolveGlobal()) {
1363 #if ENABLE(JIT)
1364         m_codeBlock->addGlobalResolveInfo(instructions().size());
1365 #endif
1366         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1367         bool dynamic = resolveResult.isDynamic() && resolveResult.depth();
1368         ValueProfile* profile = emitProfiledOpcode(dynamic ? op_resolve_global_dynamic : op_resolve_global);
1369         instructions().append(dst->index());
1370         instructions().append(addConstant(property));
1371         instructions().append(0);
1372         instructions().append(0);
1373         if (dynamic)
1374             instructions().append(resolveResult.depth());
1375         instructions().append(profile);
1376         return dst;
1377     }
1378         
1379     if (resolveResult.type() == ResolveResult::Dynamic && resolveResult.depth()) {
1380         // In this case we are at least able to drop a few scope chains from the
1381         // lookup chain, although we still need to hash from then on.
1382         ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1383         instructions().append(dst->index());
1384         instructions().append(addConstant(property));
1385         instructions().append(resolveResult.depth());
1386         instructions().append(profile);
1387         return dst;
1388     }
1389
1390     ValueProfile* profile = emitProfiledOpcode(op_resolve);
1391     instructions().append(dst->index());
1392     instructions().append(addConstant(property));
1393     instructions().append(profile);
1394     return dst;
1395 }
1396
1397 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1398 {
1399     if (resolveResult.isGlobal() && !resolveResult.isDynamic())
1400         // Global object is the base
1401         return emitLoad(dst, JSValue(resolveResult.globalObject()));
1402
1403     // We can't optimise at all :-(
1404     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1405     instructions().append(dst->index());
1406     instructions().append(addConstant(property));
1407     instructions().append(false);
1408     instructions().append(profile);
1409     return dst;
1410 }
1411
1412 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1413 {
1414     if (!m_codeBlock->isStrictMode())
1415         return emitResolveBase(dst, resolveResult, property);
1416
1417     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1418         // Global object is the base
1419         RefPtr<RegisterID> result = emitLoad(dst, JSValue(resolveResult.globalObject()));
1420         emitOpcode(op_ensure_property_exists);
1421         instructions().append(dst->index());
1422         instructions().append(addConstant(property));
1423         return result.get();
1424     }
1425
1426     // We can't optimise at all :-(
1427     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1428     instructions().append(dst->index());
1429     instructions().append(addConstant(property));
1430     instructions().append(true);
1431     instructions().append(profile);
1432     return dst;
1433 }
1434
1435 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1436 {
1437     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1438         // Global object is the base
1439         emitLoad(baseDst, JSValue(resolveResult.globalObject()));
1440
1441         if (resolveResult.isStatic()) {
1442             // Directly index the property lookup across multiple scopes.
1443             emitGetStaticVar(propDst, resolveResult, property);
1444             return baseDst;
1445         }
1446
1447         if (shouldAvoidResolveGlobal()) {
1448             ValueProfile* profile = emitProfiledOpcode(op_resolve);
1449             instructions().append(propDst->index());
1450             instructions().append(addConstant(property));
1451             instructions().append(profile);
1452             return baseDst;
1453         }
1454
1455 #if ENABLE(JIT)
1456         m_codeBlock->addGlobalResolveInfo(instructions().size());
1457 #endif
1458 #if ENABLE(CLASSIC_INTERPRETER)
1459         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1460 #endif
1461         ValueProfile* profile = emitProfiledOpcode(op_resolve_global);
1462         instructions().append(propDst->index());
1463         instructions().append(addConstant(property));
1464         instructions().append(0);
1465         instructions().append(0);
1466         instructions().append(profile);
1467         return baseDst;
1468     }
1469
1470     ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1471     instructions().append(baseDst->index());
1472     instructions().append(propDst->index());
1473     instructions().append(addConstant(property));
1474     instructions().append(profile);
1475     return baseDst;
1476 }
1477
1478 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1479 {
1480     if (resolveResult.isStatic()) {
1481         emitLoad(baseDst, jsUndefined());
1482         emitGetStaticVar(propDst, resolveResult, property);
1483         return baseDst;
1484     }
1485
1486     if (resolveResult.type() == ResolveResult::Dynamic) {
1487         // We can't optimise at all :-(
1488         ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1489         instructions().append(baseDst->index());
1490         instructions().append(propDst->index());
1491         instructions().append(addConstant(property));
1492         instructions().append(profile);
1493         return baseDst;
1494     }
1495
1496     emitLoad(baseDst, jsUndefined());
1497     return emitResolve(propDst, resolveResult, property);
1498 }
1499
1500 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& identifier)
1501 {
1502     ValueProfile* profile = 0;
1503
1504     switch (resolveResult.type()) {
1505     case ResolveResult::Register:
1506     case ResolveResult::ReadOnlyRegister:
1507         if (dst == ignoredResult())
1508             return 0;
1509         return moveToDestinationIfNeeded(dst, resolveResult.local());
1510
1511     case ResolveResult::Lexical:
1512     case ResolveResult::ReadOnlyLexical:
1513         profile = emitProfiledOpcode(op_get_scoped_var);
1514         instructions().append(dst->index());
1515         instructions().append(resolveResult.index());
1516         instructions().append(resolveResult.depth());
1517         instructions().append(profile);
1518         return dst;
1519
1520     case ResolveResult::IndexedGlobal:
1521     case ResolveResult::ReadOnlyIndexedGlobal:
1522         if (m_lastOpcodeID == op_put_global_var) {
1523             WriteBarrier<Unknown>* dstPointer;
1524             int srcIndex;
1525             retrieveLastUnaryOp(dstPointer, srcIndex);
1526             if (dstPointer == resolveResult.registerPointer() && srcIndex == dst->index())
1527                 return dst;
1528         }
1529
1530         profile = emitProfiledOpcode(op_get_global_var);
1531         instructions().append(dst->index());
1532         instructions().append(resolveResult.registerPointer());
1533         instructions().append(profile);
1534         return dst;
1535
1536     case ResolveResult::WatchedIndexedGlobal:
1537         // Skip the peephole for now. It's not clear that it's profitable given
1538         // the DFG's capabilities, and the fact that if it's watchable then we
1539         // don't expect to see any put_global_var's anyway.
1540         profile = emitProfiledOpcode(op_get_global_var_watchable);
1541         instructions().append(dst->index());
1542         instructions().append(resolveResult.registerPointer());
1543         instructions().append(addConstant(identifier)); // For the benefit of the DFG.
1544         instructions().append(profile);
1545         return dst;
1546
1547     default:
1548         ASSERT_NOT_REACHED();
1549         return 0;
1550     }
1551 }
1552
1553 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, const Identifier& identifier, RegisterID* value)
1554 {
1555     switch (resolveResult.type()) {
1556     case ResolveResult::Register:
1557     case ResolveResult::ReadOnlyRegister:
1558         return moveToDestinationIfNeeded(resolveResult.local(), value);
1559
1560     case ResolveResult::Lexical:
1561     case ResolveResult::ReadOnlyLexical:
1562         emitOpcode(op_put_scoped_var);
1563         instructions().append(resolveResult.index());
1564         instructions().append(resolveResult.depth());
1565         instructions().append(value->index());
1566         return value;
1567
1568     case ResolveResult::IndexedGlobal:
1569     case ResolveResult::ReadOnlyIndexedGlobal:
1570         emitOpcode(op_put_global_var);
1571         instructions().append(resolveResult.registerPointer());
1572         instructions().append(value->index());
1573         return value;
1574         
1575     case ResolveResult::WatchedIndexedGlobal:
1576         emitOpcode(op_put_global_var_check);
1577         instructions().append(resolveResult.registerPointer());
1578         instructions().append(value->index());
1579         instructions().append(jsCast<JSGlobalObject*>(resolveResult.globalObject())->symbolTable().get(identifier.impl()).addressOfIsWatched());
1580         instructions().append(addConstant(identifier));
1581         return value;
1582
1583     default:
1584         ASSERT_NOT_REACHED();
1585         return 0;
1586     }
1587 }
1588
1589 void BytecodeGenerator::emitMethodCheck()
1590 {
1591     emitOpcode(op_method_check);
1592 }
1593
1594 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1595 {
1596     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1597
1598     ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1599     instructions().append(dst->index());
1600     instructions().append(base->index());
1601     instructions().append(addConstant(property));
1602     instructions().append(0);
1603     instructions().append(0);
1604     instructions().append(0);
1605     instructions().append(0);
1606     instructions().append(profile);
1607     return dst;
1608 }
1609
1610 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1611 {
1612     emitOpcode(op_get_arguments_length);
1613     instructions().append(dst->index());
1614     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1615     instructions().append(base->index());
1616     instructions().append(addConstant(propertyNames().length));
1617     return dst;
1618 }
1619
1620 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1621 {
1622     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1623
1624     emitOpcode(op_put_by_id);
1625     instructions().append(base->index());
1626     instructions().append(addConstant(property));
1627     instructions().append(value->index());
1628     instructions().append(0);
1629     instructions().append(0);
1630     instructions().append(0);
1631     instructions().append(0);
1632     instructions().append(0);
1633     return value;
1634 }
1635
1636 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1637 {
1638     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1639     
1640     emitOpcode(op_put_by_id);
1641     instructions().append(base->index());
1642     instructions().append(addConstant(property));
1643     instructions().append(value->index());
1644     instructions().append(0);
1645     instructions().append(0);
1646     instructions().append(0);
1647     instructions().append(0);
1648     instructions().append(property != m_globalData->propertyNames->underscoreProto);
1649     return value;
1650 }
1651
1652 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1653 {
1654     emitOpcode(op_put_getter_setter);
1655     instructions().append(base->index());
1656     instructions().append(addConstant(property));
1657     instructions().append(getter->index());
1658     instructions().append(setter->index());
1659 }
1660
1661 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1662 {
1663     emitOpcode(op_del_by_id);
1664     instructions().append(dst->index());
1665     instructions().append(base->index());
1666     instructions().append(addConstant(property));
1667     return dst;
1668 }
1669
1670 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1671 {
1672     ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1673     instructions().append(dst->index());
1674     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1675     instructions().append(base->index());
1676     instructions().append(property->index());
1677     instructions().append(profile);
1678     return dst;
1679 }
1680
1681 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1682 {
1683     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1684         ForInContext& context = m_forInContextStack[i - 1];
1685         if (context.propertyRegister == property) {
1686             emitOpcode(op_get_by_pname);
1687             instructions().append(dst->index());
1688             instructions().append(base->index());
1689             instructions().append(property->index());
1690             instructions().append(context.expectedSubscriptRegister->index());
1691             instructions().append(context.iterRegister->index());
1692             instructions().append(context.indexRegister->index());
1693             return dst;
1694         }
1695     }
1696     ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1697     instructions().append(dst->index());
1698     instructions().append(base->index());
1699     instructions().append(property->index());
1700     instructions().append(profile);
1701     return dst;
1702 }
1703
1704 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1705 {
1706     emitOpcode(op_put_by_val);
1707     instructions().append(base->index());
1708     instructions().append(property->index());
1709     instructions().append(value->index());
1710     return value;
1711 }
1712
1713 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1714 {
1715     emitOpcode(op_del_by_val);
1716     instructions().append(dst->index());
1717     instructions().append(base->index());
1718     instructions().append(property->index());
1719     return dst;
1720 }
1721
1722 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1723 {
1724     emitOpcode(op_put_by_index);
1725     instructions().append(base->index());
1726     instructions().append(index);
1727     instructions().append(value->index());
1728     return value;
1729 }
1730
1731 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1732 {
1733     emitOpcode(op_new_object);
1734     instructions().append(dst->index());
1735     return dst;
1736 }
1737
1738 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1739 {
1740     return m_codeBlock->addConstantBuffer(length);
1741 }
1742
1743 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1744 {
1745     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1746     if (!stringInMap) {
1747         stringInMap = jsString(globalData(), identifier.ustring());
1748         addConstantValue(stringInMap);
1749     }
1750     return stringInMap;
1751 }
1752
1753 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1754 {
1755 #if !ASSERT_DISABLED
1756     unsigned checkLength = 0;
1757 #endif
1758     bool hadVariableExpression = false;
1759     if (length) {
1760         for (ElementNode* n = elements; n; n = n->next()) {
1761             if (!n->value()->isNumber() && !n->value()->isString()) {
1762                 hadVariableExpression = true;
1763                 break;
1764             }
1765             if (n->elision())
1766                 break;
1767 #if !ASSERT_DISABLED
1768             checkLength++;
1769 #endif
1770         }
1771         if (!hadVariableExpression) {
1772             ASSERT(length == checkLength);
1773             unsigned constantBufferIndex = addConstantBuffer(length);
1774             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1775             unsigned index = 0;
1776             for (ElementNode* n = elements; index < length; n = n->next()) {
1777                 if (n->value()->isNumber())
1778                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1779                 else {
1780                     ASSERT(n->value()->isString());
1781                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1782                 }
1783             }
1784             emitOpcode(op_new_array_buffer);
1785             instructions().append(dst->index());
1786             instructions().append(constantBufferIndex);
1787             instructions().append(length);
1788             return dst;
1789         }
1790     }
1791
1792     Vector<RefPtr<RegisterID>, 16> argv;
1793     for (ElementNode* n = elements; n; n = n->next()) {
1794         if (n->elision())
1795             break;
1796         argv.append(newTemporary());
1797         // op_new_array requires the initial values to be a sequential range of registers
1798         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1799         emitNode(argv.last().get(), n->value());
1800     }
1801     emitOpcode(op_new_array);
1802     instructions().append(dst->index());
1803     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1804     instructions().append(argv.size()); // argc
1805     return dst;
1806 }
1807
1808 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1809 {
1810     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false);
1811 }
1812
1813 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1814 {
1815     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1816     if (ptr.isNewEntry)
1817         ptr.iterator->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1818     return emitNewFunctionInternal(dst, ptr.iterator->second, true);
1819 }
1820
1821 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1822 {
1823     createActivationIfNecessary();
1824     emitOpcode(op_new_func);
1825     instructions().append(dst->index());
1826     instructions().append(index);
1827     instructions().append(doNullCheck);
1828     return dst;
1829 }
1830
1831 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1832 {
1833     emitOpcode(op_new_regexp);
1834     instructions().append(dst->index());
1835     instructions().append(addRegExp(regExp));
1836     return dst;
1837 }
1838
1839 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1840 {
1841     FunctionBodyNode* function = n->body();
1842     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1843     
1844     createActivationIfNecessary();
1845     emitOpcode(op_new_func_exp);
1846     instructions().append(r0->index());
1847     instructions().append(index);
1848     return r0;
1849 }
1850
1851 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1852 {
1853     return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
1854 }
1855
1856 void BytecodeGenerator::createArgumentsIfNecessary()
1857 {
1858     if (m_codeType != FunctionCode)
1859         return;
1860     
1861     if (!m_codeBlock->usesArguments())
1862         return;
1863
1864     // If we're in strict mode we tear off the arguments on function
1865     // entry, so there's no need to check if we need to create them
1866     // now
1867     if (m_codeBlock->isStrictMode())
1868         return;
1869
1870     emitOpcode(op_create_arguments);
1871     instructions().append(m_codeBlock->argumentsRegister());
1872 }
1873
1874 void BytecodeGenerator::createActivationIfNecessary()
1875 {
1876     if (m_hasCreatedActivation)
1877         return;
1878     if (!m_codeBlock->needsFullScopeChain())
1879         return;
1880     emitOpcode(op_create_activation);
1881     instructions().append(m_activationRegister->index());
1882 }
1883
1884 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1885 {
1886     return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
1887 }
1888
1889 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1890 {
1891     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1892     ASSERT(func->refCount());
1893
1894     if (m_shouldEmitProfileHooks)
1895         emitMove(callArguments.profileHookRegister(), func);
1896
1897     // Generate code for arguments.
1898     unsigned argument = 0;
1899     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1900         emitNode(callArguments.argumentRegister(argument++), n);
1901
1902     // Reserve space for call frame.
1903     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1904     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1905         callFrame.append(newTemporary());
1906
1907     if (m_shouldEmitProfileHooks) {
1908         emitOpcode(op_profile_will_call);
1909         instructions().append(callArguments.profileHookRegister()->index());
1910     }
1911
1912     emitExpressionInfo(divot, startOffset, endOffset);
1913
1914     // Emit call.
1915     emitOpcode(opcodeID);
1916     instructions().append(func->index()); // func
1917     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1918     instructions().append(callArguments.registerOffset()); // registerOffset
1919 #if ENABLE(LLINT)
1920     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1921 #else
1922     instructions().append(0);
1923 #endif
1924     instructions().append(0);
1925     if (dst != ignoredResult()) {
1926         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1927         instructions().append(dst->index()); // dst
1928         instructions().append(profile);
1929     }
1930
1931     if (m_shouldEmitProfileHooks) {
1932         emitOpcode(op_profile_did_call);
1933         instructions().append(callArguments.profileHookRegister()->index());
1934     }
1935
1936     return dst;
1937 }
1938
1939 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1940 {
1941     if (m_shouldEmitProfileHooks) {
1942         emitMove(profileHookRegister, func);
1943         emitOpcode(op_profile_will_call);
1944         instructions().append(profileHookRegister->index());
1945     }
1946     
1947     emitExpressionInfo(divot, startOffset, endOffset);
1948
1949     // Emit call.
1950     emitOpcode(op_call_varargs);
1951     instructions().append(func->index());
1952     instructions().append(thisRegister->index());
1953     instructions().append(arguments->index());
1954     instructions().append(firstFreeRegister->index());
1955     if (dst != ignoredResult()) {
1956         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1957         instructions().append(dst->index());
1958         instructions().append(profile);
1959     }
1960     if (m_shouldEmitProfileHooks) {
1961         emitOpcode(op_profile_did_call);
1962         instructions().append(profileHookRegister->index());
1963     }
1964     return dst;
1965 }
1966
1967 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1968 {
1969     if (m_codeBlock->needsFullScopeChain()) {
1970         emitOpcode(op_tear_off_activation);
1971         instructions().append(m_activationRegister->index());
1972         instructions().append(m_codeBlock->argumentsRegister());
1973     } else if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1974         emitOpcode(op_tear_off_arguments);
1975         instructions().append(m_codeBlock->argumentsRegister());
1976     }
1977
1978     // Constructors use op_ret_object_or_this to check the result is an
1979     // object, unless we can trivially determine the check is not
1980     // necessary (currently, if the return value is 'this').
1981     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1982         emitOpcode(op_ret_object_or_this);
1983         instructions().append(src->index());
1984         instructions().append(m_thisRegister.index());
1985         return src;
1986     }
1987     return emitUnaryNoDstOp(op_ret, src);
1988 }
1989
1990 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1991 {
1992     emitOpcode(opcodeID);
1993     instructions().append(src->index());
1994     return src;
1995 }
1996
1997 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1998 {
1999     ASSERT(func->refCount());
2000
2001     if (m_shouldEmitProfileHooks)
2002         emitMove(callArguments.profileHookRegister(), func);
2003
2004     // Generate code for arguments.
2005     unsigned argument = 0;
2006     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
2007         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
2008             emitNode(callArguments.argumentRegister(argument++), n);
2009     }
2010
2011     if (m_shouldEmitProfileHooks) {
2012         emitOpcode(op_profile_will_call);
2013         instructions().append(callArguments.profileHookRegister()->index());
2014     }
2015
2016     // Reserve space for call frame.
2017     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
2018     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
2019         callFrame.append(newTemporary());
2020
2021     emitExpressionInfo(divot, startOffset, endOffset);
2022
2023     emitOpcode(op_construct);
2024     instructions().append(func->index()); // func
2025     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
2026     instructions().append(callArguments.registerOffset()); // registerOffset
2027 #if ENABLE(LLINT)
2028     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
2029 #else
2030     instructions().append(0);
2031 #endif
2032     instructions().append(0);
2033     if (dst != ignoredResult()) {
2034         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
2035         instructions().append(dst->index()); // dst
2036         instructions().append(profile);
2037     }
2038
2039     if (m_shouldEmitProfileHooks) {
2040         emitOpcode(op_profile_did_call);
2041         instructions().append(callArguments.profileHookRegister()->index());
2042     }
2043
2044     return dst;
2045 }
2046
2047 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
2048 {
2049     emitOpcode(op_strcat);
2050     instructions().append(dst->index());
2051     instructions().append(src->index());
2052     instructions().append(count);
2053
2054     return dst;
2055 }
2056
2057 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2058 {
2059     emitOpcode(op_to_primitive);
2060     instructions().append(dst->index());
2061     instructions().append(src->index());
2062 }
2063
2064 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
2065 {
2066     ASSERT(scope->isTemporary());
2067     ControlFlowContext context;
2068     context.isFinallyBlock = false;
2069     m_scopeContextStack.append(context);
2070     m_dynamicScopeDepth++;
2071
2072     return emitUnaryNoDstOp(op_push_scope, scope);
2073 }
2074
2075 void BytecodeGenerator::emitPopScope()
2076 {
2077     ASSERT(m_scopeContextStack.size());
2078     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2079
2080     emitOpcode(op_pop_scope);
2081
2082     m_scopeContextStack.removeLast();
2083     m_dynamicScopeDepth--;
2084 }
2085
2086 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
2087 {
2088 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2089     if (debugHookID != DidReachBreakpoint)
2090         return;
2091 #else
2092     if (!m_shouldEmitDebugHooks)
2093         return;
2094 #endif
2095     emitOpcode(op_debug);
2096     instructions().append(debugHookID);
2097     instructions().append(firstLine);
2098     instructions().append(lastLine);
2099 }
2100
2101 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2102 {
2103     ControlFlowContext scope;
2104     scope.isFinallyBlock = true;
2105     FinallyContext context = {
2106         finallyBlock,
2107         m_scopeContextStack.size(),
2108         m_switchContextStack.size(),
2109         m_forInContextStack.size(),
2110         m_labelScopes.size(),
2111         m_finallyDepth,
2112         m_dynamicScopeDepth
2113     };
2114     scope.finallyContext = context;
2115     m_scopeContextStack.append(scope);
2116     m_finallyDepth++;
2117 }
2118
2119 void BytecodeGenerator::popFinallyContext()
2120 {
2121     ASSERT(m_scopeContextStack.size());
2122     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2123     ASSERT(m_finallyDepth > 0);
2124     m_scopeContextStack.removeLast();
2125     m_finallyDepth--;
2126 }
2127
2128 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2129 {
2130     // Reclaim free label scopes.
2131     //
2132     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2133     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2134     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2135     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2136     // loop condition is a workaround.
2137     while (m_labelScopes.size()) {
2138         if  (m_labelScopes.last().refCount())
2139             break;
2140         m_labelScopes.removeLast();
2141     }
2142
2143     if (!m_labelScopes.size())
2144         return 0;
2145
2146     // We special-case the following, which is a syntax error in Firefox:
2147     // label:
2148     //     break;
2149     if (name.isEmpty()) {
2150         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2151             LabelScope* scope = &m_labelScopes[i];
2152             if (scope->type() != LabelScope::NamedLabel) {
2153                 ASSERT(scope->breakTarget());
2154                 return scope;
2155             }
2156         }
2157         return 0;
2158     }
2159
2160     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2161         LabelScope* scope = &m_labelScopes[i];
2162         if (scope->name() && *scope->name() == name) {
2163             ASSERT(scope->breakTarget());
2164             return scope;
2165         }
2166     }
2167     return 0;
2168 }
2169
2170 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2171 {
2172     // Reclaim free label scopes.
2173     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2174         m_labelScopes.removeLast();
2175
2176     if (!m_labelScopes.size())
2177         return 0;
2178
2179     if (name.isEmpty()) {
2180         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2181             LabelScope* scope = &m_labelScopes[i];
2182             if (scope->type() == LabelScope::Loop) {
2183                 ASSERT(scope->continueTarget());
2184                 return scope;
2185             }
2186         }
2187         return 0;
2188     }
2189
2190     // Continue to the loop nested nearest to the label scope that matches
2191     // 'name'.
2192     LabelScope* result = 0;
2193     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2194         LabelScope* scope = &m_labelScopes[i];
2195         if (scope->type() == LabelScope::Loop) {
2196             ASSERT(scope->continueTarget());
2197             result = scope;
2198         }
2199         if (scope->name() && *scope->name() == name)
2200             return result; // may be 0
2201     }
2202     return 0;
2203 }
2204
2205 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2206 {
2207     while (topScope > bottomScope) {
2208         // First we count the number of dynamic scopes we need to remove to get
2209         // to a finally block.
2210         int nNormalScopes = 0;
2211         while (topScope > bottomScope) {
2212             if (topScope->isFinallyBlock)
2213                 break;
2214             ++nNormalScopes;
2215             --topScope;
2216         }
2217
2218         if (nNormalScopes) {
2219             size_t begin = instructions().size();
2220
2221             // We need to remove a number of dynamic scopes to get to the next
2222             // finally block
2223             emitOpcode(op_jmp_scopes);
2224             instructions().append(nNormalScopes);
2225
2226             // If topScope == bottomScope then there isn't actually a finally block
2227             // left to emit, so make the jmp_scopes jump directly to the target label
2228             if (topScope == bottomScope) {
2229                 instructions().append(target->bind(begin, instructions().size()));
2230                 return target;
2231             }
2232
2233             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2234             // to the next instruction
2235             RefPtr<Label> nextInsn = newLabel();
2236             instructions().append(nextInsn->bind(begin, instructions().size()));
2237             emitLabel(nextInsn.get());
2238         }
2239         
2240         Vector<ControlFlowContext> savedScopeContextStack;
2241         Vector<SwitchInfo> savedSwitchContextStack;
2242         Vector<ForInContext> savedForInContextStack;
2243         SegmentedVector<LabelScope, 8> savedLabelScopes;
2244         while (topScope > bottomScope && topScope->isFinallyBlock) {
2245             // Save the current state of the world while instating the state of the world
2246             // for the finally block.
2247             FinallyContext finallyContext = topScope->finallyContext;
2248             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2249             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2250             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2251             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2252             int topScopeIndex = -1;
2253             int bottomScopeIndex = -1;
2254             if (flipScopes) {
2255                 topScopeIndex = topScope - m_scopeContextStack.begin();
2256                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2257                 savedScopeContextStack = m_scopeContextStack;
2258                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2259             }
2260             if (flipSwitches) {
2261                 savedSwitchContextStack = m_switchContextStack;
2262                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2263             }
2264             if (flipForIns) {
2265                 savedForInContextStack = m_forInContextStack;
2266                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2267             }
2268             if (flipLabelScopes) {
2269                 savedLabelScopes = m_labelScopes;
2270                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2271                     m_labelScopes.removeLast();
2272             }
2273             int savedFinallyDepth = m_finallyDepth;
2274             m_finallyDepth = finallyContext.finallyDepth;
2275             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2276             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2277             
2278             // Emit the finally block.
2279             emitNode(finallyContext.finallyBlock);
2280             
2281             // Restore the state of the world.
2282             if (flipScopes) {
2283                 m_scopeContextStack = savedScopeContextStack;
2284                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2285                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2286             }
2287             if (flipSwitches)
2288                 m_switchContextStack = savedSwitchContextStack;
2289             if (flipForIns)
2290                 m_forInContextStack = savedForInContextStack;
2291             if (flipLabelScopes)
2292                 m_labelScopes = savedLabelScopes;
2293             m_finallyDepth = savedFinallyDepth;
2294             m_dynamicScopeDepth = savedDynamicScopeDepth;
2295             
2296             --topScope;
2297         }
2298     }
2299     return emitJump(target);
2300 }
2301
2302 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2303 {
2304     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2305     ASSERT(target->isForward());
2306
2307     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2308     ASSERT(scopeDelta <= m_scopeContextStack.size());
2309     if (!scopeDelta)
2310         return emitJump(target);
2311
2312     if (m_finallyDepth)
2313         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2314
2315     size_t begin = instructions().size();
2316
2317     emitOpcode(op_jmp_scopes);
2318     instructions().append(scopeDelta);
2319     instructions().append(target->bind(begin, instructions().size()));
2320     return target;
2321 }
2322
2323 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2324 {
2325     size_t begin = instructions().size();
2326
2327     emitOpcode(op_get_pnames);
2328     instructions().append(dst->index());
2329     instructions().append(base->index());
2330     instructions().append(i->index());
2331     instructions().append(size->index());
2332     instructions().append(breakTarget->bind(begin, instructions().size()));
2333     return dst;
2334 }
2335
2336 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2337 {
2338     size_t begin = instructions().size();
2339
2340     emitOpcode(op_next_pname);
2341     instructions().append(dst->index());
2342     instructions().append(base->index());
2343     instructions().append(i->index());
2344     instructions().append(size->index());
2345     instructions().append(iter->index());
2346     instructions().append(target->bind(begin, instructions().size()));
2347     return dst;
2348 }
2349
2350 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
2351 {
2352     m_usesExceptions = true;
2353 #if ENABLE(JIT)
2354 #if ENABLE(LLINT)
2355     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast<void*>(&llint_op_catch))) };
2356 #else
2357     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
2358 #endif
2359 #else
2360     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
2361 #endif
2362
2363     m_codeBlock->addExceptionHandler(info);
2364     emitOpcode(op_catch);
2365     instructions().append(targetRegister->index());
2366     return targetRegister;
2367 }
2368
2369 void BytecodeGenerator::emitThrowReferenceError(const UString& message)
2370 {
2371     emitOpcode(op_throw_reference_error);
2372     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2373 }
2374
2375 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
2376 {
2377     ControlFlowContext context;
2378     context.isFinallyBlock = false;
2379     m_scopeContextStack.append(context);
2380     m_dynamicScopeDepth++;
2381
2382     emitOpcode(op_push_new_scope);
2383     instructions().append(dst->index());
2384     instructions().append(addConstant(property));
2385     instructions().append(value->index());
2386 }
2387
2388 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2389 {
2390     SwitchInfo info = { instructions().size(), type };
2391     switch (type) {
2392         case SwitchInfo::SwitchImmediate:
2393             emitOpcode(op_switch_imm);
2394             break;
2395         case SwitchInfo::SwitchCharacter:
2396             emitOpcode(op_switch_char);
2397             break;
2398         case SwitchInfo::SwitchString:
2399             emitOpcode(op_switch_string);
2400             break;
2401         default:
2402             ASSERT_NOT_REACHED();
2403     }
2404
2405     instructions().append(0); // place holder for table index
2406     instructions().append(0); // place holder for default target    
2407     instructions().append(scrutineeRegister->index());
2408     m_switchContextStack.append(info);
2409 }
2410
2411 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2412 {
2413     UNUSED_PARAM(max);
2414     ASSERT(node->isNumber());
2415     double value = static_cast<NumberNode*>(node)->value();
2416     int32_t key = static_cast<int32_t>(value);
2417     ASSERT(key == value);
2418     ASSERT(key >= min);
2419     ASSERT(key <= max);
2420     return key - min;
2421 }
2422
2423 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2424 {
2425     jumpTable.min = min;
2426     jumpTable.branchOffsets.resize(max - min + 1);
2427     jumpTable.branchOffsets.fill(0);
2428     for (uint32_t i = 0; i < clauseCount; ++i) {
2429         // We're emitting this after the clause labels should have been fixed, so 
2430         // the labels should not be "forward" references
2431         ASSERT(!labels[i]->isForward());
2432         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2433     }
2434 }
2435
2436 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2437 {
2438     UNUSED_PARAM(max);
2439     ASSERT(node->isString());
2440     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2441     ASSERT(clause->length() == 1);
2442     
2443     int32_t key = (*clause)[0];
2444     ASSERT(key >= min);
2445     ASSERT(key <= max);
2446     return key - min;
2447 }
2448
2449 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2450 {
2451     jumpTable.min = min;
2452     jumpTable.branchOffsets.resize(max - min + 1);
2453     jumpTable.branchOffsets.fill(0);
2454     for (uint32_t i = 0; i < clauseCount; ++i) {
2455         // We're emitting this after the clause labels should have been fixed, so 
2456         // the labels should not be "forward" references
2457         ASSERT(!labels[i]->isForward());
2458         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2459     }
2460 }
2461
2462 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2463 {
2464     for (uint32_t i = 0; i < clauseCount; ++i) {
2465         // We're emitting this after the clause labels should have been fixed, so 
2466         // the labels should not be "forward" references
2467         ASSERT(!labels[i]->isForward());
2468         
2469         ASSERT(nodes[i]->isString());
2470         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2471         OffsetLocation location;
2472         location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2473         jumpTable.offsetTable.add(clause, location);
2474     }
2475 }
2476
2477 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2478 {
2479     SwitchInfo switchInfo = m_switchContextStack.last();
2480     m_switchContextStack.removeLast();
2481     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2482         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2483         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2484
2485         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2486         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2487     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2488         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2489         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2490         
2491         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2492         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2493     } else {
2494         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2495         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2496         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2497
2498         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2499         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2500     }
2501 }
2502
2503 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2504 {
2505     // It would be nice to do an even better job of identifying exactly where the expression is.
2506     // And we could make the caller pass the node pointer in, if there was some way of getting
2507     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2508     // is still good enough to get us an accurate line number.
2509     m_expressionTooDeep = true;
2510     return newTemporary();
2511 }
2512
2513 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2514 {
2515     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2516 }
2517
2518 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2519 {
2520     RegisterID* registerID = resolve(ident).local();
2521     if (!registerID || registerID->index() >= 0)
2522          return 0;
2523     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2524 }
2525
2526 } // namespace JSC