c4f7faf424c38b8219357f64bd5d6227754bed40
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "LowLevelInterpreter.h"
39 #include "ScopeChain.h"
40 #include "StrongInlines.h"
41 #include "UString.h"
42
43 using namespace std;
44
45 namespace JSC {
46
47 /*
48     The layout of a register frame looks like this:
49
50     For
51
52     function f(x, y) {
53         var v1;
54         function g() { }
55         var v2;
56         return (x) * (y);
57     }
58
59     assuming (x) and (y) generated temporaries t1 and t2, you would have
60
61     ------------------------------------
62     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
63     ------------------------------------
64     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
65     ------------------------------------
66     | params->|<-locals      | temps->
67
68     Because temporary registers are allocated in a stack-like fashion, we
69     can reclaim them with a simple popping algorithm. The same goes for labels.
70     (We never reclaim parameter or local registers, because parameters and
71     locals are DontDelete.)
72
73     The register layout before a function call looks like this:
74
75     For
76
77     function f(x, y)
78     {
79     }
80
81     f(1);
82
83     >                        <------------------------------
84     <                        >  reserved: call frame  |  1 | <-- value held
85     >         >snip<         <------------------------------
86     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
87     >                        <------------------------------
88     | params->|<-locals      | temps->
89
90     The call instruction fills in the "call frame" registers. It also pads
91     missing arguments at the end of the call:
92
93     >                        <-----------------------------------
94     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
95     >         >snip<         <-----------------------------------
96     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
97     >                        <-----------------------------------
98     | params->|<-locals      | temps->
99
100     After filling in missing arguments, the call instruction sets up the new
101     stack frame to overlap the end of the old stack frame:
102
103                              |---------------------------------->                        <
104                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
105                              |---------------------------------->         >snip<         <
106                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
107                              |---------------------------------->                        <
108                              |                        | params->|<-locals       | temps->
109
110     That way, arguments are "copied" into the callee's stack frame for free.
111
112     If the caller supplies too many arguments, this trick doesn't work. The
113     extra arguments protrude into space reserved for locals and temporaries.
114     In that case, the call instruction makes a real copy of the call frame header,
115     along with just the arguments expected by the callee, leaving the original
116     call frame header and arguments behind. (The call instruction can't just discard
117     extra arguments, because the "arguments" object may access them later.)
118     This copying strategy ensures that all named values will be at the indices
119     expected by the callee.
120 */
121
122 void Label::setLocation(unsigned location)
123 {
124     m_location = location;
125     
126     unsigned size = m_unresolvedJumps.size();
127     for (unsigned i = 0; i < size; ++i)
128         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
129 }
130
131 #ifndef NDEBUG
132 void ResolveResult::checkValidity()
133 {
134     switch (m_type) {
135     case Register:
136     case ReadOnlyRegister:
137         ASSERT(m_local);
138         return;
139     case Lexical:
140     case ReadOnlyLexical:
141     case DynamicLexical:
142     case DynamicReadOnlyLexical:
143         ASSERT(m_index != missingSymbolMarker());
144         return;
145     case Global:
146     case DynamicGlobal:
147         ASSERT(m_globalObject);
148         return;
149     case IndexedGlobal:
150     case ReadOnlyIndexedGlobal:
151     case DynamicIndexedGlobal:
152     case DynamicReadOnlyIndexedGlobal:
153         ASSERT(m_index != missingSymbolMarker());
154         ASSERT(m_globalObject);
155         return;
156     case Dynamic:
157         return;
158     default:
159         ASSERT_NOT_REACHED();
160     }
161 }
162 #endif
163
164 WriteBarrier<Unknown>* ResolveResult::registerPointer() const
165 {
166     return &jsCast<JSGlobalObject*>(globalObject())->registerAt(index());
167 }
168
169 static bool s_dumpsGeneratedCode = false;
170
171 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
172 {
173     s_dumpsGeneratedCode = dumpsGeneratedCode;
174 }
175
176 bool BytecodeGenerator::dumpsGeneratedCode()
177 {
178     return s_dumpsGeneratedCode;
179 }
180
181 JSObject* BytecodeGenerator::generate()
182 {
183     SamplingRegion samplingRegion("Bytecode Generation");
184     
185     m_codeBlock->setThisRegister(m_thisRegister.index());
186
187     m_scopeNode->emitBytecode(*this);
188     
189     m_codeBlock->instructions() = RefCountedArray<Instruction>(m_instructions);
190
191     if (s_dumpsGeneratedCode)
192         m_codeBlock->dump(m_scopeChain->globalObject->globalExec());
193
194     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
195         symbolTable().clear();
196
197     m_codeBlock->shrinkToFit(CodeBlock::EarlyShrink);
198
199     if (m_expressionTooDeep)
200         return createOutOfMemoryError(m_scopeChain->globalObject.get());
201     return 0;
202 }
203
204 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
205 {
206     int index = m_calleeRegisters.size();
207     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
208     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
209
210     if (!result.isNewEntry) {
211         r0 = &registerFor(result.iterator->second.getIndex());
212         return false;
213     }
214
215     r0 = addVar();
216     return true;
217 }
218
219 int BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant)
220 {
221     int index = symbolTable().size();
222     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
223     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
224     if (!result.isNewEntry)
225         index = result.iterator->second.getIndex();
226     return index;
227 }
228
229 void BytecodeGenerator::preserveLastVar()
230 {
231     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
232         m_lastVar = &m_calleeRegisters.last();
233 }
234
235 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
236     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
237     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
238     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
239     , m_scopeChain(*scopeChain->globalData, scopeChain)
240     , m_symbolTable(symbolTable)
241     , m_scopeNode(programNode)
242     , m_codeBlock(codeBlock)
243     , m_thisRegister(CallFrame::thisArgumentOffset())
244     , m_finallyDepth(0)
245     , m_dynamicScopeDepth(0)
246     , m_baseScopeDepth(0)
247     , m_codeType(GlobalCode)
248     , m_nextConstantOffset(0)
249     , m_globalConstantIndex(0)
250     , m_hasCreatedActivation(true)
251     , m_firstLazyFunction(0)
252     , m_lastLazyFunction(0)
253     , m_globalData(scopeChain->globalData)
254     , m_lastOpcodeID(op_end)
255 #ifndef NDEBUG
256     , m_lastOpcodePosition(0)
257 #endif
258     , m_stack(wtfThreadData().stack())
259     , m_usesExceptions(false)
260     , m_expressionTooDeep(false)
261 {
262     m_globalData->startedCompiling(m_codeBlock);
263     if (m_shouldEmitDebugHooks)
264         m_codeBlock->setNeedsFullScopeChain(true);
265
266     emitOpcode(op_enter);
267     codeBlock->setGlobalData(m_globalData);
268
269     // FIXME: Move code that modifies the global object to Interpreter::execute.
270     
271     m_codeBlock->setNumParameters(1); // Allocate space for "this"
272     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
273     
274     if (compilationKind == OptimizingCompilation)
275         return;
276
277     JSGlobalObject* globalObject = scopeChain->globalObject.get();
278     ExecState* exec = globalObject->globalExec();
279     
280     BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
281
282     const VarStack& varStack = programNode->varStack();
283     const FunctionStack& functionStack = programNode->functionStack();
284
285     size_t newGlobals = varStack.size() + functionStack.size();
286     if (!newGlobals)
287         return;
288     globalObject->addRegisters(newGlobals);
289
290     for (size_t i = 0; i < functionStack.size(); ++i) {
291         FunctionBodyNode* function = functionStack[i];
292         globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
293
294         JSValue value = JSFunction::create(exec, makeFunction(exec, function), scopeChain);
295         int index = addGlobalVar(function->ident(), false);
296         globalObject->registerAt(index).set(*m_globalData, globalObject, value);
297     }
298
299     for (size_t i = 0; i < varStack.size(); ++i) {
300         if (globalObject->hasProperty(exec, *varStack[i].first))
301             continue;
302         addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
303     }
304 }
305
306 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
307     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
308     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
309     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
310     , m_scopeChain(*scopeChain->globalData, scopeChain)
311     , m_symbolTable(symbolTable)
312     , m_scopeNode(functionBody)
313     , m_codeBlock(codeBlock)
314     , m_activationRegister(0)
315     , m_finallyDepth(0)
316     , m_dynamicScopeDepth(0)
317     , m_baseScopeDepth(0)
318     , m_codeType(FunctionCode)
319     , m_nextConstantOffset(0)
320     , m_globalConstantIndex(0)
321     , m_hasCreatedActivation(false)
322     , m_firstLazyFunction(0)
323     , m_lastLazyFunction(0)
324     , m_globalData(scopeChain->globalData)
325     , m_lastOpcodeID(op_end)
326 #ifndef NDEBUG
327     , m_lastOpcodePosition(0)
328 #endif
329     , m_stack(wtfThreadData().stack())
330     , m_usesExceptions(false)
331     , m_expressionTooDeep(false)
332 {
333     m_globalData->startedCompiling(m_codeBlock);
334     if (m_shouldEmitDebugHooks)
335         m_codeBlock->setNeedsFullScopeChain(true);
336
337     codeBlock->setGlobalData(m_globalData);
338     
339     emitOpcode(op_enter);
340     if (m_codeBlock->needsFullScopeChain()) {
341         m_activationRegister = addVar();
342         emitInitLazyRegister(m_activationRegister);
343         m_codeBlock->setActivationRegister(m_activationRegister->index());
344     }
345
346     // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
347     // object, if created.
348     if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) {
349         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
350         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
351
352         // We can save a little space by hard-coding the knowledge that the two
353         // 'arguments' values are stored in consecutive registers, and storing
354         // only the index of the assignable one.
355         codeBlock->setArgumentsRegister(argumentsRegister->index());
356         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
357
358         emitInitLazyRegister(argumentsRegister);
359         emitInitLazyRegister(unmodifiedArgumentsRegister);
360         
361         if (m_codeBlock->isStrictMode()) {
362             emitOpcode(op_create_arguments);
363             instructions().append(argumentsRegister->index());
364         }
365
366         // The debugger currently retrieves the arguments object from an activation rather than pulling
367         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
368         // but for now we force eager creation of the arguments object when debugging.
369         if (m_shouldEmitDebugHooks) {
370             emitOpcode(op_create_arguments);
371             instructions().append(argumentsRegister->index());
372         }
373     }
374
375     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
376     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
377
378     // Captured variables and functions go first so that activations don't have
379     // to step over the non-captured locals to mark them.
380     m_hasCreatedActivation = false;
381     if (functionBody->hasCapturedVariables()) {
382         for (size_t i = 0; i < functionStack.size(); ++i) {
383             FunctionBodyNode* function = functionStack[i];
384             const Identifier& ident = function->ident();
385             if (functionBody->captures(ident)) {
386                 if (!m_hasCreatedActivation) {
387                     m_hasCreatedActivation = true;
388                     emitOpcode(op_create_activation);
389                     instructions().append(m_activationRegister->index());
390                 }
391                 m_functions.add(ident.impl());
392                 emitNewFunction(addVar(ident, false), function);
393             }
394         }
395         for (size_t i = 0; i < varStack.size(); ++i) {
396             const Identifier& ident = *varStack[i].first;
397             if (functionBody->captures(ident))
398                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
399         }
400     }
401     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
402     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
403         m_hasCreatedActivation = true;
404         emitOpcode(op_create_activation);
405         instructions().append(m_activationRegister->index());
406     }
407
408     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
409     m_firstLazyFunction = codeBlock->m_numVars;
410     for (size_t i = 0; i < functionStack.size(); ++i) {
411         FunctionBodyNode* function = functionStack[i];
412         const Identifier& ident = function->ident();
413         if (!functionBody->captures(ident)) {
414             m_functions.add(ident.impl());
415             RefPtr<RegisterID> reg = addVar(ident, false);
416             // Don't lazily create functions that override the name 'arguments'
417             // as this would complicate lazy instantiation of actual arguments.
418             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
419                 emitNewFunction(reg.get(), function);
420             else {
421                 emitInitLazyRegister(reg.get());
422                 m_lazyFunctions.set(reg->index(), function);
423             }
424         }
425     }
426     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
427     for (size_t i = 0; i < varStack.size(); ++i) {
428         const Identifier& ident = *varStack[i].first;
429         if (!functionBody->captures(ident))
430             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
431     }
432
433     if (m_shouldEmitDebugHooks)
434         codeBlock->m_numCapturedVars = codeBlock->m_numVars;
435
436     FunctionParameters& parameters = *functionBody->parameters();
437     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
438
439     // Add "this" as a parameter
440     int nextParameterIndex = CallFrame::thisArgumentOffset();
441     m_thisRegister.setIndex(nextParameterIndex--);
442     m_codeBlock->addParameter();
443     
444     for (size_t i = 0; i < parameters.size(); ++i)
445         addParameter(parameters[i], nextParameterIndex--);
446
447     preserveLastVar();
448
449     if (isConstructor()) {
450         emitOpcode(op_create_this);
451         instructions().append(m_thisRegister.index());
452     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
453         ValueProfile* profile = emitProfiledOpcode(op_convert_this);
454         instructions().append(m_thisRegister.index());
455         instructions().append(profile);
456     }
457 }
458
459 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
460     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
461     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
462     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
463     , m_scopeChain(*scopeChain->globalData, scopeChain)
464     , m_symbolTable(symbolTable)
465     , m_scopeNode(evalNode)
466     , m_codeBlock(codeBlock)
467     , m_thisRegister(CallFrame::thisArgumentOffset())
468     , m_finallyDepth(0)
469     , m_dynamicScopeDepth(0)
470     , m_baseScopeDepth(codeBlock->baseScopeDepth())
471     , m_codeType(EvalCode)
472     , m_nextConstantOffset(0)
473     , m_globalConstantIndex(0)
474     , m_hasCreatedActivation(true)
475     , m_firstLazyFunction(0)
476     , m_lastLazyFunction(0)
477     , m_globalData(scopeChain->globalData)
478     , m_lastOpcodeID(op_end)
479 #ifndef NDEBUG
480     , m_lastOpcodePosition(0)
481 #endif
482     , m_stack(wtfThreadData().stack())
483     , m_usesExceptions(false)
484     , m_expressionTooDeep(false)
485 {
486     m_globalData->startedCompiling(m_codeBlock);
487     if (m_shouldEmitDebugHooks || m_baseScopeDepth)
488         m_codeBlock->setNeedsFullScopeChain(true);
489
490     emitOpcode(op_enter);
491     codeBlock->setGlobalData(m_globalData);
492     m_codeBlock->setNumParameters(1);
493
494     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
495     for (size_t i = 0; i < functionStack.size(); ++i)
496         m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
497
498     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
499     unsigned numVariables = varStack.size();
500     Vector<Identifier> variables;
501     variables.reserveCapacity(numVariables);
502     for (size_t i = 0; i < numVariables; ++i)
503         variables.append(*varStack[i].first);
504     codeBlock->adoptVariables(variables);
505     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
506     preserveLastVar();
507 }
508
509 BytecodeGenerator::~BytecodeGenerator()
510 {
511     m_globalData->finishedCompiling(m_codeBlock);
512 }
513
514 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
515 {
516     emitOpcode(op_init_lazy_reg);
517     instructions().append(reg->index());
518     return reg;
519 }
520
521 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
522 {
523     // Parameters overwrite var declarations, but not function declarations.
524     StringImpl* rep = ident.impl();
525     if (!m_functions.contains(rep)) {
526         symbolTable().set(rep, parameterIndex);
527         RegisterID& parameter = registerFor(parameterIndex);
528         parameter.setIndex(parameterIndex);
529     }
530
531     // To maintain the calling convention, we have to allocate unique space for
532     // each parameter, even if the parameter doesn't make it into the symbol table.
533     m_codeBlock->addParameter();
534 }
535
536 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
537 {
538     if (ident != propertyNames().arguments)
539         return false;
540     
541     if (!shouldOptimizeLocals())
542         return false;
543     
544     SymbolTableEntry entry = symbolTable().get(ident.impl());
545     if (entry.isNull())
546         return false;
547     
548     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
549         return true;
550     
551     return false;
552 }
553
554 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
555 {
556     ASSERT(willResolveToArguments(propertyNames().arguments));
557
558     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
559     ASSERT(!entry.isNull());
560     return &registerFor(entry.getIndex());
561 }
562
563 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
564 {
565     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
566         return reg;
567     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
568     return reg;
569 }
570
571 RegisterID* BytecodeGenerator::newRegister()
572 {
573     m_calleeRegisters.append(m_calleeRegisters.size());
574     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
575     return &m_calleeRegisters.last();
576 }
577
578 RegisterID* BytecodeGenerator::newTemporary()
579 {
580     // Reclaim free register IDs.
581     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
582         m_calleeRegisters.removeLast();
583         
584     RegisterID* result = newRegister();
585     result->setTemporary();
586     return result;
587 }
588
589 RegisterID* BytecodeGenerator::highestUsedRegister()
590 {
591     size_t count = m_codeBlock->m_numCalleeRegisters;
592     while (m_calleeRegisters.size() < count)
593         newRegister();
594     return &m_calleeRegisters.last();
595 }
596
597 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
598 {
599     // Reclaim free label scopes.
600     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
601         m_labelScopes.removeLast();
602
603     // Allocate new label scope.
604     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
605     m_labelScopes.append(scope);
606     return &m_labelScopes.last();
607 }
608
609 PassRefPtr<Label> BytecodeGenerator::newLabel()
610 {
611     // Reclaim free label IDs.
612     while (m_labels.size() && !m_labels.last().refCount())
613         m_labels.removeLast();
614
615     // Allocate new label ID.
616     m_labels.append(this);
617     return &m_labels.last();
618 }
619
620 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
621 {
622     unsigned newLabelIndex = instructions().size();
623     l0->setLocation(newLabelIndex);
624
625     if (m_codeBlock->numberOfJumpTargets()) {
626         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
627         ASSERT(lastLabelIndex <= newLabelIndex);
628         if (newLabelIndex == lastLabelIndex) {
629             // Peephole optimizations have already been disabled by emitting the last label
630             return l0;
631         }
632     }
633
634     m_codeBlock->addJumpTarget(newLabelIndex);
635
636     // This disables peephole optimizations when an instruction is a jump target
637     m_lastOpcodeID = op_end;
638     return l0;
639 }
640
641 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
642 {
643 #ifndef NDEBUG
644     size_t opcodePosition = instructions().size();
645     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
646     m_lastOpcodePosition = opcodePosition;
647 #endif
648     instructions().append(globalData()->interpreter->getOpcode(opcodeID));
649     m_lastOpcodeID = opcodeID;
650 }
651
652 ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
653 {
654 #if ENABLE(VALUE_PROFILER)
655     ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
656 #else
657     ValueProfile* result = 0;
658 #endif
659     emitOpcode(opcodeID);
660     return result;
661 }
662
663 void BytecodeGenerator::emitLoopHint()
664 {
665 #if ENABLE(DFG_JIT)
666     emitOpcode(op_loop_hint);
667 #endif
668 }
669
670 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
671 {
672     ASSERT(instructions().size() >= 4);
673     size_t size = instructions().size();
674     dstIndex = instructions().at(size - 3).u.operand;
675     src1Index = instructions().at(size - 2).u.operand;
676     src2Index = instructions().at(size - 1).u.operand;
677 }
678
679 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
680 {
681     ASSERT(instructions().size() >= 3);
682     size_t size = instructions().size();
683     dstIndex = instructions().at(size - 2).u.operand;
684     srcIndex = instructions().at(size - 1).u.operand;
685 }
686
687 void BytecodeGenerator::retrieveLastUnaryOp(WriteBarrier<Unknown>*& dstPointer, int& srcIndex)
688 {
689     ASSERT(instructions().size() >= 3);
690     size_t size = instructions().size();
691     dstPointer = instructions().at(size - 2).u.registerPointer;
692     srcIndex = instructions().at(size - 1).u.operand;
693 }
694
695 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
696 {
697     ASSERT(instructions().size() >= 4);
698     instructions().shrink(instructions().size() - 4);
699     m_lastOpcodeID = op_end;
700 }
701
702 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
703 {
704     ASSERT(instructions().size() >= 3);
705     instructions().shrink(instructions().size() - 3);
706     m_lastOpcodeID = op_end;
707 }
708
709 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
710 {
711     size_t begin = instructions().size();
712     emitOpcode(target->isForward() ? op_jmp : op_loop);
713     instructions().append(target->bind(begin, instructions().size()));
714     return target;
715 }
716
717 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
718 {
719     if (m_lastOpcodeID == op_less) {
720         int dstIndex;
721         int src1Index;
722         int src2Index;
723
724         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
725
726         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
727             rewindBinaryOp();
728
729             size_t begin = instructions().size();
730             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
731             instructions().append(src1Index);
732             instructions().append(src2Index);
733             instructions().append(target->bind(begin, instructions().size()));
734             return target;
735         }
736     } else if (m_lastOpcodeID == op_lesseq) {
737         int dstIndex;
738         int src1Index;
739         int src2Index;
740
741         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
742
743         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
744             rewindBinaryOp();
745
746             size_t begin = instructions().size();
747             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
748             instructions().append(src1Index);
749             instructions().append(src2Index);
750             instructions().append(target->bind(begin, instructions().size()));
751             return target;
752         }
753     } else if (m_lastOpcodeID == op_greater) {
754         int dstIndex;
755         int src1Index;
756         int src2Index;
757
758         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
759
760         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
761             rewindBinaryOp();
762
763             size_t begin = instructions().size();
764             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
765             instructions().append(src1Index);
766             instructions().append(src2Index);
767             instructions().append(target->bind(begin, instructions().size()));
768             return target;
769         }
770     } else if (m_lastOpcodeID == op_greatereq) {
771         int dstIndex;
772         int src1Index;
773         int src2Index;
774
775         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
776
777         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
778             rewindBinaryOp();
779
780             size_t begin = instructions().size();
781             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
782             instructions().append(src1Index);
783             instructions().append(src2Index);
784             instructions().append(target->bind(begin, instructions().size()));
785             return target;
786         }
787     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
788         int dstIndex;
789         int srcIndex;
790
791         retrieveLastUnaryOp(dstIndex, srcIndex);
792
793         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
794             rewindUnaryOp();
795
796             size_t begin = instructions().size();
797             emitOpcode(op_jeq_null);
798             instructions().append(srcIndex);
799             instructions().append(target->bind(begin, instructions().size()));
800             return target;
801         }
802     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
803         int dstIndex;
804         int srcIndex;
805
806         retrieveLastUnaryOp(dstIndex, srcIndex);
807
808         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
809             rewindUnaryOp();
810
811             size_t begin = instructions().size();
812             emitOpcode(op_jneq_null);
813             instructions().append(srcIndex);
814             instructions().append(target->bind(begin, instructions().size()));
815             return target;
816         }
817     }
818
819     size_t begin = instructions().size();
820
821     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
822     instructions().append(cond->index());
823     instructions().append(target->bind(begin, instructions().size()));
824     return target;
825 }
826
827 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
828 {
829     if (m_lastOpcodeID == op_less && target->isForward()) {
830         int dstIndex;
831         int src1Index;
832         int src2Index;
833
834         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
835
836         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
837             rewindBinaryOp();
838
839             size_t begin = instructions().size();
840             emitOpcode(op_jnless);
841             instructions().append(src1Index);
842             instructions().append(src2Index);
843             instructions().append(target->bind(begin, instructions().size()));
844             return target;
845         }
846     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
847         int dstIndex;
848         int src1Index;
849         int src2Index;
850
851         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
852
853         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
854             rewindBinaryOp();
855
856             size_t begin = instructions().size();
857             emitOpcode(op_jnlesseq);
858             instructions().append(src1Index);
859             instructions().append(src2Index);
860             instructions().append(target->bind(begin, instructions().size()));
861             return target;
862         }
863     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
864         int dstIndex;
865         int src1Index;
866         int src2Index;
867
868         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
869
870         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
871             rewindBinaryOp();
872
873             size_t begin = instructions().size();
874             emitOpcode(op_jngreater);
875             instructions().append(src1Index);
876             instructions().append(src2Index);
877             instructions().append(target->bind(begin, instructions().size()));
878             return target;
879         }
880     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
881         int dstIndex;
882         int src1Index;
883         int src2Index;
884
885         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
886
887         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
888             rewindBinaryOp();
889
890             size_t begin = instructions().size();
891             emitOpcode(op_jngreatereq);
892             instructions().append(src1Index);
893             instructions().append(src2Index);
894             instructions().append(target->bind(begin, instructions().size()));
895             return target;
896         }
897     } else if (m_lastOpcodeID == op_not) {
898         int dstIndex;
899         int srcIndex;
900
901         retrieveLastUnaryOp(dstIndex, srcIndex);
902
903         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
904             rewindUnaryOp();
905
906             size_t begin = instructions().size();
907             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
908             instructions().append(srcIndex);
909             instructions().append(target->bind(begin, instructions().size()));
910             return target;
911         }
912     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
913         int dstIndex;
914         int srcIndex;
915
916         retrieveLastUnaryOp(dstIndex, srcIndex);
917
918         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
919             rewindUnaryOp();
920
921             size_t begin = instructions().size();
922             emitOpcode(op_jneq_null);
923             instructions().append(srcIndex);
924             instructions().append(target->bind(begin, instructions().size()));
925             return target;
926         }
927     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
928         int dstIndex;
929         int srcIndex;
930
931         retrieveLastUnaryOp(dstIndex, srcIndex);
932
933         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
934             rewindUnaryOp();
935
936             size_t begin = instructions().size();
937             emitOpcode(op_jeq_null);
938             instructions().append(srcIndex);
939             instructions().append(target->bind(begin, instructions().size()));
940             return target;
941         }
942     }
943
944     size_t begin = instructions().size();
945     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
946     instructions().append(cond->index());
947     instructions().append(target->bind(begin, instructions().size()));
948     return target;
949 }
950
951 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
952 {
953     size_t begin = instructions().size();
954
955     emitOpcode(op_jneq_ptr);
956     instructions().append(cond->index());
957     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction()));
958     instructions().append(target->bind(begin, instructions().size()));
959     return target;
960 }
961
962 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
963 {
964     size_t begin = instructions().size();
965
966     emitOpcode(op_jneq_ptr);
967     instructions().append(cond->index());
968     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction()));
969     instructions().append(target->bind(begin, instructions().size()));
970     return target;
971 }
972
973 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
974 {
975     StringImpl* rep = ident.impl();
976     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
977     if (result.isNewEntry)
978         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
979
980     return result.iterator->second;
981 }
982
983 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
984 {
985     int index = m_nextConstantOffset;
986
987     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
988     if (result.isNewEntry) {
989         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
990         ++m_nextConstantOffset;
991         m_codeBlock->addConstant(JSValue(v));
992     } else
993         index = result.iterator->second;
994
995     return &m_constantPoolRegisters[index];
996 }
997
998 unsigned BytecodeGenerator::addRegExp(RegExp* r)
999 {
1000     return m_codeBlock->addRegExp(r);
1001 }
1002
1003 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1004 {
1005     emitOpcode(op_mov);
1006     instructions().append(dst->index());
1007     instructions().append(src->index());
1008     return dst;
1009 }
1010
1011 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1012 {
1013     emitOpcode(opcodeID);
1014     instructions().append(dst->index());
1015     instructions().append(src->index());
1016     return dst;
1017 }
1018
1019 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1020 {
1021     emitOpcode(op_pre_inc);
1022     instructions().append(srcDst->index());
1023     return srcDst;
1024 }
1025
1026 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1027 {
1028     emitOpcode(op_pre_dec);
1029     instructions().append(srcDst->index());
1030     return srcDst;
1031 }
1032
1033 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1034 {
1035     emitOpcode(op_post_inc);
1036     instructions().append(dst->index());
1037     instructions().append(srcDst->index());
1038     return dst;
1039 }
1040
1041 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1042 {
1043     emitOpcode(op_post_dec);
1044     instructions().append(dst->index());
1045     instructions().append(srcDst->index());
1046     return dst;
1047 }
1048
1049 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1050 {
1051     emitOpcode(opcodeID);
1052     instructions().append(dst->index());
1053     instructions().append(src1->index());
1054     instructions().append(src2->index());
1055
1056     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1057         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1058         instructions().append(types.toInt());
1059
1060     return dst;
1061 }
1062
1063 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1064 {
1065     if (m_lastOpcodeID == op_typeof) {
1066         int dstIndex;
1067         int srcIndex;
1068
1069         retrieveLastUnaryOp(dstIndex, srcIndex);
1070
1071         if (src1->index() == dstIndex
1072             && src1->isTemporary()
1073             && m_codeBlock->isConstantRegisterIndex(src2->index())
1074             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1075             const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1076             if (value == "undefined") {
1077                 rewindUnaryOp();
1078                 emitOpcode(op_is_undefined);
1079                 instructions().append(dst->index());
1080                 instructions().append(srcIndex);
1081                 return dst;
1082             }
1083             if (value == "boolean") {
1084                 rewindUnaryOp();
1085                 emitOpcode(op_is_boolean);
1086                 instructions().append(dst->index());
1087                 instructions().append(srcIndex);
1088                 return dst;
1089             }
1090             if (value == "number") {
1091                 rewindUnaryOp();
1092                 emitOpcode(op_is_number);
1093                 instructions().append(dst->index());
1094                 instructions().append(srcIndex);
1095                 return dst;
1096             }
1097             if (value == "string") {
1098                 rewindUnaryOp();
1099                 emitOpcode(op_is_string);
1100                 instructions().append(dst->index());
1101                 instructions().append(srcIndex);
1102                 return dst;
1103             }
1104             if (value == "object") {
1105                 rewindUnaryOp();
1106                 emitOpcode(op_is_object);
1107                 instructions().append(dst->index());
1108                 instructions().append(srcIndex);
1109                 return dst;
1110             }
1111             if (value == "function") {
1112                 rewindUnaryOp();
1113                 emitOpcode(op_is_function);
1114                 instructions().append(dst->index());
1115                 instructions().append(srcIndex);
1116                 return dst;
1117             }
1118         }
1119     }
1120
1121     emitOpcode(opcodeID);
1122     instructions().append(dst->index());
1123     instructions().append(src1->index());
1124     instructions().append(src2->index());
1125     return dst;
1126 }
1127
1128 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1129 {
1130     return emitLoad(dst, jsBoolean(b));
1131 }
1132
1133 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1134 {
1135     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1136     // Later we can do the extra work to handle that like the other cases.  They also don't
1137     // work correctly with NaN as a key.
1138     if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1139         return emitLoad(dst, jsNumber(number));
1140     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->second;
1141     if (!valueInMap)
1142         valueInMap = jsNumber(number);
1143     return emitLoad(dst, valueInMap);
1144 }
1145
1146 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1147 {
1148     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1149     if (!stringInMap)
1150         stringInMap = jsOwnedString(globalData(), identifier.ustring());
1151     return emitLoad(dst, JSValue(stringInMap));
1152 }
1153
1154 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1155 {
1156     RegisterID* constantID = addConstantValue(v);
1157     if (dst)
1158         return emitMove(dst, constantID);
1159     return constantID;
1160 }
1161
1162 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1163 {
1164     if (property == propertyNames().thisIdentifier)
1165         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1166
1167     // Check if the property should be allocated in a register.
1168     if (m_codeType != GlobalCode && shouldOptimizeLocals()) {
1169         SymbolTableEntry entry = symbolTable().get(property.impl());
1170         if (!entry.isNull()) {
1171             if (property == propertyNames().arguments)
1172                 createArgumentsIfNecessary();
1173             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1174             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1175             return ResolveResult::registerResolve(local, flags);
1176         }
1177     }
1178
1179     // Cases where we cannot statically optimize the lookup.
1180     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1181         return ResolveResult::dynamicResolve(0);
1182
1183     ScopeChainIterator iter = m_scopeChain->begin();
1184     ScopeChainIterator end = m_scopeChain->end();
1185     size_t depth = 0;
1186     size_t depthOfFirstScopeWithDynamicChecks = 0;
1187     unsigned flags = 0;
1188     for (; iter != end; ++iter, ++depth) {
1189         JSObject* currentScope = iter->get();
1190         if (!currentScope->isVariableObject()) {
1191             flags |= ResolveResult::DynamicFlag;
1192             break;
1193         }        
1194         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1195         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1196
1197         // Found the property
1198         if (!entry.isNull()) {
1199             if (entry.isReadOnly())
1200                 flags |= ResolveResult::ReadOnlyFlag;
1201             depth += m_codeBlock->needsFullScopeChain();
1202             if (++iter == end) {
1203                 if (flags & ResolveResult::DynamicFlag)
1204                     return ResolveResult::dynamicIndexedGlobalResolve(entry.getIndex(), depth, currentScope, flags);
1205                 return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentScope, flags);
1206             }
1207 #if !ASSERT_DISABLED
1208             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1209                 ASSERT(activation->isValidScopedLookup(entry.getIndex()));
1210 #endif
1211             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1212         }
1213         bool scopeRequiresDynamicChecks = false;
1214         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1215             break;
1216         if (!(flags & ResolveResult::DynamicFlag)) {
1217             if (scopeRequiresDynamicChecks)
1218                 flags |= ResolveResult::DynamicFlag;
1219             else
1220                 ++depthOfFirstScopeWithDynamicChecks;
1221         }
1222     }
1223
1224     // Can't locate the property but we're able to avoid a few lookups.
1225     JSObject* scope = iter->get();
1226     // Step over the function's activation, if it needs one. At this point we
1227     // know there is no dynamic scope in the function itself, so this is safe to
1228     // do.
1229     depth += m_codeBlock->needsFullScopeChain();
1230     depthOfFirstScopeWithDynamicChecks += m_codeBlock->needsFullScopeChain();
1231     if (++iter == end) {
1232         if ((flags & ResolveResult::DynamicFlag) && depth)
1233             return ResolveResult::dynamicGlobalResolve(depth, scope);
1234         return ResolveResult::globalResolve(scope);
1235     }
1236     return ResolveResult::dynamicResolve(depthOfFirstScopeWithDynamicChecks);
1237 }
1238
1239 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1240 {
1241     // Register-allocated const declarations.
1242     if (m_codeType != EvalCode && m_codeType != GlobalCode) {
1243         SymbolTableEntry entry = symbolTable().get(property.impl());
1244         if (!entry.isNull()) {
1245             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1246             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1247             return ResolveResult::registerResolve(local, flags);
1248         }
1249     }
1250
1251     // Const declarations in eval code or global code.
1252     ScopeChainIterator iter = scopeChain()->begin();
1253     ScopeChainIterator end = scopeChain()->end();
1254     size_t depth = 0;
1255     for (; iter != end; ++iter, ++depth) {
1256         JSObject* currentScope = iter->get();
1257         if (!currentScope->isVariableObject())
1258             continue;
1259         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1260         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1261         if (entry.isNull())
1262             continue;
1263         if (++iter == end)
1264             return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentVariableObject, 0);
1265         return ResolveResult::lexicalResolve(entry.getIndex(), depth + scopeDepth(), 0);
1266     }
1267
1268     // FIXME: While this code should only be hit in an eval block, it will assign
1269     // to the wrong base if property exists in an intervening with scope.
1270     return ResolveResult::dynamicResolve(scopeDepth());
1271 }
1272
1273 void BytecodeGenerator::emitCheckHasInstance(RegisterID* base)
1274
1275     emitOpcode(op_check_has_instance);
1276     instructions().append(base->index());
1277 }
1278
1279 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1280
1281     emitOpcode(op_instanceof);
1282     instructions().append(dst->index());
1283     instructions().append(value->index());
1284     instructions().append(base->index());
1285     instructions().append(basePrototype->index());
1286     return dst;
1287 }
1288
1289 static const unsigned maxGlobalResolves = 128;
1290
1291 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1292 {
1293     return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1294 }
1295
1296 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1297 {
1298     if (resolveResult.isStatic())
1299         return emitGetStaticVar(dst, resolveResult);
1300     
1301     if (resolveResult.isGlobal() && !shouldAvoidResolveGlobal()) {
1302 #if ENABLE(JIT)
1303         m_codeBlock->addGlobalResolveInfo(instructions().size());
1304 #endif
1305         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1306         bool dynamic = resolveResult.isDynamic() && resolveResult.depth();
1307         ValueProfile* profile = emitProfiledOpcode(dynamic ? op_resolve_global_dynamic : op_resolve_global);
1308         instructions().append(dst->index());
1309         instructions().append(addConstant(property));
1310         instructions().append(0);
1311         instructions().append(0);
1312         if (dynamic)
1313             instructions().append(resolveResult.depth());
1314         instructions().append(profile);
1315         return dst;
1316     }
1317         
1318     if (resolveResult.type() == ResolveResult::Dynamic && resolveResult.depth()) {
1319         // In this case we are at least able to drop a few scope chains from the
1320         // lookup chain, although we still need to hash from then on.
1321         ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1322         instructions().append(dst->index());
1323         instructions().append(addConstant(property));
1324         instructions().append(resolveResult.depth());
1325         instructions().append(profile);
1326         return dst;
1327     }
1328
1329     ValueProfile* profile = emitProfiledOpcode(op_resolve);
1330     instructions().append(dst->index());
1331     instructions().append(addConstant(property));
1332     instructions().append(profile);
1333     return dst;
1334 }
1335
1336 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1337 {
1338     if (resolveResult.isGlobal() && !resolveResult.isDynamic())
1339         // Global object is the base
1340         return emitLoad(dst, JSValue(resolveResult.globalObject()));
1341
1342     // We can't optimise at all :-(
1343     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1344     instructions().append(dst->index());
1345     instructions().append(addConstant(property));
1346     instructions().append(false);
1347     instructions().append(profile);
1348     return dst;
1349 }
1350
1351 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1352 {
1353     if (!m_codeBlock->isStrictMode())
1354         return emitResolveBase(dst, resolveResult, property);
1355
1356     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1357         // Global object is the base
1358         RefPtr<RegisterID> result = emitLoad(dst, JSValue(resolveResult.globalObject()));
1359         emitOpcode(op_ensure_property_exists);
1360         instructions().append(dst->index());
1361         instructions().append(addConstant(property));
1362         return result.get();
1363     }
1364
1365     // We can't optimise at all :-(
1366     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1367     instructions().append(dst->index());
1368     instructions().append(addConstant(property));
1369     instructions().append(true);
1370     instructions().append(profile);
1371     return dst;
1372 }
1373
1374 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1375 {
1376     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1377         // Global object is the base
1378         emitLoad(baseDst, JSValue(resolveResult.globalObject()));
1379
1380         if (resolveResult.isStatic()) {
1381             // Directly index the property lookup across multiple scopes.
1382             emitGetStaticVar(propDst, resolveResult);
1383             return baseDst;
1384         }
1385
1386         if (shouldAvoidResolveGlobal()) {
1387             ValueProfile* profile = emitProfiledOpcode(op_resolve);
1388             instructions().append(propDst->index());
1389             instructions().append(addConstant(property));
1390             instructions().append(profile);
1391             return baseDst;
1392         }
1393
1394 #if ENABLE(JIT)
1395         m_codeBlock->addGlobalResolveInfo(instructions().size());
1396 #endif
1397 #if ENABLE(CLASSIC_INTERPRETER)
1398         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1399 #endif
1400         ValueProfile* profile = emitProfiledOpcode(op_resolve_global);
1401         instructions().append(propDst->index());
1402         instructions().append(addConstant(property));
1403         instructions().append(0);
1404         instructions().append(0);
1405         instructions().append(profile);
1406         return baseDst;
1407     }
1408
1409     ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1410     instructions().append(baseDst->index());
1411     instructions().append(propDst->index());
1412     instructions().append(addConstant(property));
1413     instructions().append(profile);
1414     return baseDst;
1415 }
1416
1417 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1418 {
1419     if (resolveResult.isStatic()) {
1420         emitLoad(baseDst, jsUndefined());
1421         emitGetStaticVar(propDst, resolveResult);
1422         return baseDst;
1423     }
1424
1425     if (resolveResult.type() == ResolveResult::Dynamic) {
1426         // We can't optimise at all :-(
1427         ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1428         instructions().append(baseDst->index());
1429         instructions().append(propDst->index());
1430         instructions().append(addConstant(property));
1431         instructions().append(profile);
1432         return baseDst;
1433     }
1434
1435     emitLoad(baseDst, jsUndefined());
1436     return emitResolve(propDst, resolveResult, property);
1437 }
1438
1439 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult)
1440 {
1441     ValueProfile* profile = 0;
1442
1443     switch (resolveResult.type()) {
1444     case ResolveResult::Register:
1445     case ResolveResult::ReadOnlyRegister:
1446         if (dst == ignoredResult())
1447             return 0;
1448         return moveToDestinationIfNeeded(dst, resolveResult.local());
1449
1450     case ResolveResult::Lexical:
1451     case ResolveResult::ReadOnlyLexical:
1452         profile = emitProfiledOpcode(op_get_scoped_var);
1453         instructions().append(dst->index());
1454         instructions().append(resolveResult.index());
1455         instructions().append(resolveResult.depth());
1456         instructions().append(profile);
1457         return dst;
1458
1459     case ResolveResult::IndexedGlobal:
1460     case ResolveResult::ReadOnlyIndexedGlobal:
1461         if (m_lastOpcodeID == op_put_global_var) {
1462             WriteBarrier<Unknown>* dstPointer;
1463             int srcIndex;
1464             retrieveLastUnaryOp(dstPointer, srcIndex);
1465             if (dstPointer == resolveResult.registerPointer() && srcIndex == dst->index())
1466                 return dst;
1467         }
1468
1469         profile = emitProfiledOpcode(op_get_global_var);
1470         instructions().append(dst->index());
1471         instructions().append(resolveResult.registerPointer());
1472         instructions().append(profile);
1473         return dst;
1474
1475     default:
1476         ASSERT_NOT_REACHED();
1477         return 0;
1478     }
1479 }
1480
1481 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, RegisterID* value)
1482 {
1483     switch (resolveResult.type()) {
1484     case ResolveResult::Register:
1485     case ResolveResult::ReadOnlyRegister:
1486         return moveToDestinationIfNeeded(resolveResult.local(), value);
1487
1488     case ResolveResult::Lexical:
1489     case ResolveResult::ReadOnlyLexical:
1490         emitOpcode(op_put_scoped_var);
1491         instructions().append(resolveResult.index());
1492         instructions().append(resolveResult.depth());
1493         instructions().append(value->index());
1494         return value;
1495
1496     case ResolveResult::IndexedGlobal:
1497     case ResolveResult::ReadOnlyIndexedGlobal:
1498         emitOpcode(op_put_global_var);
1499         instructions().append(resolveResult.registerPointer());
1500         instructions().append(value->index());
1501         return value;
1502
1503     default:
1504         ASSERT_NOT_REACHED();
1505         return 0;
1506     }
1507 }
1508
1509 void BytecodeGenerator::emitMethodCheck()
1510 {
1511     emitOpcode(op_method_check);
1512 }
1513
1514 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1515 {
1516     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1517
1518     ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1519     instructions().append(dst->index());
1520     instructions().append(base->index());
1521     instructions().append(addConstant(property));
1522     instructions().append(0);
1523     instructions().append(0);
1524     instructions().append(0);
1525     instructions().append(0);
1526     instructions().append(profile);
1527     return dst;
1528 }
1529
1530 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1531 {
1532     emitOpcode(op_get_arguments_length);
1533     instructions().append(dst->index());
1534     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1535     instructions().append(base->index());
1536     instructions().append(addConstant(propertyNames().length));
1537     return dst;
1538 }
1539
1540 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1541 {
1542     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1543
1544     emitOpcode(op_put_by_id);
1545     instructions().append(base->index());
1546     instructions().append(addConstant(property));
1547     instructions().append(value->index());
1548     instructions().append(0);
1549     instructions().append(0);
1550     instructions().append(0);
1551     instructions().append(0);
1552     instructions().append(0);
1553     return value;
1554 }
1555
1556 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1557 {
1558     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1559     
1560     emitOpcode(op_put_by_id);
1561     instructions().append(base->index());
1562     instructions().append(addConstant(property));
1563     instructions().append(value->index());
1564     instructions().append(0);
1565     instructions().append(0);
1566     instructions().append(0);
1567     instructions().append(0);
1568     instructions().append(property != m_globalData->propertyNames->underscoreProto);
1569     return value;
1570 }
1571
1572 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1573 {
1574     emitOpcode(op_put_getter_setter);
1575     instructions().append(base->index());
1576     instructions().append(addConstant(property));
1577     instructions().append(getter->index());
1578     instructions().append(setter->index());
1579 }
1580
1581 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1582 {
1583     emitOpcode(op_del_by_id);
1584     instructions().append(dst->index());
1585     instructions().append(base->index());
1586     instructions().append(addConstant(property));
1587     return dst;
1588 }
1589
1590 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1591 {
1592     ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1593     instructions().append(dst->index());
1594     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1595     instructions().append(base->index());
1596     instructions().append(property->index());
1597     instructions().append(profile);
1598     return dst;
1599 }
1600
1601 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1602 {
1603     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1604         ForInContext& context = m_forInContextStack[i - 1];
1605         if (context.propertyRegister == property) {
1606             emitOpcode(op_get_by_pname);
1607             instructions().append(dst->index());
1608             instructions().append(base->index());
1609             instructions().append(property->index());
1610             instructions().append(context.expectedSubscriptRegister->index());
1611             instructions().append(context.iterRegister->index());
1612             instructions().append(context.indexRegister->index());
1613             return dst;
1614         }
1615     }
1616     ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1617     instructions().append(dst->index());
1618     instructions().append(base->index());
1619     instructions().append(property->index());
1620     instructions().append(profile);
1621     return dst;
1622 }
1623
1624 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1625 {
1626     emitOpcode(op_put_by_val);
1627     instructions().append(base->index());
1628     instructions().append(property->index());
1629     instructions().append(value->index());
1630     return value;
1631 }
1632
1633 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1634 {
1635     emitOpcode(op_del_by_val);
1636     instructions().append(dst->index());
1637     instructions().append(base->index());
1638     instructions().append(property->index());
1639     return dst;
1640 }
1641
1642 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1643 {
1644     emitOpcode(op_put_by_index);
1645     instructions().append(base->index());
1646     instructions().append(index);
1647     instructions().append(value->index());
1648     return value;
1649 }
1650
1651 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1652 {
1653     emitOpcode(op_new_object);
1654     instructions().append(dst->index());
1655     return dst;
1656 }
1657
1658 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1659 {
1660     return m_codeBlock->addConstantBuffer(length);
1661 }
1662
1663 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1664 {
1665     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1666     if (!stringInMap) {
1667         stringInMap = jsString(globalData(), identifier.ustring());
1668         addConstantValue(stringInMap);
1669     }
1670     return stringInMap;
1671 }
1672
1673 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1674 {
1675 #if !ASSERT_DISABLED
1676     unsigned checkLength = 0;
1677 #endif
1678     bool hadVariableExpression = false;
1679     if (length) {
1680         for (ElementNode* n = elements; n; n = n->next()) {
1681             if (!n->value()->isNumber() && !n->value()->isString()) {
1682                 hadVariableExpression = true;
1683                 break;
1684             }
1685             if (n->elision())
1686                 break;
1687 #if !ASSERT_DISABLED
1688             checkLength++;
1689 #endif
1690         }
1691         if (!hadVariableExpression) {
1692             ASSERT(length == checkLength);
1693             unsigned constantBufferIndex = addConstantBuffer(length);
1694             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1695             unsigned index = 0;
1696             for (ElementNode* n = elements; index < length; n = n->next()) {
1697                 if (n->value()->isNumber())
1698                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1699                 else {
1700                     ASSERT(n->value()->isString());
1701                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1702                 }
1703             }
1704             emitOpcode(op_new_array_buffer);
1705             instructions().append(dst->index());
1706             instructions().append(constantBufferIndex);
1707             instructions().append(length);
1708             return dst;
1709         }
1710     }
1711
1712     Vector<RefPtr<RegisterID>, 16> argv;
1713     for (ElementNode* n = elements; n; n = n->next()) {
1714         if (n->elision())
1715             break;
1716         argv.append(newTemporary());
1717         // op_new_array requires the initial values to be a sequential range of registers
1718         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1719         emitNode(argv.last().get(), n->value());
1720     }
1721     emitOpcode(op_new_array);
1722     instructions().append(dst->index());
1723     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1724     instructions().append(argv.size()); // argc
1725     return dst;
1726 }
1727
1728 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1729 {
1730     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false);
1731 }
1732
1733 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1734 {
1735     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1736     if (ptr.isNewEntry)
1737         ptr.iterator->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1738     return emitNewFunctionInternal(dst, ptr.iterator->second, true);
1739 }
1740
1741 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1742 {
1743     createActivationIfNecessary();
1744     emitOpcode(op_new_func);
1745     instructions().append(dst->index());
1746     instructions().append(index);
1747     instructions().append(doNullCheck);
1748     return dst;
1749 }
1750
1751 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1752 {
1753     emitOpcode(op_new_regexp);
1754     instructions().append(dst->index());
1755     instructions().append(addRegExp(regExp));
1756     return dst;
1757 }
1758
1759 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1760 {
1761     FunctionBodyNode* function = n->body();
1762     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1763     
1764     createActivationIfNecessary();
1765     emitOpcode(op_new_func_exp);
1766     instructions().append(r0->index());
1767     instructions().append(index);
1768     return r0;
1769 }
1770
1771 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1772 {
1773     return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
1774 }
1775
1776 void BytecodeGenerator::createArgumentsIfNecessary()
1777 {
1778     if (m_codeType != FunctionCode)
1779         return;
1780     
1781     if (!m_codeBlock->usesArguments())
1782         return;
1783
1784     // If we're in strict mode we tear off the arguments on function
1785     // entry, so there's no need to check if we need to create them
1786     // now
1787     if (m_codeBlock->isStrictMode())
1788         return;
1789
1790     emitOpcode(op_create_arguments);
1791     instructions().append(m_codeBlock->argumentsRegister());
1792 }
1793
1794 void BytecodeGenerator::createActivationIfNecessary()
1795 {
1796     if (m_hasCreatedActivation)
1797         return;
1798     if (!m_codeBlock->needsFullScopeChain())
1799         return;
1800     emitOpcode(op_create_activation);
1801     instructions().append(m_activationRegister->index());
1802 }
1803
1804 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1805 {
1806     return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
1807 }
1808
1809 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1810 {
1811     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1812     ASSERT(func->refCount());
1813
1814     if (m_shouldEmitProfileHooks)
1815         emitMove(callArguments.profileHookRegister(), func);
1816
1817     // Generate code for arguments.
1818     unsigned argument = 0;
1819     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1820         emitNode(callArguments.argumentRegister(argument++), n);
1821
1822     // Reserve space for call frame.
1823     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1824     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1825         callFrame.append(newTemporary());
1826
1827     if (m_shouldEmitProfileHooks) {
1828         emitOpcode(op_profile_will_call);
1829         instructions().append(callArguments.profileHookRegister()->index());
1830     }
1831
1832     emitExpressionInfo(divot, startOffset, endOffset);
1833
1834     // Emit call.
1835     emitOpcode(opcodeID);
1836     instructions().append(func->index()); // func
1837     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1838     instructions().append(callArguments.registerOffset()); // registerOffset
1839 #if ENABLE(LLINT)
1840     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1841 #else
1842     instructions().append(0);
1843 #endif
1844     instructions().append(0);
1845     if (dst != ignoredResult()) {
1846         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1847         instructions().append(dst->index()); // dst
1848         instructions().append(profile);
1849     }
1850
1851     if (m_shouldEmitProfileHooks) {
1852         emitOpcode(op_profile_did_call);
1853         instructions().append(callArguments.profileHookRegister()->index());
1854     }
1855
1856     return dst;
1857 }
1858
1859 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1860 {
1861     if (m_shouldEmitProfileHooks) {
1862         emitMove(profileHookRegister, func);
1863         emitOpcode(op_profile_will_call);
1864         instructions().append(profileHookRegister->index());
1865     }
1866     
1867     emitExpressionInfo(divot, startOffset, endOffset);
1868
1869     // Emit call.
1870     emitOpcode(op_call_varargs);
1871     instructions().append(func->index());
1872     instructions().append(thisRegister->index());
1873     instructions().append(arguments->index());
1874     instructions().append(firstFreeRegister->index());
1875     if (dst != ignoredResult()) {
1876         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1877         instructions().append(dst->index());
1878         instructions().append(profile);
1879     }
1880     if (m_shouldEmitProfileHooks) {
1881         emitOpcode(op_profile_did_call);
1882         instructions().append(profileHookRegister->index());
1883     }
1884     return dst;
1885 }
1886
1887 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1888 {
1889     if (m_codeBlock->needsFullScopeChain()) {
1890         emitOpcode(op_tear_off_activation);
1891         instructions().append(m_activationRegister->index());
1892         instructions().append(m_codeBlock->argumentsRegister());
1893     } else if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1894         emitOpcode(op_tear_off_arguments);
1895         instructions().append(m_codeBlock->argumentsRegister());
1896     }
1897
1898     // Constructors use op_ret_object_or_this to check the result is an
1899     // object, unless we can trivially determine the check is not
1900     // necessary (currently, if the return value is 'this').
1901     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1902         emitOpcode(op_ret_object_or_this);
1903         instructions().append(src->index());
1904         instructions().append(m_thisRegister.index());
1905         return src;
1906     }
1907     return emitUnaryNoDstOp(op_ret, src);
1908 }
1909
1910 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1911 {
1912     emitOpcode(opcodeID);
1913     instructions().append(src->index());
1914     return src;
1915 }
1916
1917 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1918 {
1919     ASSERT(func->refCount());
1920
1921     if (m_shouldEmitProfileHooks)
1922         emitMove(callArguments.profileHookRegister(), func);
1923
1924     // Generate code for arguments.
1925     unsigned argument = 0;
1926     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1927         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1928             emitNode(callArguments.argumentRegister(argument++), n);
1929     }
1930
1931     if (m_shouldEmitProfileHooks) {
1932         emitOpcode(op_profile_will_call);
1933         instructions().append(callArguments.profileHookRegister()->index());
1934     }
1935
1936     // Reserve space for call frame.
1937     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1938     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1939         callFrame.append(newTemporary());
1940
1941     emitExpressionInfo(divot, startOffset, endOffset);
1942
1943     emitOpcode(op_construct);
1944     instructions().append(func->index()); // func
1945     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1946     instructions().append(callArguments.registerOffset()); // registerOffset
1947 #if ENABLE(LLINT)
1948     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1949 #else
1950     instructions().append(0);
1951 #endif
1952     instructions().append(0);
1953     if (dst != ignoredResult()) {
1954         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1955         instructions().append(dst->index()); // dst
1956         instructions().append(profile);
1957     }
1958
1959     if (m_shouldEmitProfileHooks) {
1960         emitOpcode(op_profile_did_call);
1961         instructions().append(callArguments.profileHookRegister()->index());
1962     }
1963
1964     return dst;
1965 }
1966
1967 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1968 {
1969     emitOpcode(op_strcat);
1970     instructions().append(dst->index());
1971     instructions().append(src->index());
1972     instructions().append(count);
1973
1974     return dst;
1975 }
1976
1977 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1978 {
1979     emitOpcode(op_to_primitive);
1980     instructions().append(dst->index());
1981     instructions().append(src->index());
1982 }
1983
1984 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1985 {
1986     ASSERT(scope->isTemporary());
1987     ControlFlowContext context;
1988     context.isFinallyBlock = false;
1989     m_scopeContextStack.append(context);
1990     m_dynamicScopeDepth++;
1991
1992     return emitUnaryNoDstOp(op_push_scope, scope);
1993 }
1994
1995 void BytecodeGenerator::emitPopScope()
1996 {
1997     ASSERT(m_scopeContextStack.size());
1998     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1999
2000     emitOpcode(op_pop_scope);
2001
2002     m_scopeContextStack.removeLast();
2003     m_dynamicScopeDepth--;
2004 }
2005
2006 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
2007 {
2008 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2009     if (debugHookID != DidReachBreakpoint)
2010         return;
2011 #else
2012     if (!m_shouldEmitDebugHooks)
2013         return;
2014 #endif
2015     emitOpcode(op_debug);
2016     instructions().append(debugHookID);
2017     instructions().append(firstLine);
2018     instructions().append(lastLine);
2019 }
2020
2021 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2022 {
2023     ControlFlowContext scope;
2024     scope.isFinallyBlock = true;
2025     FinallyContext context = {
2026         finallyBlock,
2027         m_scopeContextStack.size(),
2028         m_switchContextStack.size(),
2029         m_forInContextStack.size(),
2030         m_labelScopes.size(),
2031         m_finallyDepth,
2032         m_dynamicScopeDepth
2033     };
2034     scope.finallyContext = context;
2035     m_scopeContextStack.append(scope);
2036     m_finallyDepth++;
2037 }
2038
2039 void BytecodeGenerator::popFinallyContext()
2040 {
2041     ASSERT(m_scopeContextStack.size());
2042     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2043     ASSERT(m_finallyDepth > 0);
2044     m_scopeContextStack.removeLast();
2045     m_finallyDepth--;
2046 }
2047
2048 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2049 {
2050     // Reclaim free label scopes.
2051     //
2052     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2053     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2054     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2055     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2056     // loop condition is a workaround.
2057     while (m_labelScopes.size()) {
2058         if  (m_labelScopes.last().refCount())
2059             break;
2060         m_labelScopes.removeLast();
2061     }
2062
2063     if (!m_labelScopes.size())
2064         return 0;
2065
2066     // We special-case the following, which is a syntax error in Firefox:
2067     // label:
2068     //     break;
2069     if (name.isEmpty()) {
2070         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2071             LabelScope* scope = &m_labelScopes[i];
2072             if (scope->type() != LabelScope::NamedLabel) {
2073                 ASSERT(scope->breakTarget());
2074                 return scope;
2075             }
2076         }
2077         return 0;
2078     }
2079
2080     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2081         LabelScope* scope = &m_labelScopes[i];
2082         if (scope->name() && *scope->name() == name) {
2083             ASSERT(scope->breakTarget());
2084             return scope;
2085         }
2086     }
2087     return 0;
2088 }
2089
2090 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2091 {
2092     // Reclaim free label scopes.
2093     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2094         m_labelScopes.removeLast();
2095
2096     if (!m_labelScopes.size())
2097         return 0;
2098
2099     if (name.isEmpty()) {
2100         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2101             LabelScope* scope = &m_labelScopes[i];
2102             if (scope->type() == LabelScope::Loop) {
2103                 ASSERT(scope->continueTarget());
2104                 return scope;
2105             }
2106         }
2107         return 0;
2108     }
2109
2110     // Continue to the loop nested nearest to the label scope that matches
2111     // 'name'.
2112     LabelScope* result = 0;
2113     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2114         LabelScope* scope = &m_labelScopes[i];
2115         if (scope->type() == LabelScope::Loop) {
2116             ASSERT(scope->continueTarget());
2117             result = scope;
2118         }
2119         if (scope->name() && *scope->name() == name)
2120             return result; // may be 0
2121     }
2122     return 0;
2123 }
2124
2125 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2126 {
2127     while (topScope > bottomScope) {
2128         // First we count the number of dynamic scopes we need to remove to get
2129         // to a finally block.
2130         int nNormalScopes = 0;
2131         while (topScope > bottomScope) {
2132             if (topScope->isFinallyBlock)
2133                 break;
2134             ++nNormalScopes;
2135             --topScope;
2136         }
2137
2138         if (nNormalScopes) {
2139             size_t begin = instructions().size();
2140
2141             // We need to remove a number of dynamic scopes to get to the next
2142             // finally block
2143             emitOpcode(op_jmp_scopes);
2144             instructions().append(nNormalScopes);
2145
2146             // If topScope == bottomScope then there isn't actually a finally block
2147             // left to emit, so make the jmp_scopes jump directly to the target label
2148             if (topScope == bottomScope) {
2149                 instructions().append(target->bind(begin, instructions().size()));
2150                 return target;
2151             }
2152
2153             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2154             // to the next instruction
2155             RefPtr<Label> nextInsn = newLabel();
2156             instructions().append(nextInsn->bind(begin, instructions().size()));
2157             emitLabel(nextInsn.get());
2158         }
2159         
2160         Vector<ControlFlowContext> savedScopeContextStack;
2161         Vector<SwitchInfo> savedSwitchContextStack;
2162         Vector<ForInContext> savedForInContextStack;
2163         SegmentedVector<LabelScope, 8> savedLabelScopes;
2164         while (topScope > bottomScope && topScope->isFinallyBlock) {
2165             // Save the current state of the world while instating the state of the world
2166             // for the finally block.
2167             FinallyContext finallyContext = topScope->finallyContext;
2168             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2169             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2170             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2171             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2172             int topScopeIndex = -1;
2173             int bottomScopeIndex = -1;
2174             if (flipScopes) {
2175                 topScopeIndex = topScope - m_scopeContextStack.begin();
2176                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2177                 savedScopeContextStack = m_scopeContextStack;
2178                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2179             }
2180             if (flipSwitches) {
2181                 savedSwitchContextStack = m_switchContextStack;
2182                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2183             }
2184             if (flipForIns) {
2185                 savedForInContextStack = m_forInContextStack;
2186                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2187             }
2188             if (flipLabelScopes) {
2189                 savedLabelScopes = m_labelScopes;
2190                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2191                     m_labelScopes.removeLast();
2192             }
2193             int savedFinallyDepth = m_finallyDepth;
2194             m_finallyDepth = finallyContext.finallyDepth;
2195             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2196             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2197             
2198             // Emit the finally block.
2199             emitNode(finallyContext.finallyBlock);
2200             
2201             // Restore the state of the world.
2202             if (flipScopes) {
2203                 m_scopeContextStack = savedScopeContextStack;
2204                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2205                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2206             }
2207             if (flipSwitches)
2208                 m_switchContextStack = savedSwitchContextStack;
2209             if (flipForIns)
2210                 m_forInContextStack = savedForInContextStack;
2211             if (flipLabelScopes)
2212                 m_labelScopes = savedLabelScopes;
2213             m_finallyDepth = savedFinallyDepth;
2214             m_dynamicScopeDepth = savedDynamicScopeDepth;
2215             
2216             --topScope;
2217         }
2218     }
2219     return emitJump(target);
2220 }
2221
2222 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2223 {
2224     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2225     ASSERT(target->isForward());
2226
2227     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2228     ASSERT(scopeDelta <= m_scopeContextStack.size());
2229     if (!scopeDelta)
2230         return emitJump(target);
2231
2232     if (m_finallyDepth)
2233         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2234
2235     size_t begin = instructions().size();
2236
2237     emitOpcode(op_jmp_scopes);
2238     instructions().append(scopeDelta);
2239     instructions().append(target->bind(begin, instructions().size()));
2240     return target;
2241 }
2242
2243 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2244 {
2245     size_t begin = instructions().size();
2246
2247     emitOpcode(op_get_pnames);
2248     instructions().append(dst->index());
2249     instructions().append(base->index());
2250     instructions().append(i->index());
2251     instructions().append(size->index());
2252     instructions().append(breakTarget->bind(begin, instructions().size()));
2253     return dst;
2254 }
2255
2256 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2257 {
2258     size_t begin = instructions().size();
2259
2260     emitOpcode(op_next_pname);
2261     instructions().append(dst->index());
2262     instructions().append(base->index());
2263     instructions().append(i->index());
2264     instructions().append(size->index());
2265     instructions().append(iter->index());
2266     instructions().append(target->bind(begin, instructions().size()));
2267     return dst;
2268 }
2269
2270 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
2271 {
2272     m_usesExceptions = true;
2273 #if ENABLE(JIT)
2274 #if ENABLE(LLINT)
2275     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast<void*>(&llint_op_catch))) };
2276 #else
2277     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
2278 #endif
2279 #else
2280     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
2281 #endif
2282
2283     m_codeBlock->addExceptionHandler(info);
2284     emitOpcode(op_catch);
2285     instructions().append(targetRegister->index());
2286     return targetRegister;
2287 }
2288
2289 void BytecodeGenerator::emitThrowReferenceError(const UString& message)
2290 {
2291     emitOpcode(op_throw_reference_error);
2292     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2293 }
2294
2295 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
2296 {
2297     ControlFlowContext context;
2298     context.isFinallyBlock = false;
2299     m_scopeContextStack.append(context);
2300     m_dynamicScopeDepth++;
2301
2302     emitOpcode(op_push_new_scope);
2303     instructions().append(dst->index());
2304     instructions().append(addConstant(property));
2305     instructions().append(value->index());
2306 }
2307
2308 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2309 {
2310     SwitchInfo info = { instructions().size(), type };
2311     switch (type) {
2312         case SwitchInfo::SwitchImmediate:
2313             emitOpcode(op_switch_imm);
2314             break;
2315         case SwitchInfo::SwitchCharacter:
2316             emitOpcode(op_switch_char);
2317             break;
2318         case SwitchInfo::SwitchString:
2319             emitOpcode(op_switch_string);
2320             break;
2321         default:
2322             ASSERT_NOT_REACHED();
2323     }
2324
2325     instructions().append(0); // place holder for table index
2326     instructions().append(0); // place holder for default target    
2327     instructions().append(scrutineeRegister->index());
2328     m_switchContextStack.append(info);
2329 }
2330
2331 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2332 {
2333     UNUSED_PARAM(max);
2334     ASSERT(node->isNumber());
2335     double value = static_cast<NumberNode*>(node)->value();
2336     int32_t key = static_cast<int32_t>(value);
2337     ASSERT(key == value);
2338     ASSERT(key >= min);
2339     ASSERT(key <= max);
2340     return key - min;
2341 }
2342
2343 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2344 {
2345     jumpTable.min = min;
2346     jumpTable.branchOffsets.resize(max - min + 1);
2347     jumpTable.branchOffsets.fill(0);
2348     for (uint32_t i = 0; i < clauseCount; ++i) {
2349         // We're emitting this after the clause labels should have been fixed, so 
2350         // the labels should not be "forward" references
2351         ASSERT(!labels[i]->isForward());
2352         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2353     }
2354 }
2355
2356 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2357 {
2358     UNUSED_PARAM(max);
2359     ASSERT(node->isString());
2360     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2361     ASSERT(clause->length() == 1);
2362     
2363     int32_t key = (*clause)[0];
2364     ASSERT(key >= min);
2365     ASSERT(key <= max);
2366     return key - min;
2367 }
2368
2369 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2370 {
2371     jumpTable.min = min;
2372     jumpTable.branchOffsets.resize(max - min + 1);
2373     jumpTable.branchOffsets.fill(0);
2374     for (uint32_t i = 0; i < clauseCount; ++i) {
2375         // We're emitting this after the clause labels should have been fixed, so 
2376         // the labels should not be "forward" references
2377         ASSERT(!labels[i]->isForward());
2378         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2379     }
2380 }
2381
2382 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2383 {
2384     for (uint32_t i = 0; i < clauseCount; ++i) {
2385         // We're emitting this after the clause labels should have been fixed, so 
2386         // the labels should not be "forward" references
2387         ASSERT(!labels[i]->isForward());
2388         
2389         ASSERT(nodes[i]->isString());
2390         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2391         OffsetLocation location;
2392         location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2393         jumpTable.offsetTable.add(clause, location);
2394     }
2395 }
2396
2397 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2398 {
2399     SwitchInfo switchInfo = m_switchContextStack.last();
2400     m_switchContextStack.removeLast();
2401     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2402         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2403         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2404
2405         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2406         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2407     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2408         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2409         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2410         
2411         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2412         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2413     } else {
2414         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2415         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2416         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2417
2418         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2419         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2420     }
2421 }
2422
2423 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2424 {
2425     // It would be nice to do an even better job of identifying exactly where the expression is.
2426     // And we could make the caller pass the node pointer in, if there was some way of getting
2427     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2428     // is still good enough to get us an accurate line number.
2429     m_expressionTooDeep = true;
2430     return newTemporary();
2431 }
2432
2433 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2434 {
2435     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2436 }
2437
2438 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2439 {
2440     RegisterID* registerID = resolve(ident).local();
2441     if (!registerID || registerID->index() >= 0)
2442          return 0;
2443     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2444 }
2445
2446 } // namespace JSC