DFG should be able to set watchpoints on global variables
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "LowLevelInterpreter.h"
39 #include "ScopeChain.h"
40 #include "StrongInlines.h"
41 #include "UString.h"
42
43 using namespace std;
44
45 namespace JSC {
46
47 /*
48     The layout of a register frame looks like this:
49
50     For
51
52     function f(x, y) {
53         var v1;
54         function g() { }
55         var v2;
56         return (x) * (y);
57     }
58
59     assuming (x) and (y) generated temporaries t1 and t2, you would have
60
61     ------------------------------------
62     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
63     ------------------------------------
64     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
65     ------------------------------------
66     | params->|<-locals      | temps->
67
68     Because temporary registers are allocated in a stack-like fashion, we
69     can reclaim them with a simple popping algorithm. The same goes for labels.
70     (We never reclaim parameter or local registers, because parameters and
71     locals are DontDelete.)
72
73     The register layout before a function call looks like this:
74
75     For
76
77     function f(x, y)
78     {
79     }
80
81     f(1);
82
83     >                        <------------------------------
84     <                        >  reserved: call frame  |  1 | <-- value held
85     >         >snip<         <------------------------------
86     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
87     >                        <------------------------------
88     | params->|<-locals      | temps->
89
90     The call instruction fills in the "call frame" registers. It also pads
91     missing arguments at the end of the call:
92
93     >                        <-----------------------------------
94     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
95     >         >snip<         <-----------------------------------
96     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
97     >                        <-----------------------------------
98     | params->|<-locals      | temps->
99
100     After filling in missing arguments, the call instruction sets up the new
101     stack frame to overlap the end of the old stack frame:
102
103                              |---------------------------------->                        <
104                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
105                              |---------------------------------->         >snip<         <
106                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
107                              |---------------------------------->                        <
108                              |                        | params->|<-locals       | temps->
109
110     That way, arguments are "copied" into the callee's stack frame for free.
111
112     If the caller supplies too many arguments, this trick doesn't work. The
113     extra arguments protrude into space reserved for locals and temporaries.
114     In that case, the call instruction makes a real copy of the call frame header,
115     along with just the arguments expected by the callee, leaving the original
116     call frame header and arguments behind. (The call instruction can't just discard
117     extra arguments, because the "arguments" object may access them later.)
118     This copying strategy ensures that all named values will be at the indices
119     expected by the callee.
120 */
121
122 void Label::setLocation(unsigned location)
123 {
124     m_location = location;
125     
126     unsigned size = m_unresolvedJumps.size();
127     for (unsigned i = 0; i < size; ++i)
128         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
129 }
130
131 #ifndef NDEBUG
132 void ResolveResult::checkValidity()
133 {
134     switch (m_type) {
135     case Register:
136     case ReadOnlyRegister:
137         ASSERT(m_local);
138         return;
139     case Lexical:
140     case ReadOnlyLexical:
141     case DynamicLexical:
142     case DynamicReadOnlyLexical:
143         ASSERT(m_index != missingSymbolMarker());
144         return;
145     case Global:
146     case DynamicGlobal:
147         ASSERT(m_globalObject);
148         return;
149     case IndexedGlobal:
150     case ReadOnlyIndexedGlobal:
151     case WatchedIndexedGlobal:
152     case DynamicIndexedGlobal:
153     case DynamicReadOnlyIndexedGlobal:
154         ASSERT(m_index != missingSymbolMarker());
155         ASSERT(m_globalObject);
156         return;
157     case Dynamic:
158         return;
159     default:
160         ASSERT_NOT_REACHED();
161     }
162 }
163 #endif
164
165 WriteBarrier<Unknown>* ResolveResult::registerPointer() const
166 {
167     return &jsCast<JSGlobalObject*>(globalObject())->registerAt(index());
168 }
169
170 static bool s_dumpsGeneratedCode = false;
171
172 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
173 {
174     s_dumpsGeneratedCode = dumpsGeneratedCode;
175 }
176
177 bool BytecodeGenerator::dumpsGeneratedCode()
178 {
179     return s_dumpsGeneratedCode;
180 }
181
182 JSObject* BytecodeGenerator::generate()
183 {
184     SamplingRegion samplingRegion("Bytecode Generation");
185     
186     m_codeBlock->setThisRegister(m_thisRegister.index());
187
188     m_scopeNode->emitBytecode(*this);
189     
190     m_codeBlock->instructions() = RefCountedArray<Instruction>(m_instructions);
191
192     if (s_dumpsGeneratedCode)
193         m_codeBlock->dump(m_scopeChain->globalObject->globalExec());
194
195     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
196         symbolTable().clear();
197
198     m_codeBlock->shrinkToFit(CodeBlock::EarlyShrink);
199
200     if (m_expressionTooDeep)
201         return createOutOfMemoryError(m_scopeChain->globalObject.get());
202     return 0;
203 }
204
205 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
206 {
207     int index = m_calleeRegisters.size();
208     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
209     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
210
211     if (!result.isNewEntry) {
212         r0 = &registerFor(result.iterator->second.getIndex());
213         return false;
214     }
215
216     r0 = addVar();
217     return true;
218 }
219
220 int BytecodeGenerator::addGlobalVar(
221     const Identifier& ident, ConstantMode constantMode, FunctionMode functionMode)
222 {
223     UNUSED_PARAM(functionMode);
224     int index = symbolTable().size();
225     SymbolTableEntry newEntry(index, (constantMode == IsConstant) ? ReadOnly : 0);
226     if (functionMode == IsFunctionToSpecialize)
227         newEntry.attemptToWatch();
228     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
229     if (!result.isNewEntry) {
230         result.iterator->second.notifyWrite();
231         index = result.iterator->second.getIndex();
232     }
233     return index;
234 }
235
236 void BytecodeGenerator::preserveLastVar()
237 {
238     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
239         m_lastVar = &m_calleeRegisters.last();
240 }
241
242 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
243     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
244     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
245     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
246     , m_scopeChain(*scopeChain->globalData, scopeChain)
247     , m_symbolTable(symbolTable)
248     , m_scopeNode(programNode)
249     , m_codeBlock(codeBlock)
250     , m_thisRegister(CallFrame::thisArgumentOffset())
251     , m_finallyDepth(0)
252     , m_dynamicScopeDepth(0)
253     , m_baseScopeDepth(0)
254     , m_codeType(GlobalCode)
255     , m_nextConstantOffset(0)
256     , m_globalConstantIndex(0)
257     , m_hasCreatedActivation(true)
258     , m_firstLazyFunction(0)
259     , m_lastLazyFunction(0)
260     , m_globalData(scopeChain->globalData)
261     , m_lastOpcodeID(op_end)
262 #ifndef NDEBUG
263     , m_lastOpcodePosition(0)
264 #endif
265     , m_stack(wtfThreadData().stack())
266     , m_usesExceptions(false)
267     , m_expressionTooDeep(false)
268 {
269     m_globalData->startedCompiling(m_codeBlock);
270     if (m_shouldEmitDebugHooks)
271         m_codeBlock->setNeedsFullScopeChain(true);
272
273     emitOpcode(op_enter);
274     codeBlock->setGlobalData(m_globalData);
275
276     // FIXME: Move code that modifies the global object to Interpreter::execute.
277     
278     m_codeBlock->setNumParameters(1); // Allocate space for "this"
279     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
280     
281     if (compilationKind == OptimizingCompilation)
282         return;
283
284     JSGlobalObject* globalObject = scopeChain->globalObject.get();
285     ExecState* exec = globalObject->globalExec();
286     
287     BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
288
289     const VarStack& varStack = programNode->varStack();
290     const FunctionStack& functionStack = programNode->functionStack();
291
292     size_t newGlobals = varStack.size() + functionStack.size();
293     if (!newGlobals)
294         return;
295     globalObject->addRegisters(newGlobals);
296
297     for (size_t i = 0; i < functionStack.size(); ++i) {
298         FunctionBodyNode* function = functionStack[i];
299         bool propertyDidExist = 
300             globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
301         
302         JSValue value = JSFunction::create(exec, makeFunction(exec, function), scopeChain);
303         int index = addGlobalVar(
304             function->ident(), IsVariable,
305             !propertyDidExist ? IsFunctionToSpecialize : NotFunctionOrNotSpecializable);
306         globalObject->registerAt(index).set(*m_globalData, globalObject, value);
307     }
308
309     for (size_t i = 0; i < varStack.size(); ++i) {
310         if (globalObject->hasProperty(exec, *varStack[i].first))
311             continue;
312         addGlobalVar(
313             *varStack[i].first,
314             (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable,
315             NotFunctionOrNotSpecializable);
316     }
317 }
318
319 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
320     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
321     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
322     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
323     , m_scopeChain(*scopeChain->globalData, scopeChain)
324     , m_symbolTable(symbolTable)
325     , m_scopeNode(functionBody)
326     , m_codeBlock(codeBlock)
327     , m_activationRegister(0)
328     , m_finallyDepth(0)
329     , m_dynamicScopeDepth(0)
330     , m_baseScopeDepth(0)
331     , m_codeType(FunctionCode)
332     , m_nextConstantOffset(0)
333     , m_globalConstantIndex(0)
334     , m_hasCreatedActivation(false)
335     , m_firstLazyFunction(0)
336     , m_lastLazyFunction(0)
337     , m_globalData(scopeChain->globalData)
338     , m_lastOpcodeID(op_end)
339 #ifndef NDEBUG
340     , m_lastOpcodePosition(0)
341 #endif
342     , m_stack(wtfThreadData().stack())
343     , m_usesExceptions(false)
344     , m_expressionTooDeep(false)
345 {
346     m_globalData->startedCompiling(m_codeBlock);
347     if (m_shouldEmitDebugHooks)
348         m_codeBlock->setNeedsFullScopeChain(true);
349
350     codeBlock->setGlobalData(m_globalData);
351     
352     emitOpcode(op_enter);
353     if (m_codeBlock->needsFullScopeChain()) {
354         m_activationRegister = addVar();
355         emitInitLazyRegister(m_activationRegister);
356         m_codeBlock->setActivationRegister(m_activationRegister->index());
357     }
358
359     // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
360     // object, if created.
361     if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) {
362         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
363         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
364
365         // We can save a little space by hard-coding the knowledge that the two
366         // 'arguments' values are stored in consecutive registers, and storing
367         // only the index of the assignable one.
368         codeBlock->setArgumentsRegister(argumentsRegister->index());
369         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
370
371         emitInitLazyRegister(argumentsRegister);
372         emitInitLazyRegister(unmodifiedArgumentsRegister);
373         
374         if (m_codeBlock->isStrictMode()) {
375             emitOpcode(op_create_arguments);
376             instructions().append(argumentsRegister->index());
377         }
378
379         // The debugger currently retrieves the arguments object from an activation rather than pulling
380         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
381         // but for now we force eager creation of the arguments object when debugging.
382         if (m_shouldEmitDebugHooks) {
383             emitOpcode(op_create_arguments);
384             instructions().append(argumentsRegister->index());
385         }
386     }
387
388     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
389     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
390
391     // Captured variables and functions go first so that activations don't have
392     // to step over the non-captured locals to mark them.
393     m_hasCreatedActivation = false;
394     if (functionBody->hasCapturedVariables()) {
395         for (size_t i = 0; i < functionStack.size(); ++i) {
396             FunctionBodyNode* function = functionStack[i];
397             const Identifier& ident = function->ident();
398             if (functionBody->captures(ident)) {
399                 if (!m_hasCreatedActivation) {
400                     m_hasCreatedActivation = true;
401                     emitOpcode(op_create_activation);
402                     instructions().append(m_activationRegister->index());
403                 }
404                 m_functions.add(ident.impl());
405                 emitNewFunction(addVar(ident, false), function);
406             }
407         }
408         for (size_t i = 0; i < varStack.size(); ++i) {
409             const Identifier& ident = *varStack[i].first;
410             if (functionBody->captures(ident))
411                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
412         }
413     }
414     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
415     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
416         m_hasCreatedActivation = true;
417         emitOpcode(op_create_activation);
418         instructions().append(m_activationRegister->index());
419     }
420
421     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
422     m_firstLazyFunction = codeBlock->m_numVars;
423     for (size_t i = 0; i < functionStack.size(); ++i) {
424         FunctionBodyNode* function = functionStack[i];
425         const Identifier& ident = function->ident();
426         if (!functionBody->captures(ident)) {
427             m_functions.add(ident.impl());
428             RefPtr<RegisterID> reg = addVar(ident, false);
429             // Don't lazily create functions that override the name 'arguments'
430             // as this would complicate lazy instantiation of actual arguments.
431             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
432                 emitNewFunction(reg.get(), function);
433             else {
434                 emitInitLazyRegister(reg.get());
435                 m_lazyFunctions.set(reg->index(), function);
436             }
437         }
438     }
439     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
440     for (size_t i = 0; i < varStack.size(); ++i) {
441         const Identifier& ident = *varStack[i].first;
442         if (!functionBody->captures(ident))
443             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
444     }
445
446     if (m_shouldEmitDebugHooks)
447         codeBlock->m_numCapturedVars = codeBlock->m_numVars;
448
449     FunctionParameters& parameters = *functionBody->parameters();
450     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
451
452     // Add "this" as a parameter
453     int nextParameterIndex = CallFrame::thisArgumentOffset();
454     m_thisRegister.setIndex(nextParameterIndex--);
455     m_codeBlock->addParameter();
456     
457     for (size_t i = 0; i < parameters.size(); ++i)
458         addParameter(parameters[i], nextParameterIndex--);
459
460     preserveLastVar();
461
462     if (isConstructor()) {
463         emitOpcode(op_create_this);
464         instructions().append(m_thisRegister.index());
465     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
466         ValueProfile* profile = emitProfiledOpcode(op_convert_this);
467         instructions().append(m_thisRegister.index());
468         instructions().append(profile);
469     }
470 }
471
472 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
473     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
474     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
475     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
476     , m_scopeChain(*scopeChain->globalData, scopeChain)
477     , m_symbolTable(symbolTable)
478     , m_scopeNode(evalNode)
479     , m_codeBlock(codeBlock)
480     , m_thisRegister(CallFrame::thisArgumentOffset())
481     , m_finallyDepth(0)
482     , m_dynamicScopeDepth(0)
483     , m_baseScopeDepth(codeBlock->baseScopeDepth())
484     , m_codeType(EvalCode)
485     , m_nextConstantOffset(0)
486     , m_globalConstantIndex(0)
487     , m_hasCreatedActivation(true)
488     , m_firstLazyFunction(0)
489     , m_lastLazyFunction(0)
490     , m_globalData(scopeChain->globalData)
491     , m_lastOpcodeID(op_end)
492 #ifndef NDEBUG
493     , m_lastOpcodePosition(0)
494 #endif
495     , m_stack(wtfThreadData().stack())
496     , m_usesExceptions(false)
497     , m_expressionTooDeep(false)
498 {
499     m_globalData->startedCompiling(m_codeBlock);
500     if (m_shouldEmitDebugHooks || m_baseScopeDepth)
501         m_codeBlock->setNeedsFullScopeChain(true);
502
503     emitOpcode(op_enter);
504     codeBlock->setGlobalData(m_globalData);
505     m_codeBlock->setNumParameters(1);
506
507     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
508     for (size_t i = 0; i < functionStack.size(); ++i)
509         m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
510
511     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
512     unsigned numVariables = varStack.size();
513     Vector<Identifier> variables;
514     variables.reserveCapacity(numVariables);
515     for (size_t i = 0; i < numVariables; ++i)
516         variables.append(*varStack[i].first);
517     codeBlock->adoptVariables(variables);
518     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
519     preserveLastVar();
520 }
521
522 BytecodeGenerator::~BytecodeGenerator()
523 {
524     m_globalData->finishedCompiling(m_codeBlock);
525 }
526
527 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
528 {
529     emitOpcode(op_init_lazy_reg);
530     instructions().append(reg->index());
531     return reg;
532 }
533
534 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
535 {
536     // Parameters overwrite var declarations, but not function declarations.
537     StringImpl* rep = ident.impl();
538     if (!m_functions.contains(rep)) {
539         symbolTable().set(rep, parameterIndex);
540         RegisterID& parameter = registerFor(parameterIndex);
541         parameter.setIndex(parameterIndex);
542     }
543
544     // To maintain the calling convention, we have to allocate unique space for
545     // each parameter, even if the parameter doesn't make it into the symbol table.
546     m_codeBlock->addParameter();
547 }
548
549 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
550 {
551     if (ident != propertyNames().arguments)
552         return false;
553     
554     if (!shouldOptimizeLocals())
555         return false;
556     
557     SymbolTableEntry entry = symbolTable().get(ident.impl());
558     if (entry.isNull())
559         return false;
560     
561     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
562         return true;
563     
564     return false;
565 }
566
567 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
568 {
569     ASSERT(willResolveToArguments(propertyNames().arguments));
570
571     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
572     ASSERT(!entry.isNull());
573     return &registerFor(entry.getIndex());
574 }
575
576 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
577 {
578     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
579         return reg;
580     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
581     return reg;
582 }
583
584 RegisterID* BytecodeGenerator::newRegister()
585 {
586     m_calleeRegisters.append(m_calleeRegisters.size());
587     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
588     return &m_calleeRegisters.last();
589 }
590
591 RegisterID* BytecodeGenerator::newTemporary()
592 {
593     // Reclaim free register IDs.
594     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
595         m_calleeRegisters.removeLast();
596         
597     RegisterID* result = newRegister();
598     result->setTemporary();
599     return result;
600 }
601
602 RegisterID* BytecodeGenerator::highestUsedRegister()
603 {
604     size_t count = m_codeBlock->m_numCalleeRegisters;
605     while (m_calleeRegisters.size() < count)
606         newRegister();
607     return &m_calleeRegisters.last();
608 }
609
610 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
611 {
612     // Reclaim free label scopes.
613     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
614         m_labelScopes.removeLast();
615
616     // Allocate new label scope.
617     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
618     m_labelScopes.append(scope);
619     return &m_labelScopes.last();
620 }
621
622 PassRefPtr<Label> BytecodeGenerator::newLabel()
623 {
624     // Reclaim free label IDs.
625     while (m_labels.size() && !m_labels.last().refCount())
626         m_labels.removeLast();
627
628     // Allocate new label ID.
629     m_labels.append(this);
630     return &m_labels.last();
631 }
632
633 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
634 {
635     unsigned newLabelIndex = instructions().size();
636     l0->setLocation(newLabelIndex);
637
638     if (m_codeBlock->numberOfJumpTargets()) {
639         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
640         ASSERT(lastLabelIndex <= newLabelIndex);
641         if (newLabelIndex == lastLabelIndex) {
642             // Peephole optimizations have already been disabled by emitting the last label
643             return l0;
644         }
645     }
646
647     m_codeBlock->addJumpTarget(newLabelIndex);
648
649     // This disables peephole optimizations when an instruction is a jump target
650     m_lastOpcodeID = op_end;
651     return l0;
652 }
653
654 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
655 {
656 #ifndef NDEBUG
657     size_t opcodePosition = instructions().size();
658     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
659     m_lastOpcodePosition = opcodePosition;
660 #endif
661     instructions().append(globalData()->interpreter->getOpcode(opcodeID));
662     m_lastOpcodeID = opcodeID;
663 }
664
665 ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
666 {
667 #if ENABLE(VALUE_PROFILER)
668     ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
669 #else
670     ValueProfile* result = 0;
671 #endif
672     emitOpcode(opcodeID);
673     return result;
674 }
675
676 void BytecodeGenerator::emitLoopHint()
677 {
678 #if ENABLE(DFG_JIT)
679     emitOpcode(op_loop_hint);
680 #endif
681 }
682
683 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
684 {
685     ASSERT(instructions().size() >= 4);
686     size_t size = instructions().size();
687     dstIndex = instructions().at(size - 3).u.operand;
688     src1Index = instructions().at(size - 2).u.operand;
689     src2Index = instructions().at(size - 1).u.operand;
690 }
691
692 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
693 {
694     ASSERT(instructions().size() >= 3);
695     size_t size = instructions().size();
696     dstIndex = instructions().at(size - 2).u.operand;
697     srcIndex = instructions().at(size - 1).u.operand;
698 }
699
700 void BytecodeGenerator::retrieveLastUnaryOp(WriteBarrier<Unknown>*& dstPointer, int& srcIndex)
701 {
702     ASSERT(instructions().size() >= 3);
703     size_t size = instructions().size();
704     dstPointer = instructions().at(size - 2).u.registerPointer;
705     srcIndex = instructions().at(size - 1).u.operand;
706 }
707
708 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
709 {
710     ASSERT(instructions().size() >= 4);
711     instructions().shrink(instructions().size() - 4);
712     m_lastOpcodeID = op_end;
713 }
714
715 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
716 {
717     ASSERT(instructions().size() >= 3);
718     instructions().shrink(instructions().size() - 3);
719     m_lastOpcodeID = op_end;
720 }
721
722 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
723 {
724     size_t begin = instructions().size();
725     emitOpcode(target->isForward() ? op_jmp : op_loop);
726     instructions().append(target->bind(begin, instructions().size()));
727     return target;
728 }
729
730 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
731 {
732     if (m_lastOpcodeID == op_less) {
733         int dstIndex;
734         int src1Index;
735         int src2Index;
736
737         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
738
739         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
740             rewindBinaryOp();
741
742             size_t begin = instructions().size();
743             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
744             instructions().append(src1Index);
745             instructions().append(src2Index);
746             instructions().append(target->bind(begin, instructions().size()));
747             return target;
748         }
749     } else if (m_lastOpcodeID == op_lesseq) {
750         int dstIndex;
751         int src1Index;
752         int src2Index;
753
754         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
755
756         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
757             rewindBinaryOp();
758
759             size_t begin = instructions().size();
760             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
761             instructions().append(src1Index);
762             instructions().append(src2Index);
763             instructions().append(target->bind(begin, instructions().size()));
764             return target;
765         }
766     } else if (m_lastOpcodeID == op_greater) {
767         int dstIndex;
768         int src1Index;
769         int src2Index;
770
771         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
772
773         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
774             rewindBinaryOp();
775
776             size_t begin = instructions().size();
777             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
778             instructions().append(src1Index);
779             instructions().append(src2Index);
780             instructions().append(target->bind(begin, instructions().size()));
781             return target;
782         }
783     } else if (m_lastOpcodeID == op_greatereq) {
784         int dstIndex;
785         int src1Index;
786         int src2Index;
787
788         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
789
790         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
791             rewindBinaryOp();
792
793             size_t begin = instructions().size();
794             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
795             instructions().append(src1Index);
796             instructions().append(src2Index);
797             instructions().append(target->bind(begin, instructions().size()));
798             return target;
799         }
800     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
801         int dstIndex;
802         int srcIndex;
803
804         retrieveLastUnaryOp(dstIndex, srcIndex);
805
806         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
807             rewindUnaryOp();
808
809             size_t begin = instructions().size();
810             emitOpcode(op_jeq_null);
811             instructions().append(srcIndex);
812             instructions().append(target->bind(begin, instructions().size()));
813             return target;
814         }
815     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
816         int dstIndex;
817         int srcIndex;
818
819         retrieveLastUnaryOp(dstIndex, srcIndex);
820
821         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
822             rewindUnaryOp();
823
824             size_t begin = instructions().size();
825             emitOpcode(op_jneq_null);
826             instructions().append(srcIndex);
827             instructions().append(target->bind(begin, instructions().size()));
828             return target;
829         }
830     }
831
832     size_t begin = instructions().size();
833
834     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
835     instructions().append(cond->index());
836     instructions().append(target->bind(begin, instructions().size()));
837     return target;
838 }
839
840 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
841 {
842     if (m_lastOpcodeID == op_less && target->isForward()) {
843         int dstIndex;
844         int src1Index;
845         int src2Index;
846
847         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
848
849         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
850             rewindBinaryOp();
851
852             size_t begin = instructions().size();
853             emitOpcode(op_jnless);
854             instructions().append(src1Index);
855             instructions().append(src2Index);
856             instructions().append(target->bind(begin, instructions().size()));
857             return target;
858         }
859     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
860         int dstIndex;
861         int src1Index;
862         int src2Index;
863
864         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
865
866         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
867             rewindBinaryOp();
868
869             size_t begin = instructions().size();
870             emitOpcode(op_jnlesseq);
871             instructions().append(src1Index);
872             instructions().append(src2Index);
873             instructions().append(target->bind(begin, instructions().size()));
874             return target;
875         }
876     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
877         int dstIndex;
878         int src1Index;
879         int src2Index;
880
881         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
882
883         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
884             rewindBinaryOp();
885
886             size_t begin = instructions().size();
887             emitOpcode(op_jngreater);
888             instructions().append(src1Index);
889             instructions().append(src2Index);
890             instructions().append(target->bind(begin, instructions().size()));
891             return target;
892         }
893     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
894         int dstIndex;
895         int src1Index;
896         int src2Index;
897
898         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
899
900         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
901             rewindBinaryOp();
902
903             size_t begin = instructions().size();
904             emitOpcode(op_jngreatereq);
905             instructions().append(src1Index);
906             instructions().append(src2Index);
907             instructions().append(target->bind(begin, instructions().size()));
908             return target;
909         }
910     } else if (m_lastOpcodeID == op_not) {
911         int dstIndex;
912         int srcIndex;
913
914         retrieveLastUnaryOp(dstIndex, srcIndex);
915
916         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
917             rewindUnaryOp();
918
919             size_t begin = instructions().size();
920             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
921             instructions().append(srcIndex);
922             instructions().append(target->bind(begin, instructions().size()));
923             return target;
924         }
925     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
926         int dstIndex;
927         int srcIndex;
928
929         retrieveLastUnaryOp(dstIndex, srcIndex);
930
931         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
932             rewindUnaryOp();
933
934             size_t begin = instructions().size();
935             emitOpcode(op_jneq_null);
936             instructions().append(srcIndex);
937             instructions().append(target->bind(begin, instructions().size()));
938             return target;
939         }
940     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
941         int dstIndex;
942         int srcIndex;
943
944         retrieveLastUnaryOp(dstIndex, srcIndex);
945
946         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
947             rewindUnaryOp();
948
949             size_t begin = instructions().size();
950             emitOpcode(op_jeq_null);
951             instructions().append(srcIndex);
952             instructions().append(target->bind(begin, instructions().size()));
953             return target;
954         }
955     }
956
957     size_t begin = instructions().size();
958     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
959     instructions().append(cond->index());
960     instructions().append(target->bind(begin, instructions().size()));
961     return target;
962 }
963
964 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
965 {
966     size_t begin = instructions().size();
967
968     emitOpcode(op_jneq_ptr);
969     instructions().append(cond->index());
970     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction()));
971     instructions().append(target->bind(begin, instructions().size()));
972     return target;
973 }
974
975 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
976 {
977     size_t begin = instructions().size();
978
979     emitOpcode(op_jneq_ptr);
980     instructions().append(cond->index());
981     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction()));
982     instructions().append(target->bind(begin, instructions().size()));
983     return target;
984 }
985
986 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
987 {
988     StringImpl* rep = ident.impl();
989     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
990     if (result.isNewEntry)
991         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
992
993     return result.iterator->second;
994 }
995
996 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
997 {
998     int index = m_nextConstantOffset;
999
1000     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1001     if (result.isNewEntry) {
1002         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1003         ++m_nextConstantOffset;
1004         m_codeBlock->addConstant(JSValue(v));
1005     } else
1006         index = result.iterator->second;
1007
1008     return &m_constantPoolRegisters[index];
1009 }
1010
1011 unsigned BytecodeGenerator::addRegExp(RegExp* r)
1012 {
1013     return m_codeBlock->addRegExp(r);
1014 }
1015
1016 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1017 {
1018     emitOpcode(op_mov);
1019     instructions().append(dst->index());
1020     instructions().append(src->index());
1021     return dst;
1022 }
1023
1024 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1025 {
1026     emitOpcode(opcodeID);
1027     instructions().append(dst->index());
1028     instructions().append(src->index());
1029     return dst;
1030 }
1031
1032 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1033 {
1034     emitOpcode(op_pre_inc);
1035     instructions().append(srcDst->index());
1036     return srcDst;
1037 }
1038
1039 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1040 {
1041     emitOpcode(op_pre_dec);
1042     instructions().append(srcDst->index());
1043     return srcDst;
1044 }
1045
1046 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1047 {
1048     emitOpcode(op_post_inc);
1049     instructions().append(dst->index());
1050     instructions().append(srcDst->index());
1051     return dst;
1052 }
1053
1054 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1055 {
1056     emitOpcode(op_post_dec);
1057     instructions().append(dst->index());
1058     instructions().append(srcDst->index());
1059     return dst;
1060 }
1061
1062 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1063 {
1064     emitOpcode(opcodeID);
1065     instructions().append(dst->index());
1066     instructions().append(src1->index());
1067     instructions().append(src2->index());
1068
1069     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1070         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1071         instructions().append(types.toInt());
1072
1073     return dst;
1074 }
1075
1076 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1077 {
1078     if (m_lastOpcodeID == op_typeof) {
1079         int dstIndex;
1080         int srcIndex;
1081
1082         retrieveLastUnaryOp(dstIndex, srcIndex);
1083
1084         if (src1->index() == dstIndex
1085             && src1->isTemporary()
1086             && m_codeBlock->isConstantRegisterIndex(src2->index())
1087             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1088             const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1089             if (value == "undefined") {
1090                 rewindUnaryOp();
1091                 emitOpcode(op_is_undefined);
1092                 instructions().append(dst->index());
1093                 instructions().append(srcIndex);
1094                 return dst;
1095             }
1096             if (value == "boolean") {
1097                 rewindUnaryOp();
1098                 emitOpcode(op_is_boolean);
1099                 instructions().append(dst->index());
1100                 instructions().append(srcIndex);
1101                 return dst;
1102             }
1103             if (value == "number") {
1104                 rewindUnaryOp();
1105                 emitOpcode(op_is_number);
1106                 instructions().append(dst->index());
1107                 instructions().append(srcIndex);
1108                 return dst;
1109             }
1110             if (value == "string") {
1111                 rewindUnaryOp();
1112                 emitOpcode(op_is_string);
1113                 instructions().append(dst->index());
1114                 instructions().append(srcIndex);
1115                 return dst;
1116             }
1117             if (value == "object") {
1118                 rewindUnaryOp();
1119                 emitOpcode(op_is_object);
1120                 instructions().append(dst->index());
1121                 instructions().append(srcIndex);
1122                 return dst;
1123             }
1124             if (value == "function") {
1125                 rewindUnaryOp();
1126                 emitOpcode(op_is_function);
1127                 instructions().append(dst->index());
1128                 instructions().append(srcIndex);
1129                 return dst;
1130             }
1131         }
1132     }
1133
1134     emitOpcode(opcodeID);
1135     instructions().append(dst->index());
1136     instructions().append(src1->index());
1137     instructions().append(src2->index());
1138     return dst;
1139 }
1140
1141 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1142 {
1143     return emitLoad(dst, jsBoolean(b));
1144 }
1145
1146 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1147 {
1148     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1149     // Later we can do the extra work to handle that like the other cases.  They also don't
1150     // work correctly with NaN as a key.
1151     if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1152         return emitLoad(dst, jsNumber(number));
1153     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->second;
1154     if (!valueInMap)
1155         valueInMap = jsNumber(number);
1156     return emitLoad(dst, valueInMap);
1157 }
1158
1159 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1160 {
1161     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1162     if (!stringInMap)
1163         stringInMap = jsOwnedString(globalData(), identifier.ustring());
1164     return emitLoad(dst, JSValue(stringInMap));
1165 }
1166
1167 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1168 {
1169     RegisterID* constantID = addConstantValue(v);
1170     if (dst)
1171         return emitMove(dst, constantID);
1172     return constantID;
1173 }
1174
1175 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1176 {
1177     if (property == propertyNames().thisIdentifier)
1178         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1179
1180     // Check if the property should be allocated in a register.
1181     if (m_codeType != GlobalCode && shouldOptimizeLocals()) {
1182         SymbolTableEntry entry = symbolTable().get(property.impl());
1183         if (!entry.isNull()) {
1184             if (property == propertyNames().arguments)
1185                 createArgumentsIfNecessary();
1186             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1187             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1188             return ResolveResult::registerResolve(local, flags);
1189         }
1190     }
1191
1192     // Cases where we cannot statically optimize the lookup.
1193     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1194         return ResolveResult::dynamicResolve(0);
1195
1196     ScopeChainIterator iter = m_scopeChain->begin();
1197     ScopeChainIterator end = m_scopeChain->end();
1198     size_t depth = 0;
1199     size_t depthOfFirstScopeWithDynamicChecks = 0;
1200     unsigned flags = 0;
1201     for (; iter != end; ++iter, ++depth) {
1202         JSObject* currentScope = iter->get();
1203         if (!currentScope->isVariableObject()) {
1204             flags |= ResolveResult::DynamicFlag;
1205             break;
1206         }        
1207         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1208         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1209
1210         // Found the property
1211         if (!entry.isNull()) {
1212             if (entry.isReadOnly())
1213                 flags |= ResolveResult::ReadOnlyFlag;
1214             depth += m_codeBlock->needsFullScopeChain();
1215             if (++iter == end) {
1216                 if (flags & ResolveResult::DynamicFlag)
1217                     return ResolveResult::dynamicIndexedGlobalResolve(entry.getIndex(), depth, currentScope, flags);
1218                 return ResolveResult::indexedGlobalResolve(
1219                     entry.getIndex(), currentScope,
1220                     flags | (entry.couldBeWatched() ? ResolveResult::WatchedFlag : 0));
1221             }
1222 #if !ASSERT_DISABLED
1223             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1224                 ASSERT(activation->isValidScopedLookup(entry.getIndex()));
1225 #endif
1226             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1227         }
1228         bool scopeRequiresDynamicChecks = false;
1229         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1230             break;
1231         if (!(flags & ResolveResult::DynamicFlag)) {
1232             if (scopeRequiresDynamicChecks)
1233                 flags |= ResolveResult::DynamicFlag;
1234             else
1235                 ++depthOfFirstScopeWithDynamicChecks;
1236         }
1237     }
1238
1239     // Can't locate the property but we're able to avoid a few lookups.
1240     JSObject* scope = iter->get();
1241     // Step over the function's activation, if it needs one. At this point we
1242     // know there is no dynamic scope in the function itself, so this is safe to
1243     // do.
1244     depth += m_codeBlock->needsFullScopeChain();
1245     depthOfFirstScopeWithDynamicChecks += m_codeBlock->needsFullScopeChain();
1246     if (++iter == end) {
1247         if ((flags & ResolveResult::DynamicFlag) && depth)
1248             return ResolveResult::dynamicGlobalResolve(depth, scope);
1249         return ResolveResult::globalResolve(scope);
1250     }
1251     return ResolveResult::dynamicResolve(depthOfFirstScopeWithDynamicChecks);
1252 }
1253
1254 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1255 {
1256     // Register-allocated const declarations.
1257     if (m_codeType != EvalCode && m_codeType != GlobalCode) {
1258         SymbolTableEntry entry = symbolTable().get(property.impl());
1259         if (!entry.isNull()) {
1260             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1261             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1262             return ResolveResult::registerResolve(local, flags);
1263         }
1264     }
1265
1266     // Const declarations in eval code or global code.
1267     ScopeChainIterator iter = scopeChain()->begin();
1268     ScopeChainIterator end = scopeChain()->end();
1269     size_t depth = 0;
1270     for (; iter != end; ++iter, ++depth) {
1271         JSObject* currentScope = iter->get();
1272         if (!currentScope->isVariableObject())
1273             continue;
1274         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1275         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1276         if (entry.isNull())
1277             continue;
1278         if (++iter == end)
1279             return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentVariableObject, 0);
1280         return ResolveResult::lexicalResolve(entry.getIndex(), depth + scopeDepth(), 0);
1281     }
1282
1283     // FIXME: While this code should only be hit in an eval block, it will assign
1284     // to the wrong base if property exists in an intervening with scope.
1285     return ResolveResult::dynamicResolve(scopeDepth());
1286 }
1287
1288 void BytecodeGenerator::emitCheckHasInstance(RegisterID* base)
1289
1290     emitOpcode(op_check_has_instance);
1291     instructions().append(base->index());
1292 }
1293
1294 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1295
1296     emitOpcode(op_instanceof);
1297     instructions().append(dst->index());
1298     instructions().append(value->index());
1299     instructions().append(base->index());
1300     instructions().append(basePrototype->index());
1301     return dst;
1302 }
1303
1304 static const unsigned maxGlobalResolves = 128;
1305
1306 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1307 {
1308     return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1309 }
1310
1311 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1312 {
1313     if (resolveResult.isStatic())
1314         return emitGetStaticVar(dst, resolveResult, property);
1315     
1316     if (resolveResult.isGlobal() && !shouldAvoidResolveGlobal()) {
1317 #if ENABLE(JIT)
1318         m_codeBlock->addGlobalResolveInfo(instructions().size());
1319 #endif
1320         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1321         bool dynamic = resolveResult.isDynamic() && resolveResult.depth();
1322         ValueProfile* profile = emitProfiledOpcode(dynamic ? op_resolve_global_dynamic : op_resolve_global);
1323         instructions().append(dst->index());
1324         instructions().append(addConstant(property));
1325         instructions().append(0);
1326         instructions().append(0);
1327         if (dynamic)
1328             instructions().append(resolveResult.depth());
1329         instructions().append(profile);
1330         return dst;
1331     }
1332         
1333     if (resolveResult.type() == ResolveResult::Dynamic && resolveResult.depth()) {
1334         // In this case we are at least able to drop a few scope chains from the
1335         // lookup chain, although we still need to hash from then on.
1336         ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1337         instructions().append(dst->index());
1338         instructions().append(addConstant(property));
1339         instructions().append(resolveResult.depth());
1340         instructions().append(profile);
1341         return dst;
1342     }
1343
1344     ValueProfile* profile = emitProfiledOpcode(op_resolve);
1345     instructions().append(dst->index());
1346     instructions().append(addConstant(property));
1347     instructions().append(profile);
1348     return dst;
1349 }
1350
1351 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1352 {
1353     if (resolveResult.isGlobal() && !resolveResult.isDynamic())
1354         // Global object is the base
1355         return emitLoad(dst, JSValue(resolveResult.globalObject()));
1356
1357     // We can't optimise at all :-(
1358     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1359     instructions().append(dst->index());
1360     instructions().append(addConstant(property));
1361     instructions().append(false);
1362     instructions().append(profile);
1363     return dst;
1364 }
1365
1366 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1367 {
1368     if (!m_codeBlock->isStrictMode())
1369         return emitResolveBase(dst, resolveResult, property);
1370
1371     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1372         // Global object is the base
1373         RefPtr<RegisterID> result = emitLoad(dst, JSValue(resolveResult.globalObject()));
1374         emitOpcode(op_ensure_property_exists);
1375         instructions().append(dst->index());
1376         instructions().append(addConstant(property));
1377         return result.get();
1378     }
1379
1380     // We can't optimise at all :-(
1381     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1382     instructions().append(dst->index());
1383     instructions().append(addConstant(property));
1384     instructions().append(true);
1385     instructions().append(profile);
1386     return dst;
1387 }
1388
1389 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1390 {
1391     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1392         // Global object is the base
1393         emitLoad(baseDst, JSValue(resolveResult.globalObject()));
1394
1395         if (resolveResult.isStatic()) {
1396             // Directly index the property lookup across multiple scopes.
1397             emitGetStaticVar(propDst, resolveResult, property);
1398             return baseDst;
1399         }
1400
1401         if (shouldAvoidResolveGlobal()) {
1402             ValueProfile* profile = emitProfiledOpcode(op_resolve);
1403             instructions().append(propDst->index());
1404             instructions().append(addConstant(property));
1405             instructions().append(profile);
1406             return baseDst;
1407         }
1408
1409 #if ENABLE(JIT)
1410         m_codeBlock->addGlobalResolveInfo(instructions().size());
1411 #endif
1412 #if ENABLE(CLASSIC_INTERPRETER)
1413         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1414 #endif
1415         ValueProfile* profile = emitProfiledOpcode(op_resolve_global);
1416         instructions().append(propDst->index());
1417         instructions().append(addConstant(property));
1418         instructions().append(0);
1419         instructions().append(0);
1420         instructions().append(profile);
1421         return baseDst;
1422     }
1423
1424     ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1425     instructions().append(baseDst->index());
1426     instructions().append(propDst->index());
1427     instructions().append(addConstant(property));
1428     instructions().append(profile);
1429     return baseDst;
1430 }
1431
1432 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1433 {
1434     if (resolveResult.isStatic()) {
1435         emitLoad(baseDst, jsUndefined());
1436         emitGetStaticVar(propDst, resolveResult, property);
1437         return baseDst;
1438     }
1439
1440     if (resolveResult.type() == ResolveResult::Dynamic) {
1441         // We can't optimise at all :-(
1442         ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1443         instructions().append(baseDst->index());
1444         instructions().append(propDst->index());
1445         instructions().append(addConstant(property));
1446         instructions().append(profile);
1447         return baseDst;
1448     }
1449
1450     emitLoad(baseDst, jsUndefined());
1451     return emitResolve(propDst, resolveResult, property);
1452 }
1453
1454 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& identifier)
1455 {
1456     ValueProfile* profile = 0;
1457
1458     switch (resolveResult.type()) {
1459     case ResolveResult::Register:
1460     case ResolveResult::ReadOnlyRegister:
1461         if (dst == ignoredResult())
1462             return 0;
1463         return moveToDestinationIfNeeded(dst, resolveResult.local());
1464
1465     case ResolveResult::Lexical:
1466     case ResolveResult::ReadOnlyLexical:
1467         profile = emitProfiledOpcode(op_get_scoped_var);
1468         instructions().append(dst->index());
1469         instructions().append(resolveResult.index());
1470         instructions().append(resolveResult.depth());
1471         instructions().append(profile);
1472         return dst;
1473
1474     case ResolveResult::IndexedGlobal:
1475     case ResolveResult::ReadOnlyIndexedGlobal:
1476         if (m_lastOpcodeID == op_put_global_var) {
1477             WriteBarrier<Unknown>* dstPointer;
1478             int srcIndex;
1479             retrieveLastUnaryOp(dstPointer, srcIndex);
1480             if (dstPointer == resolveResult.registerPointer() && srcIndex == dst->index())
1481                 return dst;
1482         }
1483
1484         profile = emitProfiledOpcode(op_get_global_var);
1485         instructions().append(dst->index());
1486         instructions().append(resolveResult.registerPointer());
1487         instructions().append(profile);
1488         return dst;
1489
1490     case ResolveResult::WatchedIndexedGlobal:
1491         // Skip the peephole for now. It's not clear that it's profitable given
1492         // the DFG's capabilities, and the fact that if it's watchable then we
1493         // don't expect to see any put_global_var's anyway.
1494         profile = emitProfiledOpcode(op_get_global_var_watchable);
1495         instructions().append(dst->index());
1496         instructions().append(resolveResult.registerPointer());
1497         instructions().append(addConstant(identifier)); // For the benefit of the DFG.
1498         instructions().append(profile);
1499         return dst;
1500
1501     default:
1502         ASSERT_NOT_REACHED();
1503         return 0;
1504     }
1505 }
1506
1507 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, const Identifier& identifier, RegisterID* value)
1508 {
1509     switch (resolveResult.type()) {
1510     case ResolveResult::Register:
1511     case ResolveResult::ReadOnlyRegister:
1512         return moveToDestinationIfNeeded(resolveResult.local(), value);
1513
1514     case ResolveResult::Lexical:
1515     case ResolveResult::ReadOnlyLexical:
1516         emitOpcode(op_put_scoped_var);
1517         instructions().append(resolveResult.index());
1518         instructions().append(resolveResult.depth());
1519         instructions().append(value->index());
1520         return value;
1521
1522     case ResolveResult::IndexedGlobal:
1523     case ResolveResult::ReadOnlyIndexedGlobal:
1524         emitOpcode(op_put_global_var);
1525         instructions().append(resolveResult.registerPointer());
1526         instructions().append(value->index());
1527         return value;
1528         
1529     case ResolveResult::WatchedIndexedGlobal:
1530         emitOpcode(op_put_global_var_check);
1531         instructions().append(resolveResult.registerPointer());
1532         instructions().append(value->index());
1533         instructions().append(jsCast<JSGlobalObject*>(resolveResult.globalObject())->symbolTable().get(identifier.impl()).addressOfIsWatched());
1534         instructions().append(addConstant(identifier));
1535         return value;
1536
1537     default:
1538         ASSERT_NOT_REACHED();
1539         return 0;
1540     }
1541 }
1542
1543 void BytecodeGenerator::emitMethodCheck()
1544 {
1545     emitOpcode(op_method_check);
1546 }
1547
1548 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1549 {
1550     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1551
1552     ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1553     instructions().append(dst->index());
1554     instructions().append(base->index());
1555     instructions().append(addConstant(property));
1556     instructions().append(0);
1557     instructions().append(0);
1558     instructions().append(0);
1559     instructions().append(0);
1560     instructions().append(profile);
1561     return dst;
1562 }
1563
1564 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1565 {
1566     emitOpcode(op_get_arguments_length);
1567     instructions().append(dst->index());
1568     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1569     instructions().append(base->index());
1570     instructions().append(addConstant(propertyNames().length));
1571     return dst;
1572 }
1573
1574 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1575 {
1576     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1577
1578     emitOpcode(op_put_by_id);
1579     instructions().append(base->index());
1580     instructions().append(addConstant(property));
1581     instructions().append(value->index());
1582     instructions().append(0);
1583     instructions().append(0);
1584     instructions().append(0);
1585     instructions().append(0);
1586     instructions().append(0);
1587     return value;
1588 }
1589
1590 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1591 {
1592     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1593     
1594     emitOpcode(op_put_by_id);
1595     instructions().append(base->index());
1596     instructions().append(addConstant(property));
1597     instructions().append(value->index());
1598     instructions().append(0);
1599     instructions().append(0);
1600     instructions().append(0);
1601     instructions().append(0);
1602     instructions().append(property != m_globalData->propertyNames->underscoreProto);
1603     return value;
1604 }
1605
1606 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1607 {
1608     emitOpcode(op_put_getter_setter);
1609     instructions().append(base->index());
1610     instructions().append(addConstant(property));
1611     instructions().append(getter->index());
1612     instructions().append(setter->index());
1613 }
1614
1615 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1616 {
1617     emitOpcode(op_del_by_id);
1618     instructions().append(dst->index());
1619     instructions().append(base->index());
1620     instructions().append(addConstant(property));
1621     return dst;
1622 }
1623
1624 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1625 {
1626     ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1627     instructions().append(dst->index());
1628     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1629     instructions().append(base->index());
1630     instructions().append(property->index());
1631     instructions().append(profile);
1632     return dst;
1633 }
1634
1635 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1636 {
1637     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1638         ForInContext& context = m_forInContextStack[i - 1];
1639         if (context.propertyRegister == property) {
1640             emitOpcode(op_get_by_pname);
1641             instructions().append(dst->index());
1642             instructions().append(base->index());
1643             instructions().append(property->index());
1644             instructions().append(context.expectedSubscriptRegister->index());
1645             instructions().append(context.iterRegister->index());
1646             instructions().append(context.indexRegister->index());
1647             return dst;
1648         }
1649     }
1650     ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1651     instructions().append(dst->index());
1652     instructions().append(base->index());
1653     instructions().append(property->index());
1654     instructions().append(profile);
1655     return dst;
1656 }
1657
1658 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1659 {
1660     emitOpcode(op_put_by_val);
1661     instructions().append(base->index());
1662     instructions().append(property->index());
1663     instructions().append(value->index());
1664     return value;
1665 }
1666
1667 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1668 {
1669     emitOpcode(op_del_by_val);
1670     instructions().append(dst->index());
1671     instructions().append(base->index());
1672     instructions().append(property->index());
1673     return dst;
1674 }
1675
1676 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1677 {
1678     emitOpcode(op_put_by_index);
1679     instructions().append(base->index());
1680     instructions().append(index);
1681     instructions().append(value->index());
1682     return value;
1683 }
1684
1685 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1686 {
1687     emitOpcode(op_new_object);
1688     instructions().append(dst->index());
1689     return dst;
1690 }
1691
1692 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1693 {
1694     return m_codeBlock->addConstantBuffer(length);
1695 }
1696
1697 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1698 {
1699     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1700     if (!stringInMap) {
1701         stringInMap = jsString(globalData(), identifier.ustring());
1702         addConstantValue(stringInMap);
1703     }
1704     return stringInMap;
1705 }
1706
1707 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1708 {
1709 #if !ASSERT_DISABLED
1710     unsigned checkLength = 0;
1711 #endif
1712     bool hadVariableExpression = false;
1713     if (length) {
1714         for (ElementNode* n = elements; n; n = n->next()) {
1715             if (!n->value()->isNumber() && !n->value()->isString()) {
1716                 hadVariableExpression = true;
1717                 break;
1718             }
1719             if (n->elision())
1720                 break;
1721 #if !ASSERT_DISABLED
1722             checkLength++;
1723 #endif
1724         }
1725         if (!hadVariableExpression) {
1726             ASSERT(length == checkLength);
1727             unsigned constantBufferIndex = addConstantBuffer(length);
1728             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1729             unsigned index = 0;
1730             for (ElementNode* n = elements; index < length; n = n->next()) {
1731                 if (n->value()->isNumber())
1732                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1733                 else {
1734                     ASSERT(n->value()->isString());
1735                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1736                 }
1737             }
1738             emitOpcode(op_new_array_buffer);
1739             instructions().append(dst->index());
1740             instructions().append(constantBufferIndex);
1741             instructions().append(length);
1742             return dst;
1743         }
1744     }
1745
1746     Vector<RefPtr<RegisterID>, 16> argv;
1747     for (ElementNode* n = elements; n; n = n->next()) {
1748         if (n->elision())
1749             break;
1750         argv.append(newTemporary());
1751         // op_new_array requires the initial values to be a sequential range of registers
1752         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1753         emitNode(argv.last().get(), n->value());
1754     }
1755     emitOpcode(op_new_array);
1756     instructions().append(dst->index());
1757     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1758     instructions().append(argv.size()); // argc
1759     return dst;
1760 }
1761
1762 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1763 {
1764     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false);
1765 }
1766
1767 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1768 {
1769     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1770     if (ptr.isNewEntry)
1771         ptr.iterator->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1772     return emitNewFunctionInternal(dst, ptr.iterator->second, true);
1773 }
1774
1775 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1776 {
1777     createActivationIfNecessary();
1778     emitOpcode(op_new_func);
1779     instructions().append(dst->index());
1780     instructions().append(index);
1781     instructions().append(doNullCheck);
1782     return dst;
1783 }
1784
1785 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1786 {
1787     emitOpcode(op_new_regexp);
1788     instructions().append(dst->index());
1789     instructions().append(addRegExp(regExp));
1790     return dst;
1791 }
1792
1793 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1794 {
1795     FunctionBodyNode* function = n->body();
1796     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1797     
1798     createActivationIfNecessary();
1799     emitOpcode(op_new_func_exp);
1800     instructions().append(r0->index());
1801     instructions().append(index);
1802     return r0;
1803 }
1804
1805 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1806 {
1807     return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
1808 }
1809
1810 void BytecodeGenerator::createArgumentsIfNecessary()
1811 {
1812     if (m_codeType != FunctionCode)
1813         return;
1814     
1815     if (!m_codeBlock->usesArguments())
1816         return;
1817
1818     // If we're in strict mode we tear off the arguments on function
1819     // entry, so there's no need to check if we need to create them
1820     // now
1821     if (m_codeBlock->isStrictMode())
1822         return;
1823
1824     emitOpcode(op_create_arguments);
1825     instructions().append(m_codeBlock->argumentsRegister());
1826 }
1827
1828 void BytecodeGenerator::createActivationIfNecessary()
1829 {
1830     if (m_hasCreatedActivation)
1831         return;
1832     if (!m_codeBlock->needsFullScopeChain())
1833         return;
1834     emitOpcode(op_create_activation);
1835     instructions().append(m_activationRegister->index());
1836 }
1837
1838 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1839 {
1840     return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
1841 }
1842
1843 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1844 {
1845     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1846     ASSERT(func->refCount());
1847
1848     if (m_shouldEmitProfileHooks)
1849         emitMove(callArguments.profileHookRegister(), func);
1850
1851     // Generate code for arguments.
1852     unsigned argument = 0;
1853     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1854         emitNode(callArguments.argumentRegister(argument++), n);
1855
1856     // Reserve space for call frame.
1857     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1858     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1859         callFrame.append(newTemporary());
1860
1861     if (m_shouldEmitProfileHooks) {
1862         emitOpcode(op_profile_will_call);
1863         instructions().append(callArguments.profileHookRegister()->index());
1864     }
1865
1866     emitExpressionInfo(divot, startOffset, endOffset);
1867
1868     // Emit call.
1869     emitOpcode(opcodeID);
1870     instructions().append(func->index()); // func
1871     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1872     instructions().append(callArguments.registerOffset()); // registerOffset
1873 #if ENABLE(LLINT)
1874     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1875 #else
1876     instructions().append(0);
1877 #endif
1878     instructions().append(0);
1879     if (dst != ignoredResult()) {
1880         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1881         instructions().append(dst->index()); // dst
1882         instructions().append(profile);
1883     }
1884
1885     if (m_shouldEmitProfileHooks) {
1886         emitOpcode(op_profile_did_call);
1887         instructions().append(callArguments.profileHookRegister()->index());
1888     }
1889
1890     return dst;
1891 }
1892
1893 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1894 {
1895     if (m_shouldEmitProfileHooks) {
1896         emitMove(profileHookRegister, func);
1897         emitOpcode(op_profile_will_call);
1898         instructions().append(profileHookRegister->index());
1899     }
1900     
1901     emitExpressionInfo(divot, startOffset, endOffset);
1902
1903     // Emit call.
1904     emitOpcode(op_call_varargs);
1905     instructions().append(func->index());
1906     instructions().append(thisRegister->index());
1907     instructions().append(arguments->index());
1908     instructions().append(firstFreeRegister->index());
1909     if (dst != ignoredResult()) {
1910         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1911         instructions().append(dst->index());
1912         instructions().append(profile);
1913     }
1914     if (m_shouldEmitProfileHooks) {
1915         emitOpcode(op_profile_did_call);
1916         instructions().append(profileHookRegister->index());
1917     }
1918     return dst;
1919 }
1920
1921 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1922 {
1923     if (m_codeBlock->needsFullScopeChain()) {
1924         emitOpcode(op_tear_off_activation);
1925         instructions().append(m_activationRegister->index());
1926         instructions().append(m_codeBlock->argumentsRegister());
1927     } else if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1928         emitOpcode(op_tear_off_arguments);
1929         instructions().append(m_codeBlock->argumentsRegister());
1930     }
1931
1932     // Constructors use op_ret_object_or_this to check the result is an
1933     // object, unless we can trivially determine the check is not
1934     // necessary (currently, if the return value is 'this').
1935     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1936         emitOpcode(op_ret_object_or_this);
1937         instructions().append(src->index());
1938         instructions().append(m_thisRegister.index());
1939         return src;
1940     }
1941     return emitUnaryNoDstOp(op_ret, src);
1942 }
1943
1944 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1945 {
1946     emitOpcode(opcodeID);
1947     instructions().append(src->index());
1948     return src;
1949 }
1950
1951 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1952 {
1953     ASSERT(func->refCount());
1954
1955     if (m_shouldEmitProfileHooks)
1956         emitMove(callArguments.profileHookRegister(), func);
1957
1958     // Generate code for arguments.
1959     unsigned argument = 0;
1960     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1961         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1962             emitNode(callArguments.argumentRegister(argument++), n);
1963     }
1964
1965     if (m_shouldEmitProfileHooks) {
1966         emitOpcode(op_profile_will_call);
1967         instructions().append(callArguments.profileHookRegister()->index());
1968     }
1969
1970     // Reserve space for call frame.
1971     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1972     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1973         callFrame.append(newTemporary());
1974
1975     emitExpressionInfo(divot, startOffset, endOffset);
1976
1977     emitOpcode(op_construct);
1978     instructions().append(func->index()); // func
1979     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1980     instructions().append(callArguments.registerOffset()); // registerOffset
1981 #if ENABLE(LLINT)
1982     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1983 #else
1984     instructions().append(0);
1985 #endif
1986     instructions().append(0);
1987     if (dst != ignoredResult()) {
1988         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1989         instructions().append(dst->index()); // dst
1990         instructions().append(profile);
1991     }
1992
1993     if (m_shouldEmitProfileHooks) {
1994         emitOpcode(op_profile_did_call);
1995         instructions().append(callArguments.profileHookRegister()->index());
1996     }
1997
1998     return dst;
1999 }
2000
2001 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
2002 {
2003     emitOpcode(op_strcat);
2004     instructions().append(dst->index());
2005     instructions().append(src->index());
2006     instructions().append(count);
2007
2008     return dst;
2009 }
2010
2011 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2012 {
2013     emitOpcode(op_to_primitive);
2014     instructions().append(dst->index());
2015     instructions().append(src->index());
2016 }
2017
2018 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
2019 {
2020     ASSERT(scope->isTemporary());
2021     ControlFlowContext context;
2022     context.isFinallyBlock = false;
2023     m_scopeContextStack.append(context);
2024     m_dynamicScopeDepth++;
2025
2026     return emitUnaryNoDstOp(op_push_scope, scope);
2027 }
2028
2029 void BytecodeGenerator::emitPopScope()
2030 {
2031     ASSERT(m_scopeContextStack.size());
2032     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2033
2034     emitOpcode(op_pop_scope);
2035
2036     m_scopeContextStack.removeLast();
2037     m_dynamicScopeDepth--;
2038 }
2039
2040 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
2041 {
2042 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2043     if (debugHookID != DidReachBreakpoint)
2044         return;
2045 #else
2046     if (!m_shouldEmitDebugHooks)
2047         return;
2048 #endif
2049     emitOpcode(op_debug);
2050     instructions().append(debugHookID);
2051     instructions().append(firstLine);
2052     instructions().append(lastLine);
2053 }
2054
2055 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2056 {
2057     ControlFlowContext scope;
2058     scope.isFinallyBlock = true;
2059     FinallyContext context = {
2060         finallyBlock,
2061         m_scopeContextStack.size(),
2062         m_switchContextStack.size(),
2063         m_forInContextStack.size(),
2064         m_labelScopes.size(),
2065         m_finallyDepth,
2066         m_dynamicScopeDepth
2067     };
2068     scope.finallyContext = context;
2069     m_scopeContextStack.append(scope);
2070     m_finallyDepth++;
2071 }
2072
2073 void BytecodeGenerator::popFinallyContext()
2074 {
2075     ASSERT(m_scopeContextStack.size());
2076     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2077     ASSERT(m_finallyDepth > 0);
2078     m_scopeContextStack.removeLast();
2079     m_finallyDepth--;
2080 }
2081
2082 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2083 {
2084     // Reclaim free label scopes.
2085     //
2086     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2087     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2088     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2089     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2090     // loop condition is a workaround.
2091     while (m_labelScopes.size()) {
2092         if  (m_labelScopes.last().refCount())
2093             break;
2094         m_labelScopes.removeLast();
2095     }
2096
2097     if (!m_labelScopes.size())
2098         return 0;
2099
2100     // We special-case the following, which is a syntax error in Firefox:
2101     // label:
2102     //     break;
2103     if (name.isEmpty()) {
2104         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2105             LabelScope* scope = &m_labelScopes[i];
2106             if (scope->type() != LabelScope::NamedLabel) {
2107                 ASSERT(scope->breakTarget());
2108                 return scope;
2109             }
2110         }
2111         return 0;
2112     }
2113
2114     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2115         LabelScope* scope = &m_labelScopes[i];
2116         if (scope->name() && *scope->name() == name) {
2117             ASSERT(scope->breakTarget());
2118             return scope;
2119         }
2120     }
2121     return 0;
2122 }
2123
2124 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2125 {
2126     // Reclaim free label scopes.
2127     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2128         m_labelScopes.removeLast();
2129
2130     if (!m_labelScopes.size())
2131         return 0;
2132
2133     if (name.isEmpty()) {
2134         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2135             LabelScope* scope = &m_labelScopes[i];
2136             if (scope->type() == LabelScope::Loop) {
2137                 ASSERT(scope->continueTarget());
2138                 return scope;
2139             }
2140         }
2141         return 0;
2142     }
2143
2144     // Continue to the loop nested nearest to the label scope that matches
2145     // 'name'.
2146     LabelScope* result = 0;
2147     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2148         LabelScope* scope = &m_labelScopes[i];
2149         if (scope->type() == LabelScope::Loop) {
2150             ASSERT(scope->continueTarget());
2151             result = scope;
2152         }
2153         if (scope->name() && *scope->name() == name)
2154             return result; // may be 0
2155     }
2156     return 0;
2157 }
2158
2159 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2160 {
2161     while (topScope > bottomScope) {
2162         // First we count the number of dynamic scopes we need to remove to get
2163         // to a finally block.
2164         int nNormalScopes = 0;
2165         while (topScope > bottomScope) {
2166             if (topScope->isFinallyBlock)
2167                 break;
2168             ++nNormalScopes;
2169             --topScope;
2170         }
2171
2172         if (nNormalScopes) {
2173             size_t begin = instructions().size();
2174
2175             // We need to remove a number of dynamic scopes to get to the next
2176             // finally block
2177             emitOpcode(op_jmp_scopes);
2178             instructions().append(nNormalScopes);
2179
2180             // If topScope == bottomScope then there isn't actually a finally block
2181             // left to emit, so make the jmp_scopes jump directly to the target label
2182             if (topScope == bottomScope) {
2183                 instructions().append(target->bind(begin, instructions().size()));
2184                 return target;
2185             }
2186
2187             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2188             // to the next instruction
2189             RefPtr<Label> nextInsn = newLabel();
2190             instructions().append(nextInsn->bind(begin, instructions().size()));
2191             emitLabel(nextInsn.get());
2192         }
2193         
2194         Vector<ControlFlowContext> savedScopeContextStack;
2195         Vector<SwitchInfo> savedSwitchContextStack;
2196         Vector<ForInContext> savedForInContextStack;
2197         SegmentedVector<LabelScope, 8> savedLabelScopes;
2198         while (topScope > bottomScope && topScope->isFinallyBlock) {
2199             // Save the current state of the world while instating the state of the world
2200             // for the finally block.
2201             FinallyContext finallyContext = topScope->finallyContext;
2202             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2203             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2204             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2205             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2206             int topScopeIndex = -1;
2207             int bottomScopeIndex = -1;
2208             if (flipScopes) {
2209                 topScopeIndex = topScope - m_scopeContextStack.begin();
2210                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2211                 savedScopeContextStack = m_scopeContextStack;
2212                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2213             }
2214             if (flipSwitches) {
2215                 savedSwitchContextStack = m_switchContextStack;
2216                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2217             }
2218             if (flipForIns) {
2219                 savedForInContextStack = m_forInContextStack;
2220                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2221             }
2222             if (flipLabelScopes) {
2223                 savedLabelScopes = m_labelScopes;
2224                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2225                     m_labelScopes.removeLast();
2226             }
2227             int savedFinallyDepth = m_finallyDepth;
2228             m_finallyDepth = finallyContext.finallyDepth;
2229             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2230             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2231             
2232             // Emit the finally block.
2233             emitNode(finallyContext.finallyBlock);
2234             
2235             // Restore the state of the world.
2236             if (flipScopes) {
2237                 m_scopeContextStack = savedScopeContextStack;
2238                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2239                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2240             }
2241             if (flipSwitches)
2242                 m_switchContextStack = savedSwitchContextStack;
2243             if (flipForIns)
2244                 m_forInContextStack = savedForInContextStack;
2245             if (flipLabelScopes)
2246                 m_labelScopes = savedLabelScopes;
2247             m_finallyDepth = savedFinallyDepth;
2248             m_dynamicScopeDepth = savedDynamicScopeDepth;
2249             
2250             --topScope;
2251         }
2252     }
2253     return emitJump(target);
2254 }
2255
2256 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2257 {
2258     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2259     ASSERT(target->isForward());
2260
2261     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2262     ASSERT(scopeDelta <= m_scopeContextStack.size());
2263     if (!scopeDelta)
2264         return emitJump(target);
2265
2266     if (m_finallyDepth)
2267         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2268
2269     size_t begin = instructions().size();
2270
2271     emitOpcode(op_jmp_scopes);
2272     instructions().append(scopeDelta);
2273     instructions().append(target->bind(begin, instructions().size()));
2274     return target;
2275 }
2276
2277 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2278 {
2279     size_t begin = instructions().size();
2280
2281     emitOpcode(op_get_pnames);
2282     instructions().append(dst->index());
2283     instructions().append(base->index());
2284     instructions().append(i->index());
2285     instructions().append(size->index());
2286     instructions().append(breakTarget->bind(begin, instructions().size()));
2287     return dst;
2288 }
2289
2290 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2291 {
2292     size_t begin = instructions().size();
2293
2294     emitOpcode(op_next_pname);
2295     instructions().append(dst->index());
2296     instructions().append(base->index());
2297     instructions().append(i->index());
2298     instructions().append(size->index());
2299     instructions().append(iter->index());
2300     instructions().append(target->bind(begin, instructions().size()));
2301     return dst;
2302 }
2303
2304 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
2305 {
2306     m_usesExceptions = true;
2307 #if ENABLE(JIT)
2308 #if ENABLE(LLINT)
2309     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast<void*>(&llint_op_catch))) };
2310 #else
2311     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
2312 #endif
2313 #else
2314     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
2315 #endif
2316
2317     m_codeBlock->addExceptionHandler(info);
2318     emitOpcode(op_catch);
2319     instructions().append(targetRegister->index());
2320     return targetRegister;
2321 }
2322
2323 void BytecodeGenerator::emitThrowReferenceError(const UString& message)
2324 {
2325     emitOpcode(op_throw_reference_error);
2326     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2327 }
2328
2329 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
2330 {
2331     ControlFlowContext context;
2332     context.isFinallyBlock = false;
2333     m_scopeContextStack.append(context);
2334     m_dynamicScopeDepth++;
2335
2336     emitOpcode(op_push_new_scope);
2337     instructions().append(dst->index());
2338     instructions().append(addConstant(property));
2339     instructions().append(value->index());
2340 }
2341
2342 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2343 {
2344     SwitchInfo info = { instructions().size(), type };
2345     switch (type) {
2346         case SwitchInfo::SwitchImmediate:
2347             emitOpcode(op_switch_imm);
2348             break;
2349         case SwitchInfo::SwitchCharacter:
2350             emitOpcode(op_switch_char);
2351             break;
2352         case SwitchInfo::SwitchString:
2353             emitOpcode(op_switch_string);
2354             break;
2355         default:
2356             ASSERT_NOT_REACHED();
2357     }
2358
2359     instructions().append(0); // place holder for table index
2360     instructions().append(0); // place holder for default target    
2361     instructions().append(scrutineeRegister->index());
2362     m_switchContextStack.append(info);
2363 }
2364
2365 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2366 {
2367     UNUSED_PARAM(max);
2368     ASSERT(node->isNumber());
2369     double value = static_cast<NumberNode*>(node)->value();
2370     int32_t key = static_cast<int32_t>(value);
2371     ASSERT(key == value);
2372     ASSERT(key >= min);
2373     ASSERT(key <= max);
2374     return key - min;
2375 }
2376
2377 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2378 {
2379     jumpTable.min = min;
2380     jumpTable.branchOffsets.resize(max - min + 1);
2381     jumpTable.branchOffsets.fill(0);
2382     for (uint32_t i = 0; i < clauseCount; ++i) {
2383         // We're emitting this after the clause labels should have been fixed, so 
2384         // the labels should not be "forward" references
2385         ASSERT(!labels[i]->isForward());
2386         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2387     }
2388 }
2389
2390 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2391 {
2392     UNUSED_PARAM(max);
2393     ASSERT(node->isString());
2394     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2395     ASSERT(clause->length() == 1);
2396     
2397     int32_t key = (*clause)[0];
2398     ASSERT(key >= min);
2399     ASSERT(key <= max);
2400     return key - min;
2401 }
2402
2403 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2404 {
2405     jumpTable.min = min;
2406     jumpTable.branchOffsets.resize(max - min + 1);
2407     jumpTable.branchOffsets.fill(0);
2408     for (uint32_t i = 0; i < clauseCount; ++i) {
2409         // We're emitting this after the clause labels should have been fixed, so 
2410         // the labels should not be "forward" references
2411         ASSERT(!labels[i]->isForward());
2412         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2413     }
2414 }
2415
2416 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2417 {
2418     for (uint32_t i = 0; i < clauseCount; ++i) {
2419         // We're emitting this after the clause labels should have been fixed, so 
2420         // the labels should not be "forward" references
2421         ASSERT(!labels[i]->isForward());
2422         
2423         ASSERT(nodes[i]->isString());
2424         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2425         OffsetLocation location;
2426         location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2427         jumpTable.offsetTable.add(clause, location);
2428     }
2429 }
2430
2431 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2432 {
2433     SwitchInfo switchInfo = m_switchContextStack.last();
2434     m_switchContextStack.removeLast();
2435     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2436         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2437         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2438
2439         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2440         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2441     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2442         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2443         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2444         
2445         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2446         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2447     } else {
2448         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2449         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2450         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2451
2452         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2453         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2454     }
2455 }
2456
2457 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2458 {
2459     // It would be nice to do an even better job of identifying exactly where the expression is.
2460     // And we could make the caller pass the node pointer in, if there was some way of getting
2461     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2462     // is still good enough to get us an accurate line number.
2463     m_expressionTooDeep = true;
2464     return newTemporary();
2465 }
2466
2467 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2468 {
2469     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2470 }
2471
2472 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2473 {
2474     RegisterID* registerID = resolve(ident).local();
2475     if (!registerID || registerID->index() >= 0)
2476          return 0;
2477     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2478 }
2479
2480 } // namespace JSC