jneq_ptr shouldn't have a pointer
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009, 2012 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "Comment.h"
36 #include "Interpreter.h"
37 #include "JSActivation.h"
38 #include "JSFunction.h"
39 #include "JSNameScope.h"
40 #include "LowLevelInterpreter.h"
41 #include "StrongInlines.h"
42 #include <wtf/text/WTFString.h>
43
44 using namespace std;
45
46 namespace JSC {
47
48 /*
49     The layout of a register frame looks like this:
50
51     For
52
53     function f(x, y) {
54         var v1;
55         function g() { }
56         var v2;
57         return (x) * (y);
58     }
59
60     assuming (x) and (y) generated temporaries t1 and t2, you would have
61
62     ------------------------------------
63     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
64     ------------------------------------
65     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
66     ------------------------------------
67     | params->|<-locals      | temps->
68
69     Because temporary registers are allocated in a stack-like fashion, we
70     can reclaim them with a simple popping algorithm. The same goes for labels.
71     (We never reclaim parameter or local registers, because parameters and
72     locals are DontDelete.)
73
74     The register layout before a function call looks like this:
75
76     For
77
78     function f(x, y)
79     {
80     }
81
82     f(1);
83
84     >                        <------------------------------
85     <                        >  reserved: call frame  |  1 | <-- value held
86     >         >snip<         <------------------------------
87     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
88     >                        <------------------------------
89     | params->|<-locals      | temps->
90
91     The call instruction fills in the "call frame" registers. It also pads
92     missing arguments at the end of the call:
93
94     >                        <-----------------------------------
95     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
96     >         >snip<         <-----------------------------------
97     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
98     >                        <-----------------------------------
99     | params->|<-locals      | temps->
100
101     After filling in missing arguments, the call instruction sets up the new
102     stack frame to overlap the end of the old stack frame:
103
104                              |---------------------------------->                        <
105                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
106                              |---------------------------------->         >snip<         <
107                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
108                              |---------------------------------->                        <
109                              |                        | params->|<-locals       | temps->
110
111     That way, arguments are "copied" into the callee's stack frame for free.
112
113     If the caller supplies too many arguments, this trick doesn't work. The
114     extra arguments protrude into space reserved for locals and temporaries.
115     In that case, the call instruction makes a real copy of the call frame header,
116     along with just the arguments expected by the callee, leaving the original
117     call frame header and arguments behind. (The call instruction can't just discard
118     extra arguments, because the "arguments" object may access them later.)
119     This copying strategy ensures that all named values will be at the indices
120     expected by the callee.
121 */
122
123 void Label::setLocation(unsigned location)
124 {
125     m_location = location;
126     
127     unsigned size = m_unresolvedJumps.size();
128     for (unsigned i = 0; i < size; ++i)
129         m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
130 }
131
132 #ifndef NDEBUG
133 void ResolveResult::checkValidity()
134 {
135     switch (m_type) {
136     case Register:
137     case ReadOnlyRegister:
138         ASSERT(m_local);
139         return;
140     case Lexical:
141     case ReadOnlyLexical:
142     case DynamicLexical:
143     case DynamicReadOnlyLexical:
144         ASSERT(m_index != missingSymbolMarker());
145         return;
146     case Global:
147     case DynamicGlobal:
148         ASSERT(m_globalObject);
149         return;
150     case IndexedGlobal:
151     case ReadOnlyIndexedGlobal:
152     case WatchedIndexedGlobal:
153     case DynamicIndexedGlobal:
154     case DynamicReadOnlyIndexedGlobal:
155         ASSERT(m_index != missingSymbolMarker());
156         ASSERT(m_globalObject);
157         return;
158     case Dynamic:
159         return;
160     default:
161         ASSERT_NOT_REACHED();
162     }
163 }
164 #endif
165
166 WriteBarrier<Unknown>* ResolveResult::registerPointer() const
167 {
168     return &jsCast<JSGlobalObject*>(globalObject())->registerAt(index());
169 }
170
171 static bool s_dumpsGeneratedCode = false;
172
173 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
174 {
175     s_dumpsGeneratedCode = dumpsGeneratedCode;
176 }
177
178 bool BytecodeGenerator::dumpsGeneratedCode()
179 {
180     return s_dumpsGeneratedCode;
181 }
182
183 JSObject* BytecodeGenerator::generate()
184 {
185     SamplingRegion samplingRegion("Bytecode Generation");
186     
187     m_codeBlock->setThisRegister(m_thisRegister.index());
188
189     m_scopeNode->emitBytecode(*this);
190     
191     for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
192         TryRange& range = m_tryRanges[i];
193         ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
194         HandlerInfo info = {
195             range.start->bind(0, 0), range.end->bind(0, 0),
196             range.tryData->target->bind(0, 0), range.tryData->targetScopeDepth
197 #if ENABLE(JIT)
198             ,
199 #if ENABLE(LLINT)
200             CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(LLInt::getCodePtr(llint_op_catch)))
201 #else
202             CodeLocationLabel()
203 #endif
204 #endif
205         };
206         m_codeBlock->addExceptionHandler(info);
207     }
208     
209     m_codeBlock->instructions() = RefCountedArray<Instruction>(m_instructions);
210
211     if (s_dumpsGeneratedCode)
212         m_codeBlock->dump(m_scope->globalObject()->globalExec());
213
214 #ifdef NDEBUG
215     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
216         symbolTable().clear();
217 #endif
218
219     m_codeBlock->shrinkToFit(CodeBlock::EarlyShrink);
220
221     if (m_expressionTooDeep)
222         return createOutOfMemoryError(m_scope->globalObject());
223     return 0;
224 }
225
226 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
227 {
228     int index = m_calleeRegisters.size();
229     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
230     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
231
232     if (!result.isNewEntry) {
233         r0 = &registerFor(result.iterator->second.getIndex());
234         return false;
235     }
236
237     r0 = addVar();
238     return true;
239 }
240
241 int BytecodeGenerator::addGlobalVar(
242     const Identifier& ident, ConstantMode constantMode, FunctionMode functionMode)
243 {
244     UNUSED_PARAM(functionMode);
245     int index = symbolTable().size();
246     SymbolTableEntry newEntry(index, (constantMode == IsConstant) ? ReadOnly : 0);
247     if (functionMode == IsFunctionToSpecialize)
248         newEntry.attemptToWatch();
249     SymbolTable::AddResult result = symbolTable().add(ident.impl(), newEntry);
250     if (!result.isNewEntry) {
251         result.iterator->second.notifyWrite();
252         index = result.iterator->second.getIndex();
253     }
254     return index;
255 }
256
257 void BytecodeGenerator::preserveLastVar()
258 {
259     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
260         m_lastVar = &m_calleeRegisters.last();
261 }
262
263 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, JSScope* scope, SharedSymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
264     : m_shouldEmitDebugHooks(scope->globalObject()->debugger())
265     , m_shouldEmitProfileHooks(scope->globalObject()->globalObjectMethodTable()->supportsProfiling(scope->globalObject()))
266     , m_shouldEmitRichSourceInfo(scope->globalObject()->globalObjectMethodTable()->supportsRichSourceInfo(scope->globalObject()))
267     , m_scope(*scope->globalData(), scope)
268     , m_symbolTable(symbolTable)
269 #if ENABLE(BYTECODE_COMMENTS)
270     , m_currentCommentString(0)
271 #endif
272     , m_scopeNode(programNode)
273     , m_codeBlock(codeBlock)
274     , m_thisRegister(CallFrame::thisArgumentOffset())
275     , m_emptyValueRegister(0)
276     , m_finallyDepth(0)
277     , m_dynamicScopeDepth(0)
278     , m_baseScopeDepth(0)
279     , m_codeType(GlobalCode)
280     , m_nextConstantOffset(0)
281     , m_globalConstantIndex(0)
282     , m_hasCreatedActivation(true)
283     , m_firstLazyFunction(0)
284     , m_lastLazyFunction(0)
285     , m_globalData(scope->globalData())
286     , m_lastOpcodeID(op_end)
287 #ifndef NDEBUG
288     , m_lastOpcodePosition(0)
289 #endif
290     , m_stack(wtfThreadData().stack())
291     , m_usesExceptions(false)
292     , m_expressionTooDeep(false)
293 {
294     m_globalData->startedCompiling(m_codeBlock);
295     if (m_shouldEmitDebugHooks)
296         m_codeBlock->setNeedsFullScopeChain(true);
297
298     codeBlock->setGlobalData(m_globalData);
299     symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());    
300     m_codeBlock->setNumParameters(1); // Allocate space for "this"
301
302     prependComment("entering Program block");
303     emitOpcode(op_enter);
304
305     // FIXME: Move code that modifies the global object to Interpreter::execute.
306     
307     if (compilationKind == OptimizingCompilation)
308         return;
309
310     JSGlobalObject* globalObject = scope->globalObject();
311     ExecState* exec = globalObject->globalExec();
312     
313     BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
314
315     const VarStack& varStack = programNode->varStack();
316     const FunctionStack& functionStack = programNode->functionStack();
317
318     size_t newGlobals = varStack.size() + functionStack.size();
319     if (!newGlobals)
320         return;
321     globalObject->addRegisters(newGlobals);
322
323     for (size_t i = 0; i < functionStack.size(); ++i) {
324         FunctionBodyNode* function = functionStack[i];
325         bool propertyDidExist = 
326             globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
327         
328         JSValue value = JSFunction::create(exec, FunctionExecutable::create(*m_globalData, function), scope);
329         int index = addGlobalVar(
330             function->ident(), IsVariable,
331             !propertyDidExist ? IsFunctionToSpecialize : NotFunctionOrNotSpecializable);
332         globalObject->registerAt(index).set(*m_globalData, globalObject, value);
333     }
334
335     for (size_t i = 0; i < varStack.size(); ++i) {
336         if (globalObject->hasProperty(exec, *varStack[i].first))
337             continue;
338         addGlobalVar(
339             *varStack[i].first,
340             (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable,
341             NotFunctionOrNotSpecializable);
342     }
343 }
344
345 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, JSScope* scope, SharedSymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
346     : m_shouldEmitDebugHooks(scope->globalObject()->debugger())
347     , m_shouldEmitProfileHooks(scope->globalObject()->globalObjectMethodTable()->supportsProfiling(scope->globalObject()))
348     , m_shouldEmitRichSourceInfo(scope->globalObject()->globalObjectMethodTable()->supportsRichSourceInfo(scope->globalObject()))
349     , m_scope(*scope->globalData(), scope)
350     , m_symbolTable(symbolTable)
351 #if ENABLE(BYTECODE_COMMENTS)
352     , m_currentCommentString(0)
353 #endif
354     , m_scopeNode(functionBody)
355     , m_codeBlock(codeBlock)
356     , m_activationRegister(0)
357     , m_emptyValueRegister(0)
358     , m_finallyDepth(0)
359     , m_dynamicScopeDepth(0)
360     , m_baseScopeDepth(0)
361     , m_codeType(FunctionCode)
362     , m_nextConstantOffset(0)
363     , m_globalConstantIndex(0)
364     , m_hasCreatedActivation(false)
365     , m_firstLazyFunction(0)
366     , m_lastLazyFunction(0)
367     , m_globalData(scope->globalData())
368     , m_lastOpcodeID(op_end)
369 #ifndef NDEBUG
370     , m_lastOpcodePosition(0)
371 #endif
372     , m_stack(wtfThreadData().stack())
373     , m_usesExceptions(false)
374     , m_expressionTooDeep(false)
375 {
376     m_globalData->startedCompiling(m_codeBlock);
377     if (m_shouldEmitDebugHooks)
378         m_codeBlock->setNeedsFullScopeChain(true);
379
380     codeBlock->setGlobalData(m_globalData);
381     symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
382     symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
383
384     prependComment("entering Function block");
385     emitOpcode(op_enter);
386     if (m_codeBlock->needsFullScopeChain()) {
387         m_activationRegister = addVar();
388         prependComment("activation for Full Scope Chain");
389         emitInitLazyRegister(m_activationRegister);
390         m_codeBlock->setActivationRegister(m_activationRegister->index());
391     }
392
393     symbolTable->setCaptureStart(m_codeBlock->m_numVars);
394
395     if (functionBody->usesArguments() || codeBlock->usesEval() || m_shouldEmitDebugHooks) { // May reify arguments object.
396         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
397         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
398
399         // We can save a little space by hard-coding the knowledge that the two
400         // 'arguments' values are stored in consecutive registers, and storing
401         // only the index of the assignable one.
402         codeBlock->setArgumentsRegister(argumentsRegister->index());
403         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
404
405         prependComment("arguments for Full Scope Chain");
406         emitInitLazyRegister(argumentsRegister);
407         prependComment("unmodified arguments for Full Scope Chain");
408         emitInitLazyRegister(unmodifiedArgumentsRegister);
409         
410         if (m_codeBlock->isStrictMode()) {
411             prependComment("create arguments for strict mode");
412             emitOpcode(op_create_arguments);
413             instructions().append(argumentsRegister->index());
414         }
415
416         // The debugger currently retrieves the arguments object from an activation rather than pulling
417         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
418         // but for now we force eager creation of the arguments object when debugging.
419         if (m_shouldEmitDebugHooks) {
420             prependComment("create arguments for debug hooks");
421             emitOpcode(op_create_arguments);
422             instructions().append(argumentsRegister->index());
423         }
424     }
425
426     bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
427
428     bool capturesAnyArgumentByName = false;
429     Vector<RegisterID*> capturedArguments;
430     if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
431         FunctionParameters& parameters = *functionBody->parameters();
432         capturedArguments.resize(parameters.size());
433         for (size_t i = 0; i < parameters.size(); ++i) {
434             capturedArguments[i] = 0;
435             if (!functionBody->captures(parameters[i]) && !shouldCaptureAllTheThings)
436                 continue;
437             capturesAnyArgumentByName = true;
438             capturedArguments[i] = addVar();
439         }
440     }
441
442     if (capturesAnyArgumentByName && !codeBlock->isStrictMode()) {
443         size_t parameterCount = symbolTable->parameterCount();
444         OwnArrayPtr<SlowArgument> slowArguments = adoptArrayPtr(new SlowArgument[parameterCount]);
445         for (size_t i = 0; i < parameterCount; ++i) {
446             if (!capturedArguments[i]) {
447                 ASSERT(slowArguments[i].status == SlowArgument::Normal);
448                 slowArguments[i].index = CallFrame::argumentOffset(i);
449                 continue;
450             }
451             slowArguments[i].status = SlowArgument::Captured;
452             slowArguments[i].index = capturedArguments[i]->index();
453         }
454         symbolTable->setSlowArguments(slowArguments.release());
455     }
456
457     RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
458
459     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
460     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
461
462     // Captured variables and functions go first so that activations don't have
463     // to step over the non-captured locals to mark them.
464     m_hasCreatedActivation = false;
465     if (functionBody->hasCapturedVariables()) {
466         for (size_t i = 0; i < functionStack.size(); ++i) {
467             FunctionBodyNode* function = functionStack[i];
468             const Identifier& ident = function->ident();
469             if (functionBody->captures(ident)) {
470                 if (!m_hasCreatedActivation) {
471                     m_hasCreatedActivation = true;
472                     prependComment("activation for captured vars");
473                     emitOpcode(op_create_activation);
474                     instructions().append(m_activationRegister->index());
475                 }
476                 m_functions.add(ident.impl());
477                 prependComment("captured function var");
478                 emitNewFunction(addVar(ident, false), function);
479             }
480         }
481         for (size_t i = 0; i < varStack.size(); ++i) {
482             const Identifier& ident = *varStack[i].first;
483             if (functionBody->captures(ident))
484                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
485         }
486     }
487     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
488     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
489         m_hasCreatedActivation = true;
490         prependComment("cannot lazily create functions");
491         emitOpcode(op_create_activation);
492         instructions().append(m_activationRegister->index());
493     }
494
495     symbolTable->setCaptureEnd(codeBlock->m_numVars);
496
497     m_firstLazyFunction = codeBlock->m_numVars;
498     for (size_t i = 0; i < functionStack.size(); ++i) {
499         FunctionBodyNode* function = functionStack[i];
500         const Identifier& ident = function->ident();
501         if (!functionBody->captures(ident)) {
502             m_functions.add(ident.impl());
503             RefPtr<RegisterID> reg = addVar(ident, false);
504             // Don't lazily create functions that override the name 'arguments'
505             // as this would complicate lazy instantiation of actual arguments.
506             prependComment("a function that override 'arguments'");
507             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
508                 emitNewFunction(reg.get(), function);
509             else {
510                 emitInitLazyRegister(reg.get());
511                 m_lazyFunctions.set(reg->index(), function);
512             }
513         }
514     }
515     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
516     for (size_t i = 0; i < varStack.size(); ++i) {
517         const Identifier& ident = *varStack[i].first;
518         if (!functionBody->captures(ident))
519             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
520     }
521
522     if (shouldCaptureAllTheThings)
523         symbolTable->setCaptureEnd(codeBlock->m_numVars);
524
525     FunctionParameters& parameters = *functionBody->parameters();
526     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
527
528     // Add "this" as a parameter
529     int nextParameterIndex = CallFrame::thisArgumentOffset();
530     m_thisRegister.setIndex(nextParameterIndex--);
531     m_codeBlock->addParameter();
532     
533     for (size_t i = 0; i < parameters.size(); ++i, --nextParameterIndex) {
534         int index = nextParameterIndex;
535         if (capturedArguments.size() && capturedArguments[i]) {
536             ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(parameters[i])) || shouldCaptureAllTheThings);
537             index = capturedArguments[i]->index();
538             RegisterID original(nextParameterIndex);
539             emitMove(capturedArguments[i], &original);
540         }
541         addParameter(parameters[i], index);
542     }
543     preserveLastVar();
544
545     // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
546     addCallee(functionBody, calleeRegister);
547
548     if (isConstructor()) {
549         prependComment("'this' because we are a Constructor function");
550         emitOpcode(op_create_this);
551         instructions().append(m_thisRegister.index());
552     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
553         ValueProfile* profile = emitProfiledOpcode(op_convert_this);
554         instructions().append(m_thisRegister.index());
555         instructions().append(profile);
556     }
557 }
558
559 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, JSScope* scope, SharedSymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
560     : m_shouldEmitDebugHooks(scope->globalObject()->debugger())
561     , m_shouldEmitProfileHooks(scope->globalObject()->globalObjectMethodTable()->supportsProfiling(scope->globalObject()))
562     , m_shouldEmitRichSourceInfo(scope->globalObject()->globalObjectMethodTable()->supportsRichSourceInfo(scope->globalObject()))
563     , m_scope(*scope->globalData(), scope)
564     , m_symbolTable(symbolTable)
565 #if ENABLE(BYTECODE_COMMENTS)
566     , m_currentCommentString(0)
567 #endif
568     , m_scopeNode(evalNode)
569     , m_codeBlock(codeBlock)
570     , m_thisRegister(CallFrame::thisArgumentOffset())
571     , m_emptyValueRegister(0)
572     , m_finallyDepth(0)
573     , m_dynamicScopeDepth(0)
574     , m_baseScopeDepth(codeBlock->baseScopeDepth())
575     , m_codeType(EvalCode)
576     , m_nextConstantOffset(0)
577     , m_globalConstantIndex(0)
578     , m_hasCreatedActivation(true)
579     , m_firstLazyFunction(0)
580     , m_lastLazyFunction(0)
581     , m_globalData(scope->globalData())
582     , m_lastOpcodeID(op_end)
583 #ifndef NDEBUG
584     , m_lastOpcodePosition(0)
585 #endif
586     , m_stack(wtfThreadData().stack())
587     , m_usesExceptions(false)
588     , m_expressionTooDeep(false)
589 {
590     m_globalData->startedCompiling(m_codeBlock);
591     if (m_shouldEmitDebugHooks || m_baseScopeDepth)
592         m_codeBlock->setNeedsFullScopeChain(true);
593
594     codeBlock->setGlobalData(m_globalData);
595     symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());    
596     m_codeBlock->setNumParameters(1);
597
598     prependComment("entering Eval block");
599     emitOpcode(op_enter);
600
601     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
602     for (size_t i = 0; i < functionStack.size(); ++i)
603         m_codeBlock->addFunctionDecl(FunctionExecutable::create(*m_globalData, functionStack[i]));
604
605     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
606     unsigned numVariables = varStack.size();
607     Vector<Identifier> variables;
608     variables.reserveCapacity(numVariables);
609     for (size_t i = 0; i < numVariables; ++i)
610         variables.append(*varStack[i].first);
611     codeBlock->adoptVariables(variables);
612     preserveLastVar();
613 }
614
615 BytecodeGenerator::~BytecodeGenerator()
616 {
617     m_globalData->finishedCompiling(m_codeBlock);
618 }
619
620 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
621 {
622     emitOpcode(op_init_lazy_reg);
623     instructions().append(reg->index());
624     return reg;
625 }
626
627 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
628 {
629     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
630         return 0;
631
632     m_calleeRegister.setIndex(RegisterFile::Callee);
633
634     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
635     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks) {
636         emitOpcode(op_push_name_scope);
637         instructions().append(addConstant(functionBodyNode->ident()));
638         instructions().append(m_calleeRegister.index());
639         instructions().append(ReadOnly | DontDelete);
640
641         // Put a mirror object in compilation scope, so compile-time variable resolution sees the property name we'll see at runtime.
642         m_scope.set(*globalData(),
643             JSNameScope::create(
644                 m_scope->globalObject()->globalExec(),
645                 functionBodyNode->ident(),
646                 jsUndefined(),
647                 ReadOnly | DontDelete,
648                 m_scope.get()
649             )
650         );
651         return 0;
652     }
653
654     if (!functionBodyNode->captures(functionBodyNode->ident()))
655         return &m_calleeRegister;
656
657     // Move the callee into the captured section of the stack.
658     return emitMove(addVar(), &m_calleeRegister);
659 }
660
661 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
662 {
663     if (functionBodyNode->ident().isNull() || !functionBodyNode->functionNameIsInScope())
664         return;
665
666     // If non-strict eval is in play, we use a separate object in the scope chain for the callee's name.
667     if ((m_codeBlock->usesEval() && !m_codeBlock->isStrictMode()) || m_shouldEmitDebugHooks)
668         return;
669
670     ASSERT(calleeRegister);
671     symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
672 }
673
674 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
675 {
676     // Parameters overwrite var declarations, but not function declarations.
677     StringImpl* rep = ident.impl();
678     if (!m_functions.contains(rep)) {
679         symbolTable().set(rep, parameterIndex);
680         RegisterID& parameter = registerFor(parameterIndex);
681         parameter.setIndex(parameterIndex);
682     }
683
684     // To maintain the calling convention, we have to allocate unique space for
685     // each parameter, even if the parameter doesn't make it into the symbol table.
686     m_codeBlock->addParameter();
687 }
688
689 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
690 {
691     if (ident != propertyNames().arguments)
692         return false;
693     
694     if (!shouldOptimizeLocals())
695         return false;
696     
697     SymbolTableEntry entry = symbolTable().get(ident.impl());
698     if (entry.isNull())
699         return false;
700
701     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
702         return true;
703     
704     return false;
705 }
706
707 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
708 {
709     ASSERT(willResolveToArguments(propertyNames().arguments));
710
711     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
712     ASSERT(!entry.isNull());
713     return &registerFor(entry.getIndex());
714 }
715
716 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
717 {
718     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
719         return reg;
720     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
721     return reg;
722 }
723
724 RegisterID* BytecodeGenerator::newRegister()
725 {
726     m_calleeRegisters.append(m_calleeRegisters.size());
727     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
728     return &m_calleeRegisters.last();
729 }
730
731 RegisterID* BytecodeGenerator::newTemporary()
732 {
733     // Reclaim free register IDs.
734     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
735         m_calleeRegisters.removeLast();
736         
737     RegisterID* result = newRegister();
738     result->setTemporary();
739     return result;
740 }
741
742 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
743 {
744     // Reclaim free label scopes.
745     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
746         m_labelScopes.removeLast();
747
748     // Allocate new label scope.
749     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
750     m_labelScopes.append(scope);
751     return &m_labelScopes.last();
752 }
753
754 PassRefPtr<Label> BytecodeGenerator::newLabel()
755 {
756     // Reclaim free label IDs.
757     while (m_labels.size() && !m_labels.last().refCount())
758         m_labels.removeLast();
759
760     // Allocate new label ID.
761     m_labels.append(this);
762     return &m_labels.last();
763 }
764
765 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
766 {
767     unsigned newLabelIndex = instructions().size();
768     l0->setLocation(newLabelIndex);
769
770     if (m_codeBlock->numberOfJumpTargets()) {
771         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
772         ASSERT(lastLabelIndex <= newLabelIndex);
773         if (newLabelIndex == lastLabelIndex) {
774             // Peephole optimizations have already been disabled by emitting the last label
775             return l0;
776         }
777     }
778
779     m_codeBlock->addJumpTarget(newLabelIndex);
780
781     // This disables peephole optimizations when an instruction is a jump target
782     m_lastOpcodeID = op_end;
783     return l0;
784 }
785
786 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
787 {
788 #ifndef NDEBUG
789     size_t opcodePosition = instructions().size();
790     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
791     m_lastOpcodePosition = opcodePosition;
792 #endif
793     emitComment();
794     instructions().append(globalData()->interpreter->getOpcode(opcodeID));
795     m_lastOpcodeID = opcodeID;
796 }
797
798 #if ENABLE(BYTECODE_COMMENTS)
799 // Record a comment in the CodeBlock's comments list for the current opcode
800 // that is about to be emitted.
801 void BytecodeGenerator::emitComment()
802 {
803     if (m_currentCommentString) {
804         size_t opcodePosition = instructions().size();
805         Comment comment = { opcodePosition, m_currentCommentString };
806         m_codeBlock->bytecodeComments().append(comment);
807         m_currentCommentString = 0;
808     }
809 }
810
811 // Register a comment to be associated with the next opcode that will be emitted.
812 void BytecodeGenerator::prependComment(const char* string)
813 {
814     m_currentCommentString = string;
815 }
816 #endif
817
818 ArrayProfile* BytecodeGenerator::newArrayProfile()
819 {
820 #if ENABLE(VALUE_PROFILER)
821     return m_codeBlock->addArrayProfile(instructions().size());
822 #else
823     return 0;
824 #endif
825 }
826
827 ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
828 {
829 #if ENABLE(VALUE_PROFILER)
830     ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
831 #else
832     ValueProfile* result = 0;
833 #endif
834     emitOpcode(opcodeID);
835     return result;
836 }
837
838 void BytecodeGenerator::emitLoopHint()
839 {
840 #if ENABLE(DFG_JIT)
841     emitOpcode(op_loop_hint);
842 #endif
843 }
844
845 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
846 {
847     ASSERT(instructions().size() >= 4);
848     size_t size = instructions().size();
849     dstIndex = instructions().at(size - 3).u.operand;
850     src1Index = instructions().at(size - 2).u.operand;
851     src2Index = instructions().at(size - 1).u.operand;
852 }
853
854 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
855 {
856     ASSERT(instructions().size() >= 3);
857     size_t size = instructions().size();
858     dstIndex = instructions().at(size - 2).u.operand;
859     srcIndex = instructions().at(size - 1).u.operand;
860 }
861
862 void BytecodeGenerator::retrieveLastUnaryOp(WriteBarrier<Unknown>*& dstPointer, int& srcIndex)
863 {
864     ASSERT(instructions().size() >= 3);
865     size_t size = instructions().size();
866     dstPointer = instructions().at(size - 2).u.registerPointer;
867     srcIndex = instructions().at(size - 1).u.operand;
868 }
869
870 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
871 {
872     ASSERT(instructions().size() >= 4);
873     instructions().shrink(instructions().size() - 4);
874     m_lastOpcodeID = op_end;
875 }
876
877 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
878 {
879     ASSERT(instructions().size() >= 3);
880     instructions().shrink(instructions().size() - 3);
881     m_lastOpcodeID = op_end;
882 }
883
884 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
885 {
886     size_t begin = instructions().size();
887     emitOpcode(target->isForward() ? op_jmp : op_loop);
888     instructions().append(target->bind(begin, instructions().size()));
889     return target;
890 }
891
892 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
893 {
894     if (m_lastOpcodeID == op_less) {
895         int dstIndex;
896         int src1Index;
897         int src2Index;
898
899         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
900
901         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
902             rewindBinaryOp();
903
904             size_t begin = instructions().size();
905             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
906             instructions().append(src1Index);
907             instructions().append(src2Index);
908             instructions().append(target->bind(begin, instructions().size()));
909             return target;
910         }
911     } else if (m_lastOpcodeID == op_lesseq) {
912         int dstIndex;
913         int src1Index;
914         int src2Index;
915
916         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
917
918         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
919             rewindBinaryOp();
920
921             size_t begin = instructions().size();
922             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
923             instructions().append(src1Index);
924             instructions().append(src2Index);
925             instructions().append(target->bind(begin, instructions().size()));
926             return target;
927         }
928     } else if (m_lastOpcodeID == op_greater) {
929         int dstIndex;
930         int src1Index;
931         int src2Index;
932
933         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
934
935         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
936             rewindBinaryOp();
937
938             size_t begin = instructions().size();
939             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
940             instructions().append(src1Index);
941             instructions().append(src2Index);
942             instructions().append(target->bind(begin, instructions().size()));
943             return target;
944         }
945     } else if (m_lastOpcodeID == op_greatereq) {
946         int dstIndex;
947         int src1Index;
948         int src2Index;
949
950         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
951
952         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
953             rewindBinaryOp();
954
955             size_t begin = instructions().size();
956             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
957             instructions().append(src1Index);
958             instructions().append(src2Index);
959             instructions().append(target->bind(begin, instructions().size()));
960             return target;
961         }
962     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
963         int dstIndex;
964         int srcIndex;
965
966         retrieveLastUnaryOp(dstIndex, srcIndex);
967
968         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
969             rewindUnaryOp();
970
971             size_t begin = instructions().size();
972             emitOpcode(op_jeq_null);
973             instructions().append(srcIndex);
974             instructions().append(target->bind(begin, instructions().size()));
975             return target;
976         }
977     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
978         int dstIndex;
979         int srcIndex;
980
981         retrieveLastUnaryOp(dstIndex, srcIndex);
982
983         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
984             rewindUnaryOp();
985
986             size_t begin = instructions().size();
987             emitOpcode(op_jneq_null);
988             instructions().append(srcIndex);
989             instructions().append(target->bind(begin, instructions().size()));
990             return target;
991         }
992     }
993
994     size_t begin = instructions().size();
995
996     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
997     instructions().append(cond->index());
998     instructions().append(target->bind(begin, instructions().size()));
999     return target;
1000 }
1001
1002 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
1003 {
1004     if (m_lastOpcodeID == op_less && target->isForward()) {
1005         int dstIndex;
1006         int src1Index;
1007         int src2Index;
1008
1009         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
1010
1011         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1012             rewindBinaryOp();
1013
1014             size_t begin = instructions().size();
1015             emitOpcode(op_jnless);
1016             instructions().append(src1Index);
1017             instructions().append(src2Index);
1018             instructions().append(target->bind(begin, instructions().size()));
1019             return target;
1020         }
1021     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
1022         int dstIndex;
1023         int src1Index;
1024         int src2Index;
1025
1026         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
1027
1028         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1029             rewindBinaryOp();
1030
1031             size_t begin = instructions().size();
1032             emitOpcode(op_jnlesseq);
1033             instructions().append(src1Index);
1034             instructions().append(src2Index);
1035             instructions().append(target->bind(begin, instructions().size()));
1036             return target;
1037         }
1038     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
1039         int dstIndex;
1040         int src1Index;
1041         int src2Index;
1042
1043         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
1044
1045         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1046             rewindBinaryOp();
1047
1048             size_t begin = instructions().size();
1049             emitOpcode(op_jngreater);
1050             instructions().append(src1Index);
1051             instructions().append(src2Index);
1052             instructions().append(target->bind(begin, instructions().size()));
1053             return target;
1054         }
1055     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
1056         int dstIndex;
1057         int src1Index;
1058         int src2Index;
1059
1060         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
1061
1062         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1063             rewindBinaryOp();
1064
1065             size_t begin = instructions().size();
1066             emitOpcode(op_jngreatereq);
1067             instructions().append(src1Index);
1068             instructions().append(src2Index);
1069             instructions().append(target->bind(begin, instructions().size()));
1070             return target;
1071         }
1072     } else if (m_lastOpcodeID == op_not) {
1073         int dstIndex;
1074         int srcIndex;
1075
1076         retrieveLastUnaryOp(dstIndex, srcIndex);
1077
1078         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1079             rewindUnaryOp();
1080
1081             size_t begin = instructions().size();
1082             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
1083             instructions().append(srcIndex);
1084             instructions().append(target->bind(begin, instructions().size()));
1085             return target;
1086         }
1087     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
1088         int dstIndex;
1089         int srcIndex;
1090
1091         retrieveLastUnaryOp(dstIndex, srcIndex);
1092
1093         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1094             rewindUnaryOp();
1095
1096             size_t begin = instructions().size();
1097             emitOpcode(op_jneq_null);
1098             instructions().append(srcIndex);
1099             instructions().append(target->bind(begin, instructions().size()));
1100             return target;
1101         }
1102     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
1103         int dstIndex;
1104         int srcIndex;
1105
1106         retrieveLastUnaryOp(dstIndex, srcIndex);
1107
1108         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
1109             rewindUnaryOp();
1110
1111             size_t begin = instructions().size();
1112             emitOpcode(op_jeq_null);
1113             instructions().append(srcIndex);
1114             instructions().append(target->bind(begin, instructions().size()));
1115             return target;
1116         }
1117     }
1118
1119     size_t begin = instructions().size();
1120     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
1121     instructions().append(cond->index());
1122     instructions().append(target->bind(begin, instructions().size()));
1123     return target;
1124 }
1125
1126 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
1127 {
1128     size_t begin = instructions().size();
1129
1130     emitOpcode(op_jneq_ptr);
1131     instructions().append(cond->index());
1132     instructions().append(Special::CallFunction);
1133     instructions().append(target->bind(begin, instructions().size()));
1134     return target;
1135 }
1136
1137 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
1138 {
1139     size_t begin = instructions().size();
1140
1141     emitOpcode(op_jneq_ptr);
1142     instructions().append(cond->index());
1143     instructions().append(Special::ApplyFunction);
1144     instructions().append(target->bind(begin, instructions().size()));
1145     return target;
1146 }
1147
1148 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
1149 {
1150     StringImpl* rep = ident.impl();
1151     IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
1152     if (result.isNewEntry)
1153         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
1154
1155     return result.iterator->second;
1156 }
1157
1158 // We can't hash JSValue(), so we use a dedicated data member to cache it.
1159 RegisterID* BytecodeGenerator::addConstantEmptyValue()
1160 {
1161     if (!m_emptyValueRegister) {
1162         int index = m_nextConstantOffset;
1163         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1164         ++m_nextConstantOffset;
1165         m_codeBlock->addConstant(JSValue());
1166         m_emptyValueRegister = &m_constantPoolRegisters[index];
1167     }
1168
1169     return m_emptyValueRegister;
1170 }
1171
1172 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
1173 {
1174     if (!v)
1175         return addConstantEmptyValue();
1176
1177     int index = m_nextConstantOffset;
1178     JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
1179     if (result.isNewEntry) {
1180         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1181         ++m_nextConstantOffset;
1182         m_codeBlock->addConstant(v);
1183     } else
1184         index = result.iterator->second;
1185     return &m_constantPoolRegisters[index];
1186 }
1187
1188 unsigned BytecodeGenerator::addRegExp(RegExp* r)
1189 {
1190     return m_codeBlock->addRegExp(r);
1191 }
1192
1193 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1194 {
1195     emitOpcode(op_mov);
1196     instructions().append(dst->index());
1197     instructions().append(src->index());
1198     return dst;
1199 }
1200
1201 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1202 {
1203     emitOpcode(opcodeID);
1204     instructions().append(dst->index());
1205     instructions().append(src->index());
1206     return dst;
1207 }
1208
1209 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1210 {
1211     emitOpcode(op_pre_inc);
1212     instructions().append(srcDst->index());
1213     return srcDst;
1214 }
1215
1216 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1217 {
1218     emitOpcode(op_pre_dec);
1219     instructions().append(srcDst->index());
1220     return srcDst;
1221 }
1222
1223 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1224 {
1225     emitOpcode(op_post_inc);
1226     instructions().append(dst->index());
1227     instructions().append(srcDst->index());
1228     return dst;
1229 }
1230
1231 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1232 {
1233     emitOpcode(op_post_dec);
1234     instructions().append(dst->index());
1235     instructions().append(srcDst->index());
1236     return dst;
1237 }
1238
1239 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1240 {
1241     emitOpcode(opcodeID);
1242     instructions().append(dst->index());
1243     instructions().append(src1->index());
1244     instructions().append(src2->index());
1245
1246     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1247         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1248         instructions().append(types.toInt());
1249
1250     return dst;
1251 }
1252
1253 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1254 {
1255     if (m_lastOpcodeID == op_typeof) {
1256         int dstIndex;
1257         int srcIndex;
1258
1259         retrieveLastUnaryOp(dstIndex, srcIndex);
1260
1261         if (src1->index() == dstIndex
1262             && src1->isTemporary()
1263             && m_codeBlock->isConstantRegisterIndex(src2->index())
1264             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1265             const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1266             if (value == "undefined") {
1267                 rewindUnaryOp();
1268                 emitOpcode(op_is_undefined);
1269                 instructions().append(dst->index());
1270                 instructions().append(srcIndex);
1271                 return dst;
1272             }
1273             if (value == "boolean") {
1274                 rewindUnaryOp();
1275                 emitOpcode(op_is_boolean);
1276                 instructions().append(dst->index());
1277                 instructions().append(srcIndex);
1278                 return dst;
1279             }
1280             if (value == "number") {
1281                 rewindUnaryOp();
1282                 emitOpcode(op_is_number);
1283                 instructions().append(dst->index());
1284                 instructions().append(srcIndex);
1285                 return dst;
1286             }
1287             if (value == "string") {
1288                 rewindUnaryOp();
1289                 emitOpcode(op_is_string);
1290                 instructions().append(dst->index());
1291                 instructions().append(srcIndex);
1292                 return dst;
1293             }
1294             if (value == "object") {
1295                 rewindUnaryOp();
1296                 emitOpcode(op_is_object);
1297                 instructions().append(dst->index());
1298                 instructions().append(srcIndex);
1299                 return dst;
1300             }
1301             if (value == "function") {
1302                 rewindUnaryOp();
1303                 emitOpcode(op_is_function);
1304                 instructions().append(dst->index());
1305                 instructions().append(srcIndex);
1306                 return dst;
1307             }
1308         }
1309     }
1310
1311     emitOpcode(opcodeID);
1312     instructions().append(dst->index());
1313     instructions().append(src1->index());
1314     instructions().append(src2->index());
1315     return dst;
1316 }
1317
1318 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1319 {
1320     return emitLoad(dst, jsBoolean(b));
1321 }
1322
1323 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1324 {
1325     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1326     // Later we can do the extra work to handle that like the other cases.  They also don't
1327     // work correctly with NaN as a key.
1328     if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1329         return emitLoad(dst, jsNumber(number));
1330     JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->second;
1331     if (!valueInMap)
1332         valueInMap = jsNumber(number);
1333     return emitLoad(dst, valueInMap);
1334 }
1335
1336 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1337 {
1338     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1339     if (!stringInMap)
1340         stringInMap = jsOwnedString(globalData(), identifier.string());
1341     return emitLoad(dst, JSValue(stringInMap));
1342 }
1343
1344 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1345 {
1346     RegisterID* constantID = addConstantValue(v);
1347     if (dst)
1348         return emitMove(dst, constantID);
1349     return constantID;
1350 }
1351
1352 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1353 {
1354     if (property == propertyNames().thisIdentifier)
1355         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1356
1357     // Check if the property should be allocated in a register.
1358     if (m_codeType != GlobalCode && shouldOptimizeLocals()) {
1359         SymbolTableEntry entry = symbolTable().get(property.impl());
1360         if (!entry.isNull()) {
1361             if (property == propertyNames().arguments)
1362                 createArgumentsIfNecessary();
1363             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1364             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1365             return ResolveResult::registerResolve(local, flags);
1366         }
1367     }
1368
1369     // Cases where we cannot statically optimize the lookup.
1370     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1371         return ResolveResult::dynamicResolve(0);
1372
1373     ScopeChainIterator iter = m_scope->begin();
1374     ScopeChainIterator end = m_scope->end();
1375     size_t depth = 0;
1376     size_t depthOfFirstScopeWithDynamicChecks = 0;
1377     unsigned flags = 0;
1378     for (; iter != end; ++iter, ++depth) {
1379         JSObject* currentScope = iter.get();
1380         if (!currentScope->isVariableObject()) {
1381             flags |= ResolveResult::DynamicFlag;
1382             break;
1383         }
1384         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1385         SymbolTableEntry entry = currentVariableObject->symbolTable()->get(property.impl());
1386
1387         // Found the property
1388         if (!entry.isNull()) {
1389             if (entry.isReadOnly())
1390                 flags |= ResolveResult::ReadOnlyFlag;
1391             depth += m_codeBlock->needsFullScopeChain();
1392             if (++iter == end) {
1393                 if (flags & ResolveResult::DynamicFlag)
1394                     return ResolveResult::dynamicIndexedGlobalResolve(entry.getIndex(), depth, currentScope, flags);
1395                 return ResolveResult::indexedGlobalResolve(
1396                     entry.getIndex(), currentScope,
1397                     flags | (entry.couldBeWatched() ? ResolveResult::WatchedFlag : 0));
1398             }
1399 #if !ASSERT_DISABLED
1400             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1401                 ASSERT(activation->isValid(entry));
1402 #endif
1403             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1404         }
1405         bool scopeRequiresDynamicChecks = false;
1406         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1407             break;
1408         if (!(flags & ResolveResult::DynamicFlag)) {
1409             if (scopeRequiresDynamicChecks)
1410                 flags |= ResolveResult::DynamicFlag;
1411             else
1412                 ++depthOfFirstScopeWithDynamicChecks;
1413         }
1414     }
1415
1416     // Can't locate the property but we're able to avoid a few lookups.
1417     JSObject* scope = iter.get();
1418     // Step over the function's activation, if it needs one. At this point we
1419     // know there is no dynamic scope in the function itself, so this is safe to
1420     // do.
1421     depth += m_codeBlock->needsFullScopeChain();
1422     depthOfFirstScopeWithDynamicChecks += m_codeBlock->needsFullScopeChain();
1423     if (++iter == end) {
1424         if ((flags & ResolveResult::DynamicFlag) && depth)
1425             return ResolveResult::dynamicGlobalResolve(depth, scope);
1426         return ResolveResult::globalResolve(scope);
1427     }
1428     return ResolveResult::dynamicResolve(depthOfFirstScopeWithDynamicChecks);
1429 }
1430
1431 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1432 {
1433     // Register-allocated const declarations.
1434     if (m_codeType != EvalCode && m_codeType != GlobalCode) {
1435         SymbolTableEntry entry = symbolTable().get(property.impl());
1436         if (!entry.isNull()) {
1437             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1438             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1439             return ResolveResult::registerResolve(local, flags);
1440         }
1441     }
1442
1443     // Const declarations in eval code or global code.
1444     ScopeChainIterator iter = scope()->begin();
1445     ScopeChainIterator end = scope()->end();
1446     size_t depth = 0;
1447     for (; iter != end; ++iter, ++depth) {
1448         JSObject* currentScope = iter.get();
1449         if (!currentScope->isVariableObject())
1450             continue;
1451         JSSymbolTableObject* currentVariableObject = jsCast<JSSymbolTableObject*>(currentScope);
1452         SymbolTableEntry entry = currentVariableObject->symbolTable()->get(property.impl());
1453         if (entry.isNull())
1454             continue;
1455         if (++iter == end)
1456             return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentVariableObject, 0);
1457         return ResolveResult::lexicalResolve(entry.getIndex(), depth + scopeDepth(), 0);
1458     }
1459
1460     // FIXME: While this code should only be hit in an eval block, it will assign
1461     // to the wrong base if property exists in an intervening with scope.
1462     return ResolveResult::dynamicResolve(scopeDepth());
1463 }
1464
1465 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1466 {
1467     size_t begin = instructions().size();
1468     emitOpcode(op_check_has_instance);
1469     instructions().append(dst->index());
1470     instructions().append(value->index());
1471     instructions().append(base->index());
1472     instructions().append(target->bind(begin, instructions().size()));
1473 }
1474
1475 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1476
1477     emitOpcode(op_instanceof);
1478     instructions().append(dst->index());
1479     instructions().append(value->index());
1480     instructions().append(basePrototype->index());
1481     return dst;
1482 }
1483
1484 static const unsigned maxGlobalResolves = 128;
1485
1486 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1487 {
1488     return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1489 }
1490
1491 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1492 {
1493     if (resolveResult.isStatic())
1494         return emitGetStaticVar(dst, resolveResult, property);
1495     
1496     if (resolveResult.isGlobal() && !shouldAvoidResolveGlobal()) {
1497 #if ENABLE(JIT)
1498         m_codeBlock->addGlobalResolveInfo(instructions().size());
1499 #endif
1500         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1501         bool dynamic = resolveResult.isDynamic() && resolveResult.depth();
1502         ValueProfile* profile = emitProfiledOpcode(dynamic ? op_resolve_global_dynamic : op_resolve_global);
1503         instructions().append(dst->index());
1504         instructions().append(addConstant(property));
1505         instructions().append(0);
1506         instructions().append(0);
1507         if (dynamic)
1508             instructions().append(resolveResult.depth());
1509         instructions().append(profile);
1510         return dst;
1511     }
1512         
1513     if (resolveResult.type() == ResolveResult::Dynamic && resolveResult.depth()) {
1514         // In this case we are at least able to drop a few scope chains from the
1515         // lookup chain, although we still need to hash from then on.
1516         ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1517         instructions().append(dst->index());
1518         instructions().append(addConstant(property));
1519         instructions().append(resolveResult.depth());
1520         instructions().append(profile);
1521         return dst;
1522     }
1523
1524     ValueProfile* profile = emitProfiledOpcode(op_resolve);
1525     instructions().append(dst->index());
1526     instructions().append(addConstant(property));
1527     instructions().append(profile);
1528     return dst;
1529 }
1530
1531 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1532 {
1533     if (resolveResult.isGlobal() && !resolveResult.isDynamic())
1534         // Global object is the base
1535         return emitLoad(dst, JSValue(resolveResult.globalObject()));
1536
1537     // We can't optimise at all :-(
1538     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1539     instructions().append(dst->index());
1540     instructions().append(addConstant(property));
1541     instructions().append(false);
1542     instructions().append(profile);
1543     return dst;
1544 }
1545
1546 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1547 {
1548     if (!m_codeBlock->isStrictMode())
1549         return emitResolveBase(dst, resolveResult, property);
1550
1551     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1552         // Global object is the base
1553         RefPtr<RegisterID> result = emitLoad(dst, JSValue(resolveResult.globalObject()));
1554         emitOpcode(op_ensure_property_exists);
1555         instructions().append(dst->index());
1556         instructions().append(addConstant(property));
1557         return result.get();
1558     }
1559
1560     // We can't optimise at all :-(
1561     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1562     instructions().append(dst->index());
1563     instructions().append(addConstant(property));
1564     instructions().append(true);
1565     instructions().append(profile);
1566     return dst;
1567 }
1568
1569 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1570 {
1571     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1572         // Global object is the base
1573         emitLoad(baseDst, JSValue(resolveResult.globalObject()));
1574
1575         if (resolveResult.isStatic()) {
1576             // Directly index the property lookup across multiple scopes.
1577             emitGetStaticVar(propDst, resolveResult, property);
1578             return baseDst;
1579         }
1580
1581         if (shouldAvoidResolveGlobal()) {
1582             ValueProfile* profile = emitProfiledOpcode(op_resolve);
1583             instructions().append(propDst->index());
1584             instructions().append(addConstant(property));
1585             instructions().append(profile);
1586             return baseDst;
1587         }
1588
1589 #if ENABLE(JIT)
1590         m_codeBlock->addGlobalResolveInfo(instructions().size());
1591 #endif
1592         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1593         ValueProfile* profile = emitProfiledOpcode(op_resolve_global);
1594         instructions().append(propDst->index());
1595         instructions().append(addConstant(property));
1596         instructions().append(0);
1597         instructions().append(0);
1598         instructions().append(profile);
1599         return baseDst;
1600     }
1601
1602     ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1603     instructions().append(baseDst->index());
1604     instructions().append(propDst->index());
1605     instructions().append(addConstant(property));
1606     instructions().append(profile);
1607     return baseDst;
1608 }
1609
1610 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1611 {
1612     if (resolveResult.isStatic()) {
1613         emitLoad(baseDst, jsUndefined());
1614         emitGetStaticVar(propDst, resolveResult, property);
1615         return baseDst;
1616     }
1617
1618     if (resolveResult.type() == ResolveResult::Dynamic) {
1619         // We can't optimise at all :-(
1620         ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1621         instructions().append(baseDst->index());
1622         instructions().append(propDst->index());
1623         instructions().append(addConstant(property));
1624         instructions().append(profile);
1625         return baseDst;
1626     }
1627
1628     emitLoad(baseDst, jsUndefined());
1629     return emitResolve(propDst, resolveResult, property);
1630 }
1631
1632 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& identifier)
1633 {
1634     ValueProfile* profile = 0;
1635
1636     switch (resolveResult.type()) {
1637     case ResolveResult::Register:
1638     case ResolveResult::ReadOnlyRegister:
1639         if (dst == ignoredResult())
1640             return 0;
1641         return moveToDestinationIfNeeded(dst, resolveResult.local());
1642
1643     case ResolveResult::Lexical:
1644     case ResolveResult::ReadOnlyLexical:
1645         profile = emitProfiledOpcode(op_get_scoped_var);
1646         instructions().append(dst->index());
1647         instructions().append(resolveResult.index());
1648         instructions().append(resolveResult.depth());
1649         instructions().append(profile);
1650         return dst;
1651
1652     case ResolveResult::IndexedGlobal:
1653     case ResolveResult::ReadOnlyIndexedGlobal:
1654         if (m_lastOpcodeID == op_put_global_var) {
1655             WriteBarrier<Unknown>* dstPointer;
1656             int srcIndex;
1657             retrieveLastUnaryOp(dstPointer, srcIndex);
1658             if (dstPointer == resolveResult.registerPointer() && srcIndex == dst->index())
1659                 return dst;
1660         }
1661
1662         profile = emitProfiledOpcode(op_get_global_var);
1663         instructions().append(dst->index());
1664         instructions().append(resolveResult.registerPointer());
1665         instructions().append(profile);
1666         return dst;
1667
1668     case ResolveResult::WatchedIndexedGlobal:
1669         // Skip the peephole for now. It's not clear that it's profitable given
1670         // the DFG's capabilities, and the fact that if it's watchable then we
1671         // don't expect to see any put_global_var's anyway.
1672         profile = emitProfiledOpcode(op_get_global_var_watchable);
1673         instructions().append(dst->index());
1674         instructions().append(resolveResult.registerPointer());
1675         instructions().append(addConstant(identifier)); // For the benefit of the DFG.
1676         instructions().append(profile);
1677         return dst;
1678
1679     default:
1680         ASSERT_NOT_REACHED();
1681         return 0;
1682     }
1683 }
1684
1685 RegisterID* BytecodeGenerator::emitInitGlobalConst(const ResolveResult& resolveResult, const Identifier& identifier, RegisterID* value)
1686 {
1687     ASSERT(m_codeType == GlobalCode);
1688     switch (resolveResult.type()) {
1689     case ResolveResult::IndexedGlobal:
1690     case ResolveResult::ReadOnlyIndexedGlobal:
1691         emitOpcode(op_init_global_const);
1692         instructions().append(resolveResult.registerPointer());
1693         instructions().append(value->index());
1694         return value;
1695
1696     case ResolveResult::WatchedIndexedGlobal:
1697         emitOpcode(op_init_global_const_check);
1698         instructions().append(resolveResult.registerPointer());
1699         instructions().append(value->index());
1700         instructions().append(jsCast<JSGlobalObject*>(resolveResult.globalObject())->symbolTable()->get(identifier.impl()).addressOfIsWatched());
1701         instructions().append(addConstant(identifier));
1702         return value;
1703         
1704     default:
1705         ASSERT_NOT_REACHED();
1706         return 0;
1707     }
1708 }
1709
1710 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, const Identifier& identifier, RegisterID* value)
1711 {
1712     switch (resolveResult.type()) {
1713     case ResolveResult::Register:
1714     case ResolveResult::ReadOnlyRegister:
1715         return moveToDestinationIfNeeded(resolveResult.local(), value);
1716
1717     case ResolveResult::Lexical:
1718     case ResolveResult::ReadOnlyLexical:
1719         emitOpcode(op_put_scoped_var);
1720         instructions().append(resolveResult.index());
1721         instructions().append(resolveResult.depth());
1722         instructions().append(value->index());
1723         return value;
1724
1725     case ResolveResult::IndexedGlobal:
1726     case ResolveResult::ReadOnlyIndexedGlobal:
1727         emitOpcode(op_put_global_var);
1728         instructions().append(resolveResult.registerPointer());
1729         instructions().append(value->index());
1730         return value;
1731         
1732     case ResolveResult::WatchedIndexedGlobal:
1733         emitOpcode(op_put_global_var_check);
1734         instructions().append(resolveResult.registerPointer());
1735         instructions().append(value->index());
1736         instructions().append(jsCast<JSGlobalObject*>(resolveResult.globalObject())->symbolTable()->get(identifier.impl()).addressOfIsWatched());
1737         instructions().append(addConstant(identifier));
1738         return value;
1739
1740     default:
1741         ASSERT_NOT_REACHED();
1742         return 0;
1743     }
1744 }
1745
1746 void BytecodeGenerator::emitMethodCheck()
1747 {
1748     emitOpcode(op_method_check);
1749 }
1750
1751 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1752 {
1753     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1754
1755     ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1756     instructions().append(dst->index());
1757     instructions().append(base->index());
1758     instructions().append(addConstant(property));
1759     instructions().append(0);
1760     instructions().append(0);
1761     instructions().append(0);
1762     instructions().append(0);
1763     instructions().append(profile);
1764     return dst;
1765 }
1766
1767 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1768 {
1769     emitOpcode(op_get_arguments_length);
1770     instructions().append(dst->index());
1771     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1772     instructions().append(base->index());
1773     instructions().append(addConstant(propertyNames().length));
1774     return dst;
1775 }
1776
1777 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1778 {
1779     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1780
1781     emitOpcode(op_put_by_id);
1782     instructions().append(base->index());
1783     instructions().append(addConstant(property));
1784     instructions().append(value->index());
1785     instructions().append(0);
1786     instructions().append(0);
1787     instructions().append(0);
1788     instructions().append(0);
1789     instructions().append(0);
1790     return value;
1791 }
1792
1793 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1794 {
1795     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1796     
1797     emitOpcode(op_put_by_id);
1798     instructions().append(base->index());
1799     instructions().append(addConstant(property));
1800     instructions().append(value->index());
1801     instructions().append(0);
1802     instructions().append(0);
1803     instructions().append(0);
1804     instructions().append(0);
1805     instructions().append(
1806         property != m_globalData->propertyNames->underscoreProto
1807         && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1808     return value;
1809 }
1810
1811 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1812 {
1813     emitOpcode(op_put_getter_setter);
1814     instructions().append(base->index());
1815     instructions().append(addConstant(property));
1816     instructions().append(getter->index());
1817     instructions().append(setter->index());
1818 }
1819
1820 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1821 {
1822     emitOpcode(op_del_by_id);
1823     instructions().append(dst->index());
1824     instructions().append(base->index());
1825     instructions().append(addConstant(property));
1826     return dst;
1827 }
1828
1829 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1830 {
1831     ArrayProfile* arrayProfile = newArrayProfile();
1832     ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1833     instructions().append(dst->index());
1834     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1835     instructions().append(base->index());
1836     instructions().append(property->index());
1837     instructions().append(arrayProfile);
1838     instructions().append(profile);
1839     return dst;
1840 }
1841
1842 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1843 {
1844     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1845         ForInContext& context = m_forInContextStack[i - 1];
1846         if (context.propertyRegister == property) {
1847             emitOpcode(op_get_by_pname);
1848             instructions().append(dst->index());
1849             instructions().append(base->index());
1850             instructions().append(property->index());
1851             instructions().append(context.expectedSubscriptRegister->index());
1852             instructions().append(context.iterRegister->index());
1853             instructions().append(context.indexRegister->index());
1854             return dst;
1855         }
1856     }
1857     ArrayProfile* arrayProfile = newArrayProfile();
1858     ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1859     instructions().append(dst->index());
1860     instructions().append(base->index());
1861     instructions().append(property->index());
1862     instructions().append(arrayProfile);
1863     instructions().append(profile);
1864     return dst;
1865 }
1866
1867 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1868 {
1869     ArrayProfile* arrayProfile = newArrayProfile();
1870     emitOpcode(op_put_by_val);
1871     instructions().append(base->index());
1872     instructions().append(property->index());
1873     instructions().append(value->index());
1874     instructions().append(arrayProfile);
1875     return value;
1876 }
1877
1878 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1879 {
1880     emitOpcode(op_del_by_val);
1881     instructions().append(dst->index());
1882     instructions().append(base->index());
1883     instructions().append(property->index());
1884     return dst;
1885 }
1886
1887 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1888 {
1889     emitOpcode(op_put_by_index);
1890     instructions().append(base->index());
1891     instructions().append(index);
1892     instructions().append(value->index());
1893     return value;
1894 }
1895
1896 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1897 {
1898     emitOpcode(op_new_object);
1899     instructions().append(dst->index());
1900     return dst;
1901 }
1902
1903 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1904 {
1905     return m_codeBlock->addConstantBuffer(length);
1906 }
1907
1908 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1909 {
1910     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).iterator->second;
1911     if (!stringInMap) {
1912         stringInMap = jsString(globalData(), identifier.string());
1913         addConstantValue(stringInMap);
1914     }
1915     return stringInMap;
1916 }
1917
1918 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1919 {
1920 #if !ASSERT_DISABLED
1921     unsigned checkLength = 0;
1922 #endif
1923     bool hadVariableExpression = false;
1924     if (length) {
1925         for (ElementNode* n = elements; n; n = n->next()) {
1926             if (!n->value()->isNumber() && !n->value()->isString()) {
1927                 hadVariableExpression = true;
1928                 break;
1929             }
1930             if (n->elision())
1931                 break;
1932 #if !ASSERT_DISABLED
1933             checkLength++;
1934 #endif
1935         }
1936         if (!hadVariableExpression) {
1937             ASSERT(length == checkLength);
1938             unsigned constantBufferIndex = addConstantBuffer(length);
1939             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1940             unsigned index = 0;
1941             for (ElementNode* n = elements; index < length; n = n->next()) {
1942                 if (n->value()->isNumber())
1943                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1944                 else {
1945                     ASSERT(n->value()->isString());
1946                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1947                 }
1948             }
1949             emitOpcode(op_new_array_buffer);
1950             instructions().append(dst->index());
1951             instructions().append(constantBufferIndex);
1952             instructions().append(length);
1953             return dst;
1954         }
1955     }
1956
1957     Vector<RefPtr<RegisterID>, 16> argv;
1958     for (ElementNode* n = elements; n; n = n->next()) {
1959         if (n->elision())
1960             break;
1961         argv.append(newTemporary());
1962         // op_new_array requires the initial values to be a sequential range of registers
1963         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1964         emitNode(argv.last().get(), n->value());
1965     }
1966     emitOpcode(op_new_array);
1967     instructions().append(dst->index());
1968     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1969     instructions().append(argv.size()); // argc
1970     return dst;
1971 }
1972
1973 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1974 {
1975     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(FunctionExecutable::create(*m_globalData, function)), false);
1976 }
1977
1978 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1979 {
1980     FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1981     if (ptr.isNewEntry)
1982         ptr.iterator->second = m_codeBlock->addFunctionDecl(FunctionExecutable::create(*m_globalData, function));
1983     return emitNewFunctionInternal(dst, ptr.iterator->second, true);
1984 }
1985
1986 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1987 {
1988     createActivationIfNecessary();
1989     emitOpcode(op_new_func);
1990     instructions().append(dst->index());
1991     instructions().append(index);
1992     instructions().append(doNullCheck);
1993     return dst;
1994 }
1995
1996 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1997 {
1998     emitOpcode(op_new_regexp);
1999     instructions().append(dst->index());
2000     instructions().append(addRegExp(regExp));
2001     return dst;
2002 }
2003
2004 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
2005 {
2006     FunctionBodyNode* function = n->body();
2007     unsigned index = m_codeBlock->addFunctionExpr(FunctionExecutable::create(*m_globalData, function));
2008     
2009     createActivationIfNecessary();
2010     emitOpcode(op_new_func_exp);
2011     instructions().append(r0->index());
2012     instructions().append(index);
2013     return r0;
2014 }
2015
2016 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
2017 {
2018     return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
2019 }
2020
2021 void BytecodeGenerator::createArgumentsIfNecessary()
2022 {
2023     if (m_codeType != FunctionCode)
2024         return;
2025     
2026     if (!m_codeBlock->usesArguments())
2027         return;
2028
2029     // If we're in strict mode we tear off the arguments on function
2030     // entry, so there's no need to check if we need to create them
2031     // now
2032     if (m_codeBlock->isStrictMode())
2033         return;
2034
2035     emitOpcode(op_create_arguments);
2036     instructions().append(m_codeBlock->argumentsRegister());
2037 }
2038
2039 void BytecodeGenerator::createActivationIfNecessary()
2040 {
2041     if (m_hasCreatedActivation)
2042         return;
2043     if (!m_codeBlock->needsFullScopeChain())
2044         return;
2045     emitOpcode(op_create_activation);
2046     instructions().append(m_activationRegister->index());
2047 }
2048
2049 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
2050 {
2051     return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
2052 }
2053
2054 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
2055 {
2056     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
2057     ASSERT(func->refCount());
2058
2059     if (m_shouldEmitProfileHooks)
2060         emitMove(callArguments.profileHookRegister(), func);
2061
2062     // Generate code for arguments.
2063     unsigned argument = 0;
2064     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
2065         emitNode(callArguments.argumentRegister(argument++), n);
2066
2067     // Reserve space for call frame.
2068     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
2069     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
2070         callFrame.append(newTemporary());
2071
2072     if (m_shouldEmitProfileHooks) {
2073         emitOpcode(op_profile_will_call);
2074         instructions().append(callArguments.profileHookRegister()->index());
2075     }
2076
2077     emitExpressionInfo(divot, startOffset, endOffset);
2078
2079     // Emit call.
2080     ArrayProfile* arrayProfile = newArrayProfile();
2081     emitOpcode(opcodeID);
2082     instructions().append(func->index()); // func
2083     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
2084     instructions().append(callArguments.registerOffset()); // registerOffset
2085 #if ENABLE(LLINT)
2086     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
2087 #else
2088     instructions().append(0);
2089 #endif
2090     instructions().append(arrayProfile);
2091     if (dst != ignoredResult()) {
2092         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
2093         instructions().append(dst->index()); // dst
2094         instructions().append(profile);
2095     }
2096
2097     if (m_shouldEmitProfileHooks) {
2098         emitOpcode(op_profile_did_call);
2099         instructions().append(callArguments.profileHookRegister()->index());
2100     }
2101
2102     return dst;
2103 }
2104
2105 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
2106 {
2107     if (m_shouldEmitProfileHooks) {
2108         emitMove(profileHookRegister, func);
2109         emitOpcode(op_profile_will_call);
2110         instructions().append(profileHookRegister->index());
2111     }
2112     
2113     emitExpressionInfo(divot, startOffset, endOffset);
2114
2115     // Emit call.
2116     emitOpcode(op_call_varargs);
2117     instructions().append(func->index());
2118     instructions().append(thisRegister->index());
2119     instructions().append(arguments->index());
2120     instructions().append(firstFreeRegister->index());
2121     if (dst != ignoredResult()) {
2122         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
2123         instructions().append(dst->index());
2124         instructions().append(profile);
2125     }
2126     if (m_shouldEmitProfileHooks) {
2127         emitOpcode(op_profile_did_call);
2128         instructions().append(profileHookRegister->index());
2129     }
2130     return dst;
2131 }
2132
2133 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
2134 {
2135     if (m_codeBlock->needsFullScopeChain()) {
2136         emitOpcode(op_tear_off_activation);
2137         instructions().append(m_activationRegister->index());
2138     }
2139
2140     if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
2141         emitOpcode(op_tear_off_arguments);
2142         instructions().append(m_codeBlock->argumentsRegister());
2143         instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
2144     }
2145
2146     // Constructors use op_ret_object_or_this to check the result is an
2147     // object, unless we can trivially determine the check is not
2148     // necessary (currently, if the return value is 'this').
2149     if (isConstructor() && (src->index() != m_thisRegister.index())) {
2150         emitOpcode(op_ret_object_or_this);
2151         instructions().append(src->index());
2152         instructions().append(m_thisRegister.index());
2153         return src;
2154     }
2155     return emitUnaryNoDstOp(op_ret, src);
2156 }
2157
2158 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
2159 {
2160     emitOpcode(opcodeID);
2161     instructions().append(src->index());
2162     return src;
2163 }
2164
2165 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
2166 {
2167     ASSERT(func->refCount());
2168
2169     if (m_shouldEmitProfileHooks)
2170         emitMove(callArguments.profileHookRegister(), func);
2171
2172     // Generate code for arguments.
2173     unsigned argument = 0;
2174     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
2175         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
2176             emitNode(callArguments.argumentRegister(argument++), n);
2177     }
2178
2179     if (m_shouldEmitProfileHooks) {
2180         emitOpcode(op_profile_will_call);
2181         instructions().append(callArguments.profileHookRegister()->index());
2182     }
2183
2184     // Reserve space for call frame.
2185     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
2186     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
2187         callFrame.append(newTemporary());
2188
2189     emitExpressionInfo(divot, startOffset, endOffset);
2190
2191     emitOpcode(op_construct);
2192     instructions().append(func->index()); // func
2193     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
2194     instructions().append(callArguments.registerOffset()); // registerOffset
2195 #if ENABLE(LLINT)
2196     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
2197 #else
2198     instructions().append(0);
2199 #endif
2200     instructions().append(0);
2201     if (dst != ignoredResult()) {
2202         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
2203         instructions().append(dst->index()); // dst
2204         instructions().append(profile);
2205     }
2206
2207     if (m_shouldEmitProfileHooks) {
2208         emitOpcode(op_profile_did_call);
2209         instructions().append(callArguments.profileHookRegister()->index());
2210     }
2211
2212     return dst;
2213 }
2214
2215 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
2216 {
2217     emitOpcode(op_strcat);
2218     instructions().append(dst->index());
2219     instructions().append(src->index());
2220     instructions().append(count);
2221
2222     return dst;
2223 }
2224
2225 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
2226 {
2227     emitOpcode(op_to_primitive);
2228     instructions().append(dst->index());
2229     instructions().append(src->index());
2230 }
2231
2232 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
2233 {
2234     ControlFlowContext context;
2235     context.isFinallyBlock = false;
2236     m_scopeContextStack.append(context);
2237     m_dynamicScopeDepth++;
2238
2239     return emitUnaryNoDstOp(op_push_with_scope, scope);
2240 }
2241
2242 void BytecodeGenerator::emitPopScope()
2243 {
2244     ASSERT(m_scopeContextStack.size());
2245     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
2246
2247     emitOpcode(op_pop_scope);
2248
2249     m_scopeContextStack.removeLast();
2250     m_dynamicScopeDepth--;
2251 }
2252
2253 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine, int column)
2254 {
2255 #if ENABLE(DEBUG_WITH_BREAKPOINT)
2256     if (debugHookID != DidReachBreakpoint)
2257         return;
2258 #else
2259     if (!m_shouldEmitDebugHooks)
2260         return;
2261 #endif
2262     emitOpcode(op_debug);
2263     instructions().append(debugHookID);
2264     instructions().append(firstLine);
2265     instructions().append(lastLine);
2266     instructions().append(column);
2267 }
2268
2269 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
2270 {
2271     ControlFlowContext scope;
2272     scope.isFinallyBlock = true;
2273     FinallyContext context = {
2274         finallyBlock,
2275         m_scopeContextStack.size(),
2276         m_switchContextStack.size(),
2277         m_forInContextStack.size(),
2278         m_tryContextStack.size(),
2279         m_labelScopes.size(),
2280         m_finallyDepth,
2281         m_dynamicScopeDepth
2282     };
2283     scope.finallyContext = context;
2284     m_scopeContextStack.append(scope);
2285     m_finallyDepth++;
2286 }
2287
2288 void BytecodeGenerator::popFinallyContext()
2289 {
2290     ASSERT(m_scopeContextStack.size());
2291     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2292     ASSERT(m_finallyDepth > 0);
2293     m_scopeContextStack.removeLast();
2294     m_finallyDepth--;
2295 }
2296
2297 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2298 {
2299     // Reclaim free label scopes.
2300     //
2301     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2302     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2303     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2304     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2305     // loop condition is a workaround.
2306     while (m_labelScopes.size()) {
2307         if  (m_labelScopes.last().refCount())
2308             break;
2309         m_labelScopes.removeLast();
2310     }
2311
2312     if (!m_labelScopes.size())
2313         return 0;
2314
2315     // We special-case the following, which is a syntax error in Firefox:
2316     // label:
2317     //     break;
2318     if (name.isEmpty()) {
2319         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2320             LabelScope* scope = &m_labelScopes[i];
2321             if (scope->type() != LabelScope::NamedLabel) {
2322                 ASSERT(scope->breakTarget());
2323                 return scope;
2324             }
2325         }
2326         return 0;
2327     }
2328
2329     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2330         LabelScope* scope = &m_labelScopes[i];
2331         if (scope->name() && *scope->name() == name) {
2332             ASSERT(scope->breakTarget());
2333             return scope;
2334         }
2335     }
2336     return 0;
2337 }
2338
2339 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2340 {
2341     // Reclaim free label scopes.
2342     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2343         m_labelScopes.removeLast();
2344
2345     if (!m_labelScopes.size())
2346         return 0;
2347
2348     if (name.isEmpty()) {
2349         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2350             LabelScope* scope = &m_labelScopes[i];
2351             if (scope->type() == LabelScope::Loop) {
2352                 ASSERT(scope->continueTarget());
2353                 return scope;
2354             }
2355         }
2356         return 0;
2357     }
2358
2359     // Continue to the loop nested nearest to the label scope that matches
2360     // 'name'.
2361     LabelScope* result = 0;
2362     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2363         LabelScope* scope = &m_labelScopes[i];
2364         if (scope->type() == LabelScope::Loop) {
2365             ASSERT(scope->continueTarget());
2366             result = scope;
2367         }
2368         if (scope->name() && *scope->name() == name)
2369             return result; // may be 0
2370     }
2371     return 0;
2372 }
2373
2374 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2375 {
2376     while (topScope > bottomScope) {
2377         // First we count the number of dynamic scopes we need to remove to get
2378         // to a finally block.
2379         int nNormalScopes = 0;
2380         while (topScope > bottomScope) {
2381             if (topScope->isFinallyBlock)
2382                 break;
2383             ++nNormalScopes;
2384             --topScope;
2385         }
2386
2387         if (nNormalScopes) {
2388             size_t begin = instructions().size();
2389
2390             // We need to remove a number of dynamic scopes to get to the next
2391             // finally block
2392             emitOpcode(op_jmp_scopes);
2393             instructions().append(nNormalScopes);
2394
2395             // If topScope == bottomScope then there isn't actually a finally block
2396             // left to emit, so make the jmp_scopes jump directly to the target label
2397             if (topScope == bottomScope) {
2398                 instructions().append(target->bind(begin, instructions().size()));
2399                 return target;
2400             }
2401
2402             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2403             // to the next instruction
2404             RefPtr<Label> nextInsn = newLabel();
2405             instructions().append(nextInsn->bind(begin, instructions().size()));
2406             emitLabel(nextInsn.get());
2407         }
2408         
2409         Vector<ControlFlowContext> savedScopeContextStack;
2410         Vector<SwitchInfo> savedSwitchContextStack;
2411         Vector<ForInContext> savedForInContextStack;
2412         Vector<TryContext> poppedTryContexts;
2413         SegmentedVector<LabelScope, 8> savedLabelScopes;
2414         while (topScope > bottomScope && topScope->isFinallyBlock) {
2415             RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2416             
2417             // Save the current state of the world while instating the state of the world
2418             // for the finally block.
2419             FinallyContext finallyContext = topScope->finallyContext;
2420             bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2421             bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2422             bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2423             bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2424             bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2425             int topScopeIndex = -1;
2426             int bottomScopeIndex = -1;
2427             if (flipScopes) {
2428                 topScopeIndex = topScope - m_scopeContextStack.begin();
2429                 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2430                 savedScopeContextStack = m_scopeContextStack;
2431                 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2432             }
2433             if (flipSwitches) {
2434                 savedSwitchContextStack = m_switchContextStack;
2435                 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2436             }
2437             if (flipForIns) {
2438                 savedForInContextStack = m_forInContextStack;
2439                 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2440             }
2441             if (flipTries) {
2442                 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2443                     ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2444                     TryContext context = m_tryContextStack.last();
2445                     m_tryContextStack.removeLast();
2446                     TryRange range;
2447                     range.start = context.start;
2448                     range.end = beforeFinally;
2449                     range.tryData = context.tryData;
2450                     m_tryRanges.append(range);
2451                     poppedTryContexts.append(context);
2452                 }
2453             }
2454             if (flipLabelScopes) {
2455                 savedLabelScopes = m_labelScopes;
2456                 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2457                     m_labelScopes.removeLast();
2458             }
2459             int savedFinallyDepth = m_finallyDepth;
2460             m_finallyDepth = finallyContext.finallyDepth;
2461             int savedDynamicScopeDepth = m_dynamicScopeDepth;
2462             m_dynamicScopeDepth = finallyContext.dynamicScopeDepth;
2463             
2464             // Emit the finally block.
2465             emitNode(finallyContext.finallyBlock);
2466             
2467             RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2468             
2469             // Restore the state of the world.
2470             if (flipScopes) {
2471                 m_scopeContextStack = savedScopeContextStack;
2472                 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2473                 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2474             }
2475             if (flipSwitches)
2476                 m_switchContextStack = savedSwitchContextStack;
2477             if (flipForIns)
2478                 m_forInContextStack = savedForInContextStack;
2479             if (flipTries) {
2480                 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2481                 for (unsigned i = poppedTryContexts.size(); i--;) {
2482                     TryContext context = poppedTryContexts[i];
2483                     context.start = afterFinally;
2484                     m_tryContextStack.append(context);
2485                 }
2486                 poppedTryContexts.clear();
2487             }
2488             if (flipLabelScopes)
2489                 m_labelScopes = savedLabelScopes;
2490             m_finallyDepth = savedFinallyDepth;
2491             m_dynamicScopeDepth = savedDynamicScopeDepth;
2492             
2493             --topScope;
2494         }
2495     }
2496     return emitJump(target);
2497 }
2498
2499 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2500 {
2501     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2502     ASSERT(target->isForward());
2503
2504     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2505     ASSERT(scopeDelta <= m_scopeContextStack.size());
2506     if (!scopeDelta)
2507         return emitJump(target);
2508
2509     if (m_finallyDepth)
2510         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2511
2512     size_t begin = instructions().size();
2513
2514     emitOpcode(op_jmp_scopes);
2515     instructions().append(scopeDelta);
2516     instructions().append(target->bind(begin, instructions().size()));
2517     return target;
2518 }
2519
2520 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2521 {
2522     size_t begin = instructions().size();
2523
2524     emitOpcode(op_get_pnames);
2525     instructions().append(dst->index());
2526     instructions().append(base->index());
2527     instructions().append(i->index());
2528     instructions().append(size->index());
2529     instructions().append(breakTarget->bind(begin, instructions().size()));
2530     return dst;
2531 }
2532
2533 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2534 {
2535     size_t begin = instructions().size();
2536
2537     emitOpcode(op_next_pname);
2538     instructions().append(dst->index());
2539     instructions().append(base->index());
2540     instructions().append(i->index());
2541     instructions().append(size->index());
2542     instructions().append(iter->index());
2543     instructions().append(target->bind(begin, instructions().size()));
2544     return dst;
2545 }
2546
2547 TryData* BytecodeGenerator::pushTry(Label* start)
2548 {
2549     TryData tryData;
2550     tryData.target = newLabel();
2551     tryData.targetScopeDepth = UINT_MAX;
2552     m_tryData.append(tryData);
2553     TryData* result = &m_tryData.last();
2554     
2555     TryContext tryContext;
2556     tryContext.start = start;
2557     tryContext.tryData = result;
2558     
2559     m_tryContextStack.append(tryContext);
2560     
2561     return result;
2562 }
2563
2564 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2565 {
2566     m_usesExceptions = true;
2567     
2568     ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2569     
2570     TryRange tryRange;
2571     tryRange.start = m_tryContextStack.last().start;
2572     tryRange.end = end;
2573     tryRange.tryData = m_tryContextStack.last().tryData;
2574     m_tryRanges.append(tryRange);
2575     m_tryContextStack.removeLast();
2576     
2577     emitLabel(tryRange.tryData->target.get());
2578     tryRange.tryData->targetScopeDepth = m_dynamicScopeDepth + m_baseScopeDepth;
2579
2580     emitOpcode(op_catch);
2581     instructions().append(targetRegister->index());
2582     return targetRegister;
2583 }
2584
2585 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2586 {
2587     emitOpcode(op_throw_reference_error);
2588     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2589 }
2590
2591 void BytecodeGenerator::emitPushNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2592 {
2593     ControlFlowContext context;
2594     context.isFinallyBlock = false;
2595     m_scopeContextStack.append(context);
2596     m_dynamicScopeDepth++;
2597
2598     emitOpcode(op_push_name_scope);
2599     instructions().append(addConstant(property));
2600     instructions().append(value->index());
2601     instructions().append(attributes);
2602 }
2603
2604 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2605 {
2606     SwitchInfo info = { instructions().size(), type };
2607     switch (type) {
2608         case SwitchInfo::SwitchImmediate:
2609             emitOpcode(op_switch_imm);
2610             break;
2611         case SwitchInfo::SwitchCharacter:
2612             emitOpcode(op_switch_char);
2613             break;
2614         case SwitchInfo::SwitchString:
2615             emitOpcode(op_switch_string);
2616             break;
2617         default:
2618             ASSERT_NOT_REACHED();
2619     }
2620
2621     instructions().append(0); // place holder for table index
2622     instructions().append(0); // place holder for default target    
2623     instructions().append(scrutineeRegister->index());
2624     m_switchContextStack.append(info);
2625 }
2626
2627 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2628 {
2629     UNUSED_PARAM(max);
2630     ASSERT(node->isNumber());
2631     double value = static_cast<NumberNode*>(node)->value();
2632     int32_t key = static_cast<int32_t>(value);
2633     ASSERT(key == value);
2634     ASSERT(key >= min);
2635     ASSERT(key <= max);
2636     return key - min;
2637 }
2638
2639 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2640 {
2641     jumpTable.min = min;
2642     jumpTable.branchOffsets.resize(max - min + 1);
2643     jumpTable.branchOffsets.fill(0);
2644     for (uint32_t i = 0; i < clauseCount; ++i) {
2645         // We're emitting this after the clause labels should have been fixed, so 
2646         // the labels should not be "forward" references
2647         ASSERT(!labels[i]->isForward());
2648         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2649     }
2650 }
2651
2652 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2653 {
2654     UNUSED_PARAM(max);
2655     ASSERT(node->isString());
2656     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2657     ASSERT(clause->length() == 1);
2658     
2659     int32_t key = (*clause)[0];
2660     ASSERT(key >= min);
2661     ASSERT(key <= max);
2662     return key - min;
2663 }
2664
2665 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2666 {
2667     jumpTable.min = min;
2668     jumpTable.branchOffsets.resize(max - min + 1);
2669     jumpTable.branchOffsets.fill(0);
2670     for (uint32_t i = 0; i < clauseCount; ++i) {
2671         // We're emitting this after the clause labels should have been fixed, so 
2672         // the labels should not be "forward" references
2673         ASSERT(!labels[i]->isForward());
2674         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2675     }
2676 }
2677
2678 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2679 {
2680     for (uint32_t i = 0; i < clauseCount; ++i) {
2681         // We're emitting this after the clause labels should have been fixed, so 
2682         // the labels should not be "forward" references
2683         ASSERT(!labels[i]->isForward());
2684         
2685         ASSERT(nodes[i]->isString());
2686         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2687         OffsetLocation location;
2688         location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2689         jumpTable.offsetTable.add(clause, location);
2690     }
2691 }
2692
2693 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2694 {
2695     SwitchInfo switchInfo = m_switchContextStack.last();
2696     m_switchContextStack.removeLast();
2697     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2698         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2699         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2700
2701         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2702         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2703     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2704         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2705         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2706         
2707         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2708         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2709     } else {
2710         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2711         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2712         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2713
2714         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2715         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2716     }
2717 }
2718
2719 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2720 {
2721     // It would be nice to do an even better job of identifying exactly where the expression is.
2722     // And we could make the caller pass the node pointer in, if there was some way of getting
2723     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2724     // is still good enough to get us an accurate line number.
2725     m_expressionTooDeep = true;
2726     return newTemporary();
2727 }
2728
2729 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2730 {
2731     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2732 }
2733
2734 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2735 {
2736     RegisterID* registerID = resolve(ident).local();
2737     if (!registerID || registerID->index() >= 0)
2738          return 0;
2739     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2740 }
2741
2742 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2743 {
2744     if (!isStrictMode())
2745         return;
2746
2747     RefPtr<RegisterID> error = emitLoad(newTemporary(), JSValue(createTypeError(scope()->globalObject()->globalExec(), StrictModeReadonlyPropertyWriteError)));
2748     emitThrow(error.get());
2749 }
2750
2751 } // namespace JSC