6fa0ce96b91303084e7067d75882aa6d188c876e
[WebKit-https.git] / Source / JavaScriptCore / bytecompiler / BytecodeGenerator.cpp
1 /*
2  * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3  * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4  * Copyright (C) 2012 Igalia, S.L.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  *
10  * 1.  Redistributions of source code must retain the above copyright
11  *     notice, this list of conditions and the following disclaimer.
12  * 2.  Redistributions in binary form must reproduce the above copyright
13  *     notice, this list of conditions and the following disclaimer in the
14  *     documentation and/or other materials provided with the distribution.
15  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16  *     its contributors may be used to endorse or promote products derived
17  *     from this software without specific prior written permission.
18  *
19  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29  */
30
31 #include "config.h"
32 #include "BytecodeGenerator.h"
33
34 #include "BatchedTransitionOptimizer.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "Interpreter.h"
38 #include "LowLevelInterpreter.h"
39 #include "ScopeChain.h"
40 #include "StrongInlines.h"
41 #include "UString.h"
42
43 using namespace std;
44
45 namespace JSC {
46
47 /*
48     The layout of a register frame looks like this:
49
50     For
51
52     function f(x, y) {
53         var v1;
54         function g() { }
55         var v2;
56         return (x) * (y);
57     }
58
59     assuming (x) and (y) generated temporaries t1 and t2, you would have
60
61     ------------------------------------
62     |  x |  y |  g | v2 | v1 | t1 | t2 | <-- value held
63     ------------------------------------
64     | -5 | -4 | -3 | -2 | -1 | +0 | +1 | <-- register index
65     ------------------------------------
66     | params->|<-locals      | temps->
67
68     Because temporary registers are allocated in a stack-like fashion, we
69     can reclaim them with a simple popping algorithm. The same goes for labels.
70     (We never reclaim parameter or local registers, because parameters and
71     locals are DontDelete.)
72
73     The register layout before a function call looks like this:
74
75     For
76
77     function f(x, y)
78     {
79     }
80
81     f(1);
82
83     >                        <------------------------------
84     <                        >  reserved: call frame  |  1 | <-- value held
85     >         >snip<         <------------------------------
86     <                        > +0 | +1 | +2 | +3 | +4 | +5 | <-- register index
87     >                        <------------------------------
88     | params->|<-locals      | temps->
89
90     The call instruction fills in the "call frame" registers. It also pads
91     missing arguments at the end of the call:
92
93     >                        <-----------------------------------
94     <                        >  reserved: call frame  |  1 |  ? | <-- value held ("?" stands for "undefined")
95     >         >snip<         <-----------------------------------
96     <                        > +0 | +1 | +2 | +3 | +4 | +5 | +6 | <-- register index
97     >                        <-----------------------------------
98     | params->|<-locals      | temps->
99
100     After filling in missing arguments, the call instruction sets up the new
101     stack frame to overlap the end of the old stack frame:
102
103                              |---------------------------------->                        <
104                              |  reserved: call frame  |  1 |  ? <                        > <-- value held ("?" stands for "undefined")
105                              |---------------------------------->         >snip<         <
106                              | -7 | -6 | -5 | -4 | -3 | -2 | -1 <                        > <-- register index
107                              |---------------------------------->                        <
108                              |                        | params->|<-locals       | temps->
109
110     That way, arguments are "copied" into the callee's stack frame for free.
111
112     If the caller supplies too many arguments, this trick doesn't work. The
113     extra arguments protrude into space reserved for locals and temporaries.
114     In that case, the call instruction makes a real copy of the call frame header,
115     along with just the arguments expected by the callee, leaving the original
116     call frame header and arguments behind. (The call instruction can't just discard
117     extra arguments, because the "arguments" object may access them later.)
118     This copying strategy ensures that all named values will be at the indices
119     expected by the callee.
120 */
121
122 #ifndef NDEBUG
123 void ResolveResult::checkValidity()
124 {
125     switch (m_type) {
126     case Register:
127     case ReadOnlyRegister:
128         ASSERT(m_local);
129         return;
130     case Lexical:
131     case ReadOnlyLexical:
132     case DynamicLexical:
133     case DynamicReadOnlyLexical:
134         ASSERT(m_index != missingSymbolMarker());
135         return;
136     case Global:
137     case DynamicGlobal:
138         ASSERT(m_globalObject);
139         return;
140     case IndexedGlobal:
141     case ReadOnlyIndexedGlobal:
142     case DynamicIndexedGlobal:
143     case DynamicReadOnlyIndexedGlobal:
144         ASSERT(m_index != missingSymbolMarker());
145         ASSERT(m_globalObject);
146         return;
147     case Dynamic:
148         return;
149     default:
150         ASSERT_NOT_REACHED();
151     }
152 }
153 #endif
154
155 static bool s_dumpsGeneratedCode = false;
156
157 void BytecodeGenerator::setDumpsGeneratedCode(bool dumpsGeneratedCode)
158 {
159     s_dumpsGeneratedCode = dumpsGeneratedCode;
160 }
161
162 bool BytecodeGenerator::dumpsGeneratedCode()
163 {
164     return s_dumpsGeneratedCode;
165 }
166
167 JSObject* BytecodeGenerator::generate()
168 {
169     SamplingRegion samplingRegion("Bytecode Generation");
170     
171     m_codeBlock->setThisRegister(m_thisRegister.index());
172
173     m_scopeNode->emitBytecode(*this);
174
175     m_codeBlock->setInstructionCount(m_codeBlock->instructions().size());
176
177     if (s_dumpsGeneratedCode)
178         m_codeBlock->dump(m_scopeChain->globalObject->globalExec());
179
180     if ((m_codeType == FunctionCode && !m_codeBlock->needsFullScopeChain() && !m_codeBlock->usesArguments()) || m_codeType == EvalCode)
181         symbolTable().clear();
182
183     m_codeBlock->shrinkToFit();
184
185     if (m_expressionTooDeep)
186         return createOutOfMemoryError(m_scopeChain->globalObject.get());
187     return 0;
188 }
189
190 bool BytecodeGenerator::addVar(const Identifier& ident, bool isConstant, RegisterID*& r0)
191 {
192     int index = m_calleeRegisters.size();
193     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
194     pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.impl(), newEntry);
195
196     if (!result.second) {
197         r0 = &registerFor(result.first->second.getIndex());
198         return false;
199     }
200
201     r0 = addVar();
202     return true;
203 }
204
205 int BytecodeGenerator::addGlobalVar(const Identifier& ident, bool isConstant)
206 {
207     int index = symbolTable().size();
208     SymbolTableEntry newEntry(index, isConstant ? ReadOnly : 0);
209     pair<SymbolTable::iterator, bool> result = symbolTable().add(ident.impl(), newEntry);
210     if (!result.second)
211         index = result.first->second.getIndex();
212     return index;
213 }
214
215 void BytecodeGenerator::preserveLastVar()
216 {
217     if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
218         m_lastVar = &m_calleeRegisters.last();
219 }
220
221 BytecodeGenerator::BytecodeGenerator(ProgramNode* programNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, ProgramCodeBlock* codeBlock, CompilationKind compilationKind)
222     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
223     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
224     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
225     , m_scopeChain(*scopeChain->globalData, scopeChain)
226     , m_symbolTable(symbolTable)
227     , m_scopeNode(programNode)
228     , m_codeBlock(codeBlock)
229     , m_thisRegister(CallFrame::thisArgumentOffset())
230     , m_finallyDepth(0)
231     , m_dynamicScopeDepth(0)
232     , m_baseScopeDepth(0)
233     , m_codeType(GlobalCode)
234     , m_nextConstantOffset(0)
235     , m_globalConstantIndex(0)
236     , m_hasCreatedActivation(true)
237     , m_firstLazyFunction(0)
238     , m_lastLazyFunction(0)
239     , m_globalData(scopeChain->globalData)
240     , m_lastOpcodeID(op_end)
241 #ifndef NDEBUG
242     , m_lastOpcodePosition(0)
243 #endif
244     , m_stack(wtfThreadData().stack())
245     , m_usesExceptions(false)
246     , m_expressionTooDeep(false)
247 {
248     m_globalData->startedCompiling(m_codeBlock);
249     if (m_shouldEmitDebugHooks)
250         m_codeBlock->setNeedsFullScopeChain(true);
251
252     emitOpcode(op_enter);
253     codeBlock->setGlobalData(m_globalData);
254
255     // FIXME: Move code that modifies the global object to Interpreter::execute.
256     
257     m_codeBlock->setNumParameters(1); // Allocate space for "this"
258     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
259     
260     if (compilationKind == OptimizingCompilation)
261         return;
262
263     JSGlobalObject* globalObject = scopeChain->globalObject.get();
264     ExecState* exec = globalObject->globalExec();
265     
266     BatchedTransitionOptimizer optimizer(*m_globalData, globalObject);
267
268     const VarStack& varStack = programNode->varStack();
269     const FunctionStack& functionStack = programNode->functionStack();
270
271     size_t newGlobals = varStack.size() + functionStack.size();
272     if (!newGlobals)
273         return;
274     globalObject->resizeRegisters(symbolTable->size() + newGlobals);
275
276     for (size_t i = 0; i < functionStack.size(); ++i) {
277         FunctionBodyNode* function = functionStack[i];
278         globalObject->removeDirect(*m_globalData, function->ident()); // Newly declared functions overwrite existing properties.
279
280         JSValue value = JSFunction::create(exec, makeFunction(exec, function), scopeChain);
281         int index = addGlobalVar(function->ident(), false);
282         globalObject->registerAt(index).set(*m_globalData, globalObject, value);
283     }
284
285     for (size_t i = 0; i < varStack.size(); ++i) {
286         if (globalObject->hasProperty(exec, *varStack[i].first))
287             continue;
288         addGlobalVar(*varStack[i].first, varStack[i].second & DeclarationStacks::IsConstant);
289     }
290 }
291
292 BytecodeGenerator::BytecodeGenerator(FunctionBodyNode* functionBody, ScopeChainNode* scopeChain, SymbolTable* symbolTable, CodeBlock* codeBlock, CompilationKind)
293     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
294     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
295     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
296     , m_scopeChain(*scopeChain->globalData, scopeChain)
297     , m_symbolTable(symbolTable)
298     , m_scopeNode(functionBody)
299     , m_codeBlock(codeBlock)
300     , m_activationRegister(0)
301     , m_finallyDepth(0)
302     , m_dynamicScopeDepth(0)
303     , m_baseScopeDepth(0)
304     , m_codeType(FunctionCode)
305     , m_nextConstantOffset(0)
306     , m_globalConstantIndex(0)
307     , m_hasCreatedActivation(false)
308     , m_firstLazyFunction(0)
309     , m_lastLazyFunction(0)
310     , m_globalData(scopeChain->globalData)
311     , m_lastOpcodeID(op_end)
312 #ifndef NDEBUG
313     , m_lastOpcodePosition(0)
314 #endif
315     , m_stack(wtfThreadData().stack())
316     , m_usesExceptions(false)
317     , m_expressionTooDeep(false)
318 {
319     m_globalData->startedCompiling(m_codeBlock);
320     if (m_shouldEmitDebugHooks)
321         m_codeBlock->setNeedsFullScopeChain(true);
322
323     codeBlock->setGlobalData(m_globalData);
324     
325     emitOpcode(op_enter);
326     if (m_codeBlock->needsFullScopeChain()) {
327         m_activationRegister = addVar();
328         emitInitLazyRegister(m_activationRegister);
329         m_codeBlock->setActivationRegister(m_activationRegister->index());
330     }
331
332     // Both op_tear_off_activation and op_tear_off_arguments tear off the 'arguments'
333     // object, if created.
334     if (m_codeBlock->needsFullScopeChain() || functionBody->usesArguments()) {
335         RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
336         RegisterID* argumentsRegister = addVar(propertyNames().arguments, false); // Can be changed by assigning to 'arguments'.
337
338         // We can save a little space by hard-coding the knowledge that the two
339         // 'arguments' values are stored in consecutive registers, and storing
340         // only the index of the assignable one.
341         codeBlock->setArgumentsRegister(argumentsRegister->index());
342         ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->index() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
343
344         emitInitLazyRegister(argumentsRegister);
345         emitInitLazyRegister(unmodifiedArgumentsRegister);
346         
347         if (m_codeBlock->isStrictMode()) {
348             emitOpcode(op_create_arguments);
349             instructions().append(argumentsRegister->index());
350         }
351
352         // The debugger currently retrieves the arguments object from an activation rather than pulling
353         // it from a call frame.  In the long-term it should stop doing that (<rdar://problem/6911886>),
354         // but for now we force eager creation of the arguments object when debugging.
355         if (m_shouldEmitDebugHooks) {
356             emitOpcode(op_create_arguments);
357             instructions().append(argumentsRegister->index());
358         }
359     }
360
361     const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
362     const DeclarationStacks::VarStack& varStack = functionBody->varStack();
363
364     // Captured variables and functions go first so that activations don't have
365     // to step over the non-captured locals to mark them.
366     m_hasCreatedActivation = false;
367     if (functionBody->hasCapturedVariables()) {
368         for (size_t i = 0; i < functionStack.size(); ++i) {
369             FunctionBodyNode* function = functionStack[i];
370             const Identifier& ident = function->ident();
371             if (functionBody->captures(ident)) {
372                 if (!m_hasCreatedActivation) {
373                     m_hasCreatedActivation = true;
374                     emitOpcode(op_create_activation);
375                     instructions().append(m_activationRegister->index());
376                 }
377                 m_functions.add(ident.impl());
378                 emitNewFunction(addVar(ident, false), function);
379             }
380         }
381         for (size_t i = 0; i < varStack.size(); ++i) {
382             const Identifier& ident = *varStack[i].first;
383             if (functionBody->captures(ident))
384                 addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
385         }
386     }
387     bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
388     if (!canLazilyCreateFunctions && !m_hasCreatedActivation) {
389         m_hasCreatedActivation = true;
390         emitOpcode(op_create_activation);
391         instructions().append(m_activationRegister->index());
392     }
393
394     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
395     m_firstLazyFunction = codeBlock->m_numVars;
396     for (size_t i = 0; i < functionStack.size(); ++i) {
397         FunctionBodyNode* function = functionStack[i];
398         const Identifier& ident = function->ident();
399         if (!functionBody->captures(ident)) {
400             m_functions.add(ident.impl());
401             RefPtr<RegisterID> reg = addVar(ident, false);
402             // Don't lazily create functions that override the name 'arguments'
403             // as this would complicate lazy instantiation of actual arguments.
404             if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
405                 emitNewFunction(reg.get(), function);
406             else {
407                 emitInitLazyRegister(reg.get());
408                 m_lazyFunctions.set(reg->index(), function);
409             }
410         }
411     }
412     m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
413     for (size_t i = 0; i < varStack.size(); ++i) {
414         const Identifier& ident = *varStack[i].first;
415         if (!functionBody->captures(ident))
416             addVar(ident, varStack[i].second & DeclarationStacks::IsConstant);
417     }
418
419     if (m_shouldEmitDebugHooks)
420         codeBlock->m_numCapturedVars = codeBlock->m_numVars;
421
422     FunctionParameters& parameters = *functionBody->parameters();
423     m_parameters.grow(parameters.size() + 1); // reserve space for "this"
424
425     // Add "this" as a parameter
426     int nextParameterIndex = CallFrame::thisArgumentOffset();
427     m_thisRegister.setIndex(nextParameterIndex--);
428     m_codeBlock->addParameter();
429     
430     for (size_t i = 0; i < parameters.size(); ++i)
431         addParameter(parameters[i], nextParameterIndex--);
432
433     preserveLastVar();
434
435     if (isConstructor()) {
436         RefPtr<RegisterID> func = newTemporary();
437         RefPtr<RegisterID> funcProto = newTemporary();
438
439         emitOpcode(op_get_callee);
440         instructions().append(func->index());
441         // Load prototype.
442         emitGetById(funcProto.get(), func.get(), globalData()->propertyNames->prototype);
443
444         emitOpcode(op_create_this);
445         instructions().append(m_thisRegister.index());
446         instructions().append(funcProto->index());
447     } else if (!codeBlock->isStrictMode() && (functionBody->usesThis() || codeBlock->usesEval() || m_shouldEmitDebugHooks)) {
448         emitOpcode(op_convert_this);
449         instructions().append(m_thisRegister.index());
450     }
451 }
452
453 BytecodeGenerator::BytecodeGenerator(EvalNode* evalNode, ScopeChainNode* scopeChain, SymbolTable* symbolTable, EvalCodeBlock* codeBlock, CompilationKind)
454     : m_shouldEmitDebugHooks(scopeChain->globalObject->debugger())
455     , m_shouldEmitProfileHooks(scopeChain->globalObject->globalObjectMethodTable()->supportsProfiling(scopeChain->globalObject.get()))
456     , m_shouldEmitRichSourceInfo(scopeChain->globalObject->globalObjectMethodTable()->supportsRichSourceInfo(scopeChain->globalObject.get()))
457     , m_scopeChain(*scopeChain->globalData, scopeChain)
458     , m_symbolTable(symbolTable)
459     , m_scopeNode(evalNode)
460     , m_codeBlock(codeBlock)
461     , m_thisRegister(CallFrame::thisArgumentOffset())
462     , m_finallyDepth(0)
463     , m_dynamicScopeDepth(0)
464     , m_baseScopeDepth(codeBlock->baseScopeDepth())
465     , m_codeType(EvalCode)
466     , m_nextConstantOffset(0)
467     , m_globalConstantIndex(0)
468     , m_hasCreatedActivation(true)
469     , m_firstLazyFunction(0)
470     , m_lastLazyFunction(0)
471     , m_globalData(scopeChain->globalData)
472     , m_lastOpcodeID(op_end)
473 #ifndef NDEBUG
474     , m_lastOpcodePosition(0)
475 #endif
476     , m_stack(wtfThreadData().stack())
477     , m_usesExceptions(false)
478     , m_expressionTooDeep(false)
479 {
480     m_globalData->startedCompiling(m_codeBlock);
481     if (m_shouldEmitDebugHooks || m_baseScopeDepth)
482         m_codeBlock->setNeedsFullScopeChain(true);
483
484     emitOpcode(op_enter);
485     codeBlock->setGlobalData(m_globalData);
486     m_codeBlock->setNumParameters(1);
487
488     const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
489     for (size_t i = 0; i < functionStack.size(); ++i)
490         m_codeBlock->addFunctionDecl(makeFunction(m_globalData, functionStack[i]));
491
492     const DeclarationStacks::VarStack& varStack = evalNode->varStack();
493     unsigned numVariables = varStack.size();
494     Vector<Identifier> variables;
495     variables.reserveCapacity(numVariables);
496     for (size_t i = 0; i < numVariables; ++i)
497         variables.append(*varStack[i].first);
498     codeBlock->adoptVariables(variables);
499     codeBlock->m_numCapturedVars = codeBlock->m_numVars;
500     preserveLastVar();
501 }
502
503 BytecodeGenerator::~BytecodeGenerator()
504 {
505     m_globalData->finishedCompiling(m_codeBlock);
506 }
507
508 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
509 {
510     emitOpcode(op_init_lazy_reg);
511     instructions().append(reg->index());
512     return reg;
513 }
514
515 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
516 {
517     // Parameters overwrite var declarations, but not function declarations.
518     StringImpl* rep = ident.impl();
519     if (!m_functions.contains(rep)) {
520         symbolTable().set(rep, parameterIndex);
521         RegisterID& parameter = registerFor(parameterIndex);
522         parameter.setIndex(parameterIndex);
523     }
524
525     // To maintain the calling convention, we have to allocate unique space for
526     // each parameter, even if the parameter doesn't make it into the symbol table.
527     m_codeBlock->addParameter();
528 }
529
530 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
531 {
532     if (ident != propertyNames().arguments)
533         return false;
534     
535     if (!shouldOptimizeLocals())
536         return false;
537     
538     SymbolTableEntry entry = symbolTable().get(ident.impl());
539     if (entry.isNull())
540         return false;
541     
542     if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
543         return true;
544     
545     return false;
546 }
547
548 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
549 {
550     ASSERT(willResolveToArguments(propertyNames().arguments));
551
552     SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
553     ASSERT(!entry.isNull());
554     return &registerFor(entry.getIndex());
555 }
556
557 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
558 {
559     if (m_lastLazyFunction <= reg->index() || reg->index() < m_firstLazyFunction)
560         return reg;
561     emitLazyNewFunction(reg, m_lazyFunctions.get(reg->index()));
562     return reg;
563 }
564
565 RegisterID* BytecodeGenerator::newRegister()
566 {
567     m_calleeRegisters.append(m_calleeRegisters.size());
568     m_codeBlock->m_numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
569     return &m_calleeRegisters.last();
570 }
571
572 RegisterID* BytecodeGenerator::newTemporary()
573 {
574     // Reclaim free register IDs.
575     while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
576         m_calleeRegisters.removeLast();
577         
578     RegisterID* result = newRegister();
579     result->setTemporary();
580     return result;
581 }
582
583 RegisterID* BytecodeGenerator::highestUsedRegister()
584 {
585     size_t count = m_codeBlock->m_numCalleeRegisters;
586     while (m_calleeRegisters.size() < count)
587         newRegister();
588     return &m_calleeRegisters.last();
589 }
590
591 PassRefPtr<LabelScope> BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
592 {
593     // Reclaim free label scopes.
594     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
595         m_labelScopes.removeLast();
596
597     // Allocate new label scope.
598     LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
599     m_labelScopes.append(scope);
600     return &m_labelScopes.last();
601 }
602
603 PassRefPtr<Label> BytecodeGenerator::newLabel()
604 {
605     // Reclaim free label IDs.
606     while (m_labels.size() && !m_labels.last().refCount())
607         m_labels.removeLast();
608
609     // Allocate new label ID.
610     m_labels.append(m_codeBlock);
611     return &m_labels.last();
612 }
613
614 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
615 {
616     unsigned newLabelIndex = instructions().size();
617     l0->setLocation(newLabelIndex);
618
619     if (m_codeBlock->numberOfJumpTargets()) {
620         unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
621         ASSERT(lastLabelIndex <= newLabelIndex);
622         if (newLabelIndex == lastLabelIndex) {
623             // Peephole optimizations have already been disabled by emitting the last label
624             return l0;
625         }
626     }
627
628     m_codeBlock->addJumpTarget(newLabelIndex);
629
630     // This disables peephole optimizations when an instruction is a jump target
631     m_lastOpcodeID = op_end;
632     return l0;
633 }
634
635 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
636 {
637 #ifndef NDEBUG
638     size_t opcodePosition = instructions().size();
639     ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
640     m_lastOpcodePosition = opcodePosition;
641 #endif
642     instructions().append(globalData()->interpreter->getOpcode(opcodeID));
643     m_lastOpcodeID = opcodeID;
644 }
645
646 ValueProfile* BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
647 {
648 #if ENABLE(VALUE_PROFILER)
649     ValueProfile* result = m_codeBlock->addValueProfile(instructions().size());
650 #else
651     ValueProfile* result = 0;
652 #endif
653     emitOpcode(opcodeID);
654     return result;
655 }
656
657 void BytecodeGenerator::emitLoopHint()
658 {
659 #if ENABLE(DFG_JIT)
660     emitOpcode(op_loop_hint);
661 #endif
662 }
663
664 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
665 {
666     ASSERT(instructions().size() >= 4);
667     size_t size = instructions().size();
668     dstIndex = instructions().at(size - 3).u.operand;
669     src1Index = instructions().at(size - 2).u.operand;
670     src2Index = instructions().at(size - 1).u.operand;
671 }
672
673 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
674 {
675     ASSERT(instructions().size() >= 3);
676     size_t size = instructions().size();
677     dstIndex = instructions().at(size - 2).u.operand;
678     srcIndex = instructions().at(size - 1).u.operand;
679 }
680
681 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
682 {
683     ASSERT(instructions().size() >= 4);
684     instructions().shrink(instructions().size() - 4);
685     m_lastOpcodeID = op_end;
686 }
687
688 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
689 {
690     ASSERT(instructions().size() >= 3);
691     instructions().shrink(instructions().size() - 3);
692     m_lastOpcodeID = op_end;
693 }
694
695 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
696 {
697     size_t begin = instructions().size();
698     emitOpcode(target->isForward() ? op_jmp : op_loop);
699     instructions().append(target->bind(begin, instructions().size()));
700     return target;
701 }
702
703 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
704 {
705     if (m_lastOpcodeID == op_less) {
706         int dstIndex;
707         int src1Index;
708         int src2Index;
709
710         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
711
712         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
713             rewindBinaryOp();
714
715             size_t begin = instructions().size();
716             emitOpcode(target->isForward() ? op_jless : op_loop_if_less);
717             instructions().append(src1Index);
718             instructions().append(src2Index);
719             instructions().append(target->bind(begin, instructions().size()));
720             return target;
721         }
722     } else if (m_lastOpcodeID == op_lesseq) {
723         int dstIndex;
724         int src1Index;
725         int src2Index;
726
727         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
728
729         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
730             rewindBinaryOp();
731
732             size_t begin = instructions().size();
733             emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
734             instructions().append(src1Index);
735             instructions().append(src2Index);
736             instructions().append(target->bind(begin, instructions().size()));
737             return target;
738         }
739     } else if (m_lastOpcodeID == op_greater) {
740         int dstIndex;
741         int src1Index;
742         int src2Index;
743
744         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
745
746         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
747             rewindBinaryOp();
748
749             size_t begin = instructions().size();
750             emitOpcode(target->isForward() ? op_jgreater : op_loop_if_greater);
751             instructions().append(src1Index);
752             instructions().append(src2Index);
753             instructions().append(target->bind(begin, instructions().size()));
754             return target;
755         }
756     } else if (m_lastOpcodeID == op_greatereq) {
757         int dstIndex;
758         int src1Index;
759         int src2Index;
760
761         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
762
763         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
764             rewindBinaryOp();
765
766             size_t begin = instructions().size();
767             emitOpcode(target->isForward() ? op_jgreatereq : op_loop_if_greatereq);
768             instructions().append(src1Index);
769             instructions().append(src2Index);
770             instructions().append(target->bind(begin, instructions().size()));
771             return target;
772         }
773     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
774         int dstIndex;
775         int srcIndex;
776
777         retrieveLastUnaryOp(dstIndex, srcIndex);
778
779         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
780             rewindUnaryOp();
781
782             size_t begin = instructions().size();
783             emitOpcode(op_jeq_null);
784             instructions().append(srcIndex);
785             instructions().append(target->bind(begin, instructions().size()));
786             return target;
787         }
788     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
789         int dstIndex;
790         int srcIndex;
791
792         retrieveLastUnaryOp(dstIndex, srcIndex);
793
794         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
795             rewindUnaryOp();
796
797             size_t begin = instructions().size();
798             emitOpcode(op_jneq_null);
799             instructions().append(srcIndex);
800             instructions().append(target->bind(begin, instructions().size()));
801             return target;
802         }
803     }
804
805     size_t begin = instructions().size();
806
807     emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
808     instructions().append(cond->index());
809     instructions().append(target->bind(begin, instructions().size()));
810     return target;
811 }
812
813 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
814 {
815     if (m_lastOpcodeID == op_less && target->isForward()) {
816         int dstIndex;
817         int src1Index;
818         int src2Index;
819
820         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
821
822         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
823             rewindBinaryOp();
824
825             size_t begin = instructions().size();
826             emitOpcode(op_jnless);
827             instructions().append(src1Index);
828             instructions().append(src2Index);
829             instructions().append(target->bind(begin, instructions().size()));
830             return target;
831         }
832     } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
833         int dstIndex;
834         int src1Index;
835         int src2Index;
836
837         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
838
839         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
840             rewindBinaryOp();
841
842             size_t begin = instructions().size();
843             emitOpcode(op_jnlesseq);
844             instructions().append(src1Index);
845             instructions().append(src2Index);
846             instructions().append(target->bind(begin, instructions().size()));
847             return target;
848         }
849     } else if (m_lastOpcodeID == op_greater && target->isForward()) {
850         int dstIndex;
851         int src1Index;
852         int src2Index;
853
854         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
855
856         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
857             rewindBinaryOp();
858
859             size_t begin = instructions().size();
860             emitOpcode(op_jngreater);
861             instructions().append(src1Index);
862             instructions().append(src2Index);
863             instructions().append(target->bind(begin, instructions().size()));
864             return target;
865         }
866     } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
867         int dstIndex;
868         int src1Index;
869         int src2Index;
870
871         retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
872
873         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
874             rewindBinaryOp();
875
876             size_t begin = instructions().size();
877             emitOpcode(op_jngreatereq);
878             instructions().append(src1Index);
879             instructions().append(src2Index);
880             instructions().append(target->bind(begin, instructions().size()));
881             return target;
882         }
883     } else if (m_lastOpcodeID == op_not) {
884         int dstIndex;
885         int srcIndex;
886
887         retrieveLastUnaryOp(dstIndex, srcIndex);
888
889         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
890             rewindUnaryOp();
891
892             size_t begin = instructions().size();
893             emitOpcode(target->isForward() ? op_jtrue : op_loop_if_true);
894             instructions().append(srcIndex);
895             instructions().append(target->bind(begin, instructions().size()));
896             return target;
897         }
898     } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
899         int dstIndex;
900         int srcIndex;
901
902         retrieveLastUnaryOp(dstIndex, srcIndex);
903
904         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
905             rewindUnaryOp();
906
907             size_t begin = instructions().size();
908             emitOpcode(op_jneq_null);
909             instructions().append(srcIndex);
910             instructions().append(target->bind(begin, instructions().size()));
911             return target;
912         }
913     } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
914         int dstIndex;
915         int srcIndex;
916
917         retrieveLastUnaryOp(dstIndex, srcIndex);
918
919         if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
920             rewindUnaryOp();
921
922             size_t begin = instructions().size();
923             emitOpcode(op_jeq_null);
924             instructions().append(srcIndex);
925             instructions().append(target->bind(begin, instructions().size()));
926             return target;
927         }
928     }
929
930     size_t begin = instructions().size();
931     emitOpcode(target->isForward() ? op_jfalse : op_loop_if_false);
932     instructions().append(cond->index());
933     instructions().append(target->bind(begin, instructions().size()));
934     return target;
935 }
936
937 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
938 {
939     size_t begin = instructions().size();
940
941     emitOpcode(op_jneq_ptr);
942     instructions().append(cond->index());
943     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->callFunction()));
944     instructions().append(target->bind(begin, instructions().size()));
945     return target;
946 }
947
948 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
949 {
950     size_t begin = instructions().size();
951
952     emitOpcode(op_jneq_ptr);
953     instructions().append(cond->index());
954     instructions().append(Instruction(*m_globalData, m_codeBlock->ownerExecutable(), m_scopeChain->globalObject->applyFunction()));
955     instructions().append(target->bind(begin, instructions().size()));
956     return target;
957 }
958
959 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
960 {
961     StringImpl* rep = ident.impl();
962     pair<IdentifierMap::iterator, bool> result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
963     if (result.second) // new entry
964         m_codeBlock->addIdentifier(Identifier(m_globalData, rep));
965
966     return result.first->second;
967 }
968
969 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
970 {
971     int index = m_nextConstantOffset;
972
973     pair<JSValueMap::iterator, bool> result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
974     if (result.second) {
975         m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
976         ++m_nextConstantOffset;
977         m_codeBlock->addConstant(JSValue(v));
978     } else
979         index = result.first->second;
980
981     return &m_constantPoolRegisters[index];
982 }
983
984 unsigned BytecodeGenerator::addRegExp(RegExp* r)
985 {
986     return m_codeBlock->addRegExp(r);
987 }
988
989 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
990 {
991     emitOpcode(op_mov);
992     instructions().append(dst->index());
993     instructions().append(src->index());
994     return dst;
995 }
996
997 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
998 {
999     emitOpcode(opcodeID);
1000     instructions().append(dst->index());
1001     instructions().append(src->index());
1002     return dst;
1003 }
1004
1005 RegisterID* BytecodeGenerator::emitPreInc(RegisterID* srcDst)
1006 {
1007     emitOpcode(op_pre_inc);
1008     instructions().append(srcDst->index());
1009     return srcDst;
1010 }
1011
1012 RegisterID* BytecodeGenerator::emitPreDec(RegisterID* srcDst)
1013 {
1014     emitOpcode(op_pre_dec);
1015     instructions().append(srcDst->index());
1016     return srcDst;
1017 }
1018
1019 RegisterID* BytecodeGenerator::emitPostInc(RegisterID* dst, RegisterID* srcDst)
1020 {
1021     emitOpcode(op_post_inc);
1022     instructions().append(dst->index());
1023     instructions().append(srcDst->index());
1024     return dst;
1025 }
1026
1027 RegisterID* BytecodeGenerator::emitPostDec(RegisterID* dst, RegisterID* srcDst)
1028 {
1029     emitOpcode(op_post_dec);
1030     instructions().append(dst->index());
1031     instructions().append(srcDst->index());
1032     return dst;
1033 }
1034
1035 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1036 {
1037     emitOpcode(opcodeID);
1038     instructions().append(dst->index());
1039     instructions().append(src1->index());
1040     instructions().append(src2->index());
1041
1042     if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1043         opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1044         instructions().append(types.toInt());
1045
1046     return dst;
1047 }
1048
1049 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1050 {
1051     if (m_lastOpcodeID == op_typeof) {
1052         int dstIndex;
1053         int srcIndex;
1054
1055         retrieveLastUnaryOp(dstIndex, srcIndex);
1056
1057         if (src1->index() == dstIndex
1058             && src1->isTemporary()
1059             && m_codeBlock->isConstantRegisterIndex(src2->index())
1060             && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1061             const UString& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1062             if (value == "undefined") {
1063                 rewindUnaryOp();
1064                 emitOpcode(op_is_undefined);
1065                 instructions().append(dst->index());
1066                 instructions().append(srcIndex);
1067                 return dst;
1068             }
1069             if (value == "boolean") {
1070                 rewindUnaryOp();
1071                 emitOpcode(op_is_boolean);
1072                 instructions().append(dst->index());
1073                 instructions().append(srcIndex);
1074                 return dst;
1075             }
1076             if (value == "number") {
1077                 rewindUnaryOp();
1078                 emitOpcode(op_is_number);
1079                 instructions().append(dst->index());
1080                 instructions().append(srcIndex);
1081                 return dst;
1082             }
1083             if (value == "string") {
1084                 rewindUnaryOp();
1085                 emitOpcode(op_is_string);
1086                 instructions().append(dst->index());
1087                 instructions().append(srcIndex);
1088                 return dst;
1089             }
1090             if (value == "object") {
1091                 rewindUnaryOp();
1092                 emitOpcode(op_is_object);
1093                 instructions().append(dst->index());
1094                 instructions().append(srcIndex);
1095                 return dst;
1096             }
1097             if (value == "function") {
1098                 rewindUnaryOp();
1099                 emitOpcode(op_is_function);
1100                 instructions().append(dst->index());
1101                 instructions().append(srcIndex);
1102                 return dst;
1103             }
1104         }
1105     }
1106
1107     emitOpcode(opcodeID);
1108     instructions().append(dst->index());
1109     instructions().append(src1->index());
1110     instructions().append(src2->index());
1111     return dst;
1112 }
1113
1114 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1115 {
1116     return emitLoad(dst, jsBoolean(b));
1117 }
1118
1119 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1120 {
1121     // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1122     // Later we can do the extra work to handle that like the other cases.  They also don't
1123     // work correctly with NaN as a key.
1124     if (isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1125         return emitLoad(dst, jsNumber(number));
1126     JSValue& valueInMap = m_numberMap.add(number, JSValue()).first->second;
1127     if (!valueInMap)
1128         valueInMap = jsNumber(number);
1129     return emitLoad(dst, valueInMap);
1130 }
1131
1132 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1133 {
1134     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).first->second;
1135     if (!stringInMap)
1136         stringInMap = jsOwnedString(globalData(), identifier.ustring());
1137     return emitLoad(dst, JSValue(stringInMap));
1138 }
1139
1140 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1141 {
1142     RegisterID* constantID = addConstantValue(v);
1143     if (dst)
1144         return emitMove(dst, constantID);
1145     return constantID;
1146 }
1147
1148 ResolveResult BytecodeGenerator::resolve(const Identifier& property)
1149 {
1150     if (property == propertyNames().thisIdentifier)
1151         return ResolveResult::registerResolve(thisRegister(), ResolveResult::ReadOnlyFlag);
1152
1153     // Check if the property should be allocated in a register.
1154     if (m_codeType != GlobalCode && shouldOptimizeLocals()) {
1155         SymbolTableEntry entry = symbolTable().get(property.impl());
1156         if (!entry.isNull()) {
1157             if (property == propertyNames().arguments)
1158                 createArgumentsIfNecessary();
1159             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1160             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1161             return ResolveResult::registerResolve(local, flags);
1162         }
1163     }
1164
1165     // Cases where we cannot statically optimize the lookup.
1166     if (property == propertyNames().arguments || !canOptimizeNonLocals())
1167         return ResolveResult::dynamicResolve(0);
1168
1169     ScopeChainIterator iter = m_scopeChain->begin();
1170     ScopeChainIterator end = m_scopeChain->end();
1171     size_t depth = 0;
1172     unsigned flags = 0;
1173     for (; iter != end; ++iter, ++depth) {
1174         JSObject* currentScope = iter->get();
1175         if (!currentScope->isVariableObject()) {
1176             flags |= ResolveResult::DynamicFlag;
1177             break;
1178         }        
1179         JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope);
1180         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1181
1182         // Found the property
1183         if (!entry.isNull()) {
1184             if (entry.isReadOnly())
1185                 flags |= ResolveResult::ReadOnlyFlag;
1186             depth += m_codeBlock->needsFullScopeChain();
1187             if (++iter == end) {
1188                 if (flags & ResolveResult::DynamicFlag)
1189                     return ResolveResult::dynamicIndexedGlobalResolve(entry.getIndex(), depth, currentScope, flags);
1190                 return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentScope, flags);
1191             }
1192 #if !ASSERT_DISABLED
1193             if (JSActivation* activation = jsDynamicCast<JSActivation*>(currentVariableObject))
1194                 ASSERT(activation->isValidScopedLookup(entry.getIndex()));
1195 #endif
1196             return ResolveResult::lexicalResolve(entry.getIndex(), depth, flags);
1197         }
1198         bool scopeRequiresDynamicChecks = false;
1199         if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
1200             break;
1201         if (scopeRequiresDynamicChecks)
1202             flags |= ResolveResult::DynamicFlag;
1203     }
1204
1205     // Can't locate the property but we're able to avoid a few lookups.
1206     JSObject* scope = iter->get();
1207     depth += m_codeBlock->needsFullScopeChain();
1208     if (++iter == end) {
1209         if ((flags & ResolveResult::DynamicFlag) && depth)
1210             return ResolveResult::dynamicGlobalResolve(depth, scope);
1211         return ResolveResult::globalResolve(scope);
1212     }
1213     return ResolveResult::dynamicResolve(depth);
1214 }
1215
1216 ResolveResult BytecodeGenerator::resolveConstDecl(const Identifier& property)
1217 {
1218     // Register-allocated const declarations.
1219     if (m_codeType != EvalCode && m_codeType != GlobalCode) {
1220         SymbolTableEntry entry = symbolTable().get(property.impl());
1221         if (!entry.isNull()) {
1222             unsigned flags = entry.isReadOnly() ? ResolveResult::ReadOnlyFlag : 0;
1223             RegisterID* local = createLazyRegisterIfNecessary(&registerFor(entry.getIndex()));
1224             return ResolveResult::registerResolve(local, flags);
1225         }
1226     }
1227
1228     // Const declarations in eval code or global code.
1229     ScopeChainIterator iter = scopeChain()->begin();
1230     ScopeChainIterator end = scopeChain()->end();
1231     size_t depth = 0;
1232     for (; iter != end; ++iter, ++depth) {
1233         JSObject* currentScope = iter->get();
1234         if (!currentScope->isVariableObject())
1235             continue;
1236         JSVariableObject* currentVariableObject = static_cast<JSVariableObject*>(currentScope);
1237         SymbolTableEntry entry = currentVariableObject->symbolTable().get(property.impl());
1238         if (entry.isNull())
1239             continue;
1240         if (++iter == end)
1241             return ResolveResult::indexedGlobalResolve(entry.getIndex(), currentVariableObject, 0);
1242         return ResolveResult::lexicalResolve(entry.getIndex(), depth + scopeDepth(), 0);
1243     }
1244
1245     // FIXME: While this code should only be hit in an eval block, it will assign
1246     // to the wrong base if property exists in an intervening with scope.
1247     return ResolveResult::dynamicResolve(scopeDepth());
1248 }
1249
1250 void BytecodeGenerator::emitCheckHasInstance(RegisterID* base)
1251
1252     emitOpcode(op_check_has_instance);
1253     instructions().append(base->index());
1254 }
1255
1256 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* base, RegisterID* basePrototype)
1257
1258     emitOpcode(op_instanceof);
1259     instructions().append(dst->index());
1260     instructions().append(value->index());
1261     instructions().append(base->index());
1262     instructions().append(basePrototype->index());
1263     return dst;
1264 }
1265
1266 static const unsigned maxGlobalResolves = 128;
1267
1268 bool BytecodeGenerator::shouldAvoidResolveGlobal()
1269 {
1270     return m_codeBlock->globalResolveInfoCount() > maxGlobalResolves && !m_labelScopes.size();
1271 }
1272
1273 RegisterID* BytecodeGenerator::emitResolve(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1274 {
1275     if (resolveResult.isStatic())
1276         return emitGetStaticVar(dst, resolveResult);
1277     
1278     if (resolveResult.isGlobal() && !shouldAvoidResolveGlobal()) {
1279 #if ENABLE(JIT)
1280         m_codeBlock->addGlobalResolveInfo(instructions().size());
1281 #endif
1282         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1283         bool dynamic = resolveResult.isDynamic() && resolveResult.depth();
1284         ValueProfile* profile = emitProfiledOpcode(dynamic ? op_resolve_global_dynamic : op_resolve_global);
1285         instructions().append(dst->index());
1286         instructions().append(addConstant(property));
1287         instructions().append(0);
1288         instructions().append(0);
1289         if (dynamic)
1290             instructions().append(resolveResult.depth());
1291         instructions().append(profile);
1292         return dst;
1293     }
1294         
1295     if (resolveResult.type() == ResolveResult::Dynamic && resolveResult.depth()) {
1296         // In this case we are at least able to drop a few scope chains from the
1297         // lookup chain, although we still need to hash from then on.
1298         ValueProfile* profile = emitProfiledOpcode(op_resolve_skip);
1299         instructions().append(dst->index());
1300         instructions().append(addConstant(property));
1301         instructions().append(resolveResult.depth());
1302         instructions().append(profile);
1303         return dst;
1304     }
1305
1306     ValueProfile* profile = emitProfiledOpcode(op_resolve);
1307     instructions().append(dst->index());
1308     instructions().append(addConstant(property));
1309     instructions().append(profile);
1310     return dst;
1311 }
1312
1313 RegisterID* BytecodeGenerator::emitResolveBase(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1314 {
1315     if (resolveResult.isGlobal() && !resolveResult.isDynamic())
1316         // Global object is the base
1317         return emitLoad(dst, JSValue(resolveResult.globalObject()));
1318
1319     // We can't optimise at all :-(
1320     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1321     instructions().append(dst->index());
1322     instructions().append(addConstant(property));
1323     instructions().append(false);
1324     instructions().append(profile);
1325     return dst;
1326 }
1327
1328 RegisterID* BytecodeGenerator::emitResolveBaseForPut(RegisterID* dst, const ResolveResult& resolveResult, const Identifier& property)
1329 {
1330     if (!m_codeBlock->isStrictMode())
1331         return emitResolveBase(dst, resolveResult, property);
1332
1333     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1334         // Global object is the base
1335         RefPtr<RegisterID> result = emitLoad(dst, JSValue(resolveResult.globalObject()));
1336         emitOpcode(op_ensure_property_exists);
1337         instructions().append(dst->index());
1338         instructions().append(addConstant(property));
1339         return result.get();
1340     }
1341
1342     // We can't optimise at all :-(
1343     ValueProfile* profile = emitProfiledOpcode(op_resolve_base);
1344     instructions().append(dst->index());
1345     instructions().append(addConstant(property));
1346     instructions().append(true);
1347     instructions().append(profile);
1348     return dst;
1349 }
1350
1351 RegisterID* BytecodeGenerator::emitResolveWithBase(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1352 {
1353     if (resolveResult.isGlobal() && !resolveResult.isDynamic()) {
1354         // Global object is the base
1355         emitLoad(baseDst, JSValue(resolveResult.globalObject()));
1356
1357         if (resolveResult.isStatic()) {
1358             // Directly index the property lookup across multiple scopes.
1359             emitGetStaticVar(propDst, resolveResult);
1360             return baseDst;
1361         }
1362
1363         if (shouldAvoidResolveGlobal()) {
1364             ValueProfile* profile = emitProfiledOpcode(op_resolve);
1365             instructions().append(propDst->index());
1366             instructions().append(addConstant(property));
1367             instructions().append(profile);
1368             return baseDst;
1369         }
1370
1371 #if ENABLE(JIT)
1372         m_codeBlock->addGlobalResolveInfo(instructions().size());
1373 #endif
1374 #if ENABLE(CLASSIC_INTERPRETER)
1375         m_codeBlock->addGlobalResolveInstruction(instructions().size());
1376 #endif
1377         ValueProfile* profile = emitProfiledOpcode(op_resolve_global);
1378         instructions().append(propDst->index());
1379         instructions().append(addConstant(property));
1380         instructions().append(0);
1381         instructions().append(0);
1382         instructions().append(profile);
1383         return baseDst;
1384     }
1385
1386     ValueProfile* profile = emitProfiledOpcode(op_resolve_with_base);
1387     instructions().append(baseDst->index());
1388     instructions().append(propDst->index());
1389     instructions().append(addConstant(property));
1390     instructions().append(profile);
1391     return baseDst;
1392 }
1393
1394 RegisterID* BytecodeGenerator::emitResolveWithThis(RegisterID* baseDst, RegisterID* propDst, const ResolveResult& resolveResult, const Identifier& property)
1395 {
1396     if (resolveResult.isStatic()) {
1397         emitLoad(baseDst, jsUndefined());
1398         emitGetStaticVar(propDst, resolveResult);
1399         return baseDst;
1400     }
1401
1402     if (resolveResult.type() == ResolveResult::Dynamic) {
1403         // We can't optimise at all :-(
1404         ValueProfile* profile = emitProfiledOpcode(op_resolve_with_this);
1405         instructions().append(baseDst->index());
1406         instructions().append(propDst->index());
1407         instructions().append(addConstant(property));
1408         instructions().append(profile);
1409         return baseDst;
1410     }
1411
1412     emitLoad(baseDst, jsUndefined());
1413     return emitResolve(propDst, resolveResult, property);
1414 }
1415
1416 RegisterID* BytecodeGenerator::emitGetStaticVar(RegisterID* dst, const ResolveResult& resolveResult)
1417 {
1418     ValueProfile* profile = 0;
1419
1420     switch (resolveResult.type()) {
1421     case ResolveResult::Register:
1422     case ResolveResult::ReadOnlyRegister:
1423         if (dst == ignoredResult())
1424             return 0;
1425         return moveToDestinationIfNeeded(dst, resolveResult.local());
1426
1427     case ResolveResult::Lexical:
1428     case ResolveResult::ReadOnlyLexical:
1429         profile = emitProfiledOpcode(op_get_scoped_var);
1430         instructions().append(dst->index());
1431         instructions().append(resolveResult.index());
1432         instructions().append(resolveResult.depth());
1433         instructions().append(profile);
1434         return dst;
1435
1436     case ResolveResult::IndexedGlobal:
1437     case ResolveResult::ReadOnlyIndexedGlobal:
1438         if (m_lastOpcodeID == op_put_global_var) {
1439             int dstIndex;
1440             int srcIndex;
1441             retrieveLastUnaryOp(dstIndex, srcIndex);
1442             if (dstIndex == resolveResult.index() && srcIndex == dst->index())
1443                 return dst;
1444         }
1445
1446         profile = emitProfiledOpcode(op_get_global_var);
1447         instructions().append(dst->index());
1448         instructions().append(resolveResult.index());
1449         instructions().append(profile);
1450         return dst;
1451
1452     default:
1453         ASSERT_NOT_REACHED();
1454         return 0;
1455     }
1456 }
1457
1458 RegisterID* BytecodeGenerator::emitPutStaticVar(const ResolveResult& resolveResult, RegisterID* value)
1459 {
1460     switch (resolveResult.type()) {
1461     case ResolveResult::Register:
1462     case ResolveResult::ReadOnlyRegister:
1463         return moveToDestinationIfNeeded(resolveResult.local(), value);
1464
1465     case ResolveResult::Lexical:
1466     case ResolveResult::ReadOnlyLexical:
1467         emitOpcode(op_put_scoped_var);
1468         instructions().append(resolveResult.index());
1469         instructions().append(resolveResult.depth());
1470         instructions().append(value->index());
1471         return value;
1472
1473     case ResolveResult::IndexedGlobal:
1474     case ResolveResult::ReadOnlyIndexedGlobal:
1475         emitOpcode(op_put_global_var);
1476         instructions().append(resolveResult.index());
1477         instructions().append(value->index());
1478         return value;
1479
1480     default:
1481         ASSERT_NOT_REACHED();
1482         return 0;
1483     }
1484 }
1485
1486 void BytecodeGenerator::emitMethodCheck()
1487 {
1488     emitOpcode(op_method_check);
1489 }
1490
1491 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1492 {
1493     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1494
1495     ValueProfile* profile = emitProfiledOpcode(op_get_by_id);
1496     instructions().append(dst->index());
1497     instructions().append(base->index());
1498     instructions().append(addConstant(property));
1499     instructions().append(0);
1500     instructions().append(0);
1501     instructions().append(0);
1502     instructions().append(0);
1503     instructions().append(profile);
1504     return dst;
1505 }
1506
1507 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1508 {
1509     emitOpcode(op_get_arguments_length);
1510     instructions().append(dst->index());
1511     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1512     instructions().append(base->index());
1513     instructions().append(addConstant(propertyNames().length));
1514     return dst;
1515 }
1516
1517 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1518 {
1519     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1520
1521     emitOpcode(op_put_by_id);
1522     instructions().append(base->index());
1523     instructions().append(addConstant(property));
1524     instructions().append(value->index());
1525     instructions().append(0);
1526     instructions().append(0);
1527     instructions().append(0);
1528     instructions().append(0);
1529     instructions().append(0);
1530     return value;
1531 }
1532
1533 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1534 {
1535     m_codeBlock->addPropertyAccessInstruction(instructions().size());
1536     
1537     emitOpcode(op_put_by_id);
1538     instructions().append(base->index());
1539     instructions().append(addConstant(property));
1540     instructions().append(value->index());
1541     instructions().append(0);
1542     instructions().append(0);
1543     instructions().append(0);
1544     instructions().append(0);
1545     instructions().append(property != m_globalData->propertyNames->underscoreProto);
1546     return value;
1547 }
1548
1549 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1550 {
1551     emitOpcode(op_put_getter_setter);
1552     instructions().append(base->index());
1553     instructions().append(addConstant(property));
1554     instructions().append(getter->index());
1555     instructions().append(setter->index());
1556 }
1557
1558 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1559 {
1560     emitOpcode(op_del_by_id);
1561     instructions().append(dst->index());
1562     instructions().append(base->index());
1563     instructions().append(addConstant(property));
1564     return dst;
1565 }
1566
1567 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1568 {
1569     ValueProfile* profile = emitProfiledOpcode(op_get_argument_by_val);
1570     instructions().append(dst->index());
1571     ASSERT(base->index() == m_codeBlock->argumentsRegister());
1572     instructions().append(base->index());
1573     instructions().append(property->index());
1574     instructions().append(profile);
1575     return dst;
1576 }
1577
1578 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1579 {
1580     for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1581         ForInContext& context = m_forInContextStack[i - 1];
1582         if (context.propertyRegister == property) {
1583             emitOpcode(op_get_by_pname);
1584             instructions().append(dst->index());
1585             instructions().append(base->index());
1586             instructions().append(property->index());
1587             instructions().append(context.expectedSubscriptRegister->index());
1588             instructions().append(context.iterRegister->index());
1589             instructions().append(context.indexRegister->index());
1590             return dst;
1591         }
1592     }
1593     ValueProfile* profile = emitProfiledOpcode(op_get_by_val);
1594     instructions().append(dst->index());
1595     instructions().append(base->index());
1596     instructions().append(property->index());
1597     instructions().append(profile);
1598     return dst;
1599 }
1600
1601 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1602 {
1603     emitOpcode(op_put_by_val);
1604     instructions().append(base->index());
1605     instructions().append(property->index());
1606     instructions().append(value->index());
1607     return value;
1608 }
1609
1610 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1611 {
1612     emitOpcode(op_del_by_val);
1613     instructions().append(dst->index());
1614     instructions().append(base->index());
1615     instructions().append(property->index());
1616     return dst;
1617 }
1618
1619 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1620 {
1621     emitOpcode(op_put_by_index);
1622     instructions().append(base->index());
1623     instructions().append(index);
1624     instructions().append(value->index());
1625     return value;
1626 }
1627
1628 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1629 {
1630     emitOpcode(op_new_object);
1631     instructions().append(dst->index());
1632     return dst;
1633 }
1634
1635 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1636 {
1637     return m_codeBlock->addConstantBuffer(length);
1638 }
1639
1640 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1641 {
1642     JSString*& stringInMap = m_stringMap.add(identifier.impl(), 0).first->second;
1643     if (!stringInMap) {
1644         stringInMap = jsString(globalData(), identifier.ustring());
1645         addConstantValue(stringInMap);
1646     }
1647     return stringInMap;
1648 }
1649
1650 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1651 {
1652 #if !ASSERT_DISABLED
1653     unsigned checkLength = 0;
1654 #endif
1655     bool hadVariableExpression = false;
1656     if (length) {
1657         for (ElementNode* n = elements; n; n = n->next()) {
1658             if (!n->value()->isNumber() && !n->value()->isString()) {
1659                 hadVariableExpression = true;
1660                 break;
1661             }
1662             if (n->elision())
1663                 break;
1664 #if !ASSERT_DISABLED
1665             checkLength++;
1666 #endif
1667         }
1668         if (!hadVariableExpression) {
1669             ASSERT(length == checkLength);
1670             unsigned constantBufferIndex = addConstantBuffer(length);
1671             JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex);
1672             unsigned index = 0;
1673             for (ElementNode* n = elements; index < length; n = n->next()) {
1674                 if (n->value()->isNumber())
1675                     constantBuffer[index++] = jsNumber(static_cast<NumberNode*>(n->value())->value());
1676                 else {
1677                     ASSERT(n->value()->isString());
1678                     constantBuffer[index++] = addStringConstant(static_cast<StringNode*>(n->value())->value());
1679                 }
1680             }
1681             emitOpcode(op_new_array_buffer);
1682             instructions().append(dst->index());
1683             instructions().append(constantBufferIndex);
1684             instructions().append(length);
1685             return dst;
1686         }
1687     }
1688
1689     Vector<RefPtr<RegisterID>, 16> argv;
1690     for (ElementNode* n = elements; n; n = n->next()) {
1691         if (n->elision())
1692             break;
1693         argv.append(newTemporary());
1694         // op_new_array requires the initial values to be a sequential range of registers
1695         ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() + 1);
1696         emitNode(argv.last().get(), n->value());
1697     }
1698     emitOpcode(op_new_array);
1699     instructions().append(dst->index());
1700     instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1701     instructions().append(argv.size()); // argc
1702     return dst;
1703 }
1704
1705 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, FunctionBodyNode* function)
1706 {
1707     return emitNewFunctionInternal(dst, m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function)), false);
1708 }
1709
1710 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1711 {
1712     std::pair<FunctionOffsetMap::iterator, bool> ptr = m_functionOffsets.add(function, 0);
1713     if (ptr.second)
1714         ptr.first->second = m_codeBlock->addFunctionDecl(makeFunction(m_globalData, function));
1715     return emitNewFunctionInternal(dst, ptr.first->second, true);
1716 }
1717
1718 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, unsigned index, bool doNullCheck)
1719 {
1720     createActivationIfNecessary();
1721     emitOpcode(op_new_func);
1722     instructions().append(dst->index());
1723     instructions().append(index);
1724     instructions().append(doNullCheck);
1725     return dst;
1726 }
1727
1728 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1729 {
1730     emitOpcode(op_new_regexp);
1731     instructions().append(dst->index());
1732     instructions().append(addRegExp(regExp));
1733     return dst;
1734 }
1735
1736 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1737 {
1738     FunctionBodyNode* function = n->body();
1739     unsigned index = m_codeBlock->addFunctionExpr(makeFunction(m_globalData, function));
1740     
1741     createActivationIfNecessary();
1742     emitOpcode(op_new_func_exp);
1743     instructions().append(r0->index());
1744     instructions().append(index);
1745     return r0;
1746 }
1747
1748 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1749 {
1750     return emitCall(op_call, dst, func, callArguments, divot, startOffset, endOffset);
1751 }
1752
1753 void BytecodeGenerator::createArgumentsIfNecessary()
1754 {
1755     if (m_codeType != FunctionCode)
1756         return;
1757     
1758     if (!m_codeBlock->usesArguments())
1759         return;
1760
1761     // If we're in strict mode we tear off the arguments on function
1762     // entry, so there's no need to check if we need to create them
1763     // now
1764     if (m_codeBlock->isStrictMode())
1765         return;
1766
1767     emitOpcode(op_create_arguments);
1768     instructions().append(m_codeBlock->argumentsRegister());
1769 }
1770
1771 void BytecodeGenerator::createActivationIfNecessary()
1772 {
1773     if (m_hasCreatedActivation)
1774         return;
1775     if (!m_codeBlock->needsFullScopeChain())
1776         return;
1777     emitOpcode(op_create_activation);
1778     instructions().append(m_activationRegister->index());
1779 }
1780
1781 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1782 {
1783     return emitCall(op_call_eval, dst, func, callArguments, divot, startOffset, endOffset);
1784 }
1785
1786 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1787 {
1788     ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1789     ASSERT(func->refCount());
1790
1791     if (m_shouldEmitProfileHooks)
1792         emitMove(callArguments.profileHookRegister(), func);
1793
1794     // Generate code for arguments.
1795     unsigned argument = 0;
1796     for (ArgumentListNode* n = callArguments.argumentsNode()->m_listNode; n; n = n->m_next)
1797         emitNode(callArguments.argumentRegister(argument++), n);
1798
1799     // Reserve space for call frame.
1800     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1801     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1802         callFrame.append(newTemporary());
1803
1804     if (m_shouldEmitProfileHooks) {
1805         emitOpcode(op_profile_will_call);
1806         instructions().append(callArguments.profileHookRegister()->index());
1807     }
1808
1809     emitExpressionInfo(divot, startOffset, endOffset);
1810
1811     // Emit call.
1812     emitOpcode(opcodeID);
1813     instructions().append(func->index()); // func
1814     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1815     instructions().append(callArguments.registerOffset()); // registerOffset
1816 #if ENABLE(LLINT)
1817     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1818 #else
1819     instructions().append(0);
1820 #endif
1821     instructions().append(0);
1822     if (dst != ignoredResult()) {
1823         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1824         instructions().append(dst->index()); // dst
1825         instructions().append(profile);
1826     }
1827
1828     if (m_shouldEmitProfileHooks) {
1829         emitOpcode(op_profile_did_call);
1830         instructions().append(callArguments.profileHookRegister()->index());
1831     }
1832
1833     return dst;
1834 }
1835
1836 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, unsigned divot, unsigned startOffset, unsigned endOffset)
1837 {
1838     if (m_shouldEmitProfileHooks) {
1839         emitMove(profileHookRegister, func);
1840         emitOpcode(op_profile_will_call);
1841         instructions().append(profileHookRegister->index());
1842     }
1843     
1844     emitExpressionInfo(divot, startOffset, endOffset);
1845
1846     // Emit call.
1847     emitOpcode(op_call_varargs);
1848     instructions().append(func->index());
1849     instructions().append(thisRegister->index());
1850     instructions().append(arguments->index());
1851     instructions().append(firstFreeRegister->index());
1852     if (dst != ignoredResult()) {
1853         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1854         instructions().append(dst->index());
1855         instructions().append(profile);
1856     }
1857     if (m_shouldEmitProfileHooks) {
1858         emitOpcode(op_profile_did_call);
1859         instructions().append(profileHookRegister->index());
1860     }
1861     return dst;
1862 }
1863
1864 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1865 {
1866     if (m_codeBlock->needsFullScopeChain()) {
1867         emitOpcode(op_tear_off_activation);
1868         instructions().append(m_activationRegister->index());
1869         instructions().append(m_codeBlock->argumentsRegister());
1870     } else if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !m_codeBlock->isStrictMode()) {
1871         emitOpcode(op_tear_off_arguments);
1872         instructions().append(m_codeBlock->argumentsRegister());
1873     }
1874
1875     // Constructors use op_ret_object_or_this to check the result is an
1876     // object, unless we can trivially determine the check is not
1877     // necessary (currently, if the return value is 'this').
1878     if (isConstructor() && (src->index() != m_thisRegister.index())) {
1879         emitOpcode(op_ret_object_or_this);
1880         instructions().append(src->index());
1881         instructions().append(m_thisRegister.index());
1882         return src;
1883     }
1884     return emitUnaryNoDstOp(op_ret, src);
1885 }
1886
1887 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1888 {
1889     emitOpcode(opcodeID);
1890     instructions().append(src->index());
1891     return src;
1892 }
1893
1894 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, CallArguments& callArguments, unsigned divot, unsigned startOffset, unsigned endOffset)
1895 {
1896     ASSERT(func->refCount());
1897
1898     if (m_shouldEmitProfileHooks)
1899         emitMove(callArguments.profileHookRegister(), func);
1900
1901     // Generate code for arguments.
1902     unsigned argument = 0;
1903     if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1904         for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1905             emitNode(callArguments.argumentRegister(argument++), n);
1906     }
1907
1908     if (m_shouldEmitProfileHooks) {
1909         emitOpcode(op_profile_will_call);
1910         instructions().append(callArguments.profileHookRegister()->index());
1911     }
1912
1913     // Reserve space for call frame.
1914     Vector<RefPtr<RegisterID>, RegisterFile::CallFrameHeaderSize> callFrame;
1915     for (int i = 0; i < RegisterFile::CallFrameHeaderSize; ++i)
1916         callFrame.append(newTemporary());
1917
1918     emitExpressionInfo(divot, startOffset, endOffset);
1919
1920     emitOpcode(op_construct);
1921     instructions().append(func->index()); // func
1922     instructions().append(callArguments.argumentCountIncludingThis()); // argCount
1923     instructions().append(callArguments.registerOffset()); // registerOffset
1924 #if ENABLE(LLINT)
1925     instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1926 #else
1927     instructions().append(0);
1928 #endif
1929     instructions().append(0);
1930     if (dst != ignoredResult()) {
1931         ValueProfile* profile = emitProfiledOpcode(op_call_put_result);
1932         instructions().append(dst->index()); // dst
1933         instructions().append(profile);
1934     }
1935
1936     if (m_shouldEmitProfileHooks) {
1937         emitOpcode(op_profile_did_call);
1938         instructions().append(callArguments.profileHookRegister()->index());
1939     }
1940
1941     return dst;
1942 }
1943
1944 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1945 {
1946     emitOpcode(op_strcat);
1947     instructions().append(dst->index());
1948     instructions().append(src->index());
1949     instructions().append(count);
1950
1951     return dst;
1952 }
1953
1954 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1955 {
1956     emitOpcode(op_to_primitive);
1957     instructions().append(dst->index());
1958     instructions().append(src->index());
1959 }
1960
1961 RegisterID* BytecodeGenerator::emitPushScope(RegisterID* scope)
1962 {
1963     ASSERT(scope->isTemporary());
1964     ControlFlowContext context;
1965     context.isFinallyBlock = false;
1966     m_scopeContextStack.append(context);
1967     m_dynamicScopeDepth++;
1968
1969     return emitUnaryNoDstOp(op_push_scope, scope);
1970 }
1971
1972 void BytecodeGenerator::emitPopScope()
1973 {
1974     ASSERT(m_scopeContextStack.size());
1975     ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1976
1977     emitOpcode(op_pop_scope);
1978
1979     m_scopeContextStack.removeLast();
1980     m_dynamicScopeDepth--;
1981 }
1982
1983 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
1984 {
1985 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1986     if (debugHookID != DidReachBreakpoint)
1987         return;
1988 #else
1989     if (!m_shouldEmitDebugHooks)
1990         return;
1991 #endif
1992     emitOpcode(op_debug);
1993     instructions().append(debugHookID);
1994     instructions().append(firstLine);
1995     instructions().append(lastLine);
1996 }
1997
1998 void BytecodeGenerator::pushFinallyContext(Label* target, RegisterID* retAddrDst)
1999 {
2000     ControlFlowContext scope;
2001     scope.isFinallyBlock = true;
2002     FinallyContext context = { target, retAddrDst };
2003     scope.finallyContext = context;
2004     m_scopeContextStack.append(scope);
2005     m_finallyDepth++;
2006 }
2007
2008 void BytecodeGenerator::popFinallyContext()
2009 {
2010     ASSERT(m_scopeContextStack.size());
2011     ASSERT(m_scopeContextStack.last().isFinallyBlock);
2012     ASSERT(m_finallyDepth > 0);
2013     m_scopeContextStack.removeLast();
2014     m_finallyDepth--;
2015 }
2016
2017 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
2018 {
2019     // Reclaim free label scopes.
2020     //
2021     // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
2022     // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
2023     // size 0, leading to segfaulty badness.  We are yet to identify a valid cause within our code to
2024     // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
2025     // loop condition is a workaround.
2026     while (m_labelScopes.size()) {
2027         if  (m_labelScopes.last().refCount())
2028             break;
2029         m_labelScopes.removeLast();
2030     }
2031
2032     if (!m_labelScopes.size())
2033         return 0;
2034
2035     // We special-case the following, which is a syntax error in Firefox:
2036     // label:
2037     //     break;
2038     if (name.isEmpty()) {
2039         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2040             LabelScope* scope = &m_labelScopes[i];
2041             if (scope->type() != LabelScope::NamedLabel) {
2042                 ASSERT(scope->breakTarget());
2043                 return scope;
2044             }
2045         }
2046         return 0;
2047     }
2048
2049     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2050         LabelScope* scope = &m_labelScopes[i];
2051         if (scope->name() && *scope->name() == name) {
2052             ASSERT(scope->breakTarget());
2053             return scope;
2054         }
2055     }
2056     return 0;
2057 }
2058
2059 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2060 {
2061     // Reclaim free label scopes.
2062     while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2063         m_labelScopes.removeLast();
2064
2065     if (!m_labelScopes.size())
2066         return 0;
2067
2068     if (name.isEmpty()) {
2069         for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2070             LabelScope* scope = &m_labelScopes[i];
2071             if (scope->type() == LabelScope::Loop) {
2072                 ASSERT(scope->continueTarget());
2073                 return scope;
2074             }
2075         }
2076         return 0;
2077     }
2078
2079     // Continue to the loop nested nearest to the label scope that matches
2080     // 'name'.
2081     LabelScope* result = 0;
2082     for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2083         LabelScope* scope = &m_labelScopes[i];
2084         if (scope->type() == LabelScope::Loop) {
2085             ASSERT(scope->continueTarget());
2086             result = scope;
2087         }
2088         if (scope->name() && *scope->name() == name)
2089             return result; // may be 0
2090     }
2091     return 0;
2092 }
2093
2094 PassRefPtr<Label> BytecodeGenerator::emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2095 {
2096     while (topScope > bottomScope) {
2097         // First we count the number of dynamic scopes we need to remove to get
2098         // to a finally block.
2099         int nNormalScopes = 0;
2100         while (topScope > bottomScope) {
2101             if (topScope->isFinallyBlock)
2102                 break;
2103             ++nNormalScopes;
2104             --topScope;
2105         }
2106
2107         if (nNormalScopes) {
2108             size_t begin = instructions().size();
2109
2110             // We need to remove a number of dynamic scopes to get to the next
2111             // finally block
2112             emitOpcode(op_jmp_scopes);
2113             instructions().append(nNormalScopes);
2114
2115             // If topScope == bottomScope then there isn't actually a finally block
2116             // left to emit, so make the jmp_scopes jump directly to the target label
2117             if (topScope == bottomScope) {
2118                 instructions().append(target->bind(begin, instructions().size()));
2119                 return target;
2120             }
2121
2122             // Otherwise we just use jmp_scopes to pop a group of scopes and go
2123             // to the next instruction
2124             RefPtr<Label> nextInsn = newLabel();
2125             instructions().append(nextInsn->bind(begin, instructions().size()));
2126             emitLabel(nextInsn.get());
2127         }
2128
2129         while (topScope > bottomScope && topScope->isFinallyBlock) {
2130             emitJumpSubroutine(topScope->finallyContext.retAddrDst, topScope->finallyContext.finallyAddr);
2131             --topScope;
2132         }
2133     }
2134     return emitJump(target);
2135 }
2136
2137 PassRefPtr<Label> BytecodeGenerator::emitJumpScopes(Label* target, int targetScopeDepth)
2138 {
2139     ASSERT(scopeDepth() - targetScopeDepth >= 0);
2140     ASSERT(target->isForward());
2141
2142     size_t scopeDelta = scopeDepth() - targetScopeDepth;
2143     ASSERT(scopeDelta <= m_scopeContextStack.size());
2144     if (!scopeDelta)
2145         return emitJump(target);
2146
2147     if (m_finallyDepth)
2148         return emitComplexJumpScopes(target, &m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2149
2150     size_t begin = instructions().size();
2151
2152     emitOpcode(op_jmp_scopes);
2153     instructions().append(scopeDelta);
2154     instructions().append(target->bind(begin, instructions().size()));
2155     return target;
2156 }
2157
2158 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2159 {
2160     size_t begin = instructions().size();
2161
2162     emitOpcode(op_get_pnames);
2163     instructions().append(dst->index());
2164     instructions().append(base->index());
2165     instructions().append(i->index());
2166     instructions().append(size->index());
2167     instructions().append(breakTarget->bind(begin, instructions().size()));
2168     return dst;
2169 }
2170
2171 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2172 {
2173     size_t begin = instructions().size();
2174
2175     emitOpcode(op_next_pname);
2176     instructions().append(dst->index());
2177     instructions().append(base->index());
2178     instructions().append(i->index());
2179     instructions().append(size->index());
2180     instructions().append(iter->index());
2181     instructions().append(target->bind(begin, instructions().size()));
2182     return dst;
2183 }
2184
2185 RegisterID* BytecodeGenerator::emitCatch(RegisterID* targetRegister, Label* start, Label* end)
2186 {
2187     m_usesExceptions = true;
2188 #if ENABLE(JIT)
2189 #if ENABLE(LLINT)
2190     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(bitwise_cast<void*>(&llint_op_catch))) };
2191 #else
2192     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth, CodeLocationLabel() };
2193 #endif
2194 #else
2195     HandlerInfo info = { start->bind(0, 0), end->bind(0, 0), instructions().size(), m_dynamicScopeDepth + m_baseScopeDepth };
2196 #endif
2197
2198     m_codeBlock->addExceptionHandler(info);
2199     emitOpcode(op_catch);
2200     instructions().append(targetRegister->index());
2201     return targetRegister;
2202 }
2203
2204 void BytecodeGenerator::emitThrowReferenceError(const UString& message)
2205 {
2206     emitOpcode(op_throw_reference_error);
2207     instructions().append(addConstantValue(jsString(globalData(), message))->index());
2208 }
2209
2210 PassRefPtr<Label> BytecodeGenerator::emitJumpSubroutine(RegisterID* retAddrDst, Label* finally)
2211 {
2212     size_t begin = instructions().size();
2213
2214     emitOpcode(op_jsr);
2215     instructions().append(retAddrDst->index());
2216     instructions().append(finally->bind(begin, instructions().size()));
2217     emitLabel(newLabel().get()); // Record the fact that the next instruction is implicitly labeled, because op_sret will return to it.
2218     return finally;
2219 }
2220
2221 void BytecodeGenerator::emitSubroutineReturn(RegisterID* retAddrSrc)
2222 {
2223     emitOpcode(op_sret);
2224     instructions().append(retAddrSrc->index());
2225 }
2226
2227 void BytecodeGenerator::emitPushNewScope(RegisterID* dst, const Identifier& property, RegisterID* value)
2228 {
2229     ControlFlowContext context;
2230     context.isFinallyBlock = false;
2231     m_scopeContextStack.append(context);
2232     m_dynamicScopeDepth++;
2233
2234     emitOpcode(op_push_new_scope);
2235     instructions().append(dst->index());
2236     instructions().append(addConstant(property));
2237     instructions().append(value->index());
2238 }
2239
2240 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2241 {
2242     SwitchInfo info = { instructions().size(), type };
2243     switch (type) {
2244         case SwitchInfo::SwitchImmediate:
2245             emitOpcode(op_switch_imm);
2246             break;
2247         case SwitchInfo::SwitchCharacter:
2248             emitOpcode(op_switch_char);
2249             break;
2250         case SwitchInfo::SwitchString:
2251             emitOpcode(op_switch_string);
2252             break;
2253         default:
2254             ASSERT_NOT_REACHED();
2255     }
2256
2257     instructions().append(0); // place holder for table index
2258     instructions().append(0); // place holder for default target    
2259     instructions().append(scrutineeRegister->index());
2260     m_switchContextStack.append(info);
2261 }
2262
2263 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2264 {
2265     UNUSED_PARAM(max);
2266     ASSERT(node->isNumber());
2267     double value = static_cast<NumberNode*>(node)->value();
2268     int32_t key = static_cast<int32_t>(value);
2269     ASSERT(key == value);
2270     ASSERT(key >= min);
2271     ASSERT(key <= max);
2272     return key - min;
2273 }
2274
2275 static void prepareJumpTableForImmediateSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2276 {
2277     jumpTable.min = min;
2278     jumpTable.branchOffsets.resize(max - min + 1);
2279     jumpTable.branchOffsets.fill(0);
2280     for (uint32_t i = 0; i < clauseCount; ++i) {
2281         // We're emitting this after the clause labels should have been fixed, so 
2282         // the labels should not be "forward" references
2283         ASSERT(!labels[i]->isForward());
2284         jumpTable.add(keyForImmediateSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2285     }
2286 }
2287
2288 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2289 {
2290     UNUSED_PARAM(max);
2291     ASSERT(node->isString());
2292     StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2293     ASSERT(clause->length() == 1);
2294     
2295     int32_t key = (*clause)[0];
2296     ASSERT(key >= min);
2297     ASSERT(key <= max);
2298     return key - min;
2299 }
2300
2301 static void prepareJumpTableForCharacterSwitch(SimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max)
2302 {
2303     jumpTable.min = min;
2304     jumpTable.branchOffsets.resize(max - min + 1);
2305     jumpTable.branchOffsets.fill(0);
2306     for (uint32_t i = 0; i < clauseCount; ++i) {
2307         // We're emitting this after the clause labels should have been fixed, so 
2308         // the labels should not be "forward" references
2309         ASSERT(!labels[i]->isForward());
2310         jumpTable.add(keyForCharacterSwitch(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3)); 
2311     }
2312 }
2313
2314 static void prepareJumpTableForStringSwitch(StringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2315 {
2316     for (uint32_t i = 0; i < clauseCount; ++i) {
2317         // We're emitting this after the clause labels should have been fixed, so 
2318         // the labels should not be "forward" references
2319         ASSERT(!labels[i]->isForward());
2320         
2321         ASSERT(nodes[i]->isString());
2322         StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2323         OffsetLocation location;
2324         location.branchOffset = labels[i]->bind(switchAddress, switchAddress + 3);
2325         jumpTable.offsetTable.add(clause, location);
2326     }
2327 }
2328
2329 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2330 {
2331     SwitchInfo switchInfo = m_switchContextStack.last();
2332     m_switchContextStack.removeLast();
2333     if (switchInfo.switchType == SwitchInfo::SwitchImmediate) {
2334         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfImmediateSwitchJumpTables();
2335         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2336
2337         SimpleJumpTable& jumpTable = m_codeBlock->addImmediateSwitchJumpTable();
2338         prepareJumpTableForImmediateSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2339     } else if (switchInfo.switchType == SwitchInfo::SwitchCharacter) {
2340         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfCharacterSwitchJumpTables();
2341         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2342         
2343         SimpleJumpTable& jumpTable = m_codeBlock->addCharacterSwitchJumpTable();
2344         prepareJumpTableForCharacterSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max);
2345     } else {
2346         ASSERT(switchInfo.switchType == SwitchInfo::SwitchString);
2347         instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2348         instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2349
2350         StringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2351         prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2352     }
2353 }
2354
2355 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2356 {
2357     // It would be nice to do an even better job of identifying exactly where the expression is.
2358     // And we could make the caller pass the node pointer in, if there was some way of getting
2359     // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2360     // is still good enough to get us an accurate line number.
2361     m_expressionTooDeep = true;
2362     return newTemporary();
2363 }
2364
2365 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2366 {
2367     m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2368 }
2369
2370 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2371 {
2372     RegisterID* registerID = resolve(ident).local();
2373     if (!registerID || registerID->index() >= 0)
2374          return 0;
2375     return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2376 }
2377
2378 } // namespace JSC