2 * Copyright (C) 2008, 2009, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) 2012 Igalia, S.L.
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "BytecodeGenerator.h"
34 #include "Interpreter.h"
35 #include "JSActivation.h"
36 #include "JSFunction.h"
37 #include "JSNameScope.h"
38 #include "LowLevelInterpreter.h"
39 #include "Operations.h"
41 #include "StackAlignment.h"
42 #include "StrongInlines.h"
43 #include "UnlinkedCodeBlock.h"
44 #include "UnlinkedInstructionStream.h"
45 #include <wtf/StdLibExtras.h>
46 #include <wtf/text/WTFString.h>
52 void Label::setLocation(unsigned location)
54 m_location = location;
56 unsigned size = m_unresolvedJumps.size();
57 for (unsigned i = 0; i < size; ++i)
58 m_generator->m_instructions[m_unresolvedJumps[i].second].u.operand = m_location - m_unresolvedJumps[i].first;
61 ParserError BytecodeGenerator::generate()
63 SamplingRegion samplingRegion("Bytecode Generation");
65 m_codeBlock->setThisRegister(m_thisRegister.virtualRegister());
66 for (size_t i = 0; i < m_deconstructedParameters.size(); i++) {
67 auto& entry = m_deconstructedParameters[i];
68 entry.second->bindValue(*this, entry.first.get());
71 m_scopeNode->emitBytecode(*this);
73 m_staticPropertyAnalyzer.kill();
75 for (unsigned i = 0; i < m_tryRanges.size(); ++i) {
76 TryRange& range = m_tryRanges[i];
77 int start = range.start->bind();
78 int end = range.end->bind();
80 // This will happen for empty try blocks and for some cases of finally blocks:
92 // The return will pop scopes to execute the outer finally block. But this includes
93 // popping the try context for the inner try. The try context is live in the fall-through
94 // part of the finally block not because we will emit a handler that overlaps the finally,
95 // but because we haven't yet had a chance to plant the catch target. Then when we finish
96 // emitting code for the outer finally block, we repush the try contex, this time with a
97 // new start index. But that means that the start index for the try range corresponding
98 // to the inner-finally-following-the-return (marked as "*HERE*" above) will be greater
99 // than the end index of the try block. This is harmless since end < start handlers will
100 // never get matched in our logic, but we do the runtime a favor and choose to not emit
101 // such handlers at all.
105 ASSERT(range.tryData->targetScopeDepth != UINT_MAX);
106 UnlinkedHandlerInfo info = {
107 static_cast<uint32_t>(start), static_cast<uint32_t>(end),
108 static_cast<uint32_t>(range.tryData->target->bind()),
109 range.tryData->targetScopeDepth
111 m_codeBlock->addExceptionHandler(info);
114 m_codeBlock->setInstructions(std::make_unique<UnlinkedInstructionStream>(m_instructions));
116 m_codeBlock->shrinkToFit();
118 if (m_codeBlock->symbolTable())
119 m_codeBlock->setSymbolTable(m_codeBlock->symbolTable()->cloneCapturedNames(*m_codeBlock->vm()));
121 if (m_expressionTooDeep)
122 return ParserError(ParserError::OutOfMemory);
123 return ParserError(ParserError::ErrorNone);
126 bool BytecodeGenerator::addVar(
127 const Identifier& ident, ConstantMode constantMode, WatchMode watchMode, RegisterID*& r0)
129 ASSERT(static_cast<size_t>(m_codeBlock->m_numVars) == m_calleeRegisters.size());
131 ConcurrentJITLocker locker(symbolTable().m_lock);
132 int index = virtualRegisterForLocal(m_calleeRegisters.size()).offset();
133 SymbolTableEntry newEntry(index, constantMode == IsConstant ? ReadOnly : 0);
134 SymbolTable::Map::AddResult result = symbolTable().add(locker, ident.impl(), newEntry);
136 if (!result.isNewEntry) {
137 r0 = ®isterFor(result.iterator->value.getIndex());
141 if (watchMode == IsWatchable) {
142 while (m_watchableVariables.size() < static_cast<size_t>(m_codeBlock->m_numVars))
143 m_watchableVariables.append(Identifier());
144 m_watchableVariables.append(ident);
149 ASSERT(watchMode == NotWatchable || static_cast<size_t>(m_codeBlock->m_numVars) == m_watchableVariables.size());
154 void BytecodeGenerator::preserveLastVar()
156 if ((m_firstConstantIndex = m_calleeRegisters.size()) != 0)
157 m_lastVar = &m_calleeRegisters.last();
160 BytecodeGenerator::BytecodeGenerator(VM& vm, ProgramNode* programNode, UnlinkedProgramCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
161 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
162 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
164 , m_scopeNode(programNode)
165 , m_codeBlock(vm, codeBlock)
166 , m_thisRegister(CallFrame::thisArgumentOffset())
167 , m_activationRegister(0)
168 , m_emptyValueRegister(0)
169 , m_globalObjectRegister(0)
171 , m_localScopeDepth(0)
172 , m_codeType(GlobalCode)
173 , m_nextConstantOffset(0)
174 , m_globalConstantIndex(0)
175 , m_firstLazyFunction(0)
176 , m_lastLazyFunction(0)
177 , m_staticPropertyAnalyzer(&m_instructions)
179 , m_lastOpcodeID(op_end)
181 , m_lastOpcodePosition(0)
183 , m_usesExceptions(false)
184 , m_expressionTooDeep(false)
186 m_codeBlock->setNumParameters(1); // Allocate space for "this"
188 emitOpcode(op_enter);
190 const VarStack& varStack = programNode->varStack();
191 const FunctionStack& functionStack = programNode->functionStack();
193 for (size_t i = 0; i < functionStack.size(); ++i) {
194 FunctionBodyNode* function = functionStack[i];
195 UnlinkedFunctionExecutable* unlinkedFunction = makeFunction(function);
196 codeBlock->addFunctionDeclaration(*m_vm, function->ident(), unlinkedFunction);
199 for (size_t i = 0; i < varStack.size(); ++i)
200 codeBlock->addVariableDeclaration(varStack[i].first, !!(varStack[i].second & DeclarationStacks::IsConstant));
204 BytecodeGenerator::BytecodeGenerator(VM& vm, FunctionBodyNode* functionBody, UnlinkedFunctionCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
205 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
206 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
207 , m_symbolTable(codeBlock->symbolTable())
208 , m_scopeNode(functionBody)
209 , m_codeBlock(vm, codeBlock)
210 , m_activationRegister(0)
211 , m_emptyValueRegister(0)
212 , m_globalObjectRegister(0)
214 , m_localScopeDepth(0)
215 , m_codeType(FunctionCode)
216 , m_nextConstantOffset(0)
217 , m_globalConstantIndex(0)
218 , m_firstLazyFunction(0)
219 , m_lastLazyFunction(0)
220 , m_staticPropertyAnalyzer(&m_instructions)
222 , m_lastOpcodeID(op_end)
224 , m_lastOpcodePosition(0)
226 , m_usesExceptions(false)
227 , m_expressionTooDeep(false)
229 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
230 Vector<Identifier> boundParameterProperties;
231 FunctionParameters& parameters = *functionBody->parameters();
232 for (size_t i = 0; i < parameters.size(); i++) {
233 auto pattern = parameters.at(i);
234 if (pattern->isBindingNode())
236 pattern->collectBoundIdentifiers(boundParameterProperties);
239 m_symbolTable->setParameterCountIncludingThis(functionBody->parameters()->size() + 1);
241 emitOpcode(op_enter);
242 if (m_codeBlock->needsFullScopeChain() || m_shouldEmitDebugHooks) {
243 m_activationRegister = addVar();
244 emitInitLazyRegister(m_activationRegister);
245 m_codeBlock->setActivationRegister(m_activationRegister->virtualRegister());
248 m_symbolTable->setCaptureStart(virtualRegisterForLocal(m_codeBlock->m_numVars).offset());
250 if (functionBody->usesArguments() || codeBlock->usesEval()) { // May reify arguments object.
251 RegisterID* unmodifiedArgumentsRegister = addVar(); // Anonymous, so it can't be modified by user code.
252 RegisterID* argumentsRegister = addVar(propertyNames().arguments, IsVariable, NotWatchable); // Can be changed by assigning to 'arguments'.
254 // We can save a little space by hard-coding the knowledge that the two
255 // 'arguments' values are stored in consecutive registers, and storing
256 // only the index of the assignable one.
257 codeBlock->setArgumentsRegister(argumentsRegister->virtualRegister());
258 ASSERT_UNUSED(unmodifiedArgumentsRegister, unmodifiedArgumentsRegister->virtualRegister() == JSC::unmodifiedArgumentsRegister(codeBlock->argumentsRegister()));
260 emitInitLazyRegister(argumentsRegister);
261 emitInitLazyRegister(unmodifiedArgumentsRegister);
263 if (shouldTearOffArgumentsEagerly()) {
264 emitOpcode(op_create_arguments);
265 instructions().append(argumentsRegister->index());
269 bool shouldCaptureAllTheThings = m_shouldEmitDebugHooks || codeBlock->usesEval();
271 bool capturesAnyArgumentByName = false;
272 Vector<RegisterID*, 0, UnsafeVectorOverflow> capturedArguments;
273 if (functionBody->hasCapturedVariables() || shouldCaptureAllTheThings) {
274 FunctionParameters& parameters = *functionBody->parameters();
275 capturedArguments.resize(parameters.size());
276 for (size_t i = 0; i < parameters.size(); ++i) {
277 capturedArguments[i] = 0;
278 auto pattern = parameters.at(i);
279 if (!pattern->isBindingNode())
281 const Identifier& ident = static_cast<const BindingNode*>(pattern)->boundProperty();
282 if (!functionBody->captures(ident) && !shouldCaptureAllTheThings)
284 capturesAnyArgumentByName = true;
285 capturedArguments[i] = addVar();
289 if (capturesAnyArgumentByName && !shouldTearOffArgumentsEagerly()) {
290 size_t parameterCount = m_symbolTable->parameterCount();
291 auto slowArguments = std::make_unique<SlowArgument[]>(parameterCount);
292 for (size_t i = 0; i < parameterCount; ++i) {
293 if (!capturedArguments[i]) {
294 ASSERT(slowArguments[i].status == SlowArgument::Normal);
295 slowArguments[i].index = CallFrame::argumentOffset(i);
298 slowArguments[i].status = SlowArgument::Captured;
299 slowArguments[i].index = capturedArguments[i]->index();
301 m_symbolTable->setSlowArguments(std::move(slowArguments));
304 RegisterID* calleeRegister = resolveCallee(functionBody); // May push to the scope chain and/or add a captured var.
306 const DeclarationStacks::FunctionStack& functionStack = functionBody->functionStack();
307 const DeclarationStacks::VarStack& varStack = functionBody->varStack();
310 // Captured variables and functions go first so that activations don't have
311 // to step over the non-captured locals to mark them.
312 if (functionBody->hasCapturedVariables()) {
313 for (size_t i = 0; i < boundParameterProperties.size(); i++) {
314 const Identifier& ident = boundParameterProperties[i];
315 if (functionBody->captures(ident))
316 addVar(ident, IsVariable, IsWatchable);
318 for (size_t i = 0; i < functionStack.size(); ++i) {
319 FunctionBodyNode* function = functionStack[i];
320 const Identifier& ident = function->ident();
321 if (functionBody->captures(ident)) {
322 m_functions.add(ident.impl());
323 emitNewFunction(addVar(ident, IsVariable, IsWatchable), IsCaptured, function);
326 for (size_t i = 0; i < varStack.size(); ++i) {
327 const Identifier& ident = varStack[i].first;
328 if (functionBody->captures(ident))
329 addVar(ident, (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable, IsWatchable);
333 m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
335 bool canLazilyCreateFunctions = !functionBody->needsActivationForMoreThanVariables() && !m_shouldEmitDebugHooks;
336 m_firstLazyFunction = codeBlock->m_numVars;
337 for (size_t i = 0; i < functionStack.size(); ++i) {
338 FunctionBodyNode* function = functionStack[i];
339 const Identifier& ident = function->ident();
340 if (!functionBody->captures(ident)) {
341 m_functions.add(ident.impl());
342 RefPtr<RegisterID> reg = addVar(ident, IsVariable, NotWatchable);
343 // Don't lazily create functions that override the name 'arguments'
344 // as this would complicate lazy instantiation of actual arguments.
345 if (!canLazilyCreateFunctions || ident == propertyNames().arguments)
346 emitNewFunction(reg.get(), NotCaptured, function);
348 emitInitLazyRegister(reg.get());
349 m_lazyFunctions.set(reg->virtualRegister().toLocal(), function);
353 m_lastLazyFunction = canLazilyCreateFunctions ? codeBlock->m_numVars : m_firstLazyFunction;
354 for (size_t i = 0; i < boundParameterProperties.size(); i++) {
355 const Identifier& ident = boundParameterProperties[i];
356 if (!functionBody->captures(ident))
357 addVar(ident, IsVariable, IsWatchable);
359 for (size_t i = 0; i < varStack.size(); ++i) {
360 const Identifier& ident = varStack[i].first;
361 if (!functionBody->captures(ident))
362 addVar(ident, (varStack[i].second & DeclarationStacks::IsConstant) ? IsConstant : IsVariable, NotWatchable);
365 if (shouldCaptureAllTheThings)
366 m_symbolTable->setCaptureEnd(virtualRegisterForLocal(codeBlock->m_numVars).offset());
368 if (m_symbolTable->captureCount())
369 emitOpcode(op_touch_entry);
371 m_parameters.grow(parameters.size() + 1); // reserve space for "this"
373 // Add "this" as a parameter
374 int nextParameterIndex = CallFrame::thisArgumentOffset();
375 m_thisRegister.setIndex(nextParameterIndex++);
376 m_codeBlock->addParameter();
377 for (size_t i = 0; i < parameters.size(); ++i, ++nextParameterIndex) {
378 int index = nextParameterIndex;
379 auto pattern = parameters.at(i);
380 if (!pattern->isBindingNode()) {
381 m_codeBlock->addParameter();
382 RegisterID& parameter = registerFor(index);
383 parameter.setIndex(index);
384 m_deconstructedParameters.append(std::make_pair(¶meter, pattern));
387 auto simpleParameter = static_cast<const BindingNode*>(pattern);
388 if (capturedArguments.size() && capturedArguments[i]) {
389 ASSERT((functionBody->hasCapturedVariables() && functionBody->captures(simpleParameter->boundProperty())) || shouldCaptureAllTheThings);
390 index = capturedArguments[i]->index();
391 RegisterID original(nextParameterIndex);
392 emitMove(capturedArguments[i], &original);
394 addParameter(simpleParameter->boundProperty(), index);
398 // We declare the callee's name last because it should lose to a var, function, and/or parameter declaration.
399 addCallee(functionBody, calleeRegister);
401 if (isConstructor()) {
402 emitCreateThis(&m_thisRegister);
403 } else if (functionBody->usesThis() || codeBlock->usesEval()) {
404 m_codeBlock->addPropertyAccessInstruction(instructions().size());
405 emitOpcode(op_to_this);
406 instructions().append(kill(&m_thisRegister));
407 instructions().append(0);
411 BytecodeGenerator::BytecodeGenerator(VM& vm, EvalNode* evalNode, UnlinkedEvalCodeBlock* codeBlock, DebuggerMode debuggerMode, ProfilerMode profilerMode)
412 : m_shouldEmitDebugHooks(Options::forceDebuggerBytecodeGeneration() || debuggerMode == DebuggerOn)
413 , m_shouldEmitProfileHooks(Options::forceProfilerBytecodeGeneration() || profilerMode == ProfilerOn)
414 , m_symbolTable(codeBlock->symbolTable())
415 , m_scopeNode(evalNode)
416 , m_codeBlock(vm, codeBlock)
417 , m_thisRegister(CallFrame::thisArgumentOffset())
418 , m_activationRegister(0)
419 , m_emptyValueRegister(0)
420 , m_globalObjectRegister(0)
422 , m_localScopeDepth(0)
423 , m_codeType(EvalCode)
424 , m_nextConstantOffset(0)
425 , m_globalConstantIndex(0)
426 , m_firstLazyFunction(0)
427 , m_lastLazyFunction(0)
428 , m_staticPropertyAnalyzer(&m_instructions)
430 , m_lastOpcodeID(op_end)
432 , m_lastOpcodePosition(0)
434 , m_usesExceptions(false)
435 , m_expressionTooDeep(false)
437 m_symbolTable->setUsesNonStrictEval(codeBlock->usesEval() && !codeBlock->isStrictMode());
438 m_codeBlock->setNumParameters(1);
440 emitOpcode(op_enter);
442 const DeclarationStacks::FunctionStack& functionStack = evalNode->functionStack();
443 for (size_t i = 0; i < functionStack.size(); ++i)
444 m_codeBlock->addFunctionDecl(makeFunction(functionStack[i]));
446 const DeclarationStacks::VarStack& varStack = evalNode->varStack();
447 unsigned numVariables = varStack.size();
448 Vector<Identifier, 0, UnsafeVectorOverflow> variables;
449 variables.reserveCapacity(numVariables);
450 for (size_t i = 0; i < numVariables; ++i) {
451 ASSERT(varStack[i].first.impl()->isIdentifier());
452 variables.append(varStack[i].first);
454 codeBlock->adoptVariables(variables);
458 BytecodeGenerator::~BytecodeGenerator()
462 RegisterID* BytecodeGenerator::emitInitLazyRegister(RegisterID* reg)
464 emitOpcode(op_init_lazy_reg);
465 instructions().append(reg->index());
466 ASSERT(!hasWatchableVariable(reg->index()));
470 RegisterID* BytecodeGenerator::resolveCallee(FunctionBodyNode* functionBodyNode)
472 if (!functionNameIsInScope(functionBodyNode->ident(), functionBodyNode->functionMode()))
475 if (functionNameScopeIsDynamic(m_codeBlock->usesEval(), m_codeBlock->isStrictMode()))
478 m_calleeRegister.setIndex(JSStack::Callee);
479 if (functionBodyNode->captures(functionBodyNode->ident()))
480 return emitMove(addVar(), IsCaptured, &m_calleeRegister);
482 return &m_calleeRegister;
485 void BytecodeGenerator::addCallee(FunctionBodyNode* functionBodyNode, RegisterID* calleeRegister)
490 symbolTable().add(functionBodyNode->ident().impl(), SymbolTableEntry(calleeRegister->index(), ReadOnly));
493 void BytecodeGenerator::addParameter(const Identifier& ident, int parameterIndex)
495 // Parameters overwrite var declarations, but not function declarations.
496 StringImpl* rep = ident.impl();
497 if (!m_functions.contains(rep)) {
498 symbolTable().set(rep, parameterIndex);
499 RegisterID& parameter = registerFor(parameterIndex);
500 parameter.setIndex(parameterIndex);
503 // To maintain the calling convention, we have to allocate unique space for
504 // each parameter, even if the parameter doesn't make it into the symbol table.
505 m_codeBlock->addParameter();
508 bool BytecodeGenerator::willResolveToArguments(const Identifier& ident)
510 if (ident != propertyNames().arguments)
513 if (!shouldOptimizeLocals())
516 SymbolTableEntry entry = symbolTable().get(ident.impl());
520 if (m_codeBlock->usesArguments() && m_codeType == FunctionCode)
526 RegisterID* BytecodeGenerator::uncheckedRegisterForArguments()
528 ASSERT(willResolveToArguments(propertyNames().arguments));
530 SymbolTableEntry entry = symbolTable().get(propertyNames().arguments.impl());
531 ASSERT(!entry.isNull());
532 return ®isterFor(entry.getIndex());
535 RegisterID* BytecodeGenerator::createLazyRegisterIfNecessary(RegisterID* reg)
537 if (!reg->virtualRegister().isLocal())
540 int localVariableNumber = reg->virtualRegister().toLocal();
542 if (m_lastLazyFunction <= localVariableNumber || localVariableNumber < m_firstLazyFunction)
544 emitLazyNewFunction(reg, m_lazyFunctions.get(localVariableNumber));
548 RegisterID* BytecodeGenerator::newRegister()
550 m_calleeRegisters.append(virtualRegisterForLocal(m_calleeRegisters.size()));
551 int numCalleeRegisters = max<int>(m_codeBlock->m_numCalleeRegisters, m_calleeRegisters.size());
552 numCalleeRegisters = WTF::roundUpToMultipleOf(stackAlignmentRegisters(), numCalleeRegisters);
553 m_codeBlock->m_numCalleeRegisters = numCalleeRegisters;
554 return &m_calleeRegisters.last();
557 RegisterID* BytecodeGenerator::newTemporary()
559 // Reclaim free register IDs.
560 while (m_calleeRegisters.size() && !m_calleeRegisters.last().refCount())
561 m_calleeRegisters.removeLast();
563 RegisterID* result = newRegister();
564 result->setTemporary();
568 LabelScopePtr BytecodeGenerator::newLabelScope(LabelScope::Type type, const Identifier* name)
570 // Reclaim free label scopes.
571 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
572 m_labelScopes.removeLast();
574 // Allocate new label scope.
575 LabelScope scope(type, name, scopeDepth(), newLabel(), type == LabelScope::Loop ? newLabel() : PassRefPtr<Label>()); // Only loops have continue targets.
576 m_labelScopes.append(scope);
577 return LabelScopePtr(&m_labelScopes, m_labelScopes.size() - 1);
580 PassRefPtr<Label> BytecodeGenerator::newLabel()
582 // Reclaim free label IDs.
583 while (m_labels.size() && !m_labels.last().refCount())
584 m_labels.removeLast();
586 // Allocate new label ID.
587 m_labels.append(this);
588 return &m_labels.last();
591 PassRefPtr<Label> BytecodeGenerator::emitLabel(Label* l0)
593 unsigned newLabelIndex = instructions().size();
594 l0->setLocation(newLabelIndex);
596 if (m_codeBlock->numberOfJumpTargets()) {
597 unsigned lastLabelIndex = m_codeBlock->lastJumpTarget();
598 ASSERT(lastLabelIndex <= newLabelIndex);
599 if (newLabelIndex == lastLabelIndex) {
600 // Peephole optimizations have already been disabled by emitting the last label
605 m_codeBlock->addJumpTarget(newLabelIndex);
607 // This disables peephole optimizations when an instruction is a jump target
608 m_lastOpcodeID = op_end;
612 void BytecodeGenerator::emitOpcode(OpcodeID opcodeID)
615 size_t opcodePosition = instructions().size();
616 ASSERT(opcodePosition - m_lastOpcodePosition == opcodeLength(m_lastOpcodeID) || m_lastOpcodeID == op_end);
617 m_lastOpcodePosition = opcodePosition;
619 instructions().append(opcodeID);
620 m_lastOpcodeID = opcodeID;
623 UnlinkedArrayProfile BytecodeGenerator::newArrayProfile()
625 return m_codeBlock->addArrayProfile();
628 UnlinkedArrayAllocationProfile BytecodeGenerator::newArrayAllocationProfile()
630 return m_codeBlock->addArrayAllocationProfile();
633 UnlinkedObjectAllocationProfile BytecodeGenerator::newObjectAllocationProfile()
635 return m_codeBlock->addObjectAllocationProfile();
638 UnlinkedValueProfile BytecodeGenerator::emitProfiledOpcode(OpcodeID opcodeID)
640 UnlinkedValueProfile result = m_codeBlock->addValueProfile();
641 emitOpcode(opcodeID);
645 void BytecodeGenerator::emitLoopHint()
647 emitOpcode(op_loop_hint);
650 void BytecodeGenerator::retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index)
652 ASSERT(instructions().size() >= 4);
653 size_t size = instructions().size();
654 dstIndex = instructions().at(size - 3).u.operand;
655 src1Index = instructions().at(size - 2).u.operand;
656 src2Index = instructions().at(size - 1).u.operand;
659 void BytecodeGenerator::retrieveLastUnaryOp(int& dstIndex, int& srcIndex)
661 ASSERT(instructions().size() >= 3);
662 size_t size = instructions().size();
663 dstIndex = instructions().at(size - 2).u.operand;
664 srcIndex = instructions().at(size - 1).u.operand;
667 void ALWAYS_INLINE BytecodeGenerator::rewindBinaryOp()
669 ASSERT(instructions().size() >= 4);
670 instructions().shrink(instructions().size() - 4);
671 m_lastOpcodeID = op_end;
674 void ALWAYS_INLINE BytecodeGenerator::rewindUnaryOp()
676 ASSERT(instructions().size() >= 3);
677 instructions().shrink(instructions().size() - 3);
678 m_lastOpcodeID = op_end;
681 PassRefPtr<Label> BytecodeGenerator::emitJump(Label* target)
683 size_t begin = instructions().size();
685 instructions().append(target->bind(begin, instructions().size()));
689 PassRefPtr<Label> BytecodeGenerator::emitJumpIfTrue(RegisterID* cond, Label* target)
691 if (m_lastOpcodeID == op_less) {
696 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
698 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
701 size_t begin = instructions().size();
702 emitOpcode(op_jless);
703 instructions().append(src1Index);
704 instructions().append(src2Index);
705 instructions().append(target->bind(begin, instructions().size()));
708 } else if (m_lastOpcodeID == op_lesseq) {
713 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
715 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
718 size_t begin = instructions().size();
719 emitOpcode(op_jlesseq);
720 instructions().append(src1Index);
721 instructions().append(src2Index);
722 instructions().append(target->bind(begin, instructions().size()));
725 } else if (m_lastOpcodeID == op_greater) {
730 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
732 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
735 size_t begin = instructions().size();
736 emitOpcode(op_jgreater);
737 instructions().append(src1Index);
738 instructions().append(src2Index);
739 instructions().append(target->bind(begin, instructions().size()));
742 } else if (m_lastOpcodeID == op_greatereq) {
747 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
749 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
752 size_t begin = instructions().size();
753 emitOpcode(op_jgreatereq);
754 instructions().append(src1Index);
755 instructions().append(src2Index);
756 instructions().append(target->bind(begin, instructions().size()));
759 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
763 retrieveLastUnaryOp(dstIndex, srcIndex);
765 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
768 size_t begin = instructions().size();
769 emitOpcode(op_jeq_null);
770 instructions().append(srcIndex);
771 instructions().append(target->bind(begin, instructions().size()));
774 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
778 retrieveLastUnaryOp(dstIndex, srcIndex);
780 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
783 size_t begin = instructions().size();
784 emitOpcode(op_jneq_null);
785 instructions().append(srcIndex);
786 instructions().append(target->bind(begin, instructions().size()));
791 size_t begin = instructions().size();
793 emitOpcode(op_jtrue);
794 instructions().append(cond->index());
795 instructions().append(target->bind(begin, instructions().size()));
799 PassRefPtr<Label> BytecodeGenerator::emitJumpIfFalse(RegisterID* cond, Label* target)
801 if (m_lastOpcodeID == op_less && target->isForward()) {
806 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
808 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
811 size_t begin = instructions().size();
812 emitOpcode(op_jnless);
813 instructions().append(src1Index);
814 instructions().append(src2Index);
815 instructions().append(target->bind(begin, instructions().size()));
818 } else if (m_lastOpcodeID == op_lesseq && target->isForward()) {
823 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
825 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
828 size_t begin = instructions().size();
829 emitOpcode(op_jnlesseq);
830 instructions().append(src1Index);
831 instructions().append(src2Index);
832 instructions().append(target->bind(begin, instructions().size()));
835 } else if (m_lastOpcodeID == op_greater && target->isForward()) {
840 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
842 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
845 size_t begin = instructions().size();
846 emitOpcode(op_jngreater);
847 instructions().append(src1Index);
848 instructions().append(src2Index);
849 instructions().append(target->bind(begin, instructions().size()));
852 } else if (m_lastOpcodeID == op_greatereq && target->isForward()) {
857 retrieveLastBinaryOp(dstIndex, src1Index, src2Index);
859 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
862 size_t begin = instructions().size();
863 emitOpcode(op_jngreatereq);
864 instructions().append(src1Index);
865 instructions().append(src2Index);
866 instructions().append(target->bind(begin, instructions().size()));
869 } else if (m_lastOpcodeID == op_not) {
873 retrieveLastUnaryOp(dstIndex, srcIndex);
875 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
878 size_t begin = instructions().size();
879 emitOpcode(op_jtrue);
880 instructions().append(srcIndex);
881 instructions().append(target->bind(begin, instructions().size()));
884 } else if (m_lastOpcodeID == op_eq_null && target->isForward()) {
888 retrieveLastUnaryOp(dstIndex, srcIndex);
890 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
893 size_t begin = instructions().size();
894 emitOpcode(op_jneq_null);
895 instructions().append(srcIndex);
896 instructions().append(target->bind(begin, instructions().size()));
899 } else if (m_lastOpcodeID == op_neq_null && target->isForward()) {
903 retrieveLastUnaryOp(dstIndex, srcIndex);
905 if (cond->index() == dstIndex && cond->isTemporary() && !cond->refCount()) {
908 size_t begin = instructions().size();
909 emitOpcode(op_jeq_null);
910 instructions().append(srcIndex);
911 instructions().append(target->bind(begin, instructions().size()));
916 size_t begin = instructions().size();
917 emitOpcode(op_jfalse);
918 instructions().append(cond->index());
919 instructions().append(target->bind(begin, instructions().size()));
923 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionCall(RegisterID* cond, Label* target)
925 size_t begin = instructions().size();
927 emitOpcode(op_jneq_ptr);
928 instructions().append(cond->index());
929 instructions().append(Special::CallFunction);
930 instructions().append(target->bind(begin, instructions().size()));
934 PassRefPtr<Label> BytecodeGenerator::emitJumpIfNotFunctionApply(RegisterID* cond, Label* target)
936 size_t begin = instructions().size();
938 emitOpcode(op_jneq_ptr);
939 instructions().append(cond->index());
940 instructions().append(Special::ApplyFunction);
941 instructions().append(target->bind(begin, instructions().size()));
945 unsigned BytecodeGenerator::addConstant(const Identifier& ident)
947 StringImpl* rep = ident.impl();
948 IdentifierMap::AddResult result = m_identifierMap.add(rep, m_codeBlock->numberOfIdentifiers());
949 if (result.isNewEntry)
950 m_codeBlock->addIdentifier(ident);
952 return result.iterator->value;
955 // We can't hash JSValue(), so we use a dedicated data member to cache it.
956 RegisterID* BytecodeGenerator::addConstantEmptyValue()
958 if (!m_emptyValueRegister) {
959 int index = m_nextConstantOffset;
960 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
961 ++m_nextConstantOffset;
962 m_codeBlock->addConstant(JSValue());
963 m_emptyValueRegister = &m_constantPoolRegisters[index];
966 return m_emptyValueRegister;
969 RegisterID* BytecodeGenerator::addConstantValue(JSValue v)
972 return addConstantEmptyValue();
974 int index = m_nextConstantOffset;
975 JSValueMap::AddResult result = m_jsValueMap.add(JSValue::encode(v), m_nextConstantOffset);
976 if (result.isNewEntry) {
977 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
978 ++m_nextConstantOffset;
979 m_codeBlock->addConstant(v);
981 index = result.iterator->value;
982 return &m_constantPoolRegisters[index];
985 unsigned BytecodeGenerator::addRegExp(RegExp* r)
987 return m_codeBlock->addRegExp(r);
990 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, CaptureMode captureMode, RegisterID* src)
992 m_staticPropertyAnalyzer.mov(dst->index(), src->index());
994 emitOpcode(captureMode == IsCaptured ? op_captured_mov : op_mov);
995 instructions().append(dst->index());
996 instructions().append(src->index());
997 if (captureMode == IsCaptured)
998 instructions().append(watchableVariable(dst->index()));
1002 RegisterID* BytecodeGenerator::emitMove(RegisterID* dst, RegisterID* src)
1004 return emitMove(dst, captureMode(dst->index()), src);
1007 RegisterID* BytecodeGenerator::emitUnaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src)
1009 emitOpcode(opcodeID);
1010 instructions().append(dst->index());
1011 instructions().append(src->index());
1015 RegisterID* BytecodeGenerator::emitInc(RegisterID* srcDst)
1018 instructions().append(srcDst->index());
1022 RegisterID* BytecodeGenerator::emitDec(RegisterID* srcDst)
1025 instructions().append(srcDst->index());
1029 RegisterID* BytecodeGenerator::emitBinaryOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2, OperandTypes types)
1031 emitOpcode(opcodeID);
1032 instructions().append(dst->index());
1033 instructions().append(src1->index());
1034 instructions().append(src2->index());
1036 if (opcodeID == op_bitor || opcodeID == op_bitand || opcodeID == op_bitxor ||
1037 opcodeID == op_add || opcodeID == op_mul || opcodeID == op_sub || opcodeID == op_div)
1038 instructions().append(types.toInt());
1043 RegisterID* BytecodeGenerator::emitEqualityOp(OpcodeID opcodeID, RegisterID* dst, RegisterID* src1, RegisterID* src2)
1045 if (m_lastOpcodeID == op_typeof) {
1049 retrieveLastUnaryOp(dstIndex, srcIndex);
1051 if (src1->index() == dstIndex
1052 && src1->isTemporary()
1053 && m_codeBlock->isConstantRegisterIndex(src2->index())
1054 && m_codeBlock->constantRegister(src2->index()).get().isString()) {
1055 const String& value = asString(m_codeBlock->constantRegister(src2->index()).get())->tryGetValue();
1056 if (value == "undefined") {
1058 emitOpcode(op_is_undefined);
1059 instructions().append(dst->index());
1060 instructions().append(srcIndex);
1063 if (value == "boolean") {
1065 emitOpcode(op_is_boolean);
1066 instructions().append(dst->index());
1067 instructions().append(srcIndex);
1070 if (value == "number") {
1072 emitOpcode(op_is_number);
1073 instructions().append(dst->index());
1074 instructions().append(srcIndex);
1077 if (value == "string") {
1079 emitOpcode(op_is_string);
1080 instructions().append(dst->index());
1081 instructions().append(srcIndex);
1084 if (value == "object") {
1086 emitOpcode(op_is_object);
1087 instructions().append(dst->index());
1088 instructions().append(srcIndex);
1091 if (value == "function") {
1093 emitOpcode(op_is_function);
1094 instructions().append(dst->index());
1095 instructions().append(srcIndex);
1101 emitOpcode(opcodeID);
1102 instructions().append(dst->index());
1103 instructions().append(src1->index());
1104 instructions().append(src2->index());
1108 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, bool b)
1110 return emitLoad(dst, jsBoolean(b));
1113 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, double number)
1115 // FIXME: Our hash tables won't hold infinity, so we make a new JSValue each time.
1116 // Later we can do the extra work to handle that like the other cases. They also don't
1117 // work correctly with NaN as a key.
1118 if (std::isnan(number) || number == HashTraits<double>::emptyValue() || HashTraits<double>::isDeletedValue(number))
1119 return emitLoad(dst, jsNumber(number));
1120 JSValue& valueInMap = m_numberMap.add(number, JSValue()).iterator->value;
1122 valueInMap = jsNumber(number);
1123 return emitLoad(dst, valueInMap);
1126 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, const Identifier& identifier)
1128 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1130 stringInMap = jsOwnedString(vm(), identifier.string());
1131 return emitLoad(dst, JSValue(stringInMap));
1134 RegisterID* BytecodeGenerator::emitLoad(RegisterID* dst, JSValue v)
1136 RegisterID* constantID = addConstantValue(v);
1138 return emitMove(dst, constantID);
1142 RegisterID* BytecodeGenerator::emitLoadGlobalObject(RegisterID* dst)
1144 if (!m_globalObjectRegister) {
1145 int index = m_nextConstantOffset;
1146 m_constantPoolRegisters.append(FirstConstantRegisterIndex + m_nextConstantOffset);
1147 ++m_nextConstantOffset;
1148 m_codeBlock->addConstant(JSValue());
1149 m_globalObjectRegister = &m_constantPoolRegisters[index];
1150 m_codeBlock->setGlobalObjectRegister(VirtualRegister(index));
1153 emitMove(dst, m_globalObjectRegister);
1154 return m_globalObjectRegister;
1157 bool BytecodeGenerator::isCaptured(int operand)
1159 return m_symbolTable && m_symbolTable->isCaptured(operand);
1162 Local BytecodeGenerator::local(const Identifier& property)
1164 if (property == propertyNames().thisIdentifier)
1165 return Local(thisRegister(), ReadOnly, NotCaptured);
1167 if (property == propertyNames().arguments)
1168 createArgumentsIfNecessary();
1170 if (!shouldOptimizeLocals())
1173 SymbolTableEntry entry = symbolTable().get(property.impl());
1177 RegisterID* local = createLazyRegisterIfNecessary(®isterFor(entry.getIndex()));
1178 return Local(local, entry.getAttributes(), captureMode(local->index()));
1181 Local BytecodeGenerator::constLocal(const Identifier& property)
1183 if (m_codeType != FunctionCode)
1186 SymbolTableEntry entry = symbolTable().get(property.impl());
1190 RegisterID* local = createLazyRegisterIfNecessary(®isterFor(entry.getIndex()));
1191 return Local(local, entry.getAttributes(), captureMode(local->index()));
1194 void BytecodeGenerator::emitCheckHasInstance(RegisterID* dst, RegisterID* value, RegisterID* base, Label* target)
1196 size_t begin = instructions().size();
1197 emitOpcode(op_check_has_instance);
1198 instructions().append(dst->index());
1199 instructions().append(value->index());
1200 instructions().append(base->index());
1201 instructions().append(target->bind(begin, instructions().size()));
1204 // Indicates the least upper bound of resolve type based on local scope. The bytecode linker
1205 // will start with this ResolveType and compute the least upper bound including intercepting scopes.
1206 ResolveType BytecodeGenerator::resolveType()
1208 if (m_localScopeDepth)
1210 if (m_symbolTable && m_symbolTable->usesNonStrictEval())
1211 return GlobalPropertyWithVarInjectionChecks;
1212 return GlobalProperty;
1215 RegisterID* BytecodeGenerator::emitResolveScope(RegisterID* dst, const Identifier& identifier)
1217 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1219 ASSERT(!m_symbolTable || !m_symbolTable->contains(identifier.impl()) || resolveType() == Dynamic);
1221 // resolve_scope dst, id, ResolveType, depth
1222 emitOpcode(op_resolve_scope);
1223 instructions().append(kill(dst));
1224 instructions().append(addConstant(identifier));
1225 instructions().append(resolveType());
1226 instructions().append(0);
1227 instructions().append(0);
1231 RegisterID* BytecodeGenerator::emitGetFromScope(RegisterID* dst, RegisterID* scope, const Identifier& identifier, ResolveMode resolveMode)
1233 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1235 // get_from_scope dst, scope, id, ResolveModeAndType, Structure, Operand
1236 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_from_scope);
1237 instructions().append(kill(dst));
1238 instructions().append(scope->index());
1239 instructions().append(addConstant(identifier));
1240 instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1241 instructions().append(0);
1242 instructions().append(0);
1243 instructions().append(profile);
1247 RegisterID* BytecodeGenerator::emitPutToScope(RegisterID* scope, const Identifier& identifier, RegisterID* value, ResolveMode resolveMode)
1249 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1251 // put_to_scope scope, id, value, ResolveModeAndType, Structure, Operand
1252 emitOpcode(op_put_to_scope);
1253 instructions().append(scope->index());
1254 instructions().append(addConstant(identifier));
1255 instructions().append(value->index());
1256 instructions().append(ResolveModeAndType(resolveMode, resolveType()).operand());
1257 instructions().append(0);
1258 instructions().append(0);
1262 RegisterID* BytecodeGenerator::emitInstanceOf(RegisterID* dst, RegisterID* value, RegisterID* basePrototype)
1264 emitOpcode(op_instanceof);
1265 instructions().append(dst->index());
1266 instructions().append(value->index());
1267 instructions().append(basePrototype->index());
1271 RegisterID* BytecodeGenerator::emitInitGlobalConst(const Identifier& identifier, RegisterID* value)
1273 ASSERT(m_codeType == GlobalCode);
1274 emitOpcode(op_init_global_const_nop);
1275 instructions().append(0);
1276 instructions().append(value->index());
1277 instructions().append(0);
1278 instructions().append(addConstant(identifier));
1282 RegisterID* BytecodeGenerator::emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property)
1284 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1286 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_id);
1287 instructions().append(kill(dst));
1288 instructions().append(base->index());
1289 instructions().append(addConstant(property));
1290 instructions().append(0);
1291 instructions().append(0);
1292 instructions().append(0);
1293 instructions().append(0);
1294 instructions().append(profile);
1298 RegisterID* BytecodeGenerator::emitGetArgumentsLength(RegisterID* dst, RegisterID* base)
1300 emitOpcode(op_get_arguments_length);
1301 instructions().append(dst->index());
1302 ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1303 instructions().append(base->index());
1304 instructions().append(addConstant(propertyNames().length));
1308 RegisterID* BytecodeGenerator::emitPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1310 unsigned propertyIndex = addConstant(property);
1312 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1314 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1316 emitOpcode(op_put_by_id);
1317 instructions().append(base->index());
1318 instructions().append(propertyIndex);
1319 instructions().append(value->index());
1320 instructions().append(0);
1321 instructions().append(0);
1322 instructions().append(0);
1323 instructions().append(0);
1324 instructions().append(0);
1328 RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
1330 unsigned propertyIndex = addConstant(property);
1332 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1334 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1336 emitOpcode(op_put_by_id);
1337 instructions().append(base->index());
1338 instructions().append(propertyIndex);
1339 instructions().append(value->index());
1340 instructions().append(0);
1341 instructions().append(0);
1342 instructions().append(0);
1343 instructions().append(0);
1344 instructions().append(
1345 property != m_vm->propertyNames->underscoreProto
1346 && PropertyName(property).asIndex() == PropertyName::NotAnIndex);
1350 void BytecodeGenerator::emitPutGetterSetter(RegisterID* base, const Identifier& property, RegisterID* getter, RegisterID* setter)
1352 unsigned propertyIndex = addConstant(property);
1354 m_staticPropertyAnalyzer.putById(base->index(), propertyIndex);
1356 emitOpcode(op_put_getter_setter);
1357 instructions().append(base->index());
1358 instructions().append(propertyIndex);
1359 instructions().append(getter->index());
1360 instructions().append(setter->index());
1363 RegisterID* BytecodeGenerator::emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier& property)
1365 emitOpcode(op_del_by_id);
1366 instructions().append(dst->index());
1367 instructions().append(base->index());
1368 instructions().append(addConstant(property));
1372 RegisterID* BytecodeGenerator::emitGetArgumentByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1374 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1375 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_argument_by_val);
1376 instructions().append(kill(dst));
1377 ASSERT(base->virtualRegister() == m_codeBlock->argumentsRegister());
1378 instructions().append(base->index());
1379 instructions().append(property->index());
1380 instructions().append(arrayProfile);
1381 instructions().append(profile);
1385 RegisterID* BytecodeGenerator::emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1387 for (size_t i = m_forInContextStack.size(); i > 0; i--) {
1388 ForInContext& context = m_forInContextStack[i - 1];
1389 if (context.propertyRegister == property) {
1390 emitOpcode(op_get_by_pname);
1391 instructions().append(dst->index());
1392 instructions().append(base->index());
1393 instructions().append(property->index());
1394 instructions().append(context.expectedSubscriptRegister->index());
1395 instructions().append(context.iterRegister->index());
1396 instructions().append(context.indexRegister->index());
1400 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1401 UnlinkedValueProfile profile = emitProfiledOpcode(op_get_by_val);
1402 instructions().append(kill(dst));
1403 instructions().append(base->index());
1404 instructions().append(property->index());
1405 instructions().append(arrayProfile);
1406 instructions().append(profile);
1410 RegisterID* BytecodeGenerator::emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1412 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1413 emitOpcode(op_put_by_val);
1414 instructions().append(base->index());
1415 instructions().append(property->index());
1416 instructions().append(value->index());
1417 instructions().append(arrayProfile);
1421 RegisterID* BytecodeGenerator::emitDirectPutByVal(RegisterID* base, RegisterID* property, RegisterID* value)
1423 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1424 emitOpcode(op_put_by_val_direct);
1425 instructions().append(base->index());
1426 instructions().append(property->index());
1427 instructions().append(value->index());
1428 instructions().append(arrayProfile);
1432 RegisterID* BytecodeGenerator::emitDeleteByVal(RegisterID* dst, RegisterID* base, RegisterID* property)
1434 emitOpcode(op_del_by_val);
1435 instructions().append(dst->index());
1436 instructions().append(base->index());
1437 instructions().append(property->index());
1441 RegisterID* BytecodeGenerator::emitPutByIndex(RegisterID* base, unsigned index, RegisterID* value)
1443 emitOpcode(op_put_by_index);
1444 instructions().append(base->index());
1445 instructions().append(index);
1446 instructions().append(value->index());
1450 RegisterID* BytecodeGenerator::emitCreateThis(RegisterID* dst)
1452 RefPtr<RegisterID> func = newTemporary();
1454 m_codeBlock->addPropertyAccessInstruction(instructions().size());
1455 emitOpcode(op_get_callee);
1456 instructions().append(func->index());
1457 instructions().append(0);
1459 size_t begin = instructions().size();
1460 m_staticPropertyAnalyzer.createThis(m_thisRegister.index(), begin + 3);
1462 emitOpcode(op_create_this);
1463 instructions().append(m_thisRegister.index());
1464 instructions().append(func->index());
1465 instructions().append(0);
1469 RegisterID* BytecodeGenerator::emitNewObject(RegisterID* dst)
1471 size_t begin = instructions().size();
1472 m_staticPropertyAnalyzer.newObject(dst->index(), begin + 2);
1474 emitOpcode(op_new_object);
1475 instructions().append(dst->index());
1476 instructions().append(0);
1477 instructions().append(newObjectAllocationProfile());
1481 unsigned BytecodeGenerator::addConstantBuffer(unsigned length)
1483 return m_codeBlock->addConstantBuffer(length);
1486 JSString* BytecodeGenerator::addStringConstant(const Identifier& identifier)
1488 JSString*& stringInMap = m_stringMap.add(identifier.impl(), nullptr).iterator->value;
1490 stringInMap = jsString(vm(), identifier.string());
1491 addConstantValue(stringInMap);
1496 RegisterID* BytecodeGenerator::emitNewArray(RegisterID* dst, ElementNode* elements, unsigned length)
1498 #if !ASSERT_DISABLED
1499 unsigned checkLength = 0;
1501 bool hadVariableExpression = false;
1503 for (ElementNode* n = elements; n; n = n->next()) {
1504 if (!n->value()->isConstant()) {
1505 hadVariableExpression = true;
1510 #if !ASSERT_DISABLED
1514 if (!hadVariableExpression) {
1515 ASSERT(length == checkLength);
1516 unsigned constantBufferIndex = addConstantBuffer(length);
1517 JSValue* constantBuffer = m_codeBlock->constantBuffer(constantBufferIndex).data();
1519 for (ElementNode* n = elements; index < length; n = n->next()) {
1520 ASSERT(n->value()->isConstant());
1521 constantBuffer[index++] = static_cast<ConstantNode*>(n->value())->jsValue(*this);
1523 emitOpcode(op_new_array_buffer);
1524 instructions().append(dst->index());
1525 instructions().append(constantBufferIndex);
1526 instructions().append(length);
1527 instructions().append(newArrayAllocationProfile());
1532 Vector<RefPtr<RegisterID>, 16, UnsafeVectorOverflow> argv;
1533 for (ElementNode* n = elements; n; n = n->next()) {
1537 ASSERT(!n->value()->isSpreadExpression());
1538 argv.append(newTemporary());
1539 // op_new_array requires the initial values to be a sequential range of registers
1540 ASSERT(argv.size() == 1 || argv[argv.size() - 1]->index() == argv[argv.size() - 2]->index() - 1);
1541 emitNode(argv.last().get(), n->value());
1544 emitOpcode(op_new_array);
1545 instructions().append(dst->index());
1546 instructions().append(argv.size() ? argv[0]->index() : 0); // argv
1547 instructions().append(argv.size()); // argc
1548 instructions().append(newArrayAllocationProfile());
1552 RegisterID* BytecodeGenerator::emitNewFunction(RegisterID* dst, CaptureMode captureMode, FunctionBodyNode* function)
1554 return emitNewFunctionInternal(dst, captureMode, m_codeBlock->addFunctionDecl(makeFunction(function)), false);
1557 RegisterID* BytecodeGenerator::emitLazyNewFunction(RegisterID* dst, FunctionBodyNode* function)
1559 FunctionOffsetMap::AddResult ptr = m_functionOffsets.add(function, 0);
1561 ptr.iterator->value = m_codeBlock->addFunctionDecl(makeFunction(function));
1562 return emitNewFunctionInternal(dst, NotCaptured, ptr.iterator->value, true);
1565 RegisterID* BytecodeGenerator::emitNewFunctionInternal(RegisterID* dst, CaptureMode captureMode, unsigned index, bool doNullCheck)
1567 createActivationIfNecessary();
1568 emitOpcode(captureMode == IsCaptured ? op_new_captured_func : op_new_func);
1569 instructions().append(dst->index());
1570 instructions().append(index);
1571 if (captureMode == IsCaptured) {
1572 ASSERT(!doNullCheck);
1573 instructions().append(watchableVariable(dst->index()));
1575 instructions().append(doNullCheck);
1579 RegisterID* BytecodeGenerator::emitNewRegExp(RegisterID* dst, RegExp* regExp)
1581 emitOpcode(op_new_regexp);
1582 instructions().append(dst->index());
1583 instructions().append(addRegExp(regExp));
1587 RegisterID* BytecodeGenerator::emitNewFunctionExpression(RegisterID* r0, FuncExprNode* n)
1589 FunctionBodyNode* function = n->body();
1590 unsigned index = m_codeBlock->addFunctionExpr(makeFunction(function));
1592 createActivationIfNecessary();
1593 emitOpcode(op_new_func_exp);
1594 instructions().append(r0->index());
1595 instructions().append(index);
1599 RegisterID* BytecodeGenerator::emitCall(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1601 return emitCall(op_call, dst, func, expectedFunction, callArguments, divot, divotStart, divotEnd);
1604 void BytecodeGenerator::createArgumentsIfNecessary()
1606 if (m_codeType != FunctionCode)
1609 if (!m_codeBlock->usesArguments())
1612 if (shouldTearOffArgumentsEagerly())
1615 emitOpcode(op_create_arguments);
1616 instructions().append(m_codeBlock->argumentsRegister().offset());
1617 ASSERT(!hasWatchableVariable(m_codeBlock->argumentsRegister().offset()));
1620 void BytecodeGenerator::createActivationIfNecessary()
1622 if (!m_activationRegister)
1624 emitOpcode(op_create_activation);
1625 instructions().append(m_activationRegister->index());
1628 RegisterID* BytecodeGenerator::emitCallEval(RegisterID* dst, RegisterID* func, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1630 createActivationIfNecessary();
1631 return emitCall(op_call_eval, dst, func, NoExpectedFunction, callArguments, divot, divotStart, divotEnd);
1634 ExpectedFunction BytecodeGenerator::expectedFunctionForIdentifier(const Identifier& identifier)
1636 if (identifier == m_vm->propertyNames->Object)
1637 return ExpectObjectConstructor;
1638 if (identifier == m_vm->propertyNames->Array)
1639 return ExpectArrayConstructor;
1640 return NoExpectedFunction;
1643 ExpectedFunction BytecodeGenerator::emitExpectedFunctionSnippet(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, Label* done)
1645 RefPtr<Label> realCall = newLabel();
1646 switch (expectedFunction) {
1647 case ExpectObjectConstructor: {
1648 // If the number of arguments is non-zero, then we can't do anything interesting.
1649 if (callArguments.argumentCountIncludingThis() >= 2)
1650 return NoExpectedFunction;
1652 size_t begin = instructions().size();
1653 emitOpcode(op_jneq_ptr);
1654 instructions().append(func->index());
1655 instructions().append(Special::ObjectConstructor);
1656 instructions().append(realCall->bind(begin, instructions().size()));
1658 if (dst != ignoredResult())
1663 case ExpectArrayConstructor: {
1664 // If you're doing anything other than "new Array()" or "new Array(foo)" then we
1665 // don't do inline it, for now. The only reason is that call arguments are in
1666 // the opposite order of what op_new_array expects, so we'd either need to change
1667 // how op_new_array works or we'd need an op_new_array_reverse. Neither of these
1668 // things sounds like it's worth it.
1669 if (callArguments.argumentCountIncludingThis() > 2)
1670 return NoExpectedFunction;
1672 size_t begin = instructions().size();
1673 emitOpcode(op_jneq_ptr);
1674 instructions().append(func->index());
1675 instructions().append(Special::ArrayConstructor);
1676 instructions().append(realCall->bind(begin, instructions().size()));
1678 if (dst != ignoredResult()) {
1679 if (callArguments.argumentCountIncludingThis() == 2) {
1680 emitOpcode(op_new_array_with_size);
1681 instructions().append(dst->index());
1682 instructions().append(callArguments.argumentRegister(0)->index());
1683 instructions().append(newArrayAllocationProfile());
1685 ASSERT(callArguments.argumentCountIncludingThis() == 1);
1686 emitOpcode(op_new_array);
1687 instructions().append(dst->index());
1688 instructions().append(0);
1689 instructions().append(0);
1690 instructions().append(newArrayAllocationProfile());
1697 ASSERT(expectedFunction == NoExpectedFunction);
1698 return NoExpectedFunction;
1701 size_t begin = instructions().size();
1703 instructions().append(done->bind(begin, instructions().size()));
1704 emitLabel(realCall.get());
1706 return expectedFunction;
1709 RegisterID* BytecodeGenerator::emitCall(OpcodeID opcodeID, RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1711 ASSERT(opcodeID == op_call || opcodeID == op_call_eval);
1712 ASSERT(func->refCount());
1714 if (m_shouldEmitProfileHooks)
1715 emitMove(callArguments.profileHookRegister(), func);
1717 // Generate code for arguments.
1718 unsigned argument = 0;
1719 if (callArguments.argumentsNode()) {
1720 ArgumentListNode* n = callArguments.argumentsNode()->m_listNode;
1721 if (n && n->m_expr->isSpreadExpression()) {
1722 RELEASE_ASSERT(!n->m_next);
1723 auto expression = static_cast<SpreadExpressionNode*>(n->m_expr)->expression();
1724 expression->emitBytecode(*this, callArguments.argumentRegister(0));
1725 return emitCallVarargs(dst, func, callArguments.thisRegister(), callArguments.argumentRegister(0), newTemporary(), callArguments.profileHookRegister(), divot, divotStart, divotEnd);
1727 for (; n; n = n->m_next)
1728 emitNode(callArguments.argumentRegister(argument++), n);
1731 // Reserve space for call frame.
1732 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1733 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1734 callFrame.append(newTemporary());
1736 if (m_shouldEmitProfileHooks) {
1737 emitOpcode(op_profile_will_call);
1738 instructions().append(callArguments.profileHookRegister()->index());
1741 emitExpressionInfo(divot, divotStart, divotEnd);
1743 RefPtr<Label> done = newLabel();
1744 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1747 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1748 UnlinkedValueProfile profile = emitProfiledOpcode(opcodeID);
1750 ASSERT(dst != ignoredResult());
1751 instructions().append(dst->index());
1752 instructions().append(func->index());
1753 instructions().append(callArguments.argumentCountIncludingThis());
1754 instructions().append(callArguments.stackOffset());
1756 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1758 instructions().append(0);
1760 instructions().append(arrayProfile);
1761 instructions().append(profile);
1763 if (expectedFunction != NoExpectedFunction)
1764 emitLabel(done.get());
1766 if (m_shouldEmitProfileHooks) {
1767 emitOpcode(op_profile_did_call);
1768 instructions().append(callArguments.profileHookRegister()->index());
1774 RegisterID* BytecodeGenerator::emitCallVarargs(RegisterID* dst, RegisterID* func, RegisterID* thisRegister, RegisterID* arguments, RegisterID* firstFreeRegister, RegisterID* profileHookRegister, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1776 if (m_shouldEmitProfileHooks) {
1777 emitMove(profileHookRegister, func);
1778 emitOpcode(op_profile_will_call);
1779 instructions().append(profileHookRegister->index());
1782 emitExpressionInfo(divot, divotStart, divotEnd);
1785 UnlinkedArrayProfile arrayProfile = newArrayProfile();
1786 UnlinkedValueProfile profile = emitProfiledOpcode(op_call_varargs);
1787 ASSERT(dst != ignoredResult());
1788 instructions().append(dst->index());
1789 instructions().append(func->index());
1790 instructions().append(thisRegister->index());
1791 instructions().append(arguments->index());
1792 instructions().append(firstFreeRegister->index());
1793 instructions().append(arrayProfile);
1794 instructions().append(profile);
1795 if (m_shouldEmitProfileHooks) {
1796 emitOpcode(op_profile_did_call);
1797 instructions().append(profileHookRegister->index());
1802 RegisterID* BytecodeGenerator::emitReturn(RegisterID* src)
1804 if (m_activationRegister) {
1805 emitOpcode(op_tear_off_activation);
1806 instructions().append(m_activationRegister->index());
1809 if (m_codeBlock->usesArguments() && m_codeBlock->numParameters() != 1 && !isStrictMode()) {
1810 emitOpcode(op_tear_off_arguments);
1811 instructions().append(m_codeBlock->argumentsRegister().offset());
1812 instructions().append(m_activationRegister ? m_activationRegister->index() : emitLoad(0, JSValue())->index());
1815 // Constructors use op_ret_object_or_this to check the result is an
1816 // object, unless we can trivially determine the check is not
1817 // necessary (currently, if the return value is 'this').
1818 if (isConstructor() && (src->index() != m_thisRegister.index())) {
1819 emitOpcode(op_ret_object_or_this);
1820 instructions().append(src->index());
1821 instructions().append(m_thisRegister.index());
1824 return emitUnaryNoDstOp(op_ret, src);
1827 RegisterID* BytecodeGenerator::emitUnaryNoDstOp(OpcodeID opcodeID, RegisterID* src)
1829 emitOpcode(opcodeID);
1830 instructions().append(src->index());
1834 RegisterID* BytecodeGenerator::emitConstruct(RegisterID* dst, RegisterID* func, ExpectedFunction expectedFunction, CallArguments& callArguments, const JSTextPosition& divot, const JSTextPosition& divotStart, const JSTextPosition& divotEnd)
1836 ASSERT(func->refCount());
1838 if (m_shouldEmitProfileHooks)
1839 emitMove(callArguments.profileHookRegister(), func);
1841 // Generate code for arguments.
1842 unsigned argument = 0;
1843 if (ArgumentsNode* argumentsNode = callArguments.argumentsNode()) {
1844 for (ArgumentListNode* n = argumentsNode->m_listNode; n; n = n->m_next)
1845 emitNode(callArguments.argumentRegister(argument++), n);
1848 if (m_shouldEmitProfileHooks) {
1849 emitOpcode(op_profile_will_call);
1850 instructions().append(callArguments.profileHookRegister()->index());
1853 // Reserve space for call frame.
1854 Vector<RefPtr<RegisterID>, JSStack::CallFrameHeaderSize, UnsafeVectorOverflow> callFrame;
1855 for (int i = 0; i < JSStack::CallFrameHeaderSize; ++i)
1856 callFrame.append(newTemporary());
1858 emitExpressionInfo(divot, divotStart, divotEnd);
1860 RefPtr<Label> done = newLabel();
1861 expectedFunction = emitExpectedFunctionSnippet(dst, func, expectedFunction, callArguments, done.get());
1863 UnlinkedValueProfile profile = emitProfiledOpcode(op_construct);
1864 ASSERT(dst != ignoredResult());
1865 instructions().append(dst->index());
1866 instructions().append(func->index());
1867 instructions().append(callArguments.argumentCountIncludingThis());
1868 instructions().append(callArguments.stackOffset());
1870 instructions().append(m_codeBlock->addLLIntCallLinkInfo());
1872 instructions().append(0);
1874 instructions().append(0);
1875 instructions().append(profile);
1877 if (expectedFunction != NoExpectedFunction)
1878 emitLabel(done.get());
1880 if (m_shouldEmitProfileHooks) {
1881 emitOpcode(op_profile_did_call);
1882 instructions().append(callArguments.profileHookRegister()->index());
1888 RegisterID* BytecodeGenerator::emitStrcat(RegisterID* dst, RegisterID* src, int count)
1890 emitOpcode(op_strcat);
1891 instructions().append(dst->index());
1892 instructions().append(src->index());
1893 instructions().append(count);
1898 void BytecodeGenerator::emitToPrimitive(RegisterID* dst, RegisterID* src)
1900 emitOpcode(op_to_primitive);
1901 instructions().append(dst->index());
1902 instructions().append(src->index());
1905 RegisterID* BytecodeGenerator::emitPushWithScope(RegisterID* scope)
1907 ControlFlowContext context;
1908 context.isFinallyBlock = false;
1909 m_scopeContextStack.append(context);
1910 m_localScopeDepth++;
1912 createActivationIfNecessary();
1913 return emitUnaryNoDstOp(op_push_with_scope, scope);
1916 void BytecodeGenerator::emitPopScope()
1918 ASSERT(m_scopeContextStack.size());
1919 ASSERT(!m_scopeContextStack.last().isFinallyBlock);
1921 emitOpcode(op_pop_scope);
1923 m_scopeContextStack.removeLast();
1924 m_localScopeDepth--;
1927 void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, unsigned line, unsigned charOffset, unsigned lineStart)
1929 #if ENABLE(DEBUG_WITH_BREAKPOINT)
1930 if (debugHookID != DidReachBreakpoint)
1933 if (!m_shouldEmitDebugHooks)
1936 JSTextPosition divot(line, charOffset, lineStart);
1937 emitExpressionInfo(divot, divot, divot);
1938 emitOpcode(op_debug);
1939 instructions().append(debugHookID);
1940 instructions().append(false);
1943 void BytecodeGenerator::pushFinallyContext(StatementNode* finallyBlock)
1945 // Reclaim free label scopes.
1946 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
1947 m_labelScopes.removeLast();
1949 ControlFlowContext scope;
1950 scope.isFinallyBlock = true;
1951 FinallyContext context = {
1953 static_cast<unsigned>(m_scopeContextStack.size()),
1954 static_cast<unsigned>(m_switchContextStack.size()),
1955 static_cast<unsigned>(m_forInContextStack.size()),
1956 static_cast<unsigned>(m_tryContextStack.size()),
1957 static_cast<unsigned>(m_labelScopes.size()),
1961 scope.finallyContext = context;
1962 m_scopeContextStack.append(scope);
1966 void BytecodeGenerator::popFinallyContext()
1968 ASSERT(m_scopeContextStack.size());
1969 ASSERT(m_scopeContextStack.last().isFinallyBlock);
1970 ASSERT(m_finallyDepth > 0);
1971 m_scopeContextStack.removeLast();
1975 LabelScope* BytecodeGenerator::breakTarget(const Identifier& name)
1977 // Reclaim free label scopes.
1979 // The condition was previously coded as 'm_labelScopes.size() && !m_labelScopes.last().refCount()',
1980 // however sometimes this appears to lead to GCC going a little haywire and entering the loop with
1981 // size 0, leading to segfaulty badness. We are yet to identify a valid cause within our code to
1982 // cause the GCC codegen to misbehave in this fashion, and as such the following refactoring of the
1983 // loop condition is a workaround.
1984 while (m_labelScopes.size()) {
1985 if (m_labelScopes.last().refCount())
1987 m_labelScopes.removeLast();
1990 if (!m_labelScopes.size())
1993 // We special-case the following, which is a syntax error in Firefox:
1996 if (name.isEmpty()) {
1997 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
1998 LabelScope* scope = &m_labelScopes[i];
1999 if (scope->type() != LabelScope::NamedLabel) {
2000 ASSERT(scope->breakTarget());
2007 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2008 LabelScope* scope = &m_labelScopes[i];
2009 if (scope->name() && *scope->name() == name) {
2010 ASSERT(scope->breakTarget());
2017 LabelScope* BytecodeGenerator::continueTarget(const Identifier& name)
2019 // Reclaim free label scopes.
2020 while (m_labelScopes.size() && !m_labelScopes.last().refCount())
2021 m_labelScopes.removeLast();
2023 if (!m_labelScopes.size())
2026 if (name.isEmpty()) {
2027 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2028 LabelScope* scope = &m_labelScopes[i];
2029 if (scope->type() == LabelScope::Loop) {
2030 ASSERT(scope->continueTarget());
2037 // Continue to the loop nested nearest to the label scope that matches
2039 LabelScope* result = 0;
2040 for (int i = m_labelScopes.size() - 1; i >= 0; --i) {
2041 LabelScope* scope = &m_labelScopes[i];
2042 if (scope->type() == LabelScope::Loop) {
2043 ASSERT(scope->continueTarget());
2046 if (scope->name() && *scope->name() == name)
2047 return result; // may be 0
2052 void BytecodeGenerator::emitComplexPopScopes(ControlFlowContext* topScope, ControlFlowContext* bottomScope)
2054 while (topScope > bottomScope) {
2055 // First we count the number of dynamic scopes we need to remove to get
2056 // to a finally block.
2057 int nNormalScopes = 0;
2058 while (topScope > bottomScope) {
2059 if (topScope->isFinallyBlock)
2065 if (nNormalScopes) {
2066 // We need to remove a number of dynamic scopes to get to the next
2068 while (nNormalScopes--)
2069 emitOpcode(op_pop_scope);
2071 // If topScope == bottomScope then there isn't a finally block left to emit.
2072 if (topScope == bottomScope)
2076 Vector<ControlFlowContext> savedScopeContextStack;
2077 Vector<SwitchInfo> savedSwitchContextStack;
2078 Vector<ForInContext> savedForInContextStack;
2079 Vector<TryContext> poppedTryContexts;
2080 LabelScopeStore savedLabelScopes;
2081 while (topScope > bottomScope && topScope->isFinallyBlock) {
2082 RefPtr<Label> beforeFinally = emitLabel(newLabel().get());
2084 // Save the current state of the world while instating the state of the world
2085 // for the finally block.
2086 FinallyContext finallyContext = topScope->finallyContext;
2087 bool flipScopes = finallyContext.scopeContextStackSize != m_scopeContextStack.size();
2088 bool flipSwitches = finallyContext.switchContextStackSize != m_switchContextStack.size();
2089 bool flipForIns = finallyContext.forInContextStackSize != m_forInContextStack.size();
2090 bool flipTries = finallyContext.tryContextStackSize != m_tryContextStack.size();
2091 bool flipLabelScopes = finallyContext.labelScopesSize != m_labelScopes.size();
2092 int topScopeIndex = -1;
2093 int bottomScopeIndex = -1;
2095 topScopeIndex = topScope - m_scopeContextStack.begin();
2096 bottomScopeIndex = bottomScope - m_scopeContextStack.begin();
2097 savedScopeContextStack = m_scopeContextStack;
2098 m_scopeContextStack.shrink(finallyContext.scopeContextStackSize);
2101 savedSwitchContextStack = m_switchContextStack;
2102 m_switchContextStack.shrink(finallyContext.switchContextStackSize);
2105 savedForInContextStack = m_forInContextStack;
2106 m_forInContextStack.shrink(finallyContext.forInContextStackSize);
2109 while (m_tryContextStack.size() != finallyContext.tryContextStackSize) {
2110 ASSERT(m_tryContextStack.size() > finallyContext.tryContextStackSize);
2111 TryContext context = m_tryContextStack.last();
2112 m_tryContextStack.removeLast();
2114 range.start = context.start;
2115 range.end = beforeFinally;
2116 range.tryData = context.tryData;
2117 m_tryRanges.append(range);
2118 poppedTryContexts.append(context);
2121 if (flipLabelScopes) {
2122 savedLabelScopes = m_labelScopes;
2123 while (m_labelScopes.size() > finallyContext.labelScopesSize)
2124 m_labelScopes.removeLast();
2126 int savedFinallyDepth = m_finallyDepth;
2127 m_finallyDepth = finallyContext.finallyDepth;
2128 int savedDynamicScopeDepth = m_localScopeDepth;
2129 m_localScopeDepth = finallyContext.dynamicScopeDepth;
2131 // Emit the finally block.
2132 emitNode(finallyContext.finallyBlock);
2134 RefPtr<Label> afterFinally = emitLabel(newLabel().get());
2136 // Restore the state of the world.
2138 m_scopeContextStack = savedScopeContextStack;
2139 topScope = &m_scopeContextStack[topScopeIndex]; // assert it's within bounds
2140 bottomScope = m_scopeContextStack.begin() + bottomScopeIndex; // don't assert, since it the index might be -1.
2143 m_switchContextStack = savedSwitchContextStack;
2145 m_forInContextStack = savedForInContextStack;
2147 ASSERT(m_tryContextStack.size() == finallyContext.tryContextStackSize);
2148 for (unsigned i = poppedTryContexts.size(); i--;) {
2149 TryContext context = poppedTryContexts[i];
2150 context.start = afterFinally;
2151 m_tryContextStack.append(context);
2153 poppedTryContexts.clear();
2155 if (flipLabelScopes)
2156 m_labelScopes = savedLabelScopes;
2157 m_finallyDepth = savedFinallyDepth;
2158 m_localScopeDepth = savedDynamicScopeDepth;
2165 void BytecodeGenerator::emitPopScopes(int targetScopeDepth)
2167 ASSERT(scopeDepth() - targetScopeDepth >= 0);
2169 size_t scopeDelta = scopeDepth() - targetScopeDepth;
2170 ASSERT(scopeDelta <= m_scopeContextStack.size());
2174 if (!m_finallyDepth) {
2175 while (scopeDelta--)
2176 emitOpcode(op_pop_scope);
2180 emitComplexPopScopes(&m_scopeContextStack.last(), &m_scopeContextStack.last() - scopeDelta);
2183 RegisterID* BytecodeGenerator::emitGetPropertyNames(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, Label* breakTarget)
2185 size_t begin = instructions().size();
2187 emitOpcode(op_get_pnames);
2188 instructions().append(dst->index());
2189 instructions().append(base->index());
2190 instructions().append(i->index());
2191 instructions().append(size->index());
2192 instructions().append(breakTarget->bind(begin, instructions().size()));
2196 RegisterID* BytecodeGenerator::emitNextPropertyName(RegisterID* dst, RegisterID* base, RegisterID* i, RegisterID* size, RegisterID* iter, Label* target)
2198 size_t begin = instructions().size();
2200 emitOpcode(op_next_pname);
2201 instructions().append(dst->index());
2202 instructions().append(base->index());
2203 instructions().append(i->index());
2204 instructions().append(size->index());
2205 instructions().append(iter->index());
2206 instructions().append(target->bind(begin, instructions().size()));
2210 TryData* BytecodeGenerator::pushTry(Label* start)
2213 tryData.target = newLabel();
2214 tryData.targetScopeDepth = UINT_MAX;
2215 m_tryData.append(tryData);
2216 TryData* result = &m_tryData.last();
2218 TryContext tryContext;
2219 tryContext.start = start;
2220 tryContext.tryData = result;
2222 m_tryContextStack.append(tryContext);
2227 RegisterID* BytecodeGenerator::popTryAndEmitCatch(TryData* tryData, RegisterID* targetRegister, Label* end)
2229 m_usesExceptions = true;
2231 ASSERT_UNUSED(tryData, m_tryContextStack.last().tryData == tryData);
2234 tryRange.start = m_tryContextStack.last().start;
2236 tryRange.tryData = m_tryContextStack.last().tryData;
2237 m_tryRanges.append(tryRange);
2238 m_tryContextStack.removeLast();
2240 emitLabel(tryRange.tryData->target.get());
2241 tryRange.tryData->targetScopeDepth = m_localScopeDepth;
2243 emitOpcode(op_catch);
2244 instructions().append(targetRegister->index());
2245 return targetRegister;
2248 void BytecodeGenerator::emitThrowReferenceError(const String& message)
2250 emitOpcode(op_throw_static_error);
2251 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, message)))->index());
2252 instructions().append(true);
2255 void BytecodeGenerator::emitPushFunctionNameScope(const Identifier& property, RegisterID* value, unsigned attributes)
2257 emitOpcode(op_push_name_scope);
2258 instructions().append(addConstant(property));
2259 instructions().append(value->index());
2260 instructions().append(attributes);
2263 void BytecodeGenerator::emitPushCatchScope(const Identifier& property, RegisterID* value, unsigned attributes)
2265 createActivationIfNecessary();
2267 ControlFlowContext context;
2268 context.isFinallyBlock = false;
2269 m_scopeContextStack.append(context);
2270 m_localScopeDepth++;
2272 emitOpcode(op_push_name_scope);
2273 instructions().append(addConstant(property));
2274 instructions().append(value->index());
2275 instructions().append(attributes);
2278 void BytecodeGenerator::beginSwitch(RegisterID* scrutineeRegister, SwitchInfo::SwitchType type)
2280 SwitchInfo info = { static_cast<uint32_t>(instructions().size()), type };
2282 case SwitchInfo::SwitchImmediate:
2283 emitOpcode(op_switch_imm);
2285 case SwitchInfo::SwitchCharacter:
2286 emitOpcode(op_switch_char);
2288 case SwitchInfo::SwitchString:
2289 emitOpcode(op_switch_string);
2292 RELEASE_ASSERT_NOT_REACHED();
2295 instructions().append(0); // place holder for table index
2296 instructions().append(0); // place holder for default target
2297 instructions().append(scrutineeRegister->index());
2298 m_switchContextStack.append(info);
2301 static int32_t keyForImmediateSwitch(ExpressionNode* node, int32_t min, int32_t max)
2304 ASSERT(node->isNumber());
2305 double value = static_cast<NumberNode*>(node)->value();
2306 int32_t key = static_cast<int32_t>(value);
2307 ASSERT(key == value);
2313 static int32_t keyForCharacterSwitch(ExpressionNode* node, int32_t min, int32_t max)
2316 ASSERT(node->isString());
2317 StringImpl* clause = static_cast<StringNode*>(node)->value().impl();
2318 ASSERT(clause->length() == 1);
2320 int32_t key = (*clause)[0];
2326 static void prepareJumpTableForSwitch(
2327 UnlinkedSimpleJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount,
2328 RefPtr<Label>* labels, ExpressionNode** nodes, int32_t min, int32_t max,
2329 int32_t (*keyGetter)(ExpressionNode*, int32_t min, int32_t max))
2331 jumpTable.min = min;
2332 jumpTable.branchOffsets.resize(max - min + 1);
2333 jumpTable.branchOffsets.fill(0);
2334 for (uint32_t i = 0; i < clauseCount; ++i) {
2335 // We're emitting this after the clause labels should have been fixed, so
2336 // the labels should not be "forward" references
2337 ASSERT(!labels[i]->isForward());
2338 jumpTable.add(keyGetter(nodes[i], min, max), labels[i]->bind(switchAddress, switchAddress + 3));
2342 static void prepareJumpTableForStringSwitch(UnlinkedStringJumpTable& jumpTable, int32_t switchAddress, uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes)
2344 for (uint32_t i = 0; i < clauseCount; ++i) {
2345 // We're emitting this after the clause labels should have been fixed, so
2346 // the labels should not be "forward" references
2347 ASSERT(!labels[i]->isForward());
2349 ASSERT(nodes[i]->isString());
2350 StringImpl* clause = static_cast<StringNode*>(nodes[i])->value().impl();
2351 jumpTable.offsetTable.add(clause, labels[i]->bind(switchAddress, switchAddress + 3));
2355 void BytecodeGenerator::endSwitch(uint32_t clauseCount, RefPtr<Label>* labels, ExpressionNode** nodes, Label* defaultLabel, int32_t min, int32_t max)
2357 SwitchInfo switchInfo = m_switchContextStack.last();
2358 m_switchContextStack.removeLast();
2360 switch (switchInfo.switchType) {
2361 case SwitchInfo::SwitchImmediate:
2362 case SwitchInfo::SwitchCharacter: {
2363 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfSwitchJumpTables();
2364 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2366 UnlinkedSimpleJumpTable& jumpTable = m_codeBlock->addSwitchJumpTable();
2367 prepareJumpTableForSwitch(
2368 jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes, min, max,
2369 switchInfo.switchType == SwitchInfo::SwitchImmediate
2370 ? keyForImmediateSwitch
2371 : keyForCharacterSwitch);
2375 case SwitchInfo::SwitchString: {
2376 instructions()[switchInfo.bytecodeOffset + 1] = m_codeBlock->numberOfStringSwitchJumpTables();
2377 instructions()[switchInfo.bytecodeOffset + 2] = defaultLabel->bind(switchInfo.bytecodeOffset, switchInfo.bytecodeOffset + 3);
2379 UnlinkedStringJumpTable& jumpTable = m_codeBlock->addStringSwitchJumpTable();
2380 prepareJumpTableForStringSwitch(jumpTable, switchInfo.bytecodeOffset, clauseCount, labels, nodes);
2385 RELEASE_ASSERT_NOT_REACHED();
2390 RegisterID* BytecodeGenerator::emitThrowExpressionTooDeepException()
2392 // It would be nice to do an even better job of identifying exactly where the expression is.
2393 // And we could make the caller pass the node pointer in, if there was some way of getting
2394 // that from an arbitrary node. However, calling emitExpressionInfo without any useful data
2395 // is still good enough to get us an accurate line number.
2396 m_expressionTooDeep = true;
2397 return newTemporary();
2400 void BytecodeGenerator::setIsNumericCompareFunction(bool isNumericCompareFunction)
2402 m_codeBlock->setIsNumericCompareFunction(isNumericCompareFunction);
2405 bool BytecodeGenerator::isArgumentNumber(const Identifier& ident, int argumentNumber)
2407 RegisterID* registerID = local(ident).get();
2408 if (!registerID || registerID->index() >= 0)
2410 return registerID->index() == CallFrame::argumentOffset(argumentNumber);
2413 void BytecodeGenerator::emitReadOnlyExceptionIfNeeded()
2415 if (!isStrictMode())
2417 emitOpcode(op_throw_static_error);
2418 instructions().append(addConstantValue(addStringConstant(Identifier(m_vm, StrictModeReadonlyPropertyWriteError)))->index());
2419 instructions().append(false);
2422 void BytecodeGenerator::emitEnumeration(ThrowableExpressionData* node, ExpressionNode* subjectNode, const std::function<void(BytecodeGenerator&, RegisterID*)>& callBack)
2424 if (subjectNode->isResolveNode()
2425 && willResolveToArguments(static_cast<ResolveNode*>(subjectNode)->identifier())
2426 && !symbolTable().slowArguments()) {
2427 RefPtr<RegisterID> index = emitLoad(newTemporary(), jsNumber(0));
2429 LabelScopePtr scope = newLabelScope(LabelScope::Loop);
2430 RefPtr<RegisterID> value = emitLoad(newTemporary(), jsUndefined());
2432 emitJump(scope->continueTarget());
2434 RefPtr<Label> loopStart = newLabel();
2435 emitLabel(loopStart.get());
2437 emitGetArgumentByVal(value.get(), uncheckedRegisterForArguments(), index.get());
2438 callBack(*this, value.get());
2439 emitInc(index.get());
2440 emitLabel(scope->continueTarget());
2442 RefPtr<RegisterID> length = emitGetArgumentsLength(newTemporary(), uncheckedRegisterForArguments());
2443 emitJumpIfTrue(emitEqualityOp(op_less, newTemporary(), index.get(), length.get()), loopStart.get());
2444 emitLabel(scope->breakTarget());
2448 LabelScopePtr scope = newLabelScope(LabelScope::Loop);
2449 RefPtr<RegisterID> subject = newTemporary();
2450 emitNode(subject.get(), subjectNode);
2451 RefPtr<RegisterID> iterator = emitGetById(newTemporary(), subject.get(), propertyNames().iteratorPrivateName);
2453 CallArguments args(*this, 0);
2454 emitMove(args.thisRegister(), subject.get());
2455 emitCall(iterator.get(), iterator.get(), NoExpectedFunction, args, node->divot(), node->divotStart(), node->divotEnd());
2457 RefPtr<RegisterID> iteratorNext = emitGetById(newTemporary(), iterator.get(), propertyNames().iteratorNextPrivateName);
2458 RefPtr<RegisterID> value = newTemporary();
2459 emitLoad(value.get(), jsUndefined());
2461 emitJump(scope->continueTarget());
2463 RefPtr<Label> loopStart = newLabel();
2464 emitLabel(loopStart.get());
2466 callBack(*this, value.get());
2467 emitLabel(scope->continueTarget());
2468 CallArguments nextArguments(*this, 0, 1);
2469 emitMove(nextArguments.thisRegister(), iterator.get());
2470 emitMove(nextArguments.argumentRegister(0), value.get());
2471 emitCall(value.get(), iteratorNext.get(), NoExpectedFunction, nextArguments, node->divot(), node->divotStart(), node->divotEnd());
2472 RefPtr<RegisterID> result = newTemporary();
2473 emitJumpIfFalse(emitEqualityOp(op_stricteq, result.get(), value.get(), emitLoad(0, JSValue(vm()->iterationTerminator.get()))), loopStart.get());
2474 emitLabel(scope->breakTarget());