+2015-10-07 Geoffrey Garen <ggaren@apple.com>
+
+ Unreviewed, rolling back in r190450
+ https://bugs.webkit.org/show_bug.cgi?id=149727
+
+ This time for sure?
+
+ The cause of the leak was an invalidated compilation.
+
+ There was vestigial manual memory management code that eagerly removed
+ a CodeBlock from the set of CodeBlocks if compilation was invalidated.
+ That's not cool since we rely on the set of CodeBlocks when we run
+ destructors.
+
+ The fix is to remove the vestigial code.
+
+ I ran the leaks, correctness, and performance tests locally and did not
+ see any problems.
+
+ Restored changesets:
+
+ "CodeBlock should be a GC object"
+ https://bugs.webkit.org/show_bug.cgi?id=149727
+ http://trac.webkit.org/changeset/190450
+
2015-10-07 Mark Lam <mark.lam@apple.com>
Disable tail calls because it is breaking some sites.
namespace JSC {
+const ClassInfo CodeBlock::s_info = {
+ "CodeBlock", 0, 0,
+ CREATE_METHOD_TABLE(CodeBlock)
+};
+
+const ClassInfo FunctionCodeBlock::s_info = {
+ "FunctionCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(FunctionCodeBlock)
+};
+
+#if ENABLE(WEBASSEMBLY)
+const ClassInfo WebAssemblyCodeBlock::s_info = {
+ "WebAssemblyCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(WebAssemblyCodeBlock)
+};
+#endif
+
+const ClassInfo GlobalCodeBlock::s_info = {
+ "GlobalCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(GlobalCodeBlock)
+};
+
+const ClassInfo ProgramCodeBlock::s_info = {
+ "ProgramCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(ProgramCodeBlock)
+};
+
+const ClassInfo ModuleProgramCodeBlock::s_info = {
+ "ModuleProgramCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(ModuleProgramCodeBlock)
+};
+
+const ClassInfo EvalCodeBlock::s_info = {
+ "EvalCodeBlock", &Base::s_info, 0,
+ CREATE_METHOD_TABLE(EvalCodeBlock)
+};
+
+void FunctionCodeBlock::destroy(JSCell* cell)
+{
+ jsCast<FunctionCodeBlock*>(cell)->~FunctionCodeBlock();
+}
+
+#if ENABLE(WEBASSEMBLY)
+void WebAssemblyCodeBlock::destroy(JSCell* cell)
+{
+ jsCast<WebAssemblyCodeBlock*>(cell)->~WebAssemblyCodeBlock();
+}
+#endif
+
+void ProgramCodeBlock::destroy(JSCell* cell)
+{
+ jsCast<ProgramCodeBlock*>(cell)->~ProgramCodeBlock();
+}
+
+void ModuleProgramCodeBlock::destroy(JSCell* cell)
+{
+ jsCast<ModuleProgramCodeBlock*>(cell)->~ModuleProgramCodeBlock();
+}
+
+void EvalCodeBlock::destroy(JSCell* cell)
+{
+ jsCast<EvalCodeBlock*>(cell)->~EvalCodeBlock();
+}
+
CString CodeBlock::inferredName() const
{
switch (codeType()) {
out.print(inferredName(), "#", hashAsStringIfPossible());
out.print(":[", RawPointer(this), "->");
if (!!m_alternative)
- out.print(RawPointer(m_alternative.get()), "->");
+ out.print(RawPointer(alternative()), "->");
out.print(RawPointer(ownerExecutable()), ", ", jitType, codeType());
if (codeType() == FunctionCode)
} // anonymous namespace
-CodeBlock::CodeBlock(CopyParsedBlockTag, CodeBlock& other)
- : m_globalObject(other.m_globalObject)
+CodeBlock::CodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, CodeBlock& other)
+ : JSCell(*vm, structure)
+ , m_globalObject(other.m_globalObject)
, m_heap(other.m_heap)
, m_numCalleeRegisters(other.m_numCalleeRegisters)
, m_numVars(other.m_numVars)
, m_shouldAlwaysBeInlined(true)
, m_didFailFTLCompilation(false)
, m_hasBeenCompiledWithFTL(false)
- , m_unlinkedCode(*other.m_vm, other.m_ownerExecutable.get(), other.m_unlinkedCode.get())
+ , m_unlinkedCode(*other.m_vm, this, other.m_unlinkedCode.get())
, m_hasDebuggerStatement(false)
, m_steppingMode(SteppingModeDisabled)
, m_numBreakpoints(0)
- , m_ownerExecutable(*other.m_vm, other.m_ownerExecutable.get(), other.m_ownerExecutable.get())
+ , m_ownerExecutable(*other.m_vm, this, other.m_ownerExecutable.get())
, m_vm(other.m_vm)
, m_instructions(other.m_instructions)
, m_thisRegister(other.m_thisRegister)
, m_capabilityLevelState(DFG::CapabilityLevelNotSet)
#endif
{
- m_visitStronglyHasBeenCalled.store(false, std::memory_order_relaxed);
- m_visitAggregateHasBeenCalled.store(false, std::memory_order_relaxed);
+ m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed);
ASSERT(m_heap->isDeferred());
ASSERT(m_scopeRegister.isLocal());
setNumParameters(other.numParameters());
+}
+
+void CodeBlock::finishCreation(VM& vm, CopyParsedBlockTag, CodeBlock& other)
+{
+ Base::finishCreation(vm);
+
optimizeAfterWarmUp();
jitAfterWarmUp();
}
m_heap->m_codeBlocks.add(this);
- m_heap->reportExtraMemoryAllocated(sizeof(CodeBlock));
}
-CodeBlock::CodeBlock(ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
- : m_globalObject(scope->globalObject()->vm(), ownerExecutable, scope->globalObject())
+CodeBlock::CodeBlock(VM* vm, Structure* structure, ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
+ : JSCell(*vm, structure)
+ , m_globalObject(scope->globalObject()->vm(), this, scope->globalObject())
, m_heap(&m_globalObject->vm().heap)
, m_numCalleeRegisters(unlinkedCodeBlock->m_numCalleeRegisters)
, m_numVars(unlinkedCodeBlock->m_numVars)
, m_shouldAlwaysBeInlined(true)
, m_didFailFTLCompilation(false)
, m_hasBeenCompiledWithFTL(false)
- , m_unlinkedCode(m_globalObject->vm(), ownerExecutable, unlinkedCodeBlock)
+ , m_unlinkedCode(m_globalObject->vm(), this, unlinkedCodeBlock)
, m_hasDebuggerStatement(false)
, m_steppingMode(SteppingModeDisabled)
, m_numBreakpoints(0)
- , m_ownerExecutable(m_globalObject->vm(), ownerExecutable, ownerExecutable)
+ , m_ownerExecutable(m_globalObject->vm(), this, ownerExecutable)
, m_vm(unlinkedCodeBlock->vm())
, m_thisRegister(unlinkedCodeBlock->thisRegister())
, m_scopeRegister(unlinkedCodeBlock->scopeRegister())
, m_capabilityLevelState(DFG::CapabilityLevelNotSet)
#endif
{
- m_visitStronglyHasBeenCalled.store(false, std::memory_order_relaxed);
- m_visitAggregateHasBeenCalled.store(false, std::memory_order_relaxed);
+ m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed);
ASSERT(m_heap->isDeferred());
ASSERT(m_scopeRegister.isLocal());
ASSERT(m_source);
setNumParameters(unlinkedCodeBlock->numParameters());
+}
+
+void CodeBlock::finishCreation(VM& vm, ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock,
+ JSScope* scope)
+{
+ Base::finishCreation(vm);
- if (vm()->typeProfiler() || vm()->controlFlowProfiler())
- vm()->functionHasExecutedCache()->removeUnexecutedRange(ownerExecutable->sourceID(), ownerExecutable->typeProfilingStartOffset(), ownerExecutable->typeProfilingEndOffset());
+ if (vm.typeProfiler() || vm.controlFlowProfiler())
+ vm.functionHasExecutedCache()->removeUnexecutedRange(ownerExecutable->sourceID(), ownerExecutable->typeProfilingStartOffset(), ownerExecutable->typeProfilingEndOffset());
setConstantRegisters(unlinkedCodeBlock->constantRegisters(), unlinkedCodeBlock->constantsSourceCodeRepresentation());
if (unlinkedCodeBlock->usesGlobalObject())
- m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set(*m_vm, ownerExecutable, m_globalObject.get());
+ m_constantRegisters[unlinkedCodeBlock->globalObjectRegister().toConstantIndex()].set(*m_vm, this, m_globalObject.get());
for (unsigned i = 0; i < LinkTimeConstantCount; i++) {
LinkTimeConstant type = static_cast<LinkTimeConstant>(i);
if (unsigned registerIndex = unlinkedCodeBlock->registerIndexForLinkTimeConstant(type))
- m_constantRegisters[registerIndex].set(*m_vm, ownerExecutable, m_globalObject->jsCellForLinkTimeConstant(type));
+ m_constantRegisters[registerIndex].set(*m_vm, this, m_globalObject->jsCellForLinkTimeConstant(type));
}
HashSet<int, WTF::IntHash<int>, WTF::UnsignedWithZeroKeyHashTraits<int>> clonedConstantSymbolTables;
ConcurrentJITLocker locker(symbolTable->m_lock);
symbolTable->prepareForTypeProfiling(locker);
}
- m_constantRegisters[i].set(*m_vm, ownerExecutable, symbolTable->cloneScopePart(*m_vm));
+ m_constantRegisters[i].set(*m_vm, this, symbolTable->cloneScopePart(*m_vm));
clonedConstantSymbolTables.add(i + FirstConstantRegisterIndex);
}
}
m_functionDecls.resizeToFit(unlinkedCodeBlock->numberOfFunctionDecls());
for (size_t count = unlinkedCodeBlock->numberOfFunctionDecls(), i = 0; i < count; ++i) {
UnlinkedFunctionExecutable* unlinkedExecutable = unlinkedCodeBlock->functionDecl(i);
- if (vm()->typeProfiler() || vm()->controlFlowProfiler())
- vm()->functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
- m_functionDecls[i].set(*m_vm, ownerExecutable, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
+ if (vm.typeProfiler() || vm.controlFlowProfiler())
+ vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
+ m_functionDecls[i].set(*m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
}
m_functionExprs.resizeToFit(unlinkedCodeBlock->numberOfFunctionExprs());
for (size_t count = unlinkedCodeBlock->numberOfFunctionExprs(), i = 0; i < count; ++i) {
UnlinkedFunctionExecutable* unlinkedExecutable = unlinkedCodeBlock->functionExpr(i);
- if (vm()->typeProfiler() || vm()->controlFlowProfiler())
- vm()->functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
- m_functionExprs[i].set(*m_vm, ownerExecutable, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
+ if (vm.typeProfiler() || vm.controlFlowProfiler())
+ vm.functionHasExecutedCache()->insertUnexecutedRange(ownerExecutable->sourceID(), unlinkedExecutable->typeProfilingStartOffset(), unlinkedExecutable->typeProfilingEndOffset());
+ m_functionExprs[i].set(*m_vm, this, unlinkedExecutable->link(*m_vm, ownerExecutable->source()));
}
if (unlinkedCodeBlock->hasRareData()) {
unsigned opLength = opcodeLength(pc[0].u.opcode);
- instructions[i] = vm()->interpreter->getOpcode(pc[0].u.opcode);
+ instructions[i] = vm.interpreter->getOpcode(pc[0].u.opcode);
for (size_t j = 1; j < opLength; ++j) {
if (sizeof(int32_t) != sizeof(intptr_t))
instructions[i + j].u.pointer = 0;
int inferredInlineCapacity = pc[opLength - 2].u.operand;
instructions[i + opLength - 1] = objectAllocationProfile;
- objectAllocationProfile->initialize(*vm(),
- ownerExecutable, m_globalObject->objectPrototype(), inferredInlineCapacity);
+ objectAllocationProfile->initialize(vm,
+ this, m_globalObject->objectPrototype(), inferredInlineCapacity);
break;
}
// Keep the linked module environment strongly referenced.
if (stronglyReferencedModuleEnvironments.add(jsCast<JSModuleEnvironment*>(op.lexicalEnvironment)).isNewEntry)
addConstant(op.lexicalEnvironment);
- instructions[i + 6].u.jsCell.set(*vm(), ownerExecutable, op.lexicalEnvironment);
+ instructions[i + 6].u.jsCell.set(vm, this, op.lexicalEnvironment);
} else
- instructions[i + 6].u.symbolTable.set(*vm(), ownerExecutable, op.lexicalEnvironment->symbolTable());
+ instructions[i + 6].u.symbolTable.set(vm, this, op.lexicalEnvironment->symbolTable());
} else if (JSScope* constantScope = JSScope::constantScopeForCodeBlock(op.type, this))
- instructions[i + 6].u.jsCell.set(*vm(), ownerExecutable, constantScope);
+ instructions[i + 6].u.jsCell.set(vm, this, constantScope);
else
instructions[i + 6].u.pointer = nullptr;
break;
if (op.type == GlobalVar || op.type == GlobalVarWithVarInjectionChecks || op.type == GlobalLexicalVar || op.type == GlobalLexicalVarWithVarInjectionChecks)
instructions[i + 5].u.watchpointSet = op.watchpointSet;
else if (op.structure)
- instructions[i + 5].u.structure.set(*vm(), ownerExecutable, op.structure);
+ instructions[i + 5].u.structure.set(vm, this, op.structure);
instructions[i + 6].u.pointer = reinterpret_cast<void*>(op.operand);
break;
}
if (op.watchpointSet)
op.watchpointSet->invalidate(PutToScopeFireDetail(this, ident));
} else if (op.structure)
- instructions[i + 5].u.structure.set(*vm(), ownerExecutable, op.structure);
+ instructions[i + 5].u.structure.set(vm, this, op.structure);
instructions[i + 6].u.pointer = reinterpret_cast<void*>(op.operand);
break;
}
case op_profile_type: {
- RELEASE_ASSERT(vm()->typeProfiler());
+ RELEASE_ASSERT(vm.typeProfiler());
// The format of this instruction is: op_profile_type regToProfile, TypeLocation*, flag, identifier?, resolveType?
size_t instructionOffset = i + opLength - 1;
unsigned divotStart, divotEnd;
ConcurrentJITLocker locker(symbolTable->m_lock);
// If our parent scope was created while profiling was disabled, it will not have prepared for profiling yet.
symbolTable->prepareForTypeProfiling(locker);
- globalVariableID = symbolTable->uniqueIDForVariable(locker, impl, *vm());
- globalTypeSet = symbolTable->globalTypeSetForVariable(locker, impl, *vm());
+ globalVariableID = symbolTable->uniqueIDForVariable(locker, impl, vm);
+ globalTypeSet = symbolTable->globalTypeSetForVariable(locker, impl, vm);
} else
globalVariableID = TypeProfilerNoGlobalIDExists;
const Identifier& ident = identifier(pc[4].u.operand);
ConcurrentJITLocker locker(symbolTable->m_lock);
// If our parent scope was created while profiling was disabled, it will not have prepared for profiling yet.
- globalVariableID = symbolTable->uniqueIDForVariable(locker, ident.impl(), *vm());
- globalTypeSet = symbolTable->globalTypeSetForVariable(locker, ident.impl(), *vm());
+ globalVariableID = symbolTable->uniqueIDForVariable(locker, ident.impl(), vm);
+ globalTypeSet = symbolTable->globalTypeSetForVariable(locker, ident.impl(), vm);
break;
}
}
}
- std::pair<TypeLocation*, bool> locationPair = vm()->typeProfiler()->typeLocationCache()->getTypeLocation(globalVariableID,
- ownerExecutable->sourceID(), divotStart, divotEnd, globalTypeSet, vm());
+ std::pair<TypeLocation*, bool> locationPair = vm.typeProfiler()->typeLocationCache()->getTypeLocation(globalVariableID,
+ ownerExecutable->sourceID(), divotStart, divotEnd, globalTypeSet, &vm);
TypeLocation* location = locationPair.first;
bool isNewLocation = locationPair.second;
location->m_divotForFunctionOffsetIfReturnStatement = ownerExecutable->typeProfilingStartOffset();
if (shouldAnalyze && isNewLocation)
- vm()->typeProfiler()->insertNewLocation(location);
+ vm.typeProfiler()->insertNewLocation(location);
instructions[i + 2].u.location = location;
break;
i += opLength;
}
- if (vm()->controlFlowProfiler())
+ if (vm.controlFlowProfiler())
insertBasicBlockBoundariesForControlFlowProfiler(instructions);
m_instructions = WTF::RefCountedArray<Instruction>(instructions);
dumpBytecode();
m_heap->m_codeBlocks.add(this);
- m_heap->reportExtraMemoryAllocated(sizeof(CodeBlock) + m_instructions.size() * sizeof(Instruction));
+ m_heap->reportExtraMemoryAllocated(m_instructions.size() * sizeof(Instruction));
}
#if ENABLE(WEBASSEMBLY)
-CodeBlock::CodeBlock(WebAssemblyExecutable* ownerExecutable, VM& vm, JSGlobalObject* globalObject)
- : m_globalObject(globalObject->vm(), ownerExecutable, globalObject)
+CodeBlock::CodeBlock(VM* vm, Structure* structure, WebAssemblyExecutable* ownerExecutable, VM& vm, JSGlobalObject* globalObject)
+ : JSCell(vm, structure)
+ , m_globalObject(globalObject->vm(), this, globalObject)
, m_heap(&m_globalObject->vm().heap)
, m_numCalleeRegisters(0)
, m_numVars(0)
, m_hasDebuggerStatement(false)
, m_steppingMode(SteppingModeDisabled)
, m_numBreakpoints(0)
- , m_ownerExecutable(m_globalObject->vm(), ownerExecutable, ownerExecutable)
+ , m_ownerExecutable(m_globalObject->vm(), this, ownerExecutable)
, m_vm(&vm)
, m_isStrictMode(false)
, m_needsActivation(false)
#endif
{
ASSERT(m_heap->isDeferred());
+}
+
+void CodeBlock::finishCreation(VM& vm, WebAssemblyExecutable*, JSGlobalObject*)
+{
+ Base::finishCreation(vm);
m_heap->m_codeBlocks.add(this);
- m_heap->reportExtraMemoryAllocated(sizeof(CodeBlock));
}
#endif
#if ENABLE(VERBOSE_VALUE_PROFILE)
dumpValueProfiles();
#endif
- while (m_incomingLLIntCalls.begin() != m_incomingLLIntCalls.end())
- m_incomingLLIntCalls.begin()->remove();
-#if ENABLE(JIT)
+
// We may be destroyed before any CodeBlocks that refer to us are destroyed.
// Consider that two CodeBlocks become unreachable at the same time. There
// is no guarantee about the order in which the CodeBlocks are destroyed.
// So, if we don't remove incoming calls, and get destroyed before the
// CodeBlock(s) that have calls into us, then the CallLinkInfo vector's
// destructor will try to remove nodes from our (no longer valid) linked list.
- while (m_incomingCalls.begin() != m_incomingCalls.end())
- m_incomingCalls.begin()->remove();
- while (m_incomingPolymorphicCalls.begin() != m_incomingPolymorphicCalls.end())
- m_incomingPolymorphicCalls.begin()->remove();
+ unlinkIncomingCalls();
// Note that our outgoing calls will be removed from other CodeBlocks'
// m_incomingCalls linked lists through the execution of the ~CallLinkInfo
// destructors.
+#if ENABLE(JIT)
for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter)
(*iter)->deref();
#endif // ENABLE(JIT)
}
+void CodeBlock::setAlternative(VM& vm, CodeBlock* alternative)
+{
+ m_alternative.set(vm, this, alternative);
+}
+
void CodeBlock::setNumParameters(int newValue)
{
m_numParameters = newValue;
if (jitType() != JITCode::DFGJIT)
return 0;
DFG::JITCode* jitCode = m_jitCode->dfg();
- return jitCode->osrEntryBlock.get();
+ return jitCode->osrEntryBlock();
#else // ENABLE(FTL_JIT)
return 0;
#endif // ENABLE(FTL_JIT)
}
-void CodeBlock::visitStrongly(SlotVisitor& visitor)
+void CodeBlock::visitWeakly(SlotVisitor& visitor)
{
- bool setByMe = m_visitStronglyHasBeenCalled.compareExchangeStrong(false, true);
+ bool setByMe = m_visitWeaklyHasBeenCalled.compareExchangeStrong(false, true);
if (!setByMe)
return;
- visitAggregate(visitor);
-
- stronglyVisitStrongReferences(visitor);
- stronglyVisitWeakReferences(visitor);
- propagateTransitions(visitor);
-}
-
-void CodeBlock::visitAggregate(SlotVisitor& visitor)
-{
- // I may be asked to scan myself more than once, and it may even happen concurrently.
- // To this end, use an atomic operation to check (and set) if I've been called already.
- // Only one thread may proceed past this point - whichever one wins the atomic set race.
- bool setByMe = m_visitAggregateHasBeenCalled.compareExchangeStrong(false, true);
- if (!setByMe)
+ if (Heap::isMarked(this))
return;
-
- if (!!m_alternative)
- m_alternative->visitAggregate(visitor);
-
- if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
- otherBlock->visitAggregate(visitor);
- visitor.reportExtraMemoryVisited(sizeof(CodeBlock));
- if (m_jitCode)
- visitor.reportExtraMemoryVisited(m_jitCode->size());
- if (m_instructions.size()) {
- // Divide by refCount() because m_instructions points to something that is shared
- // by multiple CodeBlocks, and we only want to count it towards the heap size once.
- // Having each CodeBlock report only its proportional share of the size is one way
- // of accomplishing this.
- visitor.reportExtraMemoryVisited(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
+ if (shouldVisitStrongly()) {
+ visitor.appendUnbarrieredReadOnlyPointer(this);
+ return;
}
- visitor.append(&m_unlinkedCode);
-
// There are two things that may use unconditional finalizers: inline cache clearing
// and jettisoning. The probability of us wanting to do at least one of those things
// is probably quite close to 1. So we add one no matter what and when it runs, it
// figures out whether it has any work to do.
- visitor.addUnconditionalFinalizer(this);
-
- m_allTransitionsHaveBeenMarked = false;
-
- if (shouldVisitStrongly()) {
- visitStrongly(visitor);
- return;
- }
-
+ visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
+
if (!JITCode::isOptimizingJIT(jitType()))
return;
+ // If we jettison ourselves we'll install our alternative, so make sure that it
+ // survives GC even if we don't.
+ visitor.append(&m_alternative);
+
// There are two things that we use weak reference harvesters for: DFG fixpoint for
// jettisoning, and trying to find structures that would be live based on some
// inline cache. So it makes sense to register them regardless.
- visitor.addWeakReferenceHarvester(this);
+ visitor.addWeakReferenceHarvester(&m_weakReferenceHarvester);
#if ENABLE(DFG_JIT)
// We get here if we're live in the sense that our owner executable is live,
// either us marking additional objects, or by other objects being marked for
// other reasons, that this iteration should run again; it will notify us of this
// decision by calling harvestWeakReferences().
-
- m_jitCode->dfgCommon()->livenessHasBeenProved = false;
-
+
+ m_allTransitionsHaveBeenMarked = false;
propagateTransitions(visitor);
+
+ m_jitCode->dfgCommon()->livenessHasBeenProved = false;
determineLiveness(visitor);
#endif // ENABLE(DFG_JIT)
}
+void CodeBlock::visitChildren(JSCell* cell, SlotVisitor& visitor)
+{
+ CodeBlock* thisObject = jsCast<CodeBlock*>(cell);
+ ASSERT_GC_OBJECT_INHERITS(thisObject, info());
+ JSCell::visitChildren(thisObject, visitor);
+ thisObject->visitChildren(visitor);
+}
+
+void CodeBlock::visitChildren(SlotVisitor& visitor)
+{
+ // There are two things that may use unconditional finalizers: inline cache clearing
+ // and jettisoning. The probability of us wanting to do at least one of those things
+ // is probably quite close to 1. So we add one no matter what and when it runs, it
+ // figures out whether it has any work to do.
+ visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
+
+ if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
+ visitor.appendUnbarrieredReadOnlyPointer(otherBlock);
+
+ if (m_jitCode)
+ visitor.reportExtraMemoryVisited(m_jitCode->size());
+ if (m_instructions.size())
+ visitor.reportExtraMemoryVisited(m_instructions.size() * sizeof(Instruction) / m_instructions.refCount());
+
+ visitor.append(&m_unlinkedCode);
+
+ stronglyVisitStrongReferences(visitor);
+ stronglyVisitWeakReferences(visitor);
+
+ m_allTransitionsHaveBeenMarked = false;
+ propagateTransitions(visitor);
+}
+
bool CodeBlock::shouldVisitStrongly()
{
if (Options::forceCodeBlockLiveness())
// because livenessHasBeenProved would have survived as true.
// - Code blocks that don't have any dead weak references.
- if (m_visitStronglyHasBeenCalled.load(std::memory_order_relaxed))
- return true;
-
-#if ENABLE(DFG_JIT)
- if (JITCode::isOptimizingJIT(jitType())) {
- if (m_jitCode->dfgCommon()->livenessHasBeenProved)
- return true;
- }
-#endif
-
- return false;
+ return Heap::isMarked(this);
}
bool CodeBlock::shouldJettisonDueToWeakReference()
return !isKnownToBeLiveDuringGC();
}
+static std::chrono::milliseconds timeToLive(JITCode::JITType jitType)
+{
+ switch (jitType) {
+ case JITCode::InterpreterThunk:
+ return std::chrono::duration_cast<std::chrono::milliseconds>(
+ std::chrono::seconds(5));
+ case JITCode::BaselineJIT:
+ // Effectively 10 additional seconds, since BaselineJIT and
+ // InterpreterThunk share a CodeBlock.
+ return std::chrono::duration_cast<std::chrono::milliseconds>(
+ std::chrono::seconds(15));
+ case JITCode::DFGJIT:
+ return std::chrono::duration_cast<std::chrono::milliseconds>(
+ std::chrono::seconds(20));
+ case JITCode::FTLJIT:
+ return std::chrono::duration_cast<std::chrono::milliseconds>(
+ std::chrono::seconds(60));
+ default:
+ return std::chrono::milliseconds::max();
+ }
+}
+
bool CodeBlock::shouldJettisonDueToOldAge()
{
- if (m_visitStronglyHasBeenCalled.load(std::memory_order_relaxed))
+ if (Heap::isMarked(this))
return false;
- if (timeSinceCreation() < JITCode::timeToLive(jitType()))
+ if (timeSinceCreation() < timeToLive(jitType()))
return false;
return true;
// All weak references are live. Record this information so we don't
// come back here again, and scan the strong references.
dfgCommon->livenessHasBeenProved = true;
- stronglyVisitStrongReferences(visitor);
+ visitor.appendUnbarrieredReadOnlyPointer(this);
#endif // ENABLE(DFG_JIT)
}
-void CodeBlock::visitWeakReferences(SlotVisitor& visitor)
+void CodeBlock::WeakReferenceHarvester::visitWeakReferences(SlotVisitor& visitor)
{
- propagateTransitions(visitor);
- determineLiveness(visitor);
+ CodeBlock* codeBlock =
+ bitwise_cast<CodeBlock*>(
+ bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_weakReferenceHarvester));
+
+ codeBlock->propagateTransitions(visitor);
+ codeBlock->determineLiveness(visitor);
}
void CodeBlock::finalizeLLIntInlineCaches()
#endif
}
-void CodeBlock::finalizeUnconditionally()
+void CodeBlock::UnconditionalFinalizer::finalizeUnconditionally()
{
+ CodeBlock* codeBlock = bitwise_cast<CodeBlock*>(
+ bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_unconditionalFinalizer));
+
#if ENABLE(DFG_JIT)
- if (shouldJettisonDueToWeakReference()) {
- jettison(Profiler::JettisonDueToWeakReference);
+ if (codeBlock->shouldJettisonDueToWeakReference()) {
+ codeBlock->jettison(Profiler::JettisonDueToWeakReference);
return;
}
#endif // ENABLE(DFG_JIT)
- if (shouldJettisonDueToOldAge()) {
- jettison(Profiler::JettisonDueToOldAge);
+ if (codeBlock->shouldJettisonDueToOldAge()) {
+ codeBlock->jettison(Profiler::JettisonDueToOldAge);
return;
}
- if (JITCode::couldBeInterpreted(jitType()))
- finalizeLLIntInlineCaches();
+ if (JITCode::couldBeInterpreted(codeBlock->jitType()))
+ codeBlock->finalizeLLIntInlineCaches();
#if ENABLE(JIT)
- if (!!jitCode())
- finalizeBaselineJITInlineCaches();
+ if (!!codeBlock->jitCode())
+ codeBlock->finalizeBaselineJITInlineCaches();
#endif
}
// guaranteeing that it matches the details of the CodeBlock we compiled
// the OSR exit against.
- alternative()->visitStrongly(visitor);
+ visitor.append(&m_alternative);
#if ENABLE(DFG_JIT)
DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
if (dfgCommon->inlineCallFrames) {
for (auto* inlineCallFrame : *dfgCommon->inlineCallFrames) {
- ASSERT(inlineCallFrame->baselineCodeBlock());
- inlineCallFrame->baselineCodeBlock()->visitStrongly(visitor);
+ ASSERT(inlineCallFrame->baselineCodeBlock);
+ visitor.append(&inlineCallFrame->baselineCodeBlock);
}
}
#endif
while (m_incomingLLIntCalls.begin() != m_incomingLLIntCalls.end())
m_incomingLLIntCalls.begin()->unlink();
#if ENABLE(JIT)
- if (m_incomingCalls.isEmpty() && m_incomingPolymorphicCalls.isEmpty())
- return;
while (m_incomingCalls.begin() != m_incomingCalls.end())
m_incomingCalls.begin()->unlink(*vm());
while (m_incomingPolymorphicCalls.begin() != m_incomingPolymorphicCalls.end())
m_incomingLLIntCalls.push(incoming);
}
-PassRefPtr<CodeBlock> CodeBlock::newReplacement()
+CodeBlock* CodeBlock::newReplacement()
{
return ownerScriptExecutable()->newReplacementCodeBlockFor(specializationKind());
}
#if ENABLE(JIT)
-CodeBlock* ProgramCodeBlock::replacement()
+CodeBlock* CodeBlock::replacement()
{
- return jsCast<ProgramExecutable*>(ownerExecutable())->codeBlock();
-}
+ const ClassInfo* classInfo = this->classInfo();
-CodeBlock* ModuleProgramCodeBlock::replacement()
-{
- return jsCast<ModuleProgramExecutable*>(ownerExecutable())->codeBlock();
-}
+ if (classInfo == FunctionCodeBlock::info())
+ return jsCast<FunctionExecutable*>(ownerExecutable())->codeBlockFor(m_isConstructor ? CodeForConstruct : CodeForCall);
-CodeBlock* EvalCodeBlock::replacement()
-{
- return jsCast<EvalExecutable*>(ownerExecutable())->codeBlock();
-}
+ if (classInfo == EvalCodeBlock::info())
+ return jsCast<EvalExecutable*>(ownerExecutable())->codeBlock();
-CodeBlock* FunctionCodeBlock::replacement()
-{
- return jsCast<FunctionExecutable*>(ownerExecutable())->codeBlockFor(m_isConstructor ? CodeForConstruct : CodeForCall);
-}
+ if (classInfo == ProgramCodeBlock::info())
+ return jsCast<ProgramExecutable*>(ownerExecutable())->codeBlock();
-DFG::CapabilityLevel ProgramCodeBlock::capabilityLevelInternal()
-{
- return DFG::programCapabilityLevel(this);
-}
+ if (classInfo == ModuleProgramCodeBlock::info())
+ return jsCast<ModuleProgramExecutable*>(ownerExecutable())->codeBlock();
-DFG::CapabilityLevel ModuleProgramCodeBlock::capabilityLevelInternal()
-{
- return DFG::programCapabilityLevel(this);
-}
+#if ENABLE(WEBASSEMBLY)
+ if (classInfo == WebAssemblyCodeBlock::info())
+ return nullptr;
+#endif
-DFG::CapabilityLevel EvalCodeBlock::capabilityLevelInternal()
-{
- return DFG::evalCapabilityLevel(this);
+ RELEASE_ASSERT_NOT_REACHED();
+ return nullptr;
}
-DFG::CapabilityLevel FunctionCodeBlock::capabilityLevelInternal()
+DFG::CapabilityLevel CodeBlock::computeCapabilityLevel()
{
- if (m_isConstructor)
- return DFG::functionForConstructCapabilityLevel(this);
- return DFG::functionForCallCapabilityLevel(this);
-}
+ const ClassInfo* classInfo = this->classInfo();
+
+ if (classInfo == FunctionCodeBlock::info()) {
+ if (m_isConstructor)
+ return DFG::functionForConstructCapabilityLevel(this);
+ return DFG::functionForCallCapabilityLevel(this);
+ }
+
+ if (classInfo == EvalCodeBlock::info())
+ return DFG::evalCapabilityLevel(this);
+
+ if (classInfo == ProgramCodeBlock::info())
+ return DFG::programCapabilityLevel(this);
+
+ if (classInfo == ModuleProgramCodeBlock::info())
+ return DFG::programCapabilityLevel(this);
#if ENABLE(WEBASSEMBLY)
-CodeBlock* WebAssemblyCodeBlock::replacement()
-{
- return nullptr;
-}
+ if (classInfo == WebAssemblyCodeBlock::info())
+ return DFG::CannotCompile;
+#endif
-DFG::CapabilityLevel WebAssemblyCodeBlock::capabilityLevelInternal()
-{
+ RELEASE_ASSERT_NOT_REACHED();
return DFG::CannotCompile;
}
-#endif
-#endif
+
+#endif // ENABLE(JIT)
void CodeBlock::jettison(Profiler::JettisonReason reason, ReoptimizationMode mode, const FireDetail* detail)
{
{
if (!codeOrigin.inlineCallFrame)
return globalObject();
- return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->eitherCodeBlock()->globalObject();
+ return codeOrigin.inlineCallFrame->baselineCodeBlock->globalObject();
}
class RecursionCheckFunctor {
#if ENABLE(JIT)
DFG::CapabilityLevel CodeBlock::capabilityLevel()
{
- DFG::CapabilityLevel result = capabilityLevelInternal();
+ DFG::CapabilityLevel result = computeCapabilityLevel();
m_capabilityLevelState = result;
return result;
}
#include "CallReturnOffsetToBytecodeOffset.h"
#include "CodeBlockHash.h"
#include "CodeBlockSet.h"
-#include "ConcurrentJITLock.h"
#include "CodeOrigin.h"
#include "CodeType.h"
#include "CompactJITCodeMap.h"
+#include "ConcurrentJITLock.h"
#include "DFGCommon.h"
#include "DFGExitProfile.h"
#include "DeferredCompilationCallback.h"
#include "ExecutionCounter.h"
#include "ExpressionRangeInfo.h"
#include "HandlerInfo.h"
-#include "ObjectAllocationProfile.h"
-#include "Options.h"
-#include "PutPropertySlot.h"
#include "Instruction.h"
#include "JITCode.h"
#include "JITWriteBarrier.h"
+#include "JSCell.h"
#include "JSGlobalObject.h"
#include "JumpTable.h"
#include "LLIntCallLinkInfo.h"
#include "LazyOperandValueProfile.h"
+#include "ObjectAllocationProfile.h"
+#include "Options.h"
#include "ProfilerCompilation.h"
#include "ProfilerJettisonReason.h"
+#include "PutPropertySlot.h"
#include "RegExpObject.h"
#include "StructureStubInfo.h"
#include "UnconditionalFinalizer.h"
enum ReoptimizationMode { DontCountReoptimization, CountReoptimization };
-class CodeBlock : public ThreadSafeRefCounted<CodeBlock>, public UnconditionalFinalizer, public WeakReferenceHarvester {
- WTF_MAKE_FAST_ALLOCATED;
+class CodeBlock : public JSCell {
+ typedef JSCell Base;
friend class BytecodeLivenessAnalysis;
friend class JIT;
friend class LLIntOffsetsExtractor;
+
+ class UnconditionalFinalizer : public JSC::UnconditionalFinalizer {
+ virtual void finalizeUnconditionally() override;
+ };
+
+ class WeakReferenceHarvester : public JSC::WeakReferenceHarvester {
+ virtual void visitWeakReferences(SlotVisitor&) override;
+ };
+
public:
enum CopyParsedBlockTag { CopyParsedBlock };
+
+ static const unsigned StructureFlags = Base::StructureFlags | StructureIsImmortal;
+
+ DECLARE_INFO;
+
protected:
- CodeBlock(CopyParsedBlockTag, CodeBlock& other);
-
- CodeBlock(ScriptExecutable* ownerExecutable, UnlinkedCodeBlock*, JSScope*, PassRefPtr<SourceProvider>, unsigned sourceOffset, unsigned firstLineColumnOffset);
+ CodeBlock(VM*, Structure*, CopyParsedBlockTag, CodeBlock& other);
+ CodeBlock(VM*, Structure*, ScriptExecutable* ownerExecutable, UnlinkedCodeBlock*, JSScope*, PassRefPtr<SourceProvider>, unsigned sourceOffset, unsigned firstLineColumnOffset);
+#if ENABLE(WEBASSEMBLY)
+ CodeBlock(VM*, Structure*, WebAssemblyExecutable* ownerExecutable, VM&, JSGlobalObject*);
+#endif
+
+ void finishCreation(VM&, CopyParsedBlockTag, CodeBlock& other);
+ void finishCreation(VM&, ScriptExecutable* ownerExecutable, UnlinkedCodeBlock*, JSScope*);
#if ENABLE(WEBASSEMBLY)
- CodeBlock(WebAssemblyExecutable* ownerExecutable, VM&, JSGlobalObject*);
+ void finishCreation(VM&, WebAssemblyExecutable* ownerExecutable, JSGlobalObject*);
#endif
WriteBarrier<JSGlobalObject> m_globalObject;
Heap* m_heap;
public:
- JS_EXPORT_PRIVATE virtual ~CodeBlock();
+ JS_EXPORT_PRIVATE ~CodeBlock();
UnlinkedCodeBlock* unlinkedCodeBlock() const { return m_unlinkedCode.get(); }
int* addressOfNumParameters() { return &m_numParameters; }
static ptrdiff_t offsetOfNumParameters() { return OBJECT_OFFSETOF(CodeBlock, m_numParameters); }
- CodeBlock* alternative() { return m_alternative.get(); }
- void setAlternative(PassRefPtr<CodeBlock> alternative) { m_alternative = alternative; }
+ CodeBlock* alternative() const { return static_cast<CodeBlock*>(m_alternative.get()); }
+ void setAlternative(VM&, CodeBlock*);
template <typename Functor> void forEachRelatedCodeBlock(Functor&& functor)
{
{
return specializationFromIsConstruct(m_isConstructor);
}
-
+
+ CodeBlock* alternativeForJettison();
CodeBlock* baselineAlternative();
// FIXME: Get rid of this.
// https://bugs.webkit.org/show_bug.cgi?id=123677
CodeBlock* baselineVersion();
- void clearMarks();
- void visitAggregate(SlotVisitor&);
- void visitStrongly(SlotVisitor&);
+ static void visitChildren(JSCell*, SlotVisitor&);
+ void visitChildren(SlotVisitor&);
+ void visitWeakly(SlotVisitor&);
+ void clearVisitWeaklyHasBeenCalled();
void dumpSource();
void dumpSource(PrintStream&);
unsigned instructionCount() const { return m_instructions.size(); }
// Exactly equivalent to codeBlock->ownerExecutable()->newReplacementCodeBlockFor(codeBlock->specializationKind())
- PassRefPtr<CodeBlock> newReplacement();
+ CodeBlock* newReplacement();
void setJITCode(PassRefPtr<JITCode> code)
{
}
#if ENABLE(JIT)
- virtual CodeBlock* replacement() = 0;
+ CodeBlock* replacement();
- virtual DFG::CapabilityLevel capabilityLevelInternal() = 0;
+ DFG::CapabilityLevel computeCapabilityLevel();
DFG::CapabilityLevel capabilityLevel();
DFG::CapabilityLevel capabilityLevelState() { return m_capabilityLevelState; }
{
unsigned result = m_constantRegisters.size();
m_constantRegisters.append(WriteBarrier<Unknown>());
- m_constantRegisters.last().set(m_globalObject->vm(), m_ownerExecutable.get(), v);
+ m_constantRegisters.last().set(m_globalObject->vm(), this, v);
m_constantsSourceCodeRepresentation.append(SourceCodeRepresentation::Other);
return result;
}
}
protected:
- virtual void visitWeakReferences(SlotVisitor&) override;
- virtual void finalizeUnconditionally() override;
void finalizeLLIntInlineCaches();
void finalizeBaselineJITInlineCaches();
size_t count = constants.size();
m_constantRegisters.resizeToFit(count);
for (size_t i = 0; i < count; i++)
- m_constantRegisters[i].set(*m_vm, ownerExecutable(), constants[i].get());
+ m_constantRegisters[i].set(*m_vm, this, constants[i].get());
m_constantsSourceCodeRepresentation = constantsSourceCodeRepresentation;
}
void replaceConstant(int index, JSValue value)
{
ASSERT(isConstantRegisterIndex(index) && static_cast<size_t>(index - FirstConstantRegisterIndex) < m_constantRegisters.size());
- m_constantRegisters[index - FirstConstantRegisterIndex].set(m_globalObject->vm(), m_ownerExecutable.get(), value);
+ m_constantRegisters[index - FirstConstantRegisterIndex].set(m_globalObject->vm(), this, value);
}
void dumpBytecode(
bool m_isStrictMode;
bool m_needsActivation;
- Atomic<bool> m_visitAggregateHasBeenCalled;
- Atomic<bool> m_visitStronglyHasBeenCalled;
+ Atomic<bool> m_visitWeaklyHasBeenCalled;
RefPtr<SourceProvider> m_source;
unsigned m_sourceOffset;
Vector<WriteBarrier<FunctionExecutable>> m_functionDecls;
Vector<WriteBarrier<FunctionExecutable>> m_functionExprs;
- RefPtr<CodeBlock> m_alternative;
+ WriteBarrier<CodeBlock> m_alternative;
BaselineExecutionCounter m_llintExecuteCounter;
#if ENABLE(JIT)
DFG::CapabilityLevel m_capabilityLevelState;
#endif
+
+ UnconditionalFinalizer m_unconditionalFinalizer;
+ WeakReferenceHarvester m_weakReferenceHarvester;
};
// Program code is not marked by any function, so we make the global object
// responsible for marking it.
class GlobalCodeBlock : public CodeBlock {
+ typedef CodeBlock Base;
+ DECLARE_INFO;
+
protected:
- GlobalCodeBlock(CopyParsedBlockTag, GlobalCodeBlock& other)
- : CodeBlock(CopyParsedBlock, other)
+ GlobalCodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, GlobalCodeBlock& other)
+ : CodeBlock(vm, structure, CopyParsedBlock, other)
{
}
-
- GlobalCodeBlock(ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
- : CodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
+
+ GlobalCodeBlock(VM* vm, Structure* structure, ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
+ : CodeBlock(vm, structure, ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
{
}
};
class ProgramCodeBlock : public GlobalCodeBlock {
public:
- ProgramCodeBlock(CopyParsedBlockTag, ProgramCodeBlock& other)
- : GlobalCodeBlock(CopyParsedBlock, other)
+ typedef GlobalCodeBlock Base;
+ DECLARE_INFO;
+
+ static ProgramCodeBlock* create(VM* vm, CopyParsedBlockTag, ProgramCodeBlock& other)
{
+ ProgramCodeBlock* instance = new (NotNull, allocateCell<ProgramCodeBlock>(vm->heap))
+ ProgramCodeBlock(vm, vm->programCodeBlockStructure.get(), CopyParsedBlock, other);
+ instance->finishCreation(*vm, CopyParsedBlock, other);
+ return instance;
}
- ProgramCodeBlock(ProgramExecutable* ownerExecutable, UnlinkedProgramCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
- : GlobalCodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, firstLineColumnOffset)
+ static ProgramCodeBlock* create(VM* vm, ProgramExecutable* ownerExecutable, UnlinkedProgramCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
{
+ ProgramCodeBlock* instance = new (NotNull, allocateCell<ProgramCodeBlock>(vm->heap))
+ ProgramCodeBlock(vm, vm->programCodeBlockStructure.get(), ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, firstLineColumnOffset);
+ instance->finishCreation(*vm, ownerExecutable, unlinkedCodeBlock, scope);
+ return instance;
}
-#if ENABLE(JIT)
-protected:
- virtual CodeBlock* replacement() override;
- virtual DFG::CapabilityLevel capabilityLevelInternal() override;
-#endif
+ static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+ {
+ return Structure::create(vm, globalObject, prototype, TypeInfo(CellType, StructureFlags), info());
+ }
+
+private:
+ ProgramCodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, ProgramCodeBlock& other)
+ : GlobalCodeBlock(vm, structure, CopyParsedBlock, other)
+ {
+ }
+
+ ProgramCodeBlock(VM* vm, Structure* structure, ProgramExecutable* ownerExecutable, UnlinkedProgramCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
+ : GlobalCodeBlock(vm, structure, ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, firstLineColumnOffset)
+ {
+ }
+
+ static void destroy(JSCell*);
};
class ModuleProgramCodeBlock : public GlobalCodeBlock {
public:
- ModuleProgramCodeBlock(CopyParsedBlockTag, ModuleProgramCodeBlock& other)
- : GlobalCodeBlock(CopyParsedBlock, other)
+ typedef GlobalCodeBlock Base;
+ DECLARE_INFO;
+
+ static ModuleProgramCodeBlock* create(VM* vm, CopyParsedBlockTag, ModuleProgramCodeBlock& other)
{
+ ModuleProgramCodeBlock* instance = new (NotNull, allocateCell<ModuleProgramCodeBlock>(vm->heap))
+ ModuleProgramCodeBlock(vm, vm->moduleProgramCodeBlockStructure.get(), CopyParsedBlock, other);
+ instance->finishCreation(*vm, CopyParsedBlock, other);
+ return instance;
}
- ModuleProgramCodeBlock(ModuleProgramExecutable* ownerExecutable, UnlinkedModuleProgramCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
- : GlobalCodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, firstLineColumnOffset)
+ static ModuleProgramCodeBlock* create(VM* vm, ModuleProgramExecutable* ownerExecutable, UnlinkedModuleProgramCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
{
+ ModuleProgramCodeBlock* instance = new (NotNull, allocateCell<ModuleProgramCodeBlock>(vm->heap))
+ ModuleProgramCodeBlock(vm, vm->moduleProgramCodeBlockStructure.get(), ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, firstLineColumnOffset);
+ instance->finishCreation(*vm, ownerExecutable, unlinkedCodeBlock, scope);
+ return instance;
}
-#if ENABLE(JIT)
-protected:
- virtual CodeBlock* replacement() override;
- virtual DFG::CapabilityLevel capabilityLevelInternal() override;
-#endif
+ static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+ {
+ return Structure::create(vm, globalObject, prototype, TypeInfo(CellType, StructureFlags), info());
+ }
+
+private:
+ ModuleProgramCodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, ModuleProgramCodeBlock& other)
+ : GlobalCodeBlock(vm, structure, CopyParsedBlock, other)
+ {
+ }
+
+ ModuleProgramCodeBlock(VM* vm, Structure* structure, ModuleProgramExecutable* ownerExecutable, UnlinkedModuleProgramCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
+ : GlobalCodeBlock(vm, structure, ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, firstLineColumnOffset)
+ {
+ }
+
+ static void destroy(JSCell*);
};
class EvalCodeBlock : public GlobalCodeBlock {
public:
- EvalCodeBlock(CopyParsedBlockTag, EvalCodeBlock& other)
- : GlobalCodeBlock(CopyParsedBlock, other)
+ typedef GlobalCodeBlock Base;
+ DECLARE_INFO;
+
+ static EvalCodeBlock* create(VM* vm, CopyParsedBlockTag, EvalCodeBlock& other)
{
+ EvalCodeBlock* instance = new (NotNull, allocateCell<EvalCodeBlock>(vm->heap))
+ EvalCodeBlock(vm, vm->evalCodeBlockStructure.get(), CopyParsedBlock, other);
+ instance->finishCreation(*vm, CopyParsedBlock, other);
+ return instance;
}
-
- EvalCodeBlock(EvalExecutable* ownerExecutable, UnlinkedEvalCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider)
- : GlobalCodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, 1)
+
+ static EvalCodeBlock* create(VM* vm, EvalExecutable* ownerExecutable, UnlinkedEvalCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider)
{
+ EvalCodeBlock* instance = new (NotNull, allocateCell<EvalCodeBlock>(vm->heap))
+ EvalCodeBlock(vm, vm->evalCodeBlockStructure.get(), ownerExecutable, unlinkedCodeBlock, scope, sourceProvider);
+ instance->finishCreation(*vm, ownerExecutable, unlinkedCodeBlock, scope);
+ return instance;
}
-
+
+ static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+ {
+ return Structure::create(vm, globalObject, prototype, TypeInfo(CellType, StructureFlags), info());
+ }
+
const Identifier& variable(unsigned index) { return unlinkedEvalCodeBlock()->variable(index); }
unsigned numVariables() { return unlinkedEvalCodeBlock()->numVariables(); }
-#if ENABLE(JIT)
-protected:
- virtual CodeBlock* replacement() override;
- virtual DFG::CapabilityLevel capabilityLevelInternal() override;
-#endif
+private:
+ EvalCodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, EvalCodeBlock& other)
+ : GlobalCodeBlock(vm, structure, CopyParsedBlock, other)
+ {
+ }
+
+ EvalCodeBlock(VM* vm, Structure* structure, EvalExecutable* ownerExecutable, UnlinkedEvalCodeBlock* unlinkedCodeBlock,
+ JSScope* scope, PassRefPtr<SourceProvider> sourceProvider)
+ : GlobalCodeBlock(vm, structure, ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, 1)
+ {
+ }
+ static void destroy(JSCell*);
+
private:
UnlinkedEvalCodeBlock* unlinkedEvalCodeBlock() const { return jsCast<UnlinkedEvalCodeBlock*>(unlinkedCodeBlock()); }
};
class FunctionCodeBlock : public CodeBlock {
public:
- FunctionCodeBlock(CopyParsedBlockTag, FunctionCodeBlock& other)
- : CodeBlock(CopyParsedBlock, other)
+ typedef CodeBlock Base;
+ DECLARE_INFO;
+
+ static FunctionCodeBlock* create(VM* vm, CopyParsedBlockTag, FunctionCodeBlock& other)
+ {
+ FunctionCodeBlock* instance = new (NotNull, allocateCell<FunctionCodeBlock>(vm->heap))
+ FunctionCodeBlock(vm, vm->functionCodeBlockStructure.get(), CopyParsedBlock, other);
+ instance->finishCreation(*vm, CopyParsedBlock, other);
+ return instance;
+ }
+
+ static FunctionCodeBlock* create(VM* vm, FunctionExecutable* ownerExecutable, UnlinkedFunctionCodeBlock* unlinkedCodeBlock, JSScope* scope,
+ PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
{
+ FunctionCodeBlock* instance = new (NotNull, allocateCell<FunctionCodeBlock>(vm->heap))
+ FunctionCodeBlock(vm, vm->functionCodeBlockStructure.get(), ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset);
+ instance->finishCreation(*vm, ownerExecutable, unlinkedCodeBlock, scope);
+ return instance;
}
- FunctionCodeBlock(FunctionExecutable* ownerExecutable, UnlinkedFunctionCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
- : CodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
+ static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+ {
+ return Structure::create(vm, globalObject, prototype, TypeInfo(CellType, StructureFlags), info());
+ }
+
+private:
+ FunctionCodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, FunctionCodeBlock& other)
+ : CodeBlock(vm, structure, CopyParsedBlock, other)
+ {
+ }
+
+ FunctionCodeBlock(VM* vm, Structure* structure, FunctionExecutable* ownerExecutable, UnlinkedFunctionCodeBlock* unlinkedCodeBlock, JSScope* scope,
+ PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
+ : CodeBlock(vm, structure, ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
{
}
-#if ENABLE(JIT)
-protected:
- virtual CodeBlock* replacement() override;
- virtual DFG::CapabilityLevel capabilityLevelInternal() override;
-#endif
+ static void destroy(JSCell*);
};
#if ENABLE(WEBASSEMBLY)
class WebAssemblyCodeBlock : public CodeBlock {
public:
- WebAssemblyCodeBlock(CopyParsedBlockTag, WebAssemblyCodeBlock& other)
- : CodeBlock(CopyParsedBlock, other)
+ DECLARE_INFO;
+
+ static WebAssemblyCodeBlock* create(VM* vm, CopyParsedBlockTag, WebAssemblyCodeBlock& other)
{
+ WebAssemblyCodeBlock* instance = new (NotNull, allocateCell<WebAssemblyCodeBlock>(vm->heap))
+ WebAssemblyCodeBlock(vm, vm->webAssemblyCodeBlockStructure.get(), CopyParsedBlock, other);
+ instance->finishCreation(*vm);
+ return instance;
}
- WebAssemblyCodeBlock(WebAssemblyExecutable* ownerExecutable, VM& vm, JSGlobalObject* globalObject)
- : CodeBlock(ownerExecutable, vm, globalObject)
+ static WebAssemblyCodeBlock* create(VM* vm, WebAssemblyExecutable* ownerExecutable, JSGlobalObject* globalObject)
{
+ WebAssemblyCodeBlock* instance = new (NotNull, allocateCell<WebAssemblyCodeBlock>(vm->heap))
+ WebAssemblyCodeBlock(vm, vm->webAssemblyCodeBlockStructure.get(), ownerExecutable, globalObject);
+ instance->finishCreation(*vm);
+ return instance;
}
-#if ENABLE(JIT)
-protected:
- virtual CodeBlock* replacement() override;
- virtual DFG::CapabilityLevel capabilityLevelInternal() override;
-#endif
+ static Structure* createStructure(VM& vm, JSGlobalObject* globalObject, JSValue prototype)
+ {
+ return Structure::create(vm, globalObject, prototype, TypeInfo(CellType, StructureFlags), info());
+ }
+
+private:
+ WebAssemblyCodeBlock(VM& vm, Structure* structure, CopyParsedBlockTag, WebAssemblyCodeBlock& other)
+ : CodeBlock(vm, structure, CopyParsedBlock, other)
+ {
+ }
+
+ WebAssemblyCodeBlock(VM& vm, Structure* structure, WebAssemblyExecutable* ownerExecutable, JSGlobalObject* globalObject)
+ : CodeBlock(vm, structure, ownerExecutable, vm, globalObject)
+ {
+ }
+
+ static void destroy(JSCell*);
};
#endif
return uncheckedR(reg.offset());
}
-inline void CodeBlock::clearMarks()
+inline void CodeBlock::clearVisitWeaklyHasBeenCalled()
{
- m_visitStronglyHasBeenCalled.store(false, std::memory_order_relaxed);
- m_visitAggregateHasBeenCalled.store(false, std::memory_order_relaxed);
+ m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed);
}
inline void CodeBlockSet::mark(void* candidateCodeBlock)
{
if (!codeBlock)
return;
-
- // Force GC to visit all CodeBlocks on the stack, including old CodeBlocks
- // that have not executed a barrier. This is overkill, but we have always
- // done this, and it might help us recover gracefully if we forget to execute
- // a barrier when a CodeBlock needs it.
- codeBlock->clearMarks();
+
+ // Try to recover gracefully if we forget to execute a barrier for a
+ // CodeBlock that does value profiling. This is probably overkill, but we
+ // have always done it.
+ Heap::heap(codeBlock)->writeBarrier(codeBlock);
m_currentlyExecuting.add(codeBlock);
}
{
switch (type()) {
case ProgramExecutableType: {
- if (CodeBlock* codeBlock = jsCast<ProgramExecutable*>(this)->m_programCodeBlock.get())
+ if (CodeBlock* codeBlock = static_cast<CodeBlock*>(jsCast<ProgramExecutable*>(this)->m_programCodeBlock.get()))
codeBlock->forEachRelatedCodeBlock(std::forward<Functor>(functor));
break;
}
case EvalExecutableType: {
- if (CodeBlock* codeBlock = jsCast<EvalExecutable*>(this)->m_evalCodeBlock.get())
+ if (CodeBlock* codeBlock = static_cast<CodeBlock*>(jsCast<EvalExecutable*>(this)->m_evalCodeBlock.get()))
codeBlock->forEachRelatedCodeBlock(std::forward<Functor>(functor));
break;
}
case FunctionExecutableType: {
Functor f(std::forward<Functor>(functor));
FunctionExecutable* executable = jsCast<FunctionExecutable*>(this);
- if (CodeBlock* codeBlock = executable->m_codeBlockForCall.get())
+ if (CodeBlock* codeBlock = static_cast<CodeBlock*>(executable->m_codeBlockForCall.get()))
codeBlock->forEachRelatedCodeBlock(f);
- if (CodeBlock* codeBlock = executable->m_codeBlockForConstruct.get())
+ if (CodeBlock* codeBlock = static_cast<CodeBlock*>(executable->m_codeBlockForConstruct.get()))
codeBlock->forEachRelatedCodeBlock(f);
break;
}
case ModuleProgramExecutableType: {
- if (CodeBlock* codeBlock = jsCast<ModuleProgramExecutable*>(this)->m_moduleProgramCodeBlock.get())
+ if (CodeBlock* codeBlock = static_cast<CodeBlock*>(jsCast<ModuleProgramExecutable*>(this)->m_moduleProgramCodeBlock.get()))
codeBlock->forEachRelatedCodeBlock(std::forward<Functor>(functor));
break;
}
if (!a.inlineCallFrame)
return true;
- if (a.inlineCallFrame->executable.get() != b.inlineCallFrame->executable.get())
+ if (a.inlineCallFrame->baselineCodeBlock.get() != b.inlineCallFrame->baselineCodeBlock.get())
return false;
a = a.inlineCallFrame->directCaller;
if (!codeOrigin.inlineCallFrame)
return result;
- result += WTF::PtrHash<JSCell*>::hash(codeOrigin.inlineCallFrame->executable.get());
+ result += WTF::PtrHash<JSCell*>::hash(codeOrigin.inlineCallFrame->baselineCodeBlock.get());
codeOrigin = codeOrigin.inlineCallFrame->directCaller;
}
return result;
}
-ScriptExecutable* CodeOrigin::codeOriginOwner() const
+CodeBlock* CodeOrigin::codeOriginOwner() const
{
if (!inlineCallFrame)
return 0;
- return inlineCallFrame->executable.get();
+ return inlineCallFrame->baselineCodeBlock.get();
}
int CodeOrigin::stackOffset() const
out.print(" --> ");
if (InlineCallFrame* frame = stack[i].inlineCallFrame) {
- out.print(frame->briefFunctionInformation(), ":<", RawPointer(frame->executable.get()), "> ");
+ out.print(frame->briefFunctionInformation(), ":<", RawPointer(frame->baselineCodeBlock.get()), "> ");
if (frame->isClosureCall)
out.print("(closure) ");
}
// If the code origin corresponds to inlined code, gives you the heap object that
// would have owned the code if it had not been inlined. Otherwise returns 0.
- ScriptExecutable* codeOriginOwner() const;
+ CodeBlock* codeOriginOwner() const;
int stackOffset() const;
DeferredCompilationCallback::DeferredCompilationCallback() { }
DeferredCompilationCallback::~DeferredCompilationCallback() { }
-void DeferredCompilationCallback::compilationDidComplete(CodeBlock* codeBlock, CompilationResult result)
+void DeferredCompilationCallback::compilationDidComplete(CodeBlock*, CodeBlock*, CompilationResult result)
{
dumpCompiledSourcesIfNeeded();
switch (result) {
case CompilationFailed:
case CompilationInvalidated:
- codeBlock->heap()->removeCodeBlock(codeBlock);
- break;
case CompilationSuccessful:
break;
case CompilationDeferred:
public:
virtual ~DeferredCompilationCallback();
- virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*) = 0;
- virtual void compilationDidComplete(CodeBlock*, CompilationResult);
+ virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*, CodeBlock* profiledDFGCodeBlock) = 0;
+ virtual void compilationDidComplete(CodeBlock*, CodeBlock* profiledDFGCodeBlock, CompilationResult);
Vector<DeferredSourceDump>& ensureDeferredSourceDump();
return 0;
}
- EvalExecutable* getSlow(ExecState* exec, ScriptExecutable* owner, bool inStrictContext, ThisTDZMode thisTDZMode, const String& evalSource, JSScope* scope)
+ EvalExecutable* getSlow(ExecState* exec, JSCell* owner, bool inStrictContext, ThisTDZMode thisTDZMode, const String& evalSource, JSScope* scope)
{
VariableEnvironment variablesUnderTDZ;
JSScope::collectVariablesUnderTDZ(scope, variablesUnderTDZ);
CodeBlockHash InlineCallFrame::hash() const
{
- return jsCast<FunctionExecutable*>(executable.get())->codeBlockFor(
- specializationKind())->hash();
+ return baselineCodeBlock->hash();
}
CString InlineCallFrame::hashAsStringIfPossible() const
{
- return jsCast<FunctionExecutable*>(executable.get())->codeBlockFor(
- specializationKind())->hashAsStringIfPossible();
+ return baselineCodeBlock->hashAsStringIfPossible();
}
CString InlineCallFrame::inferredName() const
{
- return jsCast<FunctionExecutable*>(executable.get())->inferredName().utf8();
-}
-
-CodeBlock* InlineCallFrame::baselineCodeBlock() const
-{
- return jsCast<FunctionExecutable*>(executable.get())->baselineCodeBlockFor(specializationKind());
+ return jsCast<FunctionExecutable*>(baselineCodeBlock->ownerExecutable())->inferredName().utf8();
}
void InlineCallFrame::dumpBriefFunctionInformation(PrintStream& out) const
void InlineCallFrame::dumpInContext(PrintStream& out, DumpContext* context) const
{
- out.print(briefFunctionInformation(), ":<", RawPointer(executable.get()));
- if (executable->isStrictMode())
+ out.print(briefFunctionInformation(), ":<", RawPointer(baselineCodeBlock.get()));
+ if (isStrictMode())
out.print(" (StrictMode)");
out.print(", bc#", directCaller.bytecodeIndex, ", ", static_cast<Kind>(kind));
if (isClosureCall)
#include "CodeBlock.h"
#include "CodeBlockHash.h"
#include "CodeOrigin.h"
-#include "Executable.h"
#include "ValueRecovery.h"
#include "WriteBarrier.h"
#include <wtf/BitVector.h>
struct InlineCallFrame;
class ExecState;
-class ScriptExecutable;
class JSFunction;
struct InlineCallFrame {
}
Vector<ValueRecovery> arguments; // Includes 'this'.
- WriteBarrier<ScriptExecutable> executable;
+ WriteBarrier<CodeBlock> baselineCodeBlock;
ValueRecovery calleeRecovery;
CodeOrigin directCaller;
CodeBlockHash hash() const;
CString hashAsStringIfPossible() const;
- CodeBlock* baselineCodeBlock() const;
-
void setStackOffset(signed offset)
{
stackOffset = offset;
ptrdiff_t callerFrameOffset() const { return stackOffset * sizeof(Register) + CallFrame::callerFrameOffset(); }
ptrdiff_t returnPCOffset() const { return stackOffset * sizeof(Register) + CallFrame::returnPCOffset(); }
+ bool isStrictMode() const { return baselineCodeBlock->isStrictMode(); }
+
void dumpBriefFunctionInformation(PrintStream&) const;
void dump(PrintStream&) const;
void dumpInContext(PrintStream&, DumpContext*) const;
inline CodeBlock* baselineCodeBlockForInlineCallFrame(InlineCallFrame* inlineCallFrame)
{
RELEASE_ASSERT(inlineCallFrame);
- ScriptExecutable* executable = inlineCallFrame->executable.get();
- RELEASE_ASSERT(executable->structure()->classInfo() == FunctionExecutable::info());
- return static_cast<FunctionExecutable*>(executable)->baselineCodeBlockFor(inlineCallFrame->specializationKind());
+ return inlineCallFrame->baselineCodeBlock.get();
}
inline CodeBlock* baselineCodeBlockForOriginAndBaselineCodeBlock(const CodeOrigin& codeOrigin, CodeBlock* baselineCodeBlock)
}
// We will emit code that has a weak reference that isn't otherwise listed anywhere.
- state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock->ownerExecutable(), structure));
+ state.weakReferences.append(WriteBarrier<JSCell>(vm, codeBlock, structure));
jit.move(CCallHelpers::TrustedImmPtr(condition.object()), scratchGPR);
state.failAndRepatch.append(
for (auto& entry : cases)
doesCalls |= entry->doesCalls();
- m_stubRoutine = createJITStubRoutine(code, vm, codeBlock->ownerExecutable(), doesCalls);
+ m_stubRoutine = createJITStubRoutine(code, vm, codeBlock, doesCalls);
m_watchpoints = WTF::move(state.watchpoints);
if (!state.weakReferences.isEmpty())
m_weakReferences = std::make_unique<Vector<WriteBarrier<JSCell>>>(WTF::move(state.weakReferences));
cacheType = CacheType::GetByIdSelf;
u.byIdSelf.baseObjectStructure.set(
- *codeBlock->vm(), codeBlock->ownerExecutable(), baseObjectStructure);
+ *codeBlock->vm(), codeBlock, baseObjectStructure);
u.byIdSelf.offset = offset;
}
cacheType = CacheType::PutByIdReplace;
u.byIdSelf.baseObjectStructure.set(
- *codeBlock->vm(), codeBlock->ownerExecutable(), baseObjectStructure);
+ *codeBlock->vm(), codeBlock, baseObjectStructure);
u.byIdSelf.offset = offset;
}
Vector<std::unique_ptr<AccessCase>> accessCases;
std::unique_ptr<AccessCase> previousCase =
- AccessCase::fromStructureStubInfo(vm, codeBlock->ownerExecutable(), *this);
+ AccessCase::fromStructureStubInfo(vm, codeBlock, *this);
if (previousCase)
accessCases.append(WTF::move(previousCase));
ASSERT(callsiteBlockHead);
m_inlineCallFrame = byteCodeParser->m_graph.m_plan.inlineCallFrames->add();
- byteCodeParser->m_graph.freeze(codeBlock->ownerExecutable());
+ byteCodeParser->m_graph.freeze(codeBlock->baselineVersion());
// The owner is the machine code block, and we already have a barrier on that when the
// plan finishes.
- m_inlineCallFrame->executable.setWithoutWriteBarrier(codeBlock->ownerScriptExecutable());
+ m_inlineCallFrame->baselineCodeBlock.setWithoutWriteBarrier(codeBlock->baselineVersion());
m_inlineCallFrame->setStackOffset(inlineCallFrameStart.offset() - JSStack::CallFrameHeaderSize);
if (callee) {
m_inlineCallFrame->calleeRecovery = ValueRecovery::constant(callee);
if (Options::verboseDFGByteCodeParsing())
dataLog("Parsing ", *m_codeBlock, "\n");
- m_dfgCodeBlock = m_graph.m_plan.profiledDFGCodeBlock.get();
+ m_dfgCodeBlock = m_graph.m_plan.profiledDFGCodeBlock;
if (isFTL(m_graph.m_plan.mode) && m_dfgCodeBlock
&& Options::enablePolyvariantDevirtualization()) {
if (Options::enablePolyvariantCallInlining())
trackedReferences.check(recovery.constant());
}
- if (ScriptExecutable* executable = inlineCallFrame->executable.get())
- trackedReferences.check(executable);
+ if (CodeBlock* baselineCodeBlock = inlineCallFrame->baselineCodeBlock.get())
+ trackedReferences.check(baselineCodeBlock);
if (inlineCallFrame->calleeRecovery.isConstant())
trackedReferences.check(inlineCallFrame->calleeRecovery.constant());
namespace JSC { namespace DFG {
-DesiredTransition::DesiredTransition(CodeBlock* codeBlock, ScriptExecutable* codeOriginOwner, Structure* oldStructure, Structure* newStructure)
+DesiredTransition::DesiredTransition(CodeBlock* codeBlock, CodeBlock* codeOriginOwner, Structure* oldStructure, Structure* newStructure)
: m_codeBlock(codeBlock)
, m_codeOriginOwner(codeOriginOwner)
, m_oldStructure(oldStructure)
{
common->transitions.append(
WeakReferenceTransition(
- vm, m_codeBlock->ownerExecutable(),
+ vm, m_codeBlock,
m_codeOriginOwner,
m_oldStructure, m_newStructure));
}
{
}
-void DesiredTransitions::addLazily(CodeBlock* codeBlock, ScriptExecutable* codeOriginOwner, Structure* oldStructure, Structure* newStructure)
+void DesiredTransitions::addLazily(CodeBlock* codeBlock, CodeBlock* codeOriginOwner, Structure* oldStructure, Structure* newStructure)
{
m_transitions.append(DesiredTransition(codeBlock, codeOriginOwner, oldStructure, newStructure));
}
class DesiredTransition {
public:
- DesiredTransition(CodeBlock*, ScriptExecutable*, Structure*, Structure*);
+ DesiredTransition(CodeBlock*, CodeBlock* codeOriginOwner, Structure*, Structure*);
void reallyAdd(VM&, CommonData*);
private:
CodeBlock* m_codeBlock;
- ScriptExecutable* m_codeOriginOwner;
+ CodeBlock* m_codeOriginOwner;
Structure* m_oldStructure;
Structure* m_newStructure;
};
DesiredTransitions();
~DesiredTransitions();
- void addLazily(CodeBlock*, ScriptExecutable*, Structure*, Structure*);
+ void addLazily(CodeBlock*, CodeBlock* codeOriginOwner, Structure*, Structure*);
void reallyAdd(VM&, CommonData*);
void visitChildren(SlotVisitor&);
for (JSCell* target : m_references) {
if (Structure* structure = jsDynamicCast<Structure*>(target)) {
common->weakStructureReferences.append(
- WriteBarrier<Structure>(vm, m_codeBlock->ownerExecutable(), structure));
+ WriteBarrier<Structure>(vm, m_codeBlock, structure));
} else {
common->weakReferences.append(
- WriteBarrier<JSCell>(vm, m_codeBlock->ownerExecutable(), target));
+ WriteBarrier<JSCell>(vm, m_codeBlock, target));
}
}
}
vm, codeBlock, profiledDFGCodeBlock, mode, osrEntryBytecodeIndex, mustHandleValues,
callback);
if (result != CompilationDeferred)
- callback->compilationDidComplete(codeBlock, result);
+ callback->compilationDidComplete(codeBlock, profiledDFGCodeBlock, result);
return result;
}
Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
: m_vm(vm)
, m_plan(plan)
- , m_codeBlock(m_plan.codeBlock.get())
+ , m_codeBlock(m_plan.codeBlock)
, m_profiledBlock(m_codeBlock->alternative())
, m_allocator(longLivedState.m_allocator)
, m_nextMachineLocal(0)
if (!inlineCallFrame)
return m_codeBlock->ownerScriptExecutable();
- return inlineCallFrame->executable.get();
+ return inlineCallFrame->baselineCodeBlock->ownerScriptExecutable();
}
ScriptExecutable* executableFor(const CodeOrigin& codeOrigin)
{
if (!codeOrigin.inlineCallFrame)
return m_codeBlock->isStrictMode();
- return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->isStrictMode();
+ return codeOrigin.inlineCallFrame->isStrictMode();
}
ECMAMode ecmaModeFor(CodeOrigin codeOrigin)
#if ENABLE(DFG_JIT)
+#include "CodeBlock.h"
#include "CompilationResult.h"
#include "DFGCommonData.h"
#include "DFGMinifiedGraph.h"
void validateReferences(const TrackedReferences&) override;
void shrinkToFit();
+
+#if ENABLE(FTL_JIT)
+ CodeBlock* osrEntryBlock() { return m_osrEntryBlock.get(); }
+ void setOSREntryBlock(VM& vm, const JSCell* owner, CodeBlock* osrEntryBlock) { m_osrEntryBlock.set(vm, owner, osrEntryBlock); }
+ void clearOSREntryBlock() { m_osrEntryBlock.clear(); }
+#endif
private:
friend class JITCompiler; // Allow JITCompiler to call setCodeRef().
#if ENABLE(FTL_JIT)
uint8_t nestedTriggerIsSet { 0 };
UpperTierExecutionCounter tierUpCounter;
- RefPtr<CodeBlock> osrEntryBlock;
+ WriteBarrier<CodeBlock> m_osrEntryBlock;
unsigned osrEntryRetry;
bool abandonOSREntry;
#endif // ENABLE(FTL_JIT)
bool JITFinalizer::finalize()
{
m_jitCode->initializeCodeRef(
- FINALIZE_DFG_CODE(*m_linkBuffer, ("DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::DFGJIT)).data())),
+ FINALIZE_DFG_CODE(*m_linkBuffer, ("DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data())),
MacroAssemblerCodePtr());
m_plan.codeBlock->setJITCode(m_jitCode);
{
RELEASE_ASSERT(!m_withArityCheck.isEmptyValue());
m_jitCode->initializeCodeRef(
- FINALIZE_DFG_CODE(*m_linkBuffer, ("DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::DFGJIT)).data())),
+ FINALIZE_DFG_CODE(*m_linkBuffer, ("DFG JIT code for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::DFGJIT)).data())),
m_withArityCheck);
m_plan.codeBlock->setJITCode(m_jitCode);
void JITFinalizer::finalizeCommon()
{
#if ENABLE(FTL_JIT)
- m_jitCode->optimizeAfterWarmUp(m_plan.codeBlock.get());
+ m_jitCode->optimizeAfterWarmUp(m_plan.codeBlock);
#endif // ENABLE(FTL_JIT)
if (m_plan.compilation)
jit.branchTest8(
AssemblyHelpers::NonZero,
AssemblyHelpers::AbsoluteAddress(
- inlineCallFrame->executable->addressOfDidTryToEnterInLoop())));
+ inlineCallFrame->baselineCodeBlock->ownerScriptExecutable()->addressOfDidTryToEnterInLoop())));
}
jit.move(
void adjustAndJumpToTarget(CCallHelpers& jit, const OSRExitBase& exit, bool isExitingToOpCatch)
{
- jit.move(AssemblyHelpers::TrustedImmPtr(jit.codeBlock()->ownerExecutable()), GPRInfo::argumentGPR1);
+ CodeBlock* baselineCodeBlock = jit.baselineCodeBlockFor(exit.m_codeOrigin);
+ jit.move(AssemblyHelpers::TrustedImmPtr(baselineCodeBlock), GPRInfo::argumentGPR1);
osrWriteBarrier(jit, GPRInfo::argumentGPR1, GPRInfo::nonArgGPR0);
InlineCallFrameSet* inlineCallFrames = jit.codeBlock()->jitCode()->dfgCommon()->inlineCallFrames.get();
if (inlineCallFrames) {
for (InlineCallFrame* inlineCallFrame : *inlineCallFrames) {
- ScriptExecutable* ownerExecutable = inlineCallFrame->executable.get();
- jit.move(AssemblyHelpers::TrustedImmPtr(ownerExecutable), GPRInfo::argumentGPR1);
+ CodeBlock* baselineCodeBlock = inlineCallFrame->baselineCodeBlock.get();
+ jit.move(AssemblyHelpers::TrustedImmPtr(baselineCodeBlock), GPRInfo::argumentGPR1);
osrWriteBarrier(jit, GPRInfo::argumentGPR1, GPRInfo::nonArgGPR0);
}
}
if (exit.m_codeOrigin.inlineCallFrame)
jit.addPtr(AssemblyHelpers::TrustedImm32(exit.m_codeOrigin.inlineCallFrame->stackOffset * sizeof(EncodedJSValue)), GPRInfo::callFrameRegister);
- CodeBlock* baselineCodeBlock = jit.baselineCodeBlockFor(exit.m_codeOrigin);
Vector<BytecodeAndMachineOffset>& decodedCodeMap = jit.decodedCodeMapFor(baselineCodeBlock);
BytecodeAndMachineOffset* mapping = binarySearch<BytecodeAndMachineOffset, unsigned>(decodedCodeMap, decodedCodeMap.size(), exit.m_codeOrigin.bytecodeIndex, BytecodeAndMachineOffset::getBytecodeIndex);
DeferGC deferGC(vm.heap);
for (; codeOrigin.inlineCallFrame; codeOrigin = codeOrigin.inlineCallFrame->directCaller) {
- CodeBlock* codeBlock = codeOrigin.inlineCallFrame->baselineCodeBlock();
+ CodeBlock* codeBlock = codeOrigin.inlineCallFrame->baselineCodeBlock.get();
if (codeBlock->jitType() == JSC::JITCode::BaselineJIT)
continue;
bool didTryToEnterIntoInlinedLoops = false;
for (InlineCallFrame* inlineCallFrame = exit->m_codeOrigin.inlineCallFrame; inlineCallFrame; inlineCallFrame = inlineCallFrame->directCaller.inlineCallFrame) {
- if (inlineCallFrame->executable->didTryToEnterInLoop()) {
+ if (inlineCallFrame->baselineCodeBlock->ownerScriptExecutable()->didTryToEnterInLoop()) {
didTryToEnterIntoInlinedLoops = true;
break;
}
// We need to compile the code.
compile(
- *vm, codeBlock->newReplacement().get(), codeBlock, FTLMode, UINT_MAX,
- Operands<JSValue>(), ToFTLDeferredCompilationCallback::create(codeBlock));
+ *vm, codeBlock->newReplacement(), codeBlock, FTLMode, UINT_MAX,
+ Operands<JSValue>(), ToFTLDeferredCompilationCallback::create());
}
static void triggerTierUpNowCommon(ExecState* exec, bool inLoop)
if (worklistState == Worklist::Compiling)
return 0;
- if (CodeBlock* entryBlock = jitCode->osrEntryBlock.get()) {
+ if (CodeBlock* entryBlock = jitCode->osrEntryBlock()) {
void* address = FTL::prepareOSREntry(
exec, codeBlock, entryBlock, bytecodeIndex, streamIndex);
if (address)
// OSR entry failed. Oh no! This implies that we need to retry. We retry
// without exponential backoff and we only do this for the entry code block.
- jitCode->osrEntryBlock = nullptr;
+ jitCode->clearOSREntryBlock();
jitCode->osrEntryRetry = 0;
return 0;
}
Operands<JSValue> mustHandleValues;
jitCode->reconstruct(
exec, codeBlock, CodeOrigin(bytecodeIndex), streamIndex, mustHandleValues);
- RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
+ CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
CompilationResult forEntryResult = compile(
- *vm, replacementCodeBlock.get(), codeBlock, FTLForOSREntryMode, bytecodeIndex,
- mustHandleValues, ToFTLForOSREntryDeferredCompilationCallback::create(codeBlock));
+ *vm, replacementCodeBlock, codeBlock, FTLForOSREntryMode, bytecodeIndex,
+ mustHandleValues, ToFTLForOSREntryDeferredCompilationCallback::create());
- if (forEntryResult != CompilationSuccessful) {
- ASSERT(forEntryResult == CompilationDeferred || replacementCodeBlock->hasOneRef());
+ if (forEntryResult != CompilationSuccessful)
return 0;
- }
// It's possible that the for-entry compile already succeeded. In that case OSR
// entry will succeed unless we ran out of stack. It's not clear what we should do.
// We signal to try again after a while if that happens.
void* address = FTL::prepareOSREntry(
- exec, codeBlock, jitCode->osrEntryBlock.get(), bytecodeIndex, streamIndex);
+ exec, codeBlock, jitCode->osrEntryBlock(), bytecodeIndex, streamIndex);
return static_cast<char*>(address);
}
#endif // ENABLE(FTL_JIT)
} // anonymous namespace
-Plan::Plan(PassRefPtr<CodeBlock> passedCodeBlock, CodeBlock* profiledDFGCodeBlock,
+Plan::Plan(CodeBlock* passedCodeBlock, CodeBlock* profiledDFGCodeBlock,
CompilationMode mode, unsigned osrEntryBytecodeIndex,
const Operands<JSValue>& mustHandleValues)
: vm(*passedCodeBlock->vm())
, mode(mode)
, osrEntryBytecodeIndex(osrEntryBytecodeIndex)
, mustHandleValues(mustHandleValues)
- , compilation(codeBlock->vm()->m_perBytecodeProfiler ? adoptRef(new Profiler::Compilation(codeBlock->vm()->m_perBytecodeProfiler->ensureBytecodesFor(codeBlock.get()), profilerCompilationKindForMode(mode))) : 0)
+ , compilation(codeBlock->vm()->m_perBytecodeProfiler ? adoptRef(new Profiler::Compilation(codeBlock->vm()->m_perBytecodeProfiler->ensureBytecodesFor(codeBlock), profilerCompilationKindForMode(mode))) : 0)
, inlineCallFrames(adoptRef(new InlineCallFrameSet()))
- , identifiers(codeBlock.get())
- , weakReferences(codeBlock.get())
+ , identifiers(codeBlock)
+ , weakReferences(codeBlock)
, willTryToTierUp(false)
, stage(Preparing)
{
void Plan::reallyAdd(CommonData* commonData)
{
- watchpoints.reallyAdd(codeBlock.get(), *commonData);
+ watchpoints.reallyAdd(codeBlock, *commonData);
identifiers.reallyAdd(vm, commonData);
weakReferences.reallyAdd(vm, commonData);
transitions.reallyAdd(vm, commonData);
void Plan::notifyReady()
{
- callback->compilationDidBecomeReadyAsynchronously(codeBlock.get());
+ callback->compilationDidBecomeReadyAsynchronously(codeBlock, profiledDFGCodeBlock);
stage = Ready;
}
CompilationResult Plan::finalizeWithoutNotifyingCallback()
{
// We will establish new references from the code block to things. So, we need a barrier.
- vm.heap.writeBarrier(codeBlock->ownerExecutable());
+ vm.heap.writeBarrier(codeBlock);
if (!isStillValid())
return CompilationInvalidated;
void Plan::finalizeAndNotifyCallback()
{
- callback->compilationDidComplete(codeBlock.get(), finalizeWithoutNotifyingCallback());
+ callback->compilationDidComplete(codeBlock, profiledDFGCodeBlock, finalizeWithoutNotifyingCallback());
}
CompilationKey Plan::key()
return CompilationKey(codeBlock->alternative(), mode);
}
-void Plan::clearCodeBlockMarks()
+void Plan::rememberCodeBlocks()
{
// Compilation writes lots of values to a CodeBlock without performing
// an explicit barrier. So, we need to be pessimistic and assume that
// all our CodeBlocks must be visited during GC.
- codeBlock->clearMarks();
- codeBlock->alternative()->clearMarks();
+ Heap::heap(codeBlock)->writeBarrier(codeBlock);
if (profiledDFGCodeBlock)
- profiledDFGCodeBlock->clearMarks();
+ Heap::heap(profiledDFGCodeBlock)->writeBarrier(profiledDFGCodeBlock);
}
void Plan::checkLivenessAndVisitChildren(SlotVisitor& visitor)
for (unsigned i = mustHandleValues.size(); i--;)
visitor.appendUnbarrieredValue(&mustHandleValues[i]);
- codeBlock->visitStrongly(visitor);
- codeBlock->alternative()->visitStrongly(visitor);
- if (profiledDFGCodeBlock)
- profiledDFGCodeBlock->visitStrongly(visitor);
+ visitor.appendUnbarrieredReadOnlyPointer(codeBlock);
+ visitor.appendUnbarrieredReadOnlyPointer(profiledDFGCodeBlock);
if (inlineCallFrames) {
for (auto* inlineCallFrame : *inlineCallFrames) {
- ASSERT(inlineCallFrame->baselineCodeBlock());
- inlineCallFrame->baselineCodeBlock()->visitStrongly(visitor);
+ ASSERT(inlineCallFrame->baselineCodeBlock.get());
+ visitor.appendUnbarrieredReadOnlyPointer(inlineCallFrame->baselineCodeBlock.get());
}
}
struct Plan : public ThreadSafeRefCounted<Plan> {
Plan(
- PassRefPtr<CodeBlock> codeBlockToCompile, CodeBlock* profiledDFGCodeBlock,
+ CodeBlock* codeBlockToCompile, CodeBlock* profiledDFGCodeBlock,
CompilationMode, unsigned osrEntryBytecodeIndex,
const Operands<JSValue>& mustHandleValues);
~Plan();
CompilationKey key();
- void clearCodeBlockMarks();
+ void rememberCodeBlocks();
void checkLivenessAndVisitChildren(SlotVisitor&);
bool isKnownToBeLiveDuringGC();
void cancel();
VM& vm;
- RefPtr<CodeBlock> codeBlock;
- RefPtr<CodeBlock> profiledDFGCodeBlock;
+
+ // These can be raw pointers because we visit them during every GC in checkLivenessAndVisitChildren.
+ CodeBlock* codeBlock;
+ CodeBlock* profiledDFGCodeBlock;
+
CompilationMode mode;
const unsigned osrEntryBytecodeIndex;
Operands<JSValue> mustHandleValues;
namespace JSC { namespace DFG {
-ToFTLDeferredCompilationCallback::ToFTLDeferredCompilationCallback(
- PassRefPtr<CodeBlock> dfgCodeBlock)
- : m_dfgCodeBlock(dfgCodeBlock)
+ToFTLDeferredCompilationCallback::ToFTLDeferredCompilationCallback()
{
}
ToFTLDeferredCompilationCallback::~ToFTLDeferredCompilationCallback() { }
-Ref<ToFTLDeferredCompilationCallback> ToFTLDeferredCompilationCallback::create(PassRefPtr<CodeBlock> dfgCodeBlock)
+Ref<ToFTLDeferredCompilationCallback> ToFTLDeferredCompilationCallback::create()
{
- return adoptRef(*new ToFTLDeferredCompilationCallback(dfgCodeBlock));
+ return adoptRef(*new ToFTLDeferredCompilationCallback());
}
void ToFTLDeferredCompilationCallback::compilationDidBecomeReadyAsynchronously(
- CodeBlock* codeBlock)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock)
{
if (Options::verboseOSR()) {
dataLog(
- "Optimizing compilation of ", *codeBlock, " (for ", *m_dfgCodeBlock,
+ "Optimizing compilation of ", *codeBlock, " (for ", *profiledDFGCodeBlock,
") did become ready.\n");
}
- m_dfgCodeBlock->jitCode()->dfg()->forceOptimizationSlowPathConcurrently(
- m_dfgCodeBlock.get());
+ profiledDFGCodeBlock->jitCode()->dfg()->forceOptimizationSlowPathConcurrently(
+ profiledDFGCodeBlock);
}
void ToFTLDeferredCompilationCallback::compilationDidComplete(
- CodeBlock* codeBlock, CompilationResult result)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock, CompilationResult result)
{
if (Options::verboseOSR()) {
dataLog(
- "Optimizing compilation of ", *codeBlock, " (for ", *m_dfgCodeBlock,
+ "Optimizing compilation of ", *codeBlock, " (for ", *profiledDFGCodeBlock,
") result: ", result, "\n");
}
- if (m_dfgCodeBlock->replacement() != m_dfgCodeBlock) {
+ if (profiledDFGCodeBlock->replacement() != profiledDFGCodeBlock) {
if (Options::verboseOSR()) {
dataLog(
"Dropping FTL code block ", *codeBlock, " on the floor because the "
- "DFG code block ", *m_dfgCodeBlock, " was jettisoned.\n");
+ "DFG code block ", *profiledDFGCodeBlock, " was jettisoned.\n");
}
return;
}
if (result == CompilationSuccessful)
codeBlock->ownerScriptExecutable()->installCode(codeBlock);
- m_dfgCodeBlock->jitCode()->dfg()->setOptimizationThresholdBasedOnCompilationResult(
- m_dfgCodeBlock.get(), result);
+ profiledDFGCodeBlock->jitCode()->dfg()->setOptimizationThresholdBasedOnCompilationResult(
+ profiledDFGCodeBlock, result);
- DeferredCompilationCallback::compilationDidComplete(codeBlock, result);
+ DeferredCompilationCallback::compilationDidComplete(codeBlock, profiledDFGCodeBlock, result);
}
} } // JSC::DFG
class ToFTLDeferredCompilationCallback : public DeferredCompilationCallback {
protected:
- ToFTLDeferredCompilationCallback(PassRefPtr<CodeBlock> dfgCodeBlock);
+ ToFTLDeferredCompilationCallback();
public:
virtual ~ToFTLDeferredCompilationCallback();
- static Ref<ToFTLDeferredCompilationCallback> create(PassRefPtr<CodeBlock> dfgCodeBlock);
+ static Ref<ToFTLDeferredCompilationCallback> create();
- virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*);
- virtual void compilationDidComplete(CodeBlock*, CompilationResult);
-
-private:
- RefPtr<CodeBlock> m_dfgCodeBlock;
+ virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*, CodeBlock* profiledDFGCodeBlock);
+ virtual void compilationDidComplete(CodeBlock*, CodeBlock* profiledDFGCodeBlock, CompilationResult);
};
} } // namespace JSC::DFG
namespace JSC { namespace DFG {
-ToFTLForOSREntryDeferredCompilationCallback::ToFTLForOSREntryDeferredCompilationCallback(
- PassRefPtr<CodeBlock> dfgCodeBlock)
- : m_dfgCodeBlock(dfgCodeBlock)
+ToFTLForOSREntryDeferredCompilationCallback::ToFTLForOSREntryDeferredCompilationCallback()
{
}
{
}
-Ref<ToFTLForOSREntryDeferredCompilationCallback>ToFTLForOSREntryDeferredCompilationCallback::create(
- PassRefPtr<CodeBlock> dfgCodeBlock)
+Ref<ToFTLForOSREntryDeferredCompilationCallback>ToFTLForOSREntryDeferredCompilationCallback::create()
{
- return adoptRef(*new ToFTLForOSREntryDeferredCompilationCallback(dfgCodeBlock));
+ return adoptRef(*new ToFTLForOSREntryDeferredCompilationCallback());
}
void ToFTLForOSREntryDeferredCompilationCallback::compilationDidBecomeReadyAsynchronously(
- CodeBlock* codeBlock)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock)
{
if (Options::verboseOSR()) {
dataLog(
- "Optimizing compilation of ", *codeBlock, " (for ", *m_dfgCodeBlock,
+ "Optimizing compilation of ", *codeBlock, " (for ", *profiledDFGCodeBlock,
") did become ready.\n");
}
- m_dfgCodeBlock->jitCode()->dfg()->forceOptimizationSlowPathConcurrently(
- m_dfgCodeBlock.get());
+ profiledDFGCodeBlock->jitCode()->dfg()->forceOptimizationSlowPathConcurrently(
+ profiledDFGCodeBlock);
}
void ToFTLForOSREntryDeferredCompilationCallback::compilationDidComplete(
- CodeBlock* codeBlock, CompilationResult result)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock, CompilationResult result)
{
if (Options::verboseOSR()) {
dataLog(
- "Optimizing compilation of ", *codeBlock, " (for ", *m_dfgCodeBlock,
+ "Optimizing compilation of ", *codeBlock, " (for ", *profiledDFGCodeBlock,
") result: ", result, "\n");
}
- JITCode* jitCode = m_dfgCodeBlock->jitCode()->dfg();
+ JITCode* jitCode = profiledDFGCodeBlock->jitCode()->dfg();
switch (result) {
case CompilationSuccessful:
- jitCode->osrEntryBlock = codeBlock;
+ jitCode->setOSREntryBlock(*codeBlock->vm(), profiledDFGCodeBlock, codeBlock);
break;
case CompilationFailed:
jitCode->osrEntryRetry = 0;
break;
}
- DeferredCompilationCallback::compilationDidComplete(codeBlock, result);
+ DeferredCompilationCallback::compilationDidComplete(codeBlock, profiledDFGCodeBlock, result);
}
} } // JSC::DFG
class ToFTLForOSREntryDeferredCompilationCallback : public DeferredCompilationCallback {
protected:
- ToFTLForOSREntryDeferredCompilationCallback(PassRefPtr<CodeBlock> dfgCodeBlock);
+ ToFTLForOSREntryDeferredCompilationCallback();
public:
virtual ~ToFTLForOSREntryDeferredCompilationCallback();
- static Ref<ToFTLForOSREntryDeferredCompilationCallback> create(PassRefPtr<CodeBlock> dfgCodeBlock);
+ static Ref<ToFTLForOSREntryDeferredCompilationCallback> create();
- virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*);
- virtual void compilationDidComplete(CodeBlock*, CompilationResult);
-
-private:
- RefPtr<CodeBlock> m_dfgCodeBlock;
+ virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*, CodeBlock* profiledDFGCodeBlock);
+ virtual void compilationDidComplete(CodeBlock*, CodeBlock* profiledDFGCodeBlock, CompilationResult);
};
} } // namespace JSC::DFG
completeAllReadyPlansForVM(vm);
}
-void Worklist::clearCodeBlockMarks(VM& vm)
+void Worklist::rememberCodeBlocks(VM& vm)
{
LockHolder locker(m_lock);
for (PlanMap::iterator iter = m_plans.begin(); iter != m_plans.end(); ++iter) {
Plan* plan = iter->value.get();
if (&plan->vm != &vm)
continue;
- plan->clearCodeBlockMarks();
+ plan->rememberCodeBlocks();
}
}
}
}
-void clearCodeBlockMarks(VM& vm)
+void rememberCodeBlocks(VM& vm)
{
for (unsigned i = DFG::numberOfWorklists(); i--;) {
if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i))
- worklist->clearCodeBlockMarks(vm);
+ worklist->rememberCodeBlocks(vm);
}
}
// worklist->completeAllReadyPlansForVM(vm);
void completeAllPlansForVM(VM&);
- void clearCodeBlockMarks(VM&);
+ void rememberCodeBlocks(VM&);
void waitUntilAllPlansForVMAreReady(VM&);
State completeAllReadyPlansForVM(VM&, CompilationKey = CompilationKey());
}
void completeAllPlansForVM(VM&);
-void clearCodeBlockMarks(VM&);
+void rememberCodeBlocks(VM&);
} } // namespace JSC::DFG
jitCode->initializeExitThunks(
FINALIZE_DFG_CODE(
*exitThunksLinkBuffer,
- ("FTL exit thunks for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::FTLJIT)).data())));
+ ("FTL exit thunks for %s", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data())));
} // else this function had no OSR exits, so no exit thunks.
if (sideCodeLinkBuffer) {
jitCode->addHandle(FINALIZE_DFG_CODE(
*sideCodeLinkBuffer,
("FTL side code for %s",
- toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::FTLJIT)).data()))
+ toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data()))
.executableMemory());
}
jitCode->addHandle(FINALIZE_DFG_CODE(
*handleExceptionsLinkBuffer,
("FTL exception handler for %s",
- toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::FTLJIT)).data()))
+ toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data()))
.executableMemory());
}
jitCode->initializeArityCheckEntrypoint(
FINALIZE_DFG_CODE(
*entrypointLinkBuffer,
- ("FTL entrypoint thunk for %s with LLVM generated code at %p", toCString(CodeBlockWithJITType(m_plan.codeBlock.get(), JITCode::FTLJIT)).data(), function)));
+ ("FTL entrypoint thunk for %s with LLVM generated code at %p", toCString(CodeBlockWithJITType(m_plan.codeBlock, JITCode::FTLJIT)).data(), function)));
m_plan.codeBlock->setJITCode(jitCode);
CodeBlockSet::~CodeBlockSet()
{
- for (CodeBlock* codeBlock : m_oldCodeBlocks)
- codeBlock->deref();
-
- for (CodeBlock* codeBlock : m_newCodeBlocks)
- codeBlock->deref();
}
-void CodeBlockSet::add(PassRefPtr<CodeBlock> codeBlock)
+void CodeBlockSet::add(CodeBlock* codeBlock)
{
- CodeBlock* block = codeBlock.leakRef();
- bool isNewEntry = m_newCodeBlocks.add(block).isNewEntry;
+ bool isNewEntry = m_newCodeBlocks.add(codeBlock).isNewEntry;
ASSERT_UNUSED(isNewEntry, isNewEntry);
}
void CodeBlockSet::clearMarksForFullCollection()
{
for (CodeBlock* codeBlock : m_oldCodeBlocks)
- codeBlock->clearMarks();
+ codeBlock->clearVisitWeaklyHasBeenCalled();
// We promote after we clear marks on the old generation CodeBlocks because
// none of the young generations CodeBlocks need to be cleared.
promoteYoungCodeBlocks();
}
-void CodeBlockSet::clearMarksForEdenCollection(const Vector<const JSCell*>& rememberedSet)
+void CodeBlockSet::lastChanceToFinalize()
{
- // This ensures that we will revisit CodeBlocks in remembered Executables even if they were previously marked.
- for (const JSCell* cell : rememberedSet) {
- ScriptExecutable* executable = const_cast<ScriptExecutable*>(jsDynamicCast<const ScriptExecutable*>(cell));
- if (!executable)
- continue;
- executable->forEachCodeBlock([this](CodeBlock* codeBlock) {
- codeBlock->clearMarks();
- m_remembered.add(codeBlock);
- });
- }
+ for (CodeBlock* codeBlock : m_newCodeBlocks)
+ codeBlock->classInfo()->methodTable.destroy(codeBlock);
+
+ for (CodeBlock* codeBlock : m_oldCodeBlocks)
+ codeBlock->classInfo()->methodTable.destroy(codeBlock);
}
void CodeBlockSet::deleteUnmarkedAndUnreferenced(HeapOperation collectionType)
{
HashSet<CodeBlock*>& set = collectionType == EdenCollection ? m_newCodeBlocks : m_oldCodeBlocks;
+ Vector<CodeBlock*> unmarked;
+ for (CodeBlock* codeBlock : set) {
+ if (Heap::isMarked(codeBlock))
+ continue;
+ unmarked.append(codeBlock);
+ }
- // This needs to be a fixpoint because code blocks that are unmarked may
- // refer to each other. For example, a DFG code block that is owned by
- // the GC may refer to an FTL for-entry code block that is also owned by
- // the GC.
- Vector<CodeBlock*, 16> toRemove;
- if (verbose)
- dataLog("Fixpointing over unmarked, set size = ", set.size(), "...\n");
- for (;;) {
- for (CodeBlock* codeBlock : set) {
- if (!codeBlock->hasOneRef())
- continue;
- codeBlock->deref();
- toRemove.append(codeBlock);
- }
- if (verbose)
- dataLog(" Removing ", toRemove.size(), " blocks.\n");
- if (toRemove.isEmpty())
- break;
- for (CodeBlock* codeBlock : toRemove)
- set.remove(codeBlock);
- toRemove.resize(0);
+ for (CodeBlock* codeBlock : unmarked) {
+ codeBlock->classInfo()->methodTable.destroy(codeBlock);
+ set.remove(codeBlock);
}
// Any remaining young CodeBlocks are live and need to be promoted to the set of old CodeBlocks.
void CodeBlockSet::remove(CodeBlock* codeBlock)
{
- codeBlock->deref();
if (m_oldCodeBlocks.contains(codeBlock)) {
m_oldCodeBlocks.remove(codeBlock);
return;
m_newCodeBlocks.remove(codeBlock);
}
-void CodeBlockSet::traceMarked(SlotVisitor& visitor)
-{
- if (verbose)
- dataLog("Tracing ", m_currentlyExecuting.size(), " code blocks.\n");
-
- // We strongly visit the currently executing set because jettisoning code
- // is not valuable once it's on the stack. We're past the point where
- // jettisoning would avoid the cost of OSR exit.
- for (const RefPtr<CodeBlock>& codeBlock : m_currentlyExecuting)
- codeBlock->visitStrongly(visitor);
-
- // We strongly visit the remembered set because jettisoning old code during
- // Eden GC is unsound. There might be an old object with a strong reference
- // to the code.
- for (const RefPtr<CodeBlock>& codeBlock : m_remembered)
- codeBlock->visitStrongly(visitor);
-}
-
-void CodeBlockSet::rememberCurrentlyExecutingCodeBlocks(Heap* heap)
+void CodeBlockSet::writeBarrierCurrentlyExecutingCodeBlocks(Heap* heap)
{
if (verbose)
dataLog("Remembering ", m_currentlyExecuting.size(), " code blocks.\n");
- for (const RefPtr<CodeBlock>& codeBlock : m_currentlyExecuting)
- heap->writeBarrier(codeBlock->ownerExecutable());
+ for (CodeBlock* codeBlock : m_currentlyExecuting)
+ heap->writeBarrier(codeBlock);
- // It's safe to clear these RefPtr sets because we won't delete the CodeBlocks
- // in them until the next GC, and we'll recompute them at that time.
+ // It's safe to clear this set because we won't delete the CodeBlocks
+ // in it until the next GC, and we'll recompute it at that time.
m_currentlyExecuting.clear();
- m_remembered.clear();
}
void CodeBlockSet::dump(PrintStream& out) const
out.print(comma, pointerDump(codeBlock));
out.print("], currentlyExecuting = [");
comma = CommaPrinter();
- for (const RefPtr<CodeBlock>& codeBlock : m_currentlyExecuting)
- out.print(comma, pointerDump(codeBlock.get()));
+ for (CodeBlock* codeBlock : m_currentlyExecuting)
+ out.print(comma, pointerDump(codeBlock));
out.print("]}");
}
public:
CodeBlockSet();
~CodeBlockSet();
+
+ void lastChanceToFinalize();
// Add a CodeBlock. This is only called by CodeBlock constructors.
- void add(PassRefPtr<CodeBlock>);
+ void add(CodeBlock*);
- // Clear mark bits for certain CodeBlocks depending on the type of collection.
- void clearMarksForEdenCollection(const Vector<const JSCell*>&);
-
// Clear all mark bits for all CodeBlocks.
void clearMarksForFullCollection();
void remove(CodeBlock*);
- // Trace all marked code blocks. The CodeBlock is free to make use of
- // mayBeExecuting.
- void traceMarked(SlotVisitor&);
-
// Add all currently executing CodeBlocks to the remembered set to be
// re-scanned during the next collection.
- void rememberCurrentlyExecutingCodeBlocks(Heap*);
+ void writeBarrierCurrentlyExecutingCodeBlocks(Heap*);
// Visits each CodeBlock in the heap until the visitor function returns true
// to indicate that it is done iterating, or until every CodeBlock has been
void dump(PrintStream&) const;
private:
- void clearMarksForCodeBlocksInRememberedExecutables(const Vector<const JSCell*>&);
void promoteYoungCodeBlocks();
- // This is not a set of RefPtr<CodeBlock> because we need to be able to find
- // arbitrary bogus pointers. I could have written a thingy that had peek types
- // and all, but that seemed like overkill.
HashSet<CodeBlock*> m_oldCodeBlocks;
HashSet<CodeBlock*> m_newCodeBlocks;
- HashSet<RefPtr<CodeBlock>> m_currentlyExecuting;
- HashSet<RefPtr<CodeBlock>> m_remembered;
+ HashSet<CodeBlock*> m_currentlyExecuting;
};
} // namespace JSC
RELEASE_ASSERT(!m_vm->entryScope);
RELEASE_ASSERT(m_operationInProgress == NoOperation);
+ m_codeBlocks.lastChanceToFinalize();
m_objectSpace.lastChanceToFinalize();
releaseDelayedReleasedObjects();
GCPHASE(MarkRoots);
ASSERT(isValidThreadState(m_vm));
- Vector<const JSCell*> rememberedSet(m_slotVisitor.markStack().size());
- m_slotVisitor.markStack().fillVector(rememberedSet);
-
-#if ENABLE(DFG_JIT)
- DFG::clearCodeBlockMarks(*m_vm);
-#endif
- if (m_operationInProgress == EdenCollection)
- m_codeBlocks.clearMarksForEdenCollection(rememberedSet);
- else
- m_codeBlocks.clearMarksForFullCollection();
-
// We gather conservative roots before clearing mark bits because conservative
// gathering uses the mark bits to determine whether a reference is valid.
ConservativeRoots conservativeRoots(&m_objectSpace.blocks(), &m_storageSpace);
gatherJSStackRoots(conservativeRoots);
gatherScratchBufferRoots(conservativeRoots);
- clearLivenessData();
+#if ENABLE(DFG_JIT)
+ DFG::rememberCodeBlocks(*m_vm);
+#endif
- if (m_operationInProgress == FullCollection)
+ if (m_operationInProgress == FullCollection) {
m_opaqueRoots.clear();
+ m_slotVisitor.clearMarkStack();
+ }
+
+ clearLivenessData();
m_parallelMarkersShouldExit = false;
{
ParallelModeEnabler enabler(m_slotVisitor);
+ m_slotVisitor.donateAndDrain();
visitExternalRememberedSet();
visitSmallStrings();
visitConservativeRoots(conservativeRoots);
void Heap::clearLivenessData()
{
GCPHASE(ClearLivenessData);
+ if (m_operationInProgress == FullCollection)
+ m_codeBlocks.clearMarksForFullCollection();
+
m_objectSpace.clearNewlyAllocated();
m_objectSpace.clearMarks();
}
void Heap::traceCodeBlocksAndJITStubRoutines()
{
GCPHASE(TraceCodeBlocksAndJITStubRoutines);
- m_codeBlocks.traceMarked(m_slotVisitor);
m_jitStubRoutines.traceMarkedStubRoutines(m_slotVisitor);
if (Options::logGC() == GCLogging::Verbose)
// If JavaScript is running, it's not safe to delete all JavaScript code, since
// we'll end up returning to deleted code.
RELEASE_ASSERT(!m_vm->entryScope);
+ ASSERT(m_operationInProgress == NoOperation);
completeAllDFGPlans();
- for (ExecutableBase* current : m_executables) {
- if (!current->isFunctionExecutable())
- continue;
- static_cast<FunctionExecutable*>(current)->clearCode();
- }
-
- ASSERT(m_operationInProgress == FullCollection || m_operationInProgress == NoOperation);
- m_codeBlocks.clearMarksForFullCollection();
- m_codeBlocks.deleteUnmarkedAndUnreferenced(FullCollection);
+ for (ExecutableBase* executable : m_executables)
+ executable->clearCode();
}
void Heap::deleteAllUnlinkedCodeBlocks()
if (isMarked(current))
continue;
- // We do this because executable memory is limited on some platforms and because
- // CodeBlock requires eager finalization.
- ExecutableBase::clearCodeVirtual(current);
+ // Eagerly dereference the Executable's JITCode in order to run watchpoint
+ // destructors. Otherwise, watchpoints might fire for deleted CodeBlocks.
+ current->clearCode();
std::swap(m_executables[i], m_executables.last());
m_executables.removeLast();
}
deleteUnmarkedCompiledCode();
deleteSourceProviderCaches();
notifyIncrementalSweeper();
- rememberCurrentlyExecutingCodeBlocks();
+ writeBarrierCurrentlyExecutingCodeBlocks();
resetAllocators();
updateAllocationLimits();
GCPHASE(StartingCollection);
if (shouldDoFullCollection(collectionType)) {
m_operationInProgress = FullCollection;
- m_slotVisitor.clearMarkStack();
m_shouldDoFullCollection = false;
if (Options::logGC())
dataLog("FullCollection, ");
m_sweeper->startSweeping();
}
-void Heap::rememberCurrentlyExecutingCodeBlocks()
+void Heap::writeBarrierCurrentlyExecutingCodeBlocks()
{
- GCPHASE(RememberCurrentlyExecutingCodeBlocks);
- m_codeBlocks.rememberCurrentlyExecutingCodeBlocks(this);
+ GCPHASE(WriteBarrierCurrentlyExecutingCodeBlocks);
+ m_codeBlocks.writeBarrierCurrentlyExecutingCodeBlocks(this);
}
void Heap::resetAllocators()
template<typename T> void releaseSoon(RetainPtr<T>&&);
#endif
- void removeCodeBlock(CodeBlock* cb) { m_codeBlocks.remove(cb); }
-
static bool isZombified(JSCell* cell) { return *(void**)cell == zombifiedBits; }
void registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback);
void snapshotMarkedSpace();
void deleteSourceProviderCaches();
void notifyIncrementalSweeper();
- void rememberCurrentlyExecutingCodeBlocks();
+ void writeBarrierCurrentlyExecutingCodeBlocks();
void resetAllocators();
void copyBackingStores();
void harvestWeakReferences();
ASSERT(!callFrame->vm().exception());
ThisTDZMode thisTDZMode = callerCodeBlock->unlinkedCodeBlock()->constructorKind() == ConstructorKind::Derived ? ThisTDZMode::AlwaysCheck : ThisTDZMode::CheckIfNeeded;
- eval = callerCodeBlock->evalCodeCache().getSlow(callFrame, callerCodeBlock->ownerScriptExecutable(), callerCodeBlock->isStrictMode(), thisTDZMode, programSource, callerScopeChain);
+ eval = callerCodeBlock->evalCodeCache().getSlow(callFrame, callerCodeBlock, callerCodeBlock->isStrictMode(), thisTDZMode, programSource, callerScopeChain);
if (!eval)
return jsUndefined();
}
m_frame.m_argumentCountIncludingThis = callFrame->r(inlineCallFrame->argumentCountRegister.offset()).unboxedInt32();
else
m_frame.m_argumentCountIncludingThis = inlineCallFrame->arguments.size();
- m_frame.m_codeBlock = inlineCallFrame->baselineCodeBlock();
+ m_frame.m_codeBlock = inlineCallFrame->baselineCodeBlock.get();
m_frame.m_bytecodeOffset = codeOrigin->bytecodeIndex;
JSFunction* callee = inlineCallFrame->calleeForCallFrame(callFrame);
if (!codeOrigin.inlineCallFrame)
return m_codeBlock->ownerExecutable();
- return codeOrigin.inlineCallFrame->executable.get();
+ return codeOrigin.inlineCallFrame->baselineCodeBlock->ownerExecutable();
}
Vector<BytecodeAndMachineOffset>& AssemblyHelpers::decodedCodeMapFor(CodeBlock* codeBlock)
{
if (!codeOrigin.inlineCallFrame)
return codeBlock()->isStrictMode();
- return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->isStrictMode();
+ return codeOrigin.inlineCallFrame->isStrictMode();
}
ECMAMode ecmaModeFor(CodeOrigin codeOrigin)
// Helper for the creation of simple stub routines that need no help from the GC. Note
// that codeBlock gets "executed" more than once.
#define FINALIZE_CODE_FOR_GC_AWARE_STUB(codeBlock, patchBuffer, makesCalls, cell, dataLogFArguments) \
- (createJITStubRoutine(FINALIZE_CODE_FOR((codeBlock), (patchBuffer), dataLogFArguments), *(codeBlock)->vm(), (codeBlock)->ownerExecutable(), (makesCalls), (cell)))
+ (createJITStubRoutine(FINALIZE_CODE_FOR((codeBlock), (patchBuffer), dataLogFArguments), *(codeBlock)->vm(), (codeBlock), (makesCalls), (cell)))
} // namespace JSC
}
}
- static std::chrono::milliseconds timeToLive(JITType jitType)
- {
- switch (jitType) {
- case InterpreterThunk:
- return std::chrono::duration_cast<std::chrono::milliseconds>(
- std::chrono::seconds(5));
- case BaselineJIT:
- // Effectively 10 additional seconds, since BaselineJIT and
- // InterpreterThunk share a CodeBlock.
- return std::chrono::duration_cast<std::chrono::milliseconds>(
- std::chrono::seconds(15));
- case DFGJIT:
- return std::chrono::duration_cast<std::chrono::milliseconds>(
- std::chrono::seconds(20));
- case FTLJIT:
- return std::chrono::duration_cast<std::chrono::milliseconds>(
- std::chrono::seconds(60));
- default:
- return std::chrono::milliseconds::max();
- }
- }
-
static bool isLowerTier(JITType expectedLower, JITType expectedHigher)
{
RELEASE_ASSERT(isExecutableScript(expectedLower));
for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j)
emitInitRegister(virtualRegisterForLocal(j).offset());
- emitWriteBarrier(m_codeBlock->ownerExecutable());
+ emitWriteBarrier(m_codeBlock);
emitEnterOptimizationCheck();
}
mustHandleValues[i] = exec->uncheckedR(operand).jsValue();
}
- RefPtr<CodeBlock> replacementCodeBlock = codeBlock->newReplacement();
+ CodeBlock* replacementCodeBlock = codeBlock->newReplacement();
CompilationResult result = DFG::compile(
- vm, replacementCodeBlock.get(), 0, DFG::DFGMode, bytecodeIndex,
+ vm, replacementCodeBlock, nullptr, DFG::DFGMode, bytecodeIndex,
mustHandleValues, JITToDFGDeferredCompilationCallback::create());
- if (result != CompilationSuccessful) {
- ASSERT(result == CompilationDeferred || replacementCodeBlock->hasOneRef());
+ if (result != CompilationSuccessful)
return encodeResult(0, 0);
- }
}
CodeBlock* optimizedCodeBlock = codeBlock->replacement();
}
void JITToDFGDeferredCompilationCallback::compilationDidBecomeReadyAsynchronously(
- CodeBlock* codeBlock)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock)
{
+ ASSERT_UNUSED(profiledDFGCodeBlock, !profiledDFGCodeBlock);
ASSERT(codeBlock->alternative()->jitType() == JITCode::BaselineJIT);
if (Options::verboseOSR())
}
void JITToDFGDeferredCompilationCallback::compilationDidComplete(
- CodeBlock* codeBlock, CompilationResult result)
+ CodeBlock* codeBlock, CodeBlock* profiledDFGCodeBlock, CompilationResult result)
{
+ ASSERT(!profiledDFGCodeBlock);
ASSERT(codeBlock->alternative()->jitType() == JITCode::BaselineJIT);
if (Options::verboseOSR())
codeBlock->alternative()->setOptimizationThresholdBasedOnCompilationResult(result);
- DeferredCompilationCallback::compilationDidComplete(codeBlock, result);
+ DeferredCompilationCallback::compilationDidComplete(codeBlock, profiledDFGCodeBlock, result);
}
} // JSC
static Ref<JITToDFGDeferredCompilationCallback> create();
- virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*) override;
- virtual void compilationDidComplete(CodeBlock*, CompilationResult) override;
+ virtual void compilationDidBecomeReadyAsynchronously(CodeBlock*, CodeBlock* profiledDFGCodeBlock) override;
+ virtual void compilationDidComplete(CodeBlock*, CodeBlock* profiledDFGCodeBlock, CompilationResult) override;
};
} // namespace JSC
return GiveUpOnCache;
CodeBlock* codeBlock = exec->codeBlock();
- ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
VM& vm = exec->vm();
std::unique_ptr<AccessCase> newCase;
if (isJSArray(baseValue) && propertyName == exec->propertyNames().length)
- newCase = AccessCase::getLength(vm, owner, AccessCase::ArrayLength);
+ newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
else if (isJSString(baseValue) && propertyName == exec->propertyNames().length)
- newCase = AccessCase::getLength(vm, owner, AccessCase::StringLength);
+ newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
else {
if (!slot.isCacheable() && !slot.isUnset())
return GiveUpOnCache;
if (slot.isUnset()) {
conditionSet = generateConditionsForPropertyMiss(
- vm, codeBlock->ownerExecutable(), exec, structure, propertyName.impl());
+ vm, codeBlock, exec, structure, propertyName.impl());
} else {
conditionSet = generateConditionsForPrototypePropertyHit(
- vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(),
+ vm, codeBlock, exec, structure, slot.slotBase(),
propertyName.impl());
}
type = AccessCase::CustomGetter;
newCase = AccessCase::get(
- vm, owner, type, offset, structure, conditionSet, loadTargetFromProxy,
+ vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
slot.isCacheableCustom() ? slot.slotBase() : nullptr);
}
return GiveUpOnCache;
CodeBlock* codeBlock = exec->codeBlock();
- ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
VM& vm = exec->vm();
if (!baseValue.isCell())
return RetryCacheLater;
}
- newCase = AccessCase::replace(vm, owner, structure, slot.cachedOffset());
+ newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
} else {
ASSERT(slot.type() == PutPropertySlot::NewProperty);
if (putKind == NotDirect) {
conditionSet =
generateConditionsForPropertySetterMiss(
- vm, owner, exec, newStructure, ident.impl());
+ vm, codeBlock, exec, newStructure, ident.impl());
if (!conditionSet.isValid())
return GiveUpOnCache;
}
- newCase = AccessCase::transition(vm, owner, structure, newStructure, offset, conditionSet);
+ newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
}
} else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
if (slot.isCacheableCustom()) {
if (slot.base() != baseValue) {
conditionSet =
generateConditionsForPrototypePropertyHitCustom(
- vm, owner, exec, structure, slot.base(), ident.impl());
+ vm, codeBlock, exec, structure, slot.base(), ident.impl());
if (!conditionSet.isValid())
return GiveUpOnCache;
}
newCase = AccessCase::setter(
- vm, owner, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
+ vm, codeBlock, AccessCase::CustomSetter, structure, invalidOffset, conditionSet,
slot.customSetter(), slot.base());
} else {
ObjectPropertyConditionSet conditionSet;
if (slot.base() != baseValue) {
conditionSet =
generateConditionsForPrototypePropertyHit(
- vm, owner, exec, structure, slot.base(), ident.impl());
+ vm, codeBlock, exec, structure, slot.base(), ident.impl());
if (!conditionSet.isValid())
return GiveUpOnCache;
offset = conditionSet.slotBaseCondition().offset();
offset = slot.cachedOffset();
newCase = AccessCase::setter(
- vm, owner, AccessCase::Setter, structure, offset, conditionSet);
+ vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
}
}
}
CodeBlock* codeBlock = exec->codeBlock();
- ScriptExecutable* owner = codeBlock->ownerScriptExecutable();
VM& vm = exec->vm();
Structure* structure = base->structure(vm);
if (wasFound) {
if (slot.slotBase() != base) {
conditionSet = generateConditionsForPrototypePropertyHit(
- vm, codeBlock->ownerExecutable(), exec, structure, slot.slotBase(), ident.impl());
+ vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
}
} else {
conditionSet = generateConditionsForPropertyMiss(
- vm, codeBlock->ownerExecutable(), exec, structure, ident.impl());
+ vm, codeBlock, exec, structure, ident.impl());
}
if (!conditionSet.isValid())
return GiveUpOnCache;
std::unique_ptr<AccessCase> newCase = AccessCase::in(
- vm, owner, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
+ vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
MacroAssemblerCodePtr codePtr = stubInfo.addAccessCase(codeBlock, ident, WTF::move(newCase));
if (!codePtr)
VM* vm = callerCodeBlock->vm();
ASSERT(!callLinkInfo.isLinked());
- callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock->ownerExecutable(), callee);
- callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
+ callLinkInfo.setCallee(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin(), callerCodeBlock, callee);
+ callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
if (shouldShowDisassemblyFor(callerCodeBlock))
dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
("Polymorphic call stub for %s, return point %p, targets %s",
toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
toCString(listDump(callCases)).data())),
- *vm, callerCodeBlock->ownerExecutable(), exec->callerFrame(), callLinkInfo, callCases,
+ *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
WTF::move(fastCounts)));
MacroAssembler::replaceWithJump(
if (!structure->isUncacheableDictionary()
&& !structure->typeInfo().prohibitsPropertyCaching()
&& !structure->typeInfo().newImpurePropertyFiresWatchpoints()) {
- vm.heap.writeBarrier(codeBlock->ownerExecutable());
+ vm.heap.writeBarrier(codeBlock);
ConcurrentJITLocker locker(codeBlock->m_lock);
&& !structure->typeInfo().prohibitsPropertyCaching()
&& baseCell == slot.base()) {
- vm.heap.writeBarrier(codeBlock->ownerExecutable());
+ vm.heap.writeBarrier(codeBlock);
if (slot.type() == PutPropertySlot::NewProperty) {
GCSafeConcurrentJITLocker locker(codeBlock->m_lock, vm.heap);
StructureChain* chain = structure->prototypeChain(exec);
ASSERT(chain);
pc[7].u.structureChain.set(
- vm, codeBlock->ownerExecutable(), chain);
+ vm, codeBlock, chain);
}
pc[8].u.putByIdFlags = static_cast<PutByIdFlags>(
pc[8].u.putByIdFlags |
if (callLinkInfo->isOnList())
callLinkInfo->remove();
- callLinkInfo->callee.set(vm, callerCodeBlock->ownerExecutable(), callee);
- callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock->ownerExecutable(), callee);
+ callLinkInfo->callee.set(vm, callerCodeBlock, callee);
+ callLinkInfo->lastSeenCallee.set(vm, callerCodeBlock, callee);
callLinkInfo->machineCodeTarget = codePtr;
if (codeBlock)
codeBlock->linkIncomingCall(exec, callLinkInfo);
for (unsigned i = 1; i < stack.size(); ++i) {
append(Origin(
- database.ensureBytecodesFor(stack[i].inlineCallFrame->baselineCodeBlock()),
+ database.ensureBytecodesFor(stack[i].inlineCallFrame->baselineCodeBlock.get()),
stack[i].bytecodeIndex));
}
}
auto& cacheWriteBarrier = pc[4].u.jsCell;
if (!cacheWriteBarrier)
- cacheWriteBarrier.set(exec->vm(), exec->codeBlock()->ownerExecutable(), constructor);
+ cacheWriteBarrier.set(exec->vm(), exec->codeBlock(), constructor);
else if (cacheWriteBarrier.unvalidatedGet() != JSCell::seenMultipleCalleeObjects() && cacheWriteBarrier.get() != constructor)
cacheWriteBarrier.setWithoutWriteBarrier(JSCell::seenMultipleCalleeObjects());
if (myStructure != otherStructure) {
if (otherStructure)
pc[3].u.toThisStatus = ToThisConflicted;
- pc[2].u.structure.set(vm, exec->codeBlock()->ownerExecutable(), myStructure);
+ pc[2].u.structure.set(vm, exec->codeBlock(), myStructure);
}
} else {
pc[3].u.toThisStatus = ToThisConflicted;
SLOW_PATH_DECL(slow_path_enter)
{
BEGIN();
- ExecutableBase* ownerExecutable = exec->codeBlock()->ownerExecutable();
- Heap::heap(ownerExecutable)->writeBarrier(ownerExecutable);
+ CodeBlock* codeBlock = exec->codeBlock();
+ Heap::heap(codeBlock)->writeBarrier(codeBlock);
END();
}
scope->structure()->didCachePropertyReplacement(exec->vm(), slot.cachedOffset());
ConcurrentJITLocker locker(codeBlock->m_lock);
- pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), scope->structure());
+ pc[5].u.structure.set(exec->vm(), codeBlock, scope->structure());
pc[6].u.operand = slot.cachedOffset();
}
}
Structure* structure = scope->structure(vm);
{
ConcurrentJITLocker locker(codeBlock->m_lock);
- pc[5].u.structure.set(exec->vm(), codeBlock->ownerExecutable(), structure);
+ pc[5].u.structure.set(exec->vm(), codeBlock, structure);
pc[6].u.operand = slot.cachedOffset();
}
structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
#endif
m_numParametersForCall = NUM_PARAMETERS_NOT_COMPILED;
m_numParametersForConstruct = NUM_PARAMETERS_NOT_COMPILED;
+
+ if (classInfo() == FunctionExecutable::info()) {
+ FunctionExecutable* executable = jsCast<FunctionExecutable*>(this);
+ executable->m_codeBlockForCall.clear();
+ executable->m_codeBlockForConstruct.clear();
+ return;
+ }
+
+ if (classInfo() == EvalExecutable::info()) {
+ EvalExecutable* executable = jsCast<EvalExecutable*>(this);
+ executable->m_evalCodeBlock.clear();
+ executable->m_unlinkedEvalCodeBlock.clear();
+ return;
+ }
+
+ if (classInfo() == ProgramExecutable::info()) {
+ ProgramExecutable* executable = jsCast<ProgramExecutable*>(this);
+ executable->m_programCodeBlock.clear();
+ executable->m_unlinkedProgramCodeBlock.clear();
+ return;
+ }
+
+ if (classInfo() == ModuleProgramExecutable::info()) {
+ ModuleProgramExecutable* executable = jsCast<ModuleProgramExecutable*>(this);
+ executable->m_moduleProgramCodeBlock.clear();
+ executable->m_unlinkedModuleProgramCodeBlock.clear();
+ executable->m_moduleEnvironmentSymbolTable.clear();
+ return;
+ }
+
+#if ENABLE(WEBASSEMBLY)
+ if (classInfo() == WebAssemblyExecutable::info()) {
+ WebAssemblyExecutable* executable = jsCast<WebAssemblyExecutable*>(this);
+ executable->m_codeBlockForCall.clear();
+ return;
+ }
+#endif
+
+ ASSERT(classInfo() == NativeExecutable::info());
}
#if ENABLE(DFG_JIT)
{
ASSERT(vm.heap.isDeferred());
- RefPtr<CodeBlock> oldCodeBlock;
+ CodeBlock* oldCodeBlock = nullptr;
switch (codeType) {
case GlobalCode: {
ASSERT(kind == CodeForCall);
- oldCodeBlock = executable->m_programCodeBlock;
- executable->m_programCodeBlock = codeBlock;
+ oldCodeBlock = executable->m_programCodeBlock.get();
+ executable->m_programCodeBlock.setMayBeNull(vm, this, codeBlock);
break;
}
ASSERT(kind == CodeForCall);
- oldCodeBlock = executable->m_moduleProgramCodeBlock;
- executable->m_moduleProgramCodeBlock = codeBlock;
+ oldCodeBlock = executable->m_moduleProgramCodeBlock.get();
+ executable->m_moduleProgramCodeBlock.setMayBeNull(vm, this, codeBlock);
break;
}
ASSERT(kind == CodeForCall);
- oldCodeBlock = executable->m_evalCodeBlock;
- executable->m_evalCodeBlock = codeBlock;
+ oldCodeBlock = executable->m_evalCodeBlock.get();
+ executable->m_evalCodeBlock.setMayBeNull(vm, this, codeBlock);
break;
}
switch (kind) {
case CodeForCall:
- oldCodeBlock = executable->m_codeBlockForCall;
- executable->m_codeBlockForCall = codeBlock;
+ oldCodeBlock = executable->m_codeBlockForCall.get();
+ executable->m_codeBlockForCall.setMayBeNull(vm, this, codeBlock);
break;
case CodeForConstruct:
- oldCodeBlock = executable->m_codeBlockForConstruct;
- executable->m_codeBlockForConstruct = codeBlock;
+ oldCodeBlock = executable->m_codeBlockForConstruct.get();
+ executable->m_codeBlockForConstruct.setMayBeNull(vm, this, codeBlock);
break;
}
break;
vm.heap.writeBarrier(this);
}
-RefPtr<CodeBlock> ScriptExecutable::newCodeBlockFor(
+CodeBlock* ScriptExecutable::newCodeBlockFor(
CodeSpecializationKind kind, JSFunction* function, JSScope* scope, JSObject*& exception)
{
VM* vm = scope->vm();
RELEASE_ASSERT(kind == CodeForCall);
RELEASE_ASSERT(!executable->m_evalCodeBlock);
RELEASE_ASSERT(!function);
- return adoptRef(new EvalCodeBlock(
+ return EvalCodeBlock::create(vm,
executable, executable->m_unlinkedEvalCodeBlock.get(), scope,
- executable->source().provider()));
+ executable->source().provider());
}
if (classInfo() == ProgramExecutable::info()) {
RELEASE_ASSERT(kind == CodeForCall);
RELEASE_ASSERT(!executable->m_programCodeBlock);
RELEASE_ASSERT(!function);
- return adoptRef(new ProgramCodeBlock(
+ return ProgramCodeBlock::create(vm,
executable, executable->m_unlinkedProgramCodeBlock.get(), scope,
- executable->source().provider(), executable->source().startColumn()));
+ executable->source().provider(), executable->source().startColumn());
}
if (classInfo() == ModuleProgramExecutable::info()) {
RELEASE_ASSERT(kind == CodeForCall);
RELEASE_ASSERT(!executable->m_moduleProgramCodeBlock);
RELEASE_ASSERT(!function);
- return adoptRef(new ModuleProgramCodeBlock(
+ return ModuleProgramCodeBlock::create(vm,
executable, executable->m_unlinkedModuleProgramCodeBlock.get(), scope,
- executable->source().provider(), executable->source().startColumn()));
+ executable->source().provider(), executable->source().startColumn());
}
RELEASE_ASSERT(classInfo() == FunctionExecutable::info());
unsigned sourceOffset = executable->source().startOffset();
unsigned startColumn = executable->source().startColumn();
- return adoptRef(new FunctionCodeBlock(
- executable, unlinkedCodeBlock, scope, provider, sourceOffset, startColumn));
+ return FunctionCodeBlock::create(vm,
+ executable, unlinkedCodeBlock, scope, provider, sourceOffset, startColumn);
}
-PassRefPtr<CodeBlock> ScriptExecutable::newReplacementCodeBlockFor(
+CodeBlock* ScriptExecutable::newReplacementCodeBlockFor(
CodeSpecializationKind kind)
{
if (classInfo() == EvalExecutable::info()) {
EvalExecutable* executable = jsCast<EvalExecutable*>(this);
EvalCodeBlock* baseline = static_cast<EvalCodeBlock*>(
executable->m_evalCodeBlock->baselineVersion());
- RefPtr<EvalCodeBlock> result = adoptRef(new EvalCodeBlock(
- CodeBlock::CopyParsedBlock, *baseline));
- result->setAlternative(baseline);
+ EvalCodeBlock* result = EvalCodeBlock::create(vm(),
+ CodeBlock::CopyParsedBlock, *baseline);
+ result->setAlternative(*vm(), baseline);
return result;
}
ProgramExecutable* executable = jsCast<ProgramExecutable*>(this);
ProgramCodeBlock* baseline = static_cast<ProgramCodeBlock*>(
executable->m_programCodeBlock->baselineVersion());
- RefPtr<ProgramCodeBlock> result = adoptRef(new ProgramCodeBlock(
- CodeBlock::CopyParsedBlock, *baseline));
- result->setAlternative(baseline);
+ ProgramCodeBlock* result = ProgramCodeBlock::create(vm(),
+ CodeBlock::CopyParsedBlock, *baseline);
+ result->setAlternative(*vm(), baseline);
return result;
}
ModuleProgramExecutable* executable = jsCast<ModuleProgramExecutable*>(this);
ModuleProgramCodeBlock* baseline = static_cast<ModuleProgramCodeBlock*>(
executable->m_moduleProgramCodeBlock->baselineVersion());
- RefPtr<ModuleProgramCodeBlock> result = adoptRef(new ModuleProgramCodeBlock(
- CodeBlock::CopyParsedBlock, *baseline));
- result->setAlternative(baseline);
+ ModuleProgramCodeBlock* result = ModuleProgramCodeBlock::create(vm(),
+ CodeBlock::CopyParsedBlock, *baseline);
+ result->setAlternative(*vm(), baseline);
return result;
}
FunctionExecutable* executable = jsCast<FunctionExecutable*>(this);
FunctionCodeBlock* baseline = static_cast<FunctionCodeBlock*>(
executable->codeBlockFor(kind)->baselineVersion());
- RefPtr<FunctionCodeBlock> result = adoptRef(new FunctionCodeBlock(
- CodeBlock::CopyParsedBlock, *baseline));
- result->setAlternative(baseline);
+ FunctionCodeBlock* result = FunctionCodeBlock::create(vm(),
+ CodeBlock::CopyParsedBlock, *baseline);
+ result->setAlternative(*vm(), baseline);
return result;
}
DeferGC deferGC(vm.heap);
JSObject* exception = 0;
- RefPtr<CodeBlock> codeBlock = newCodeBlockFor(kind, function, scope, exception);
+ CodeBlock* codeBlock = newCodeBlockFor(kind, function, scope, exception);
if (!codeBlock) {
RELEASE_ASSERT(exception);
return exception;
codeBlock->validate();
if (Options::useLLInt())
- setupLLInt(vm, codeBlock.get());
+ setupLLInt(vm, codeBlock);
else
- setupJIT(vm, codeBlock.get());
+ setupJIT(vm, codeBlock);
- installCode(*codeBlock->vm(), codeBlock.get(), codeBlock->codeType(), codeBlock->specializationKind());
+ installCode(*codeBlock->vm(), codeBlock, codeBlock->codeType(), codeBlock->specializationKind());
return 0;
}
EvalExecutable* thisObject = jsCast<EvalExecutable*>(cell);
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
ScriptExecutable::visitChildren(thisObject, visitor);
- if (thisObject->m_evalCodeBlock)
- thisObject->m_evalCodeBlock->visitAggregate(visitor);
visitor.append(&thisObject->m_unlinkedEvalCodeBlock);
-}
-
-void EvalExecutable::clearCode()
-{
- m_evalCodeBlock = nullptr;
- m_unlinkedEvalCodeBlock.clear();
- Base::clearCode();
+ if (thisObject->m_evalCodeBlock)
+ thisObject->m_evalCodeBlock->visitWeakly(visitor);
}
JSObject* ProgramExecutable::checkSyntax(ExecState* exec)
ScriptExecutable::visitChildren(thisObject, visitor);
visitor.append(&thisObject->m_unlinkedProgramCodeBlock);
if (thisObject->m_programCodeBlock)
- thisObject->m_programCodeBlock->visitAggregate(visitor);
-}
-
-void ProgramExecutable::clearCode()
-{
- m_programCodeBlock = nullptr;
- m_unlinkedProgramCodeBlock.clear();
- Base::clearCode();
+ thisObject->m_programCodeBlock->visitWeakly(visitor);
}
void ModuleProgramExecutable::visitChildren(JSCell* cell, SlotVisitor& visitor)
visitor.append(&thisObject->m_unlinkedModuleProgramCodeBlock);
visitor.append(&thisObject->m_moduleEnvironmentSymbolTable);
if (thisObject->m_moduleProgramCodeBlock)
- thisObject->m_moduleProgramCodeBlock->visitAggregate(visitor);
-}
-
-void ModuleProgramExecutable::clearCode()
-{
- m_moduleProgramCodeBlock = nullptr;
- m_unlinkedModuleProgramCodeBlock.clear();
- m_moduleEnvironmentSymbolTable.clear();
- Base::clearCode();
+ thisObject->m_moduleProgramCodeBlock->visitWeakly(visitor);
}
FunctionCodeBlock* FunctionExecutable::baselineCodeBlockFor(CodeSpecializationKind kind)
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
ScriptExecutable::visitChildren(thisObject, visitor);
if (thisObject->m_codeBlockForCall)
- thisObject->m_codeBlockForCall->visitAggregate(visitor);
+ thisObject->m_codeBlockForCall->visitWeakly(visitor);
if (thisObject->m_codeBlockForConstruct)
- thisObject->m_codeBlockForConstruct->visitAggregate(visitor);
+ thisObject->m_codeBlockForConstruct->visitWeakly(visitor);
visitor.append(&thisObject->m_unlinkedExecutable);
visitor.append(&thisObject->m_singletonFunction);
}
-void FunctionExecutable::clearCode()
-{
- m_codeBlockForCall = nullptr;
- m_codeBlockForConstruct = nullptr;
- Base::clearCode();
-}
-
FunctionExecutable* FunctionExecutable::fromGlobalCode(
const Identifier& name, ExecState& exec, const SourceCode& source,
JSObject*& exception, int overrideLineNumber)
ASSERT_GC_OBJECT_INHERITS(thisObject, info());
ExecutableBase::visitChildren(thisObject, visitor);
if (thisObject->m_codeBlockForCall)
- thisObject->m_codeBlockForCall->visitAggregate(visitor);
+ thisObject->m_codeBlockForCall->visitWeakly(visitor);
visitor.append(&thisObject->m_module);
}
-void WebAssemblyExecutable::clearCode()
-{
- m_codeBlockForCall = nullptr;
- Base::clearCode();
-}
-
void WebAssemblyExecutable::prepareForExecution(ExecState* exec)
{
if (hasJITCodeForCall())
VM& vm = exec->vm();
DeferGC deferGC(vm.heap);
- RefPtr<WebAssemblyCodeBlock> codeBlock = adoptRef(new WebAssemblyCodeBlock(
- this, vm, exec->lexicalGlobalObject()));
+ WebAssemblyCodeBlock* codeBlock = WebAssemblyCodeBlock::create(vm,
+ this, exec->lexicalGlobalObject()));
- WASMFunctionParser::compile(vm, codeBlock.get(), m_module.get(), m_source, m_functionIndex);
+ WASMFunctionParser::compile(vm, codeBlock, m_module.get(), m_source, m_functionIndex);
m_jitCodeForCall = codeBlock->jitCode();
m_jitCodeForCallWithArityCheck = MacroAssemblerCodePtr();
m_numParametersForCall = codeBlock->numParameters();
- m_codeBlockForCall = codeBlock;
+ m_codeBlockForCall.set(vm, this, codeBlock);
Heap::heap(this)->writeBarrier(this);
}
int m_numParametersForConstruct;
public:
- static void clearCodeVirtual(ExecutableBase*);
-
PassRefPtr<JITCode> generatedJITCodeForCall()
{
ASSERT(m_jitCodeForCall);
}
private:
+ friend class ExecutableBase;
+
NativeExecutable(VM& vm, NativeFunction function, NativeFunction constructor)
: ExecutableBase(vm, vm.nativeExecutableStructure.get(), NUM_PARAMETERS_IS_HOST)
, m_function(function)
void installCode(CodeBlock*);
void installCode(VM&, CodeBlock*, CodeType, CodeSpecializationKind);
- RefPtr<CodeBlock> newCodeBlockFor(CodeSpecializationKind, JSFunction*, JSScope*, JSObject*& exception);
- PassRefPtr<CodeBlock> newReplacementCodeBlockFor(CodeSpecializationKind);
+ CodeBlock* newCodeBlockFor(CodeSpecializationKind, JSFunction*, JSScope*, JSObject*& exception);
+ CodeBlock* newReplacementCodeBlockFor(CodeSpecializationKind);
JSObject* prepareForExecution(ExecState* exec, JSFunction* function, JSScope* scope, CodeSpecializationKind kind)
{
template <typename Functor> void forEachCodeBlock(Functor&&);
private:
+ friend class ExecutableBase;
JSObject* prepareForExecutionImpl(ExecState*, JSFunction*, JSScope*, CodeSpecializationKind);
protected:
DECLARE_INFO;
- void clearCode();
-
ExecutableInfo executableInfo() const { return ExecutableInfo(needsActivation(), usesEval(), isStrictMode(), false, false, ConstructorKind::None, false); }
unsigned numVariables() { return m_unlinkedEvalCodeBlock->numVariables(); }
unsigned numberOfFunctionDecls() { return m_unlinkedEvalCodeBlock->numberOfFunctionDecls(); }
private:
+ friend class ExecutableBase;
friend class ScriptExecutable;
EvalExecutable(ExecState*, const SourceCode&, bool);
static void visitChildren(JSCell*, SlotVisitor&);
- RefPtr<EvalCodeBlock> m_evalCodeBlock;
+ WriteBarrier<EvalCodeBlock> m_evalCodeBlock;
WriteBarrier<UnlinkedEvalCodeBlock> m_unlinkedEvalCodeBlock;
};
DECLARE_INFO;
- void clearCode();
-
ExecutableInfo executableInfo() const { return ExecutableInfo(needsActivation(), usesEval(), isStrictMode(), false, false, ConstructorKind::None, false); }
private:
+ friend class ExecutableBase;
friend class ScriptExecutable;
ProgramExecutable(ExecState*, const SourceCode&);
static void visitChildren(JSCell*, SlotVisitor&);
WriteBarrier<UnlinkedProgramCodeBlock> m_unlinkedProgramCodeBlock;
- RefPtr<ProgramCodeBlock> m_programCodeBlock;
+ WriteBarrier<ProgramCodeBlock> m_programCodeBlock;
};
class ModuleProgramExecutable final : public ScriptExecutable {
DECLARE_INFO;
- void clearCode();
-
ExecutableInfo executableInfo() const { return ExecutableInfo(needsActivation(), usesEval(), isStrictMode(), false, false, ConstructorKind::None, false); }
UnlinkedModuleProgramCodeBlock* unlinkedModuleProgramCodeBlock() { return m_unlinkedModuleProgramCodeBlock.get(); }
SymbolTable* moduleEnvironmentSymbolTable() { return m_moduleEnvironmentSymbolTable.get(); }
private:
+ friend class ExecutableBase;
friend class ScriptExecutable;
ModuleProgramExecutable(ExecState*, const SourceCode&);
WriteBarrier<UnlinkedModuleProgramCodeBlock> m_unlinkedModuleProgramCodeBlock;
WriteBarrier<SymbolTable> m_moduleEnvironmentSymbolTable;
- RefPtr<ModuleProgramCodeBlock> m_moduleProgramCodeBlock;
+ WriteBarrier<ModuleProgramCodeBlock> m_moduleProgramCodeBlock;
};
class FunctionExecutable final : public ScriptExecutable {
bool isGeneratedForCall() const
{
- return m_codeBlockForCall;
+ return !!m_codeBlockForCall;
}
FunctionCodeBlock* codeBlockForCall()
bool isGeneratedForConstruct() const
{
- return m_codeBlockForConstruct;
+ return m_codeBlockForConstruct.get();
}
FunctionCodeBlock* codeBlockForConstruct()
DECLARE_INFO;
- void clearCode();
-
InferredValue* singletonFunction() { return m_singletonFunction.get(); }
private:
+ friend class ExecutableBase;
FunctionExecutable(
VM&, const SourceCode&, UnlinkedFunctionExecutable*, unsigned firstLine,
unsigned lastLine, unsigned startColumn, unsigned endColumn);
friend class ScriptExecutable;
WriteBarrier<UnlinkedFunctionExecutable> m_unlinkedExecutable;
- RefPtr<FunctionCodeBlock> m_codeBlockForCall;
- RefPtr<FunctionCodeBlock> m_codeBlockForConstruct;
+ WriteBarrier<FunctionCodeBlock> m_codeBlockForCall;
+ WriteBarrier<FunctionCodeBlock> m_codeBlockForConstruct;
RefPtr<TypeSet> m_returnStatementTypeSet;
unsigned m_parametersStartOffset;
WriteBarrier<InferredValue> m_singletonFunction;
DECLARE_INFO;
- void clearCode();
-
void prepareForExecution(ExecState*);
WebAssemblyCodeBlock* codeBlockForCall()
}
private:
+ friend class ExecutableBase;
WebAssemblyExecutable(VM&, const SourceCode&, JSWASMModule*, unsigned functionIndex);
static void visitChildren(JSCell*, SlotVisitor&);
WriteBarrier<JSWASMModule> m_module;
unsigned m_functionIndex;
- RefPtr<WebAssemblyCodeBlock> m_codeBlockForCall;
+ WriteBarrier<WebAssemblyCodeBlock> m_codeBlockForCall;
};
#endif
-inline void ExecutableBase::clearCodeVirtual(ExecutableBase* executable)
-{
- switch (executable->type()) {
- case EvalExecutableType:
- return jsCast<EvalExecutable*>(executable)->clearCode();
- case ProgramExecutableType:
- return jsCast<ProgramExecutable*>(executable)->clearCode();
- case FunctionExecutableType:
- return jsCast<FunctionExecutable*>(executable)->clearCode();
-#if ENABLE(WEBASSEMBLY)
- case WebAssemblyExecutableType:
- return jsCast<WebAssemblyExecutable*>(executable)->clearCode();
-#endif
- case ModuleProgramExecutableType:
- return jsCast<ModuleProgramExecutable*>(executable)->clearCode();
- default:
- return jsCast<NativeExecutable*>(executable)->clearCode();
- }
-}
-
-}
+} // namespace JSC
-#endif
+#endif // Executable_h
exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
+ programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
+ functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
+#if ENABLE(WEBASSEMBLY)
+ webAssemblyCodeBlockStructure.set(*this, WebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
+#endif
+
iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
smallStrings.initializeCommonStrings(*this);
Strong<Structure> promiseDeferredStructure;
Strong<Structure> internalPromiseDeferredStructure;
Strong<Structure> nativeStdFunctionCellStructure;
+ Strong<Structure> programCodeBlockStructure;
+ Strong<Structure> moduleProgramCodeBlockStructure;
+ Strong<Structure> evalCodeBlockStructure;
+ Strong<Structure> functionCodeBlockStructure;
+ Strong<Structure> webAssemblyCodeBlockStructure;
+
Strong<JSCell> iterationTerminator;
Strong<JSCell> emptyPropertyNameEnumerator;