Harden how the compiler references GC objects
[WebKit.git] / Source / JavaScriptCore / bytecode / CodeBlock.cpp
index fbd8549..1cc783c 100644 (file)
@@ -74,8 +74,6 @@
 #include "TypeProfiler.h"
 #include "UnlinkedInstructionStream.h"
 #include "VMInlines.h"
-#include "WebAssemblyCodeBlock.h"
-#include "WebAssemblyExecutable.h"
 #include <wtf/BagToHashMap.h>
 #include <wtf/CommaPrinter.h>
 #include <wtf/SimpleStats.h>
@@ -1771,8 +1769,8 @@ void CodeBlock::dumpBytecode(
     if (!exitSites.isEmpty()) {
         out.print(" !! frequent exits: ");
         CommaPrinter comma;
-        for (unsigned i = 0; i < exitSites.size(); ++i)
-            out.print(comma, exitSites[i].kind(), " ", exitSites[i].jitType());
+        for (auto& exitSite : exitSites)
+            out.print(comma, exitSite.kind(), " ", exitSite.jitType());
     }
 #else // ENABLE(DFG_JIT)
     UNUSED_PARAM(location);
@@ -1797,17 +1795,6 @@ void CodeBlock::dumpBytecode(
     macro(functionExpressions) \
     macro(constantRegisters)
 
-#define FOR_EACH_MEMBER_VECTOR_RARE_DATA(macro) \
-    macro(regexps) \
-    macro(functions) \
-    macro(exceptionHandlers) \
-    macro(switchJumpTables) \
-    macro(stringSwitchJumpTables) \
-    macro(evalCodeCache) \
-    macro(expressionInfo) \
-    macro(lineInfo) \
-    macro(callReturnIndexVector)
-
 template<typename T>
 static size_t sizeInBytes(const Vector<T>& vector)
 {
@@ -1873,7 +1860,7 @@ CodeBlock::CodeBlock(VM* vm, Structure* structure, CopyParsedBlockTag, CodeBlock
     , m_reoptimizationRetryCounter(0)
     , m_creationTime(std::chrono::steady_clock::now())
 {
-    m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed);
+    m_visitWeaklyHasBeenCalled = false;
 
     ASSERT(heap()->isDeferred());
     ASSERT(m_scopeRegister.isLocal());
@@ -1932,7 +1919,7 @@ CodeBlock::CodeBlock(VM* vm, Structure* structure, ScriptExecutable* ownerExecut
     , m_reoptimizationRetryCounter(0)
     , m_creationTime(std::chrono::steady_clock::now())
 {
-    m_visitWeaklyHasBeenCalled.store(false, std::memory_order_relaxed);
+    m_visitWeaklyHasBeenCalled = false;
 
     ASSERT(heap()->isDeferred());
     ASSERT(m_scopeRegister.isLocal());
@@ -2375,43 +2362,6 @@ void CodeBlock::finishCreation(VM& vm, ScriptExecutable* ownerExecutable, Unlink
     heap()->reportExtraMemoryAllocated(m_instructions.size() * sizeof(Instruction));
 }
 
-#if ENABLE(WEBASSEMBLY)
-CodeBlock::CodeBlock(VM* vm, Structure* structure, WebAssemblyExecutable* ownerExecutable, JSGlobalObject* globalObject)
-    : JSCell(*vm, structure)
-    , m_globalObject(globalObject->vm(), this, globalObject)
-    , m_numCalleeLocals(0)
-    , m_numVars(0)
-    , m_shouldAlwaysBeInlined(false)
-#if ENABLE(JIT)
-    , m_capabilityLevelState(DFG::CannotCompile)
-#endif
-    , m_didFailJITCompilation(false)
-    , m_didFailFTLCompilation(false)
-    , m_hasBeenCompiledWithFTL(false)
-    , m_isConstructor(false)
-    , m_isStrictMode(false)
-    , m_codeType(FunctionCode)
-    , m_hasDebuggerStatement(false)
-    , m_steppingMode(SteppingModeDisabled)
-    , m_numBreakpoints(0)
-    , m_ownerExecutable(m_globalObject->vm(), this, ownerExecutable)
-    , m_vm(vm)
-    , m_osrExitCounter(0)
-    , m_optimizationDelayCounter(0)
-    , m_reoptimizationRetryCounter(0)
-    , m_creationTime(std::chrono::steady_clock::now())
-{
-    ASSERT(heap()->isDeferred());
-}
-
-void CodeBlock::finishCreation(VM& vm, WebAssemblyExecutable*, JSGlobalObject*)
-{
-    Base::finishCreation(vm);
-
-    heap()->m_codeBlocks->add(this);
-}
-#endif
-
 CodeBlock::~CodeBlock()
 {
     if (m_vm->m_perBytecodeProfiler)
@@ -2487,13 +2437,6 @@ void CodeBlock::setNumParameters(int newValue)
     m_argumentValueProfiles = RefCountedArray<ValueProfile>(newValue);
 }
 
-void EvalCodeCache::visitAggregate(SlotVisitor& visitor)
-{
-    EvalCacheMap::iterator end = m_cacheMap.end();
-    for (EvalCacheMap::iterator ptr = m_cacheMap.begin(); ptr != end; ++ptr)
-        visitor.append(&ptr->value);
-}
-
 CodeBlock* CodeBlock::specialOSREntryBlockOrNull()
 {
 #if ENABLE(FTL_JIT)
@@ -2508,18 +2451,20 @@ CodeBlock* CodeBlock::specialOSREntryBlockOrNull()
 
 void CodeBlock::visitWeakly(SlotVisitor& visitor)
 {
-    bool setByMe = !m_visitWeaklyHasBeenCalled.compareExchangeStrong(false, true);
-    if (!setByMe)
+    ConcurrentJSLocker locker(m_lock);
+    if (m_visitWeaklyHasBeenCalled)
         return;
+    
+    m_visitWeaklyHasBeenCalled = true;
 
     if (Heap::isMarkedConcurrently(this))
         return;
 
-    if (shouldVisitStrongly()) {
-        visitor.appendUnbarrieredReadOnlyPointer(this);
+    if (shouldVisitStrongly(locker)) {
+        visitor.appendUnbarriered(this);
         return;
     }
-
+    
     // There are two things that may use unconditional finalizers: inline cache clearing
     // and jettisoning. The probability of us wanting to do at least one of those things
     // is probably quite close to 1. So we add one no matter what and when it runs, it
@@ -2531,7 +2476,7 @@ void CodeBlock::visitWeakly(SlotVisitor& visitor)
 
     // If we jettison ourselves we'll install our alternative, so make sure that it
     // survives GC even if we don't.
-    visitor.append(&m_alternative);
+    visitor.append(m_alternative);
     
     // There are two things that we use weak reference harvesters for: DFG fixpoint for
     // jettisoning, and trying to find structures that would be live based on some
@@ -2549,10 +2494,10 @@ void CodeBlock::visitWeakly(SlotVisitor& visitor)
     // decision by calling harvestWeakReferences().
 
     m_allTransitionsHaveBeenMarked = false;
-    propagateTransitions(visitor);
+    propagateTransitions(locker, visitor);
 
     m_jitCode->dfgCommon()->livenessHasBeenProved = false;
-    determineLiveness(visitor);
+    determineLiveness(locker, visitor);
 #endif // ENABLE(DFG_JIT)
 }
 
@@ -2575,6 +2520,7 @@ void CodeBlock::visitChildren(JSCell* cell, SlotVisitor& visitor)
 
 void CodeBlock::visitChildren(SlotVisitor& visitor)
 {
+    ConcurrentJSLocker locker(m_lock);
     // There are two things that may use unconditional finalizers: inline cache clearing
     // and jettisoning. The probability of us wanting to do at least one of those things
     // is probably quite close to 1. So we add one no matter what and when it runs, it
@@ -2582,7 +2528,7 @@ void CodeBlock::visitChildren(SlotVisitor& visitor)
     visitor.addUnconditionalFinalizer(&m_unconditionalFinalizer);
 
     if (CodeBlock* otherBlock = specialOSREntryBlockOrNull())
-        visitor.appendUnbarrieredReadOnlyPointer(otherBlock);
+        visitor.appendUnbarriered(otherBlock);
 
     if (m_jitCode)
         visitor.reportExtraMemoryVisited(m_jitCode->size());
@@ -2592,19 +2538,19 @@ void CodeBlock::visitChildren(SlotVisitor& visitor)
         visitor.reportExtraMemoryVisited(m_instructions.size() * sizeof(Instruction) / refCount);
     }
 
-    stronglyVisitStrongReferences(visitor);
-    stronglyVisitWeakReferences(visitor);
+    stronglyVisitStrongReferences(locker, visitor);
+    stronglyVisitWeakReferences(locker, visitor);
 
     m_allTransitionsHaveBeenMarked = false;
-    propagateTransitions(visitor);
+    propagateTransitions(locker, visitor);
 }
 
-bool CodeBlock::shouldVisitStrongly()
+bool CodeBlock::shouldVisitStrongly(const ConcurrentJSLocker& locker)
 {
     if (Options::forceCodeBlockLiveness())
         return true;
 
-    if (shouldJettisonDueToOldAge())
+    if (shouldJettisonDueToOldAge(locker))
         return false;
 
     // Interpreter and Baseline JIT CodeBlocks don't need to be jettisoned when
@@ -2656,7 +2602,7 @@ static std::chrono::milliseconds timeToLive(JITCode::JITType jitType)
     }
 }
 
-bool CodeBlock::shouldJettisonDueToOldAge()
+bool CodeBlock::shouldJettisonDueToOldAge(const ConcurrentJSLocker&)
 {
     if (Heap::isMarkedConcurrently(this))
         return false;
@@ -2683,7 +2629,7 @@ static bool shouldMarkTransition(DFG::WeakReferenceTransition& transition)
 }
 #endif // ENABLE(DFG_JIT)
 
-void CodeBlock::propagateTransitions(SlotVisitor& visitor)
+void CodeBlock::propagateTransitions(const ConcurrentJSLocker&, SlotVisitor& visitor)
 {
     UNUSED_PARAM(visitor);
 
@@ -2708,7 +2654,7 @@ void CodeBlock::propagateTransitions(SlotVisitor& visitor)
                 Structure* newStructure =
                     m_vm->heap.structureIDTable().get(newStructureID);
                 if (Heap::isMarkedConcurrently(oldStructure))
-                    visitor.appendUnbarrieredReadOnlyPointer(newStructure);
+                    visitor.appendUnbarriered(newStructure);
                 else
                     allAreMarkedSoFar = false;
                 break;
@@ -2731,9 +2677,9 @@ void CodeBlock::propagateTransitions(SlotVisitor& visitor)
         DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
         for (auto& weakReference : dfgCommon->weakStructureReferences)
             allAreMarkedSoFar &= weakReference->markIfCheap(visitor);
-        
-        for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-            if (shouldMarkTransition(dfgCommon->transitions[i])) {
+
+        for (auto& transition : dfgCommon->transitions) {
+            if (shouldMarkTransition(transition)) {
                 // If the following three things are live, then the target of the
                 // transition is also live:
                 //
@@ -2752,8 +2698,8 @@ void CodeBlock::propagateTransitions(SlotVisitor& visitor)
                 // We also short-circuit the liveness if the structure is harmless
                 // to mark (i.e. its global object and prototype are both already
                 // live).
-                
-                visitor.append(&dfgCommon->transitions[i].m_to);
+
+                visitor.append(transition.m_to);
             } else
                 allAreMarkedSoFar = false;
         }
@@ -2764,7 +2710,7 @@ void CodeBlock::propagateTransitions(SlotVisitor& visitor)
         m_allTransitionsHaveBeenMarked = true;
 }
 
-void CodeBlock::determineLiveness(SlotVisitor& visitor)
+void CodeBlock::determineLiveness(const ConcurrentJSLocker&, SlotVisitor& visitor)
 {
     UNUSED_PARAM(visitor);
     
@@ -2779,6 +2725,7 @@ void CodeBlock::determineLiveness(SlotVisitor& visitor)
     // GC we still have not proved liveness, then this code block is toast.
     bool allAreLiveSoFar = true;
     for (unsigned i = 0; i < dfgCommon->weakReferences.size(); ++i) {
+        ASSERT(!jsDynamicCast<CodeBlock*>(dfgCommon->weakReferences[i].get()));
         if (!Heap::isMarkedConcurrently(dfgCommon->weakReferences[i].get())) {
             allAreLiveSoFar = false;
             break;
@@ -2801,7 +2748,7 @@ void CodeBlock::determineLiveness(SlotVisitor& visitor)
     // All weak references are live. Record this information so we don't
     // come back here again, and scan the strong references.
     dfgCommon->livenessHasBeenProved = true;
-    visitor.appendUnbarrieredReadOnlyPointer(this);
+    visitor.appendUnbarriered(this);
 #endif // ENABLE(DFG_JIT)
 }
 
@@ -2810,9 +2757,9 @@ void CodeBlock::WeakReferenceHarvester::visitWeakReferences(SlotVisitor& visitor
     CodeBlock* codeBlock =
         bitwise_cast<CodeBlock*>(
             bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_weakReferenceHarvester));
-
-    codeBlock->propagateTransitions(visitor);
-    codeBlock->determineLiveness(visitor);
+    
+    codeBlock->propagateTransitions(NoLockingNecessary, visitor);
+    codeBlock->determineLiveness(NoLockingNecessary, visitor);
 }
 
 void CodeBlock::clearLLIntGetByIdCache(Instruction* instruction)
@@ -2825,11 +2772,6 @@ void CodeBlock::clearLLIntGetByIdCache(Instruction* instruction)
 
 void CodeBlock::finalizeLLIntInlineCaches()
 {
-#if ENABLE(WEBASSEMBLY)
-    if (m_ownerExecutable->isWebAssemblyExecutable())
-        return;
-#endif
-
     Interpreter* interpreter = m_vm->interpreter;
     const Vector<unsigned>& propertyAccessInstructions = m_unlinkedCode->propertyAccessInstructions();
     for (size_t size = propertyAccessInstructions.size(), i = 0; i < size; ++i) {
@@ -2951,16 +2893,14 @@ void CodeBlock::UnconditionalFinalizer::finalizeUnconditionally()
 {
     CodeBlock* codeBlock = bitwise_cast<CodeBlock*>(
         bitwise_cast<char*>(this) - OBJECT_OFFSETOF(CodeBlock, m_unconditionalFinalizer));
-
-#if ENABLE(DFG_JIT)
-    if (codeBlock->shouldJettisonDueToWeakReference()) {
-        codeBlock->jettison(Profiler::JettisonDueToWeakReference);
-        return;
-    }
-#endif // ENABLE(DFG_JIT)
-
-    if (codeBlock->shouldJettisonDueToOldAge()) {
-        codeBlock->jettison(Profiler::JettisonDueToOldAge);
+    
+    codeBlock->updateAllPredictions();
+    
+    if (!Heap::isMarked(codeBlock)) {
+        if (codeBlock->shouldJettisonDueToWeakReference())
+            codeBlock->jettison(Profiler::JettisonDueToWeakReference);
+        else
+            codeBlock->jettison(Profiler::JettisonDueToOldAge);
         return;
     }
 
@@ -3099,55 +3039,55 @@ void CodeBlock::resetJITData()
 }
 #endif
 
-void CodeBlock::visitOSRExitTargets(SlotVisitor& visitor)
+void CodeBlock::visitOSRExitTargets(const ConcurrentJSLocker&, SlotVisitor& visitor)
 {
     // We strongly visit OSR exits targets because we don't want to deal with
     // the complexity of generating an exit target CodeBlock on demand and
     // guaranteeing that it matches the details of the CodeBlock we compiled
     // the OSR exit against.
 
-    visitor.append(&m_alternative);
+    visitor.append(m_alternative);
 
 #if ENABLE(DFG_JIT)
     DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
     if (dfgCommon->inlineCallFrames) {
         for (auto* inlineCallFrame : *dfgCommon->inlineCallFrames) {
             ASSERT(inlineCallFrame->baselineCodeBlock);
-            visitor.append(&inlineCallFrame->baselineCodeBlock);
+            visitor.append(inlineCallFrame->baselineCodeBlock);
         }
     }
 #endif
 }
 
-void CodeBlock::stronglyVisitStrongReferences(SlotVisitor& visitor)
+void CodeBlock::stronglyVisitStrongReferences(const ConcurrentJSLocker& locker, SlotVisitor& visitor)
 {
-    visitor.append(&m_globalObject);
-    visitor.append(&m_ownerExecutable);
-    visitor.append(&m_unlinkedCode);
+    UNUSED_PARAM(locker);
+    
+    visitor.append(m_globalObject);
+    visitor.append(m_ownerExecutable);
+    visitor.append(m_unlinkedCode);
     if (m_rareData)
-        m_rareData->m_evalCodeCache.visitAggregate(visitor);
+        m_rareData->m_directEvalCodeCache.visitAggregate(visitor);
     visitor.appendValues(m_constantRegisters.data(), m_constantRegisters.size());
-    for (size_t i = 0; i < m_functionExprs.size(); ++i)
-        visitor.append(&m_functionExprs[i]);
-    for (size_t i = 0; i < m_functionDecls.size(); ++i)
-        visitor.append(&m_functionDecls[i]);
-    for (unsigned i = 0; i < m_objectAllocationProfiles.size(); ++i)
-        m_objectAllocationProfiles[i].visitAggregate(visitor);
+    for (auto& functionExpr : m_functionExprs)
+        visitor.append(functionExpr);
+    for (auto& functionDecl : m_functionDecls)
+        visitor.append(functionDecl);
+    for (auto& objectAllocationProfile : m_objectAllocationProfiles)
+        objectAllocationProfile.visitAggregate(visitor);
 
 #if ENABLE(JIT)
     for (ByValInfo* byValInfo : m_byValInfos)
-        visitor.append(&byValInfo->cachedSymbol);
+        visitor.append(byValInfo->cachedSymbol);
 #endif
 
 #if ENABLE(DFG_JIT)
     if (JITCode::isOptimizingJIT(jitType()))
-        visitOSRExitTargets(visitor);
+        visitOSRExitTargets(locker, visitor);
 #endif
-
-    updateAllPredictions();
 }
 
-void CodeBlock::stronglyVisitWeakReferences(SlotVisitor& visitor)
+void CodeBlock::stronglyVisitWeakReferences(const ConcurrentJSLocker&, SlotVisitor& visitor)
 {
     UNUSED_PARAM(visitor);
 
@@ -3157,18 +3097,18 @@ void CodeBlock::stronglyVisitWeakReferences(SlotVisitor& visitor)
     
     DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
 
-    for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-        if (!!dfgCommon->transitions[i].m_codeOrigin)
-            visitor.append(&dfgCommon->transitions[i].m_codeOrigin); // Almost certainly not necessary, since the code origin should also be a weak reference. Better to be safe, though.
-        visitor.append(&dfgCommon->transitions[i].m_from);
-        visitor.append(&dfgCommon->transitions[i].m_to);
+    for (auto& transition : dfgCommon->transitions) {
+        if (!!transition.m_codeOrigin)
+            visitor.append(transition.m_codeOrigin); // Almost certainly not necessary, since the code origin should also be a weak reference. Better to be safe, though.
+        visitor.append(transition.m_from);
+        visitor.append(transition.m_to);
     }
-    
-    for (unsigned i = 0; i < dfgCommon->weakReferences.size(); ++i)
-        visitor.append(&dfgCommon->weakReferences[i]);
 
-    for (unsigned i = 0; i < dfgCommon->weakStructureReferences.size(); ++i)
-        visitor.append(&dfgCommon->weakStructureReferences[i]);
+    for (auto& weakReference : dfgCommon->weakReferences)
+        visitor.append(weakReference);
+
+    for (auto& weakStructureReference : dfgCommon->weakStructureReferences)
+        visitor.append(weakStructureReference);
 
     dfgCommon->livenessHasBeenProved = true;
 #endif    
@@ -3382,11 +3322,6 @@ CodeBlock* CodeBlock::replacement()
     if (classInfo == ModuleProgramCodeBlock::info())
         return jsCast<ModuleProgramExecutable*>(ownerExecutable())->codeBlock();
 
-#if ENABLE(WEBASSEMBLY)
-    if (classInfo == WebAssemblyCodeBlock::info())
-        return nullptr;
-#endif
-
     RELEASE_ASSERT_NOT_REACHED();
     return nullptr;
 }
@@ -3410,11 +3345,6 @@ DFG::CapabilityLevel CodeBlock::computeCapabilityLevel()
     if (classInfo == ModuleProgramCodeBlock::info())
         return DFG::programCapabilityLevel(this);
 
-#if ENABLE(WEBASSEMBLY)
-    if (classInfo == WebAssemblyCodeBlock::info())
-        return DFG::CannotCompile;
-#endif
-
     RELEASE_ASSERT_NOT_REACHED();
     return DFG::CannotCompile;
 }
@@ -3447,8 +3377,7 @@ void CodeBlock::jettison(Profiler::JettisonReason reason, ReoptimizationMode mod
         if (DFG::shouldDumpDisassembly()) {
             dataLog(*this, " will be jettisoned because of the following dead references:\n");
             DFG::CommonData* dfgCommon = m_jitCode->dfgCommon();
-            for (unsigned i = 0; i < dfgCommon->transitions.size(); ++i) {
-                DFG::WeakReferenceTransition& transition = dfgCommon->transitions[i];
+            for (auto& transition : dfgCommon->transitions) {
                 JSCell* origin = transition.m_codeOrigin.get();
                 JSCell* from = transition.m_from.get();
                 JSCell* to = transition.m_to.get();
@@ -3977,9 +3906,9 @@ bool CodeBlock::shouldReoptimizeFromLoopNow()
 
 ArrayProfile* CodeBlock::getArrayProfile(const ConcurrentJSLocker&, unsigned bytecodeOffset)
 {
-    for (unsigned i = 0; i < m_arrayProfiles.size(); ++i) {
-        if (m_arrayProfiles[i].bytecodeOffset() == bytecodeOffset)
-            return &m_arrayProfiles[i];
+    for (auto& m_arrayProfile : m_arrayProfiles) {
+        if (m_arrayProfile.bytecodeOffset() == bytecodeOffset)
+            return &m_arrayProfile;
     }
     return 0;
 }
@@ -4086,10 +4015,6 @@ void CodeBlock::updateAllArrayPredictions()
 
 void CodeBlock::updateAllPredictions()
 {
-#if ENABLE(WEBASSEMBLY)
-    if (m_ownerExecutable->isWebAssemblyExecutable())
-        return;
-#endif
     updateAllValueProfilePredictions();
     updateAllArrayPredictions();
 }
@@ -4139,10 +4064,8 @@ void CodeBlock::tallyFrequentExitSites()
     switch (jitType()) {
     case JITCode::DFGJIT: {
         DFG::JITCode* jitCode = m_jitCode->dfg();
-        for (unsigned i = 0; i < jitCode->osrExit.size(); ++i) {
-            DFG::OSRExit& exit = jitCode->osrExit[i];
+        for (auto& exit : jitCode->osrExit)
             exit.considerAddingAsFrequentExitSite(profiledBlock);
-        }
         break;
     }
 
@@ -4280,10 +4203,10 @@ bool CodeBlock::usesOpcode(OpcodeID opcodeID)
 
 String CodeBlock::nameForRegister(VirtualRegister virtualRegister)
 {
-    for (unsigned i = 0; i < m_constantRegisters.size(); i++) {
-        if (m_constantRegisters[i].get().isEmpty())
+    for (auto& constantRegister : m_constantRegisters) {
+        if (constantRegister.get().isEmpty())
             continue;
-        if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(m_constantRegisters[i].get())) {
+        if (SymbolTable* symbolTable = jsDynamicCast<SymbolTable*>(constantRegister.get())) {
             ConcurrentJSLocker locker(symbolTable->m_lock);
             auto end = symbolTable->end(locker);
             for (auto ptr = symbolTable->begin(locker); ptr != end; ++ptr) {
@@ -4507,29 +4430,29 @@ void CodeBlock::setPCToCodeOriginMap(std::unique_ptr<PCToCodeOriginMap>&& map)
     m_pcToCodeOriginMap = WTFMove(map);
 }
 
-Optional<CodeOrigin> CodeBlock::findPC(void* pc)
+std::optional<CodeOrigin> CodeBlock::findPC(void* pc)
 {
     if (m_pcToCodeOriginMap) {
-        if (Optional<CodeOrigin> codeOrigin = m_pcToCodeOriginMap->findPC(pc))
+        if (std::optional<CodeOrigin> codeOrigin = m_pcToCodeOriginMap->findPC(pc))
             return codeOrigin;
     }
 
     for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
         StructureStubInfo* stub = *iter;
         if (stub->containsPC(pc))
-            return Optional<CodeOrigin>(stub->codeOrigin);
+            return std::optional<CodeOrigin>(stub->codeOrigin);
     }
 
-    if (Optional<CodeOrigin> codeOrigin = m_jitCode->findPC(this, pc))
+    if (std::optional<CodeOrigin> codeOrigin = m_jitCode->findPC(this, pc))
         return codeOrigin;
 
-    return Nullopt;
+    return std::nullopt;
 }
 #endif // ENABLE(JIT)
 
-Optional<unsigned> CodeBlock::bytecodeOffsetFromCallSiteIndex(CallSiteIndex callSiteIndex)
+std::optional<unsigned> CodeBlock::bytecodeOffsetFromCallSiteIndex(CallSiteIndex callSiteIndex)
 {
-    Optional<unsigned> bytecodeOffset;
+    std::optional<unsigned> bytecodeOffset;
     JITCode::JITType jitType = this->jitType();
     if (jitType == JITCode::InterpreterThunk || jitType == JITCode::BaselineJIT) {
 #if USE(JSVALUE64)