https://bugs.webkit.org/show_bug.cgi?id=158806
Reviewed by Saam Barati.
If we try to compile a CodeBlock that we already tried compiling in the past then we need
to clean up the data structures that were partly filled in by the failed compile. That
causes some races, since the DFG may be trying to parse those data structures while we are
clearing them. This patch introduces such a clean-up (CodeBlock::resetJITData()) and fixes
the races.
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::dumpBytecode):
(JSC::CodeBlock::getStubInfoMap):
(JSC::CodeBlock::getCallLinkInfoMap):
(JSC::CodeBlock::getByValInfoMap):
(JSC::CodeBlock::getCallLinkInfoForBytecodeIndex):
(JSC::CodeBlock::resetJITData):
(JSC::CodeBlock::visitOSRExitTargets):
(JSC::CodeBlock::setSteppingMode):
(JSC::CodeBlock::addRareCaseProfile):
(JSC::CodeBlock::rareCaseProfileForBytecodeOffset):
(JSC::CodeBlock::rareCaseProfileCountForBytecodeOffset):
(JSC::CodeBlock::resultProfileForBytecodeOffset):
(JSC::CodeBlock::specialFastCaseProfileCountForBytecodeOffset):
(JSC::CodeBlock::couldTakeSpecialFastCase):
(JSC::CodeBlock::ensureResultProfile):
* bytecode/CodeBlock.h:
(JSC::CodeBlock::getFromAllValueProfiles):
(JSC::CodeBlock::numberOfRareCaseProfiles):
(JSC::CodeBlock::numberOfResultProfiles):
(JSC::CodeBlock::numberOfArrayProfiles):
(JSC::CodeBlock::arrayProfiles):
(JSC::CodeBlock::addRareCaseProfile): Deleted.
(JSC::CodeBlock::specialFastCaseProfileCountForBytecodeOffset): Deleted.
(JSC::CodeBlock::couldTakeSpecialFastCase): Deleted.
* dfg/DFGByteCodeParser.cpp:
(JSC::DFG::ByteCodeParser::makeSafe):
* dfg/DFGGraph.cpp:
(JSC::DFG::Graph::methodOfGettingAValueProfileFor):
* jit/JIT.cpp:
(JSC::JIT::link):
* jit/JITWorklist.cpp:
(JSC::JITWorklist::compileNow):
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@202397
268f45cc-cd09-0410-ab3c-
d52691b4dbfc
+2016-06-23 Filip Pizlo <fpizlo@apple.com>
+
+ Failing baseline JIT compilation of a code block and then trying to compile it from OSR from DFG/FTL will corrupt the CodeBlock
+ https://bugs.webkit.org/show_bug.cgi?id=158806
+
+ Reviewed by Saam Barati.
+
+ If we try to compile a CodeBlock that we already tried compiling in the past then we need
+ to clean up the data structures that were partly filled in by the failed compile. That
+ causes some races, since the DFG may be trying to parse those data structures while we are
+ clearing them. This patch introduces such a clean-up (CodeBlock::resetJITData()) and fixes
+ the races.
+
+ * bytecode/CodeBlock.cpp:
+ (JSC::CodeBlock::dumpBytecode):
+ (JSC::CodeBlock::getStubInfoMap):
+ (JSC::CodeBlock::getCallLinkInfoMap):
+ (JSC::CodeBlock::getByValInfoMap):
+ (JSC::CodeBlock::getCallLinkInfoForBytecodeIndex):
+ (JSC::CodeBlock::resetJITData):
+ (JSC::CodeBlock::visitOSRExitTargets):
+ (JSC::CodeBlock::setSteppingMode):
+ (JSC::CodeBlock::addRareCaseProfile):
+ (JSC::CodeBlock::rareCaseProfileForBytecodeOffset):
+ (JSC::CodeBlock::rareCaseProfileCountForBytecodeOffset):
+ (JSC::CodeBlock::resultProfileForBytecodeOffset):
+ (JSC::CodeBlock::specialFastCaseProfileCountForBytecodeOffset):
+ (JSC::CodeBlock::couldTakeSpecialFastCase):
+ (JSC::CodeBlock::ensureResultProfile):
+ * bytecode/CodeBlock.h:
+ (JSC::CodeBlock::getFromAllValueProfiles):
+ (JSC::CodeBlock::numberOfRareCaseProfiles):
+ (JSC::CodeBlock::numberOfResultProfiles):
+ (JSC::CodeBlock::numberOfArrayProfiles):
+ (JSC::CodeBlock::arrayProfiles):
+ (JSC::CodeBlock::addRareCaseProfile): Deleted.
+ (JSC::CodeBlock::specialFastCaseProfileCountForBytecodeOffset): Deleted.
+ (JSC::CodeBlock::couldTakeSpecialFastCase): Deleted.
+ * dfg/DFGByteCodeParser.cpp:
+ (JSC::DFG::ByteCodeParser::makeSafe):
+ * dfg/DFGGraph.cpp:
+ (JSC::DFG::Graph::methodOfGettingAValueProfileFor):
+ * jit/JIT.cpp:
+ (JSC::JIT::link):
+ * jit/JITWorklist.cpp:
+ (JSC::JITWorklist::compileNow):
+
2016-06-23 Joseph Pecoraro <pecoraro@apple.com>
Web Inspector: Memory Timeline sometimes shows impossible value for bmalloc size (underflowed)
}
dumpRareCaseProfile(out, "rare case: ", rareCaseProfileForBytecodeOffset(location), hasPrintedProfiling);
- dumpResultProfile(out, resultProfileForBytecodeOffset(location), hasPrintedProfiling);
+ {
+ ConcurrentJITLocker locker(m_lock);
+ dumpResultProfile(out, resultProfileForBytecodeOffset(locker, location), hasPrintedProfiling);
+ }
#if ENABLE(DFG_JIT)
Vector<DFG::FrequentExitSite> exitSites = exitProfile().exitSitesFor(location);
void CodeBlock::getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result)
{
#if ENABLE(JIT)
- toHashMap(m_stubInfos, getStructureStubInfoCodeOrigin, result);
+ if (JITCode::isJIT(jitType()))
+ toHashMap(m_stubInfos, getStructureStubInfoCodeOrigin, result);
#else
UNUSED_PARAM(result);
#endif
void CodeBlock::getCallLinkInfoMap(const ConcurrentJITLocker&, CallLinkInfoMap& result)
{
#if ENABLE(JIT)
- toHashMap(m_callLinkInfos, getCallLinkInfoCodeOrigin, result);
+ if (JITCode::isJIT(jitType()))
+ toHashMap(m_callLinkInfos, getCallLinkInfoCodeOrigin, result);
#else
UNUSED_PARAM(result);
#endif
void CodeBlock::getByValInfoMap(const ConcurrentJITLocker&, ByValInfoMap& result)
{
#if ENABLE(JIT)
- for (auto* byValInfo : m_byValInfos)
- result.add(CodeOrigin(byValInfo->bytecodeIndex), byValInfo);
+ if (JITCode::isJIT(jitType())) {
+ for (auto* byValInfo : m_byValInfos)
+ result.add(CodeOrigin(byValInfo->bytecodeIndex), byValInfo);
+ }
#else
UNUSED_PARAM(result);
#endif
}
return nullptr;
}
+
+void CodeBlock::resetJITData()
+{
+ RELEASE_ASSERT(!JITCode::isJIT(jitType()));
+ ConcurrentJITLocker locker(m_lock);
+
+ // We can clear these because no other thread will have references to any stub infos, call
+ // link infos, or by val infos if we don't have JIT code. Attempts to query these data
+ // structures using the concurrent API (getStubInfoMap and friends) will return nothing if we
+ // don't have JIT code.
+ m_stubInfos.clear();
+ m_callLinkInfos.clear();
+ m_byValInfos.clear();
+
+ // We can clear this because the DFG's queries to these data structures are guarded by whether
+ // there is JIT code.
+ m_rareCaseProfiles.clear();
+
+ // We can clear these because the DFG only accesses members of this data structure when
+ // holding the lock or after querying whether we have JIT code.
+ m_resultProfiles.clear();
+ m_bytecodeOffsetToResultProfileIndexMap = nullptr;
+}
#endif
void CodeBlock::visitOSRExitTargets(SlotVisitor& visitor)
jettison(Profiler::JettisonDueToDebuggerStepping);
}
+RareCaseProfile* CodeBlock::addRareCaseProfile(int bytecodeOffset)
+{
+ m_rareCaseProfiles.append(RareCaseProfile(bytecodeOffset));
+ return &m_rareCaseProfiles.last();
+}
+
RareCaseProfile* CodeBlock::rareCaseProfileForBytecodeOffset(int bytecodeOffset)
{
return tryBinarySearch<RareCaseProfile, int>(
return 0;
}
-ResultProfile* CodeBlock::resultProfileForBytecodeOffset(int bytecodeOffset)
-{
- ConcurrentJITLocker locker(m_lock);
- return resultProfileForBytecodeOffset(locker, bytecodeOffset);
-}
-
ResultProfile* CodeBlock::resultProfileForBytecodeOffset(const ConcurrentJITLocker&, int bytecodeOffset)
{
if (!m_bytecodeOffsetToResultProfileIndexMap)
return &m_resultProfiles[iterator->value];
}
+unsigned CodeBlock::specialFastCaseProfileCountForBytecodeOffset(int bytecodeOffset)
+{
+ ConcurrentJITLocker locker(m_lock);
+ return specialFastCaseProfileCountForBytecodeOffset(locker, bytecodeOffset);
+}
+
+unsigned CodeBlock::specialFastCaseProfileCountForBytecodeOffset(const ConcurrentJITLocker& locker, int bytecodeOffset)
+{
+ ResultProfile* profile = resultProfileForBytecodeOffset(locker, bytecodeOffset);
+ if (!profile)
+ return 0;
+ return profile->specialFastPathCount();
+}
+
+bool CodeBlock::couldTakeSpecialFastCase(int bytecodeOffset)
+{
+ if (!hasBaselineJITProfiling())
+ return false;
+ unsigned specialFastCaseCount = specialFastCaseProfileCountForBytecodeOffset(bytecodeOffset);
+ return specialFastCaseCount >= Options::couldTakeSlowCaseMinimumCount();
+}
ResultProfile* CodeBlock::ensureResultProfile(int bytecodeOffset)
{
// that there had been inlining. Chances are if you want to use this, you're really
// looking for a CallLinkInfoMap to amortize the cost of calling this.
CallLinkInfo* getCallLinkInfoForBytecodeIndex(unsigned bytecodeIndex);
+
+ // We call this when we want to reattempt compiling something with the baseline JIT. Ideally
+ // the baseline JIT would not add data to CodeBlock, but instead it would put its data into
+ // a newly created JITCode, which could be thrown away if we bail on JIT compilation. Then we
+ // would be able to get rid of this silly function.
+ // FIXME: https://bugs.webkit.org/show_bug.cgi?id=159061
+ void resetJITData();
#endif // ENABLE(JIT)
void unlinkIncomingCalls();
return valueProfile(index - numberOfArgumentValueProfiles());
}
- RareCaseProfile* addRareCaseProfile(int bytecodeOffset)
- {
- m_rareCaseProfiles.append(RareCaseProfile(bytecodeOffset));
- return &m_rareCaseProfiles.last();
- }
+ RareCaseProfile* addRareCaseProfile(int bytecodeOffset);
unsigned numberOfRareCaseProfiles() { return m_rareCaseProfiles.size(); }
RareCaseProfile* rareCaseProfileForBytecodeOffset(int bytecodeOffset);
unsigned rareCaseProfileCountForBytecodeOffset(int bytecodeOffset);
ResultProfile* ensureResultProfile(int bytecodeOffset);
ResultProfile* ensureResultProfile(const ConcurrentJITLocker&, int bytecodeOffset);
unsigned numberOfResultProfiles() { return m_resultProfiles.size(); }
- ResultProfile* resultProfileForBytecodeOffset(int bytecodeOffset);
ResultProfile* resultProfileForBytecodeOffset(const ConcurrentJITLocker&, int bytecodeOffset);
- unsigned specialFastCaseProfileCountForBytecodeOffset(int bytecodeOffset)
- {
- ResultProfile* profile = resultProfileForBytecodeOffset(bytecodeOffset);
- if (!profile)
- return 0;
- return profile->specialFastPathCount();
- }
+ unsigned specialFastCaseProfileCountForBytecodeOffset(const ConcurrentJITLocker&, int bytecodeOffset);
+ unsigned specialFastCaseProfileCountForBytecodeOffset(int bytecodeOffset);
- bool couldTakeSpecialFastCase(int bytecodeOffset)
- {
- if (!hasBaselineJITProfiling())
- return false;
- unsigned specialFastCaseCount = specialFastCaseProfileCountForBytecodeOffset(bytecodeOffset);
- return specialFastCaseCount >= Options::couldTakeSlowCaseMinimumCount();
- }
+ bool couldTakeSpecialFastCase(int bytecodeOffset);
unsigned numberOfArrayProfiles() const { return m_arrayProfiles.size(); }
const ArrayProfileVector& arrayProfiles() { return m_arrayProfiles; }
if (!isX86() && node->op() == ArithMod)
return node;
- ResultProfile* resultProfile = m_inlineStackTop->m_profiledBlock->resultProfileForBytecodeOffset(m_currentIndex);
- if (resultProfile) {
- switch (node->op()) {
- case ArithAdd:
- case ArithSub:
- case ValueAdd:
- if (resultProfile->didObserveDouble())
- node->mergeFlags(NodeMayHaveDoubleResult);
- if (resultProfile->didObserveNonNumber())
- node->mergeFlags(NodeMayHaveNonNumberResult);
- break;
+ {
+ ConcurrentJITLocker locker(m_inlineStackTop->m_profiledBlock->m_lock);
+ ResultProfile* resultProfile = m_inlineStackTop->m_profiledBlock->resultProfileForBytecodeOffset(locker, m_currentIndex);
+ if (resultProfile) {
+ switch (node->op()) {
+ case ArithAdd:
+ case ArithSub:
+ case ValueAdd:
+ if (resultProfile->didObserveDouble())
+ node->mergeFlags(NodeMayHaveDoubleResult);
+ if (resultProfile->didObserveNonNumber())
+ node->mergeFlags(NodeMayHaveNonNumberResult);
+ break;
- case ArithMul: {
- if (resultProfile->didObserveInt52Overflow())
- node->mergeFlags(NodeMayOverflowInt52);
- if (resultProfile->didObserveInt32Overflow() || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
- node->mergeFlags(NodeMayOverflowInt32InBaseline);
- if (resultProfile->didObserveNegZeroDouble() || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
- node->mergeFlags(NodeMayNegZeroInBaseline);
- if (resultProfile->didObserveDouble())
- node->mergeFlags(NodeMayHaveDoubleResult);
- if (resultProfile->didObserveNonNumber())
- node->mergeFlags(NodeMayHaveNonNumberResult);
- break;
- }
+ case ArithMul: {
+ if (resultProfile->didObserveInt52Overflow())
+ node->mergeFlags(NodeMayOverflowInt52);
+ if (resultProfile->didObserveInt32Overflow() || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
+ node->mergeFlags(NodeMayOverflowInt32InBaseline);
+ if (resultProfile->didObserveNegZeroDouble() || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
+ node->mergeFlags(NodeMayNegZeroInBaseline);
+ if (resultProfile->didObserveDouble())
+ node->mergeFlags(NodeMayHaveDoubleResult);
+ if (resultProfile->didObserveNonNumber())
+ node->mergeFlags(NodeMayHaveNonNumberResult);
+ break;
+ }
- default:
- break;
+ default:
+ break;
+ }
}
}
if (node->hasHeapPrediction())
return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
- if (ResultProfile* result = profiledBlock->resultProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex))
- return result;
+ {
+ ConcurrentJITLocker locker(profiledBlock->m_lock);
+ if (profiledBlock->hasBaselineJITProfiling()) {
+ if (ResultProfile* result = profiledBlock->resultProfileForBytecodeOffset(locker, node->origin.semantic.bytecodeIndex))
+ return result;
+ }
+ }
switch (node->op()) {
case Identity:
if (patchBuffer.didFailToAllocate())
return CompilationFailed;
- m_codeBlock->setCalleeSaveRegisters(RegisterSet::llintBaselineCalleeSaveRegisters()); // Might be able to remove as this is probably already set to this value.
-
// Translate vPC offsets into addresses in JIT generated code, for switch tables.
for (unsigned i = 0; i < m_switches.size(); ++i) {
SwitchRecord record = m_switches[i];
void JITWorklist::compileNow(CodeBlock* codeBlock)
{
- // If this ever happens, we'll have a bad time because the baseline JIT does not clean up its
- // changes to the CodeBlock after a failed compilation.
- // FIXME: https://bugs.webkit.org/show_bug.cgi?id=158806
- RELEASE_ASSERT(!codeBlock->m_didFailJITCompilation);
-
DeferGC deferGC(codeBlock->vm()->heap);
if (codeBlock->jitType() != JITCode::InterpreterThunk)
return;
if (codeBlock->jitType() != JITCode::InterpreterThunk)
return;
+ // We do this in case we had previously attempted, and then failed, to compile with the
+ // baseline JIT.
+ codeBlock->resetJITData();
+
// OK, just compile it.
JIT::compile(codeBlock->vm(), codeBlock, JITCompilationMustSucceed);
codeBlock->ownerScriptExecutable()->installCode(codeBlock);