https://bugs.webkit.org/show_bug.cgi?id=148741
Reviewed by Geoffrey Garen.
This removes our reliance on RepatchBuffer having a pointer to CodeBlock. This is in
preparation for removing RepatchBuffer entirely (see
https://bugs.webkit.org/show_bug.cgi?id=148742). In the longer term, this is necessary
for making inline cache code, particularly in StructureStubInfo, more self-contained.
Currently StructureStubInfo relies on very pointless-looking methods in CodeBlock to
clear itself, and the only thing that those methods do is create a RepatchBuffer. It's
quite silly.
* assembler/LinkBuffer.cpp:
(JSC::LinkBuffer::allocate):
(JSC::LinkBuffer::performFinalization):
* assembler/RepatchBuffer.h:
(JSC::RepatchBuffer::RepatchBuffer):
(JSC::RepatchBuffer::~RepatchBuffer):
(JSC::RepatchBuffer::relink):
(JSC::RepatchBuffer::revertJumpReplacementToPatchableBranch32WithPatch):
(JSC::RepatchBuffer::codeBlock): Deleted.
* bytecode/CallLinkInfo.cpp:
(JSC::CallLinkInfo::clearStub):
(JSC::CallLinkInfo::unlink):
(JSC::CallLinkInfo::visitWeak):
* bytecode/CallLinkInfo.h:
(JSC::CallLinkInfo::registerPreservationMode):
(JSC::CallLinkInfo::isLinked):
(JSC::CallLinkInfo::setUpCall):
(JSC::CallLinkInfo::codeOrigin):
* bytecode/CodeBlock.cpp:
(JSC::CodeBlock::finalizeUnconditionally):
(JSC::CodeBlock::resetStubInternal):
(JSC::CodeBlock::unlinkIncomingCalls):
* bytecode/PolymorphicGetByIdList.cpp:
(JSC::GetByIdAccess::fromStructureStubInfo):
(JSC::GetByIdAccess::visitWeak):
(JSC::PolymorphicGetByIdList::didSelfPatching):
(JSC::PolymorphicGetByIdList::visitWeak):
* bytecode/PolymorphicGetByIdList.h:
(JSC::GetByIdAccess::doesCalls):
* bytecode/PolymorphicPutByIdList.cpp:
(JSC::PutByIdAccess::fromStructureStubInfo):
(JSC::PutByIdAccess::visitWeak):
(JSC::PolymorphicPutByIdList::addAccess):
(JSC::PolymorphicPutByIdList::visitWeak):
* bytecode/PolymorphicPutByIdList.h:
(JSC::PutByIdAccess::customSetter):
(JSC::PolymorphicPutByIdList::kind):
* bytecode/StructureStubInfo.cpp:
(JSC::StructureStubInfo::deref):
(JSC::StructureStubInfo::visitWeakReferences):
* bytecode/StructureStubInfo.h:
(JSC::StructureStubInfo::seenOnce):
* jit/AccessorCallJITStubRoutine.cpp:
(JSC::AccessorCallJITStubRoutine::~AccessorCallJITStubRoutine):
(JSC::AccessorCallJITStubRoutine::visitWeak):
* jit/AccessorCallJITStubRoutine.h:
* jit/ExecutableAllocator.h:
(JSC::ExecutableAllocator::makeWritable): Deleted.
(JSC::ExecutableAllocator::makeExecutable): Deleted.
(JSC::ExecutableAllocator::allocator): Deleted.
* jit/JITStubRoutine.cpp:
(JSC::JITStubRoutine::~JITStubRoutine):
(JSC::JITStubRoutine::visitWeak):
* jit/JITStubRoutine.h:
* jit/PolymorphicCallStubRoutine.cpp:
(JSC::PolymorphicCallNode::~PolymorphicCallNode):
(JSC::PolymorphicCallNode::unlink):
(JSC::PolymorphicCallStubRoutine::clearCallNodesFor):
(JSC::PolymorphicCallStubRoutine::visitWeak):
* jit/PolymorphicCallStubRoutine.h:
(JSC::PolymorphicCallNode::hasCallLinkInfo):
* jit/Repatch.cpp:
(JSC::readCallTarget):
(JSC::repatchCall):
(JSC::repatchByIdSelfAccess):
(JSC::tryCacheGetByID):
(JSC::tryCachePutByID):
(JSC::tryBuildPutByIdList):
(JSC::revertCall):
(JSC::unlinkFor):
(JSC::linkVirtualFor):
(JSC::linkPolymorphicCall):
(JSC::resetGetByID):
(JSC::resetPutByID):
(JSC::resetIn):
* jit/Repatch.h:
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@189278
268f45cc-cd09-0410-ab3c-
d52691b4dbfc
+2015-09-03 Filip Pizlo <fpizlo@apple.com>
+
+ RepatchBuffer should be stateless
+ https://bugs.webkit.org/show_bug.cgi?id=148741
+
+ Reviewed by Geoffrey Garen.
+
+ This removes our reliance on RepatchBuffer having a pointer to CodeBlock. This is in
+ preparation for removing RepatchBuffer entirely (see
+ https://bugs.webkit.org/show_bug.cgi?id=148742). In the longer term, this is necessary
+ for making inline cache code, particularly in StructureStubInfo, more self-contained.
+ Currently StructureStubInfo relies on very pointless-looking methods in CodeBlock to
+ clear itself, and the only thing that those methods do is create a RepatchBuffer. It's
+ quite silly.
+
+ * assembler/LinkBuffer.cpp:
+ (JSC::LinkBuffer::allocate):
+ (JSC::LinkBuffer::performFinalization):
+ * assembler/RepatchBuffer.h:
+ (JSC::RepatchBuffer::RepatchBuffer):
+ (JSC::RepatchBuffer::~RepatchBuffer):
+ (JSC::RepatchBuffer::relink):
+ (JSC::RepatchBuffer::revertJumpReplacementToPatchableBranch32WithPatch):
+ (JSC::RepatchBuffer::codeBlock): Deleted.
+ * bytecode/CallLinkInfo.cpp:
+ (JSC::CallLinkInfo::clearStub):
+ (JSC::CallLinkInfo::unlink):
+ (JSC::CallLinkInfo::visitWeak):
+ * bytecode/CallLinkInfo.h:
+ (JSC::CallLinkInfo::registerPreservationMode):
+ (JSC::CallLinkInfo::isLinked):
+ (JSC::CallLinkInfo::setUpCall):
+ (JSC::CallLinkInfo::codeOrigin):
+ * bytecode/CodeBlock.cpp:
+ (JSC::CodeBlock::finalizeUnconditionally):
+ (JSC::CodeBlock::resetStubInternal):
+ (JSC::CodeBlock::unlinkIncomingCalls):
+ * bytecode/PolymorphicGetByIdList.cpp:
+ (JSC::GetByIdAccess::fromStructureStubInfo):
+ (JSC::GetByIdAccess::visitWeak):
+ (JSC::PolymorphicGetByIdList::didSelfPatching):
+ (JSC::PolymorphicGetByIdList::visitWeak):
+ * bytecode/PolymorphicGetByIdList.h:
+ (JSC::GetByIdAccess::doesCalls):
+ * bytecode/PolymorphicPutByIdList.cpp:
+ (JSC::PutByIdAccess::fromStructureStubInfo):
+ (JSC::PutByIdAccess::visitWeak):
+ (JSC::PolymorphicPutByIdList::addAccess):
+ (JSC::PolymorphicPutByIdList::visitWeak):
+ * bytecode/PolymorphicPutByIdList.h:
+ (JSC::PutByIdAccess::customSetter):
+ (JSC::PolymorphicPutByIdList::kind):
+ * bytecode/StructureStubInfo.cpp:
+ (JSC::StructureStubInfo::deref):
+ (JSC::StructureStubInfo::visitWeakReferences):
+ * bytecode/StructureStubInfo.h:
+ (JSC::StructureStubInfo::seenOnce):
+ * jit/AccessorCallJITStubRoutine.cpp:
+ (JSC::AccessorCallJITStubRoutine::~AccessorCallJITStubRoutine):
+ (JSC::AccessorCallJITStubRoutine::visitWeak):
+ * jit/AccessorCallJITStubRoutine.h:
+ * jit/ExecutableAllocator.h:
+ (JSC::ExecutableAllocator::makeWritable): Deleted.
+ (JSC::ExecutableAllocator::makeExecutable): Deleted.
+ (JSC::ExecutableAllocator::allocator): Deleted.
+ * jit/JITStubRoutine.cpp:
+ (JSC::JITStubRoutine::~JITStubRoutine):
+ (JSC::JITStubRoutine::visitWeak):
+ * jit/JITStubRoutine.h:
+ * jit/PolymorphicCallStubRoutine.cpp:
+ (JSC::PolymorphicCallNode::~PolymorphicCallNode):
+ (JSC::PolymorphicCallNode::unlink):
+ (JSC::PolymorphicCallStubRoutine::clearCallNodesFor):
+ (JSC::PolymorphicCallStubRoutine::visitWeak):
+ * jit/PolymorphicCallStubRoutine.h:
+ (JSC::PolymorphicCallNode::hasCallLinkInfo):
+ * jit/Repatch.cpp:
+ (JSC::readCallTarget):
+ (JSC::repatchCall):
+ (JSC::repatchByIdSelfAccess):
+ (JSC::tryCacheGetByID):
+ (JSC::tryCachePutByID):
+ (JSC::tryBuildPutByIdList):
+ (JSC::revertCall):
+ (JSC::unlinkFor):
+ (JSC::linkVirtualFor):
+ (JSC::linkPolymorphicCall):
+ (JSC::resetGetByID):
+ (JSC::resetPutByID):
+ (JSC::resetIn):
+ * jit/Repatch.h:
+
2015-09-02 Filip Pizlo <fpizlo@apple.com>
Replace all the various forms of branchStructure() with a single method in AssemblyHelpers
m_executableMemory = m_vm->executableAllocator.allocate(*m_vm, initialSize, ownerUID, effort);
if (!m_executableMemory)
return;
- ExecutableAllocator::makeWritable(m_executableMemory->start(), m_executableMemory->sizeInBytes());
m_code = m_executableMemory->start();
m_size = initialSize;
m_didAllocate = true;
m_completed = true;
#endif
-#if ENABLE(BRANCH_COMPACTION)
- ExecutableAllocator::makeExecutable(code(), m_initialSize);
-#else
- ExecutableAllocator::makeExecutable(code(), m_size);
-#endif
MacroAssembler::cacheFlush(code(), m_size);
}
/*
- * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2009, 2015 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
typedef MacroAssemblerCodePtr CodePtr;
public:
- RepatchBuffer(CodeBlock* codeBlock)
- : m_codeBlock(codeBlock)
+ RepatchBuffer(CodeBlock*)
{
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- RefPtr<JITCode> code = codeBlock->jitCode();
- m_start = code->start();
- m_size = code->size();
-
- ExecutableAllocator::makeWritable(m_start, m_size);
-#endif
}
~RepatchBuffer()
{
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- ExecutableAllocator::makeExecutable(m_start, m_size);
-#endif
}
- CodeBlock* codeBlock() const { return m_codeBlock; }
-
void relink(CodeLocationJump jump, CodeLocationLabel destination)
{
MacroAssembler::repatchJump(jump, destination);
{
MacroAssembler::revertJumpReplacementToPatchableBranch32WithPatch(instructionStart, address, value);
}
-
-private:
- CodeBlock* m_codeBlock;
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- void* m_start;
- size_t m_size;
-#endif
};
} // namespace JSC
m_stub = nullptr;
}
-void CallLinkInfo::unlink(RepatchBuffer& repatchBuffer)
+void CallLinkInfo::unlink(VM& vm, RepatchBuffer& repatchBuffer)
{
if (!isLinked()) {
// We could be called even if we're not linked anymore because of how polymorphic calls
return;
}
- unlinkFor(repatchBuffer, *this);
+ unlinkFor(vm, repatchBuffer, *this);
// It will be on a list if the callee has a code block.
if (isOnList())
remove();
}
-void CallLinkInfo::visitWeak(RepatchBuffer& repatchBuffer)
+void CallLinkInfo::visitWeak(VM& vm, RepatchBuffer& repatchBuffer)
{
auto handleSpecificCallee = [&] (JSFunction* callee) {
if (Heap::isMarked(callee->executable()))
if (isLinked()) {
if (stub()) {
- if (!stub()->visitWeak(repatchBuffer)) {
+ if (!stub()->visitWeak(vm, repatchBuffer)) {
if (Options::verboseOSR()) {
dataLog(
- "Clearing closure call from ", *repatchBuffer.codeBlock(), " to ",
+ "Clearing closure call to ",
listDump(stub()->variants()), ", stub routine ", RawPointer(stub()),
".\n");
}
- unlink(repatchBuffer);
+ unlink(vm, repatchBuffer);
m_clearedByGC = true;
}
} else if (!Heap::isMarked(m_callee.get())) {
if (Options::verboseOSR()) {
dataLog(
- "Clearing call from ", *repatchBuffer.codeBlock(), " to ",
+ "Clearing call to ",
RawPointer(m_callee.get()), " (",
m_callee.get()->executable()->hashFor(specializationKind()),
").\n");
}
handleSpecificCallee(m_callee.get());
- unlink(repatchBuffer);
+ unlink(vm, repatchBuffer);
}
}
if (haveLastSeenCallee() && !Heap::isMarked(lastSeenCallee())) {
}
bool isLinked() { return m_stub || m_callee; }
- void unlink(RepatchBuffer&);
+ void unlink(VM&, RepatchBuffer&);
void setUpCall(CallType callType, CodeOrigin codeOrigin, unsigned calleeGPR)
{
return m_codeOrigin;
}
- void visitWeak(RepatchBuffer&);
+ void visitWeak(VM&, RepatchBuffer&);
private:
CodeLocationNearCall m_callReturnLocation;
RepatchBuffer repatchBuffer(this);
for (auto iter = callLinkInfosBegin(); !!iter; ++iter)
- (*iter)->visitWeak(repatchBuffer);
+ (*iter)->visitWeak(*vm(), repatchBuffer);
for (Bag<StructureStubInfo>::iterator iter = m_stubInfos.begin(); !!iter; ++iter) {
StructureStubInfo& stubInfo = **iter;
- if (stubInfo.visitWeakReferences(repatchBuffer))
+ if (stubInfo.visitWeakReferences(*vm(), repatchBuffer))
continue;
resetStubDuringGCInternal(repatchBuffer, stubInfo);
RELEASE_ASSERT(JITCode::isJIT(jitType()));
if (isGetByIdAccess(accessType))
- resetGetByID(repatchBuffer, stubInfo);
+ resetGetByID(repatchBuffer, this, stubInfo);
else if (isPutByIdAccess(accessType))
- resetPutByID(repatchBuffer, stubInfo);
+ resetPutByID(repatchBuffer, this, stubInfo);
else {
RELEASE_ASSERT(isInAccess(accessType));
- resetIn(repatchBuffer, stubInfo);
+ resetIn(repatchBuffer, this, stubInfo);
}
stubInfo.reset();
return;
RepatchBuffer repatchBuffer(this);
while (m_incomingCalls.begin() != m_incomingCalls.end())
- m_incomingCalls.begin()->unlink(repatchBuffer);
+ m_incomingCalls.begin()->unlink(*vm(), repatchBuffer);
while (m_incomingPolymorphicCalls.begin() != m_incomingPolymorphicCalls.end())
- m_incomingPolymorphicCalls.begin()->unlink(repatchBuffer);
+ m_incomingPolymorphicCalls.begin()->unlink(*vm(), repatchBuffer);
#endif // ENABLE(JIT)
}
return result;
}
-bool GetByIdAccess::visitWeak(RepatchBuffer& repatchBuffer) const
+bool GetByIdAccess::visitWeak(VM& vm, RepatchBuffer& repatchBuffer) const
{
if (m_structure && !Heap::isMarked(m_structure.get()))
return false;
if (!m_conditionSet.areStillLive())
return false;
- if (!m_stubRoutine->visitWeak(repatchBuffer))
+ if (!m_stubRoutine->visitWeak(vm, repatchBuffer))
return false;
return true;
}
return false;
}
-bool PolymorphicGetByIdList::visitWeak(RepatchBuffer& repatchBuffer) const
+bool PolymorphicGetByIdList::visitWeak(VM& vm, RepatchBuffer& repatchBuffer) const
{
for (unsigned i = size(); i--;) {
- if (!at(i).visitWeak(repatchBuffer))
+ if (!at(i).visitWeak(vm, repatchBuffer))
return false;
}
return true;
bool doesCalls() const { return type() == Getter || type() == CustomGetter; }
- bool visitWeak(RepatchBuffer&) const;
+ bool visitWeak(VM&, RepatchBuffer&) const;
private:
friend class CodeBlock;
bool didSelfPatching() const; // Are any of the accesses SimpleInline?
- bool visitWeak(RepatchBuffer&) const;
+ bool visitWeak(VM&, RepatchBuffer&) const;
private:
friend class CodeBlock;
return result;
}
-bool PutByIdAccess::visitWeak(RepatchBuffer& repatchBuffer) const
+bool PutByIdAccess::visitWeak(VM& vm, RepatchBuffer& repatchBuffer) const
{
if (!m_conditionSet.areStillLive())
return false;
RELEASE_ASSERT_NOT_REACHED();
return false;
}
- if (!m_stubRoutine->visitWeak(repatchBuffer))
+ if (!m_stubRoutine->visitWeak(vm, repatchBuffer))
return false;
return true;
}
m_list.last() = putByIdAccess;
}
-bool PolymorphicPutByIdList::visitWeak(RepatchBuffer& repatchBuffer) const
+bool PolymorphicPutByIdList::visitWeak(VM& vm, RepatchBuffer& repatchBuffer) const
{
for (unsigned i = 0; i < size(); ++i) {
- if (!at(i).visitWeak(repatchBuffer))
+ if (!at(i).visitWeak(vm, repatchBuffer))
return false;
}
return true;
return m_customSetter;
}
- bool visitWeak(RepatchBuffer&) const;
+ bool visitWeak(VM&, RepatchBuffer&) const;
private:
friend class CodeBlock;
PutKind kind() const { return m_kind; }
- bool visitWeak(RepatchBuffer&) const;
+ bool visitWeak(VM&, RepatchBuffer&) const;
private:
friend class CodeBlock;
}
}
-bool StructureStubInfo::visitWeakReferences(RepatchBuffer& repatchBuffer)
+bool StructureStubInfo::visitWeakReferences(VM& vm, RepatchBuffer& repatchBuffer)
{
switch (accessType) {
case access_get_by_id_self:
return false;
break;
case access_get_by_id_list: {
- if (!u.getByIdList.list->visitWeak(repatchBuffer))
+ if (!u.getByIdList.list->visitWeak(vm, repatchBuffer))
return false;
break;
}
return false;
break;
case access_put_by_id_list:
- if (!u.putByIdList.list->visitWeak(repatchBuffer))
+ if (!u.putByIdList.list->visitWeak(vm, repatchBuffer))
return false;
break;
case access_in_list: {
// outgoing GC pointers are known to point to currently marked objects; this method is
// allowed to accomplish this by either clearing those pointers somehow or by proving that
// they have already been marked. It is not allowed to mark new objects.
- bool visitWeakReferences(RepatchBuffer&);
+ bool visitWeakReferences(VM&, RepatchBuffer&);
bool seenOnce()
{
/*
- * Copyright (C) 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
{
}
-bool AccessorCallJITStubRoutine::visitWeak(RepatchBuffer& repatchBuffer)
+bool AccessorCallJITStubRoutine::visitWeak(VM& vm, RepatchBuffer& repatchBuffer)
{
- m_callLinkInfo->visitWeak(repatchBuffer);
+ m_callLinkInfo->visitWeak(vm, repatchBuffer);
return true;
}
/*
- * Copyright (C) 2014 Apple Inc. All rights reserved.
+ * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
virtual ~AccessorCallJITStubRoutine();
- virtual bool visitWeak(RepatchBuffer&) override;
+ virtual bool visitWeak(VM&, RepatchBuffer&) override;
std::unique_ptr<CallLinkInfo> m_callLinkInfo;
};
#define JIT_ALLOCATOR_LARGE_ALLOC_SIZE (pageSize() * 4)
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
-#define PROTECTION_FLAGS_RW (PROT_READ | PROT_WRITE)
-#define PROTECTION_FLAGS_RX (PROT_READ | PROT_EXEC)
-#define EXECUTABLE_POOL_WRITABLE false
-#else
#define EXECUTABLE_POOL_WRITABLE true
-#endif
namespace JSC {
RefPtr<ExecutableMemoryHandle> allocate(VM&, size_t sizeInBytes, void* ownerUID, JITCompilationEffort);
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- static void makeWritable(void* start, size_t size)
- {
- reprotectRegion(start, size, Writable);
- }
-
- static void makeExecutable(void* start, size_t size)
- {
- reprotectRegion(start, size, Executable);
- }
-#else
- static void makeWritable(void*, size_t) {}
- static void makeExecutable(void*, size_t) {}
-#endif
-
static size_t committedByteCount();
-
-private:
-
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- static void reprotectRegion(void*, size_t, ProtectionSetting);
-#if ENABLE(EXECUTABLE_ALLOCATOR_DEMAND)
- // We create a MetaAllocator for each JS global object.
- std::unique_ptr<DemandExecutableAllocator> m_allocator;
- DemandExecutableAllocator* allocator() { return m_allocator.get(); }
-#endif
-#endif
-
};
#endif // ENABLE(JIT) && ENABLE(ASSEMBLER)
JITStubRoutine::~JITStubRoutine() { }
-bool JITStubRoutine::visitWeak(RepatchBuffer&)
+bool JITStubRoutine::visitWeak(VM&, RepatchBuffer&)
{
return true;
}
// Return true if you are still valid after. Return false if you are now invalid. If you return
// false, you will usually not do any clearing because the idea is that you will simply be
// destroyed.
- virtual bool visitWeak(RepatchBuffer&);
+ virtual bool visitWeak(VM&, RepatchBuffer&);
protected:
virtual void observeZeroRefCount();
remove();
}
-void PolymorphicCallNode::unlink(RepatchBuffer& repatchBuffer)
+void PolymorphicCallNode::unlink(VM& vm, RepatchBuffer& repatchBuffer)
{
if (m_callLinkInfo) {
if (Options::showDisassembly())
dataLog("Unlinking polymorphic call at ", m_callLinkInfo->callReturnLocation(), ", ", m_callLinkInfo->codeOrigin(), "\n");
- m_callLinkInfo->unlink(repatchBuffer);
+ m_callLinkInfo->unlink(vm, repatchBuffer);
}
if (isOnList())
}
}
-bool PolymorphicCallStubRoutine::visitWeak(RepatchBuffer&)
+bool PolymorphicCallStubRoutine::visitWeak(VM&, RepatchBuffer&)
{
for (auto& variant : m_variants) {
if (!Heap::isMarked(variant.get()))
~PolymorphicCallNode();
- void unlink(RepatchBuffer&);
+ void unlink(VM&, RepatchBuffer&);
bool hasCallLinkInfo(CallLinkInfo* info) { return m_callLinkInfo == info; }
void clearCallLinkInfo();
void clearCallNodesFor(CallLinkInfo*);
- bool visitWeak(RepatchBuffer&) override;
+ bool visitWeak(VM&, RepatchBuffer&) override;
protected:
virtual void markRequiredObjectsInternal(SlotVisitor&) override;
// - tagTypeNumberRegister
// - tagMaskRegister
-static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
+static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
{
FunctionPtr result = MacroAssembler::readCallTarget(call);
#if ENABLE(FTL_JIT)
- CodeBlock* codeBlock = repatchBuffer.codeBlock();
if (codeBlock->jitType() == JITCode::FTLJIT) {
return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
MacroAssemblerCodePtr::createFromExecutableAddress(
result.executableAddress())).callTarget());
}
#else
- UNUSED_PARAM(repatchBuffer);
+ UNUSED_PARAM(codeBlock);
#endif // ENABLE(FTL_JIT)
return result;
}
-static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
+static void repatchCall(RepatchBuffer& repatchBuffer, CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
{
#if ENABLE(FTL_JIT)
- CodeBlock* codeBlock = repatchBuffer.codeBlock();
if (codeBlock->jitType() == JITCode::FTLJIT) {
VM& vm = *codeBlock->vm();
FTL::Thunks& thunks = *vm.ftlThunks;
newCalleeFunction = FunctionPtr(
thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
}
+#else // ENABLE(FTL_JIT)
+ UNUSED_PARAM(codeBlock);
#endif // ENABLE(FTL_JIT)
repatchBuffer.relink(call, newCalleeFunction);
}
-static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
+static void repatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
{
- RepatchBuffer repatchBuffer(codeblock);
- repatchCall(repatchBuffer, call, newCalleeFunction);
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchCall(repatchBuffer, codeBlock, call, newCalleeFunction);
}
static void repatchByIdSelfAccess(
RepatchBuffer repatchBuffer(codeBlock);
// Only optimize once!
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, slowPathFunction);
// Patch the structure check & the offset of the load.
repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
RepatchBuffer repatchBuffer(codeBlock);
replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, operationGetById);
return RetryCacheLater;
}
RepatchBuffer repatchBuffer(codeBlock);
replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, operationGetById);
return RetryCacheLater;
}
stubInfo.callReturnLocation.jumpAtOffset(
stubInfo.patch.deltaCallToJump),
CodeLocationLabel(stubInfo.stubRoutine->code().code()));
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, conditionSet, putKind == Direct);
RepatchBuffer repatchBuffer(codeBlock);
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
RELEASE_ASSERT(!list->isFull());
return RetryCacheLater;
}
RepatchBuffer repatchBuffer(codeBlock);
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
if (list->isFull())
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
return RetryCacheLater;
}
RepatchBuffer repatchBuffer(codeBlock);
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
if (list->isFull())
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
return RetryCacheLater;
}
}
void unlinkFor(
- RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo)
+ VM& vm, RepatchBuffer& repatchBuffer, CallLinkInfo& callLinkInfo)
{
if (Options::showDisassembly())
- dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), " in request from ", pointerDump(repatchBuffer.codeBlock()), "\n");
+ dataLog("Unlinking call from ", callLinkInfo.callReturnLocation(), "\n");
- VM* vm = repatchBuffer.codeBlock()->vm();
- revertCall(repatchBuffer, vm, callLinkInfo, vm->getCTIStub(linkCallThunkGenerator));
+ revertCall(repatchBuffer, &vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
}
void linkVirtualFor(
callLinkInfo.remove();
}
-void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+void resetGetByID(RepatchBuffer& repatchBuffer, CodeBlock* codeBlock, StructureStubInfo& stubInfo)
{
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, operationGetByIdOptimize);
CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
}
-void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+void resetPutByID(RepatchBuffer& repatchBuffer, CodeBlock* codeBlock, StructureStubInfo& stubInfo)
{
- V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
+ V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.callReturnLocation).executableAddress());
V_JITOperation_ESsiJJI optimizedFunction;
if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
optimizedFunction = operationPutByIdStrictOptimize;
ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
optimizedFunction = operationPutByIdDirectNonStrictOptimize;
}
- repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
+ repatchCall(repatchBuffer, codeBlock, stubInfo.callReturnLocation, optimizedFunction);
CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
}
-void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
+void resetIn(RepatchBuffer& repatchBuffer, CodeBlock*, StructureStubInfo& stubInfo)
{
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
}
void repatchIn(ExecState*, JSCell*, const Identifier&, bool wasFound, const PropertySlot&, StructureStubInfo&);
void linkFor(ExecState*, CallLinkInfo&, CodeBlock*, JSFunction* callee, MacroAssemblerCodePtr);
void linkSlowFor(ExecState*, CallLinkInfo&);
-void unlinkFor(RepatchBuffer&, CallLinkInfo&);
+void unlinkFor(VM&, RepatchBuffer&, CallLinkInfo&);
void linkVirtualFor(ExecState*, CallLinkInfo&);
void linkPolymorphicCall(ExecState*, CallLinkInfo&, CallVariant);
-void resetGetByID(RepatchBuffer&, StructureStubInfo&);
-void resetPutByID(RepatchBuffer&, StructureStubInfo&);
-void resetIn(RepatchBuffer&, StructureStubInfo&);
+void resetGetByID(RepatchBuffer&, CodeBlock*, StructureStubInfo&);
+void resetPutByID(RepatchBuffer&, CodeBlock*, StructureStubInfo&);
+void resetIn(RepatchBuffer&, CodeBlock*, StructureStubInfo&);
} // namespace JSC