https://bugs.webkit.org/show_bug.cgi?id=189166
Reviewed by Mark Lam.
* bytecode/AccessCase.cpp:
(JSC::AccessCase::generateImpl):
* bytecode/GetterSetterAccessCase.cpp:
(JSC::GetterSetterAccessCase::emitDOMJITGetter):
* bytecode/InlineAccess.cpp:
(JSC::getScratchRegister):
* bytecode/PolymorphicAccess.cpp:
(JSC::PolymorphicAccess::regenerate):
* bytecode/StructureStubInfo.h:
(JSC::StructureStubInfo::valueRegs const):
* jit/JITInlineCacheGenerator.cpp:
(JSC::JITByIdGenerator::JITByIdGenerator):
(JSC::JITGetByIdWithThisGenerator::JITGetByIdWithThisGenerator):
(JSC::JITInstanceOfGenerator::JITInstanceOfGenerator):
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@235527
268f45cc-cd09-0410-ab3c-
d52691b4dbfc
2018-08-30 Saam barati <sbarati@apple.com>
+ Switch int8_t to GPRReg in StructureStubInfo because sizeof(GPRReg) == sizeof(int8_t)
+ https://bugs.webkit.org/show_bug.cgi?id=189166
+
+ Reviewed by Mark Lam.
+
+ * bytecode/AccessCase.cpp:
+ (JSC::AccessCase::generateImpl):
+ * bytecode/GetterSetterAccessCase.cpp:
+ (JSC::GetterSetterAccessCase::emitDOMJITGetter):
+ * bytecode/InlineAccess.cpp:
+ (JSC::getScratchRegister):
+ * bytecode/PolymorphicAccess.cpp:
+ (JSC::PolymorphicAccess::regenerate):
+ * bytecode/StructureStubInfo.h:
+ (JSC::StructureStubInfo::valueRegs const):
+ * jit/JITInlineCacheGenerator.cpp:
+ (JSC::JITByIdGenerator::JITByIdGenerator):
+ (JSC::JITGetByIdWithThisGenerator::JITGetByIdWithThisGenerator):
+ (JSC::JITInstanceOfGenerator::JITInstanceOfGenerator):
+
+2018-08-30 Saam barati <sbarati@apple.com>
+
InlineAccess should do StringLength
https://bugs.webkit.org/show_bug.cgi?id=158911
ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
allocator.lock(baseGPR);
#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
+ allocator.lock(stubInfo.patch.baseTagGPR);
#endif
allocator.lock(valueRegs);
allocator.lock(scratchGPR);
ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
allocator.lock(baseGPR);
#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
+ allocator.lock(stubInfo.patch.baseTagGPR);
#endif
allocator.lock(valueRegs);
allocator.lock(scratchGPR);
{
ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
allocator.lock(stubInfo.baseGPR());
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.valueGPR));
+ allocator.lock(stubInfo.patch.valueGPR);
#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.valueTagGPR));
+ allocator.lock(stubInfo.patch.baseTagGPR);
+ allocator.lock(stubInfo.patch.valueTagGPR);
#endif
GPRReg scratch = allocator.allocateScratchGPR();
if (allocator.didReuseRegisters())
state.ident = &ident;
state.baseGPR = stubInfo.baseGPR();
- state.thisGPR = static_cast<GPRReg>(stubInfo.patch.thisGPR);
+ state.thisGPR = stubInfo.patch.thisGPR;
state.valueRegs = stubInfo.valueRegs();
ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
allocator.lock(state.thisGPR);
allocator.lock(state.valueRegs);
#if USE(JSVALUE32_64)
- allocator.lock(static_cast<GPRReg>(stubInfo.patch.baseTagGPR));
+ allocator.lock(stubInfo.patch.baseTagGPR);
#endif
state.scratchGPR = allocator.allocateScratchGPR();
int32_t deltaFromStartToSlowPathCallLocation;
int32_t deltaFromStartToSlowPathStart;
- int8_t baseGPR;
- int8_t valueGPR;
- int8_t thisGPR;
+ GPRReg baseGPR;
+ GPRReg valueGPR;
+ GPRReg thisGPR;
#if USE(JSVALUE32_64)
- int8_t valueTagGPR;
- int8_t baseTagGPR;
- int8_t thisTagGPR;
+ GPRReg valueTagGPR;
+ GPRReg baseTagGPR;
+ GPRReg thisTagGPR;
#endif
} patch;
GPRReg baseGPR() const
{
- return static_cast<GPRReg>(patch.baseGPR);
+ return patch.baseGPR;
}
CodeLocationCall<JSInternalPtrTag> slowPathCallLocation() { return patch.start.callAtOffset<JSInternalPtrTag>(patch.deltaFromStartToSlowPathCallLocation); }
{
return JSValueRegs(
#if USE(JSVALUE32_64)
- static_cast<GPRReg>(patch.valueTagGPR),
+ patch.valueTagGPR,
#endif
- static_cast<GPRReg>(patch.valueGPR));
+ patch.valueGPR);
}
, m_base(base)
, m_value(value)
{
- m_stubInfo->patch.baseGPR = static_cast<int8_t>(base.payloadGPR());
- m_stubInfo->patch.valueGPR = static_cast<int8_t>(value.payloadGPR());
- m_stubInfo->patch.thisGPR = static_cast<int8_t>(InvalidGPRReg);
+ m_stubInfo->patch.baseGPR = base.payloadGPR();
+ m_stubInfo->patch.valueGPR = value.payloadGPR();
+ m_stubInfo->patch.thisGPR = InvalidGPRReg;
#if USE(JSVALUE32_64)
- m_stubInfo->patch.baseTagGPR = static_cast<int8_t>(base.tagGPR());
- m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(value.tagGPR());
- m_stubInfo->patch.thisTagGPR = static_cast<int8_t>(InvalidGPRReg);
+ m_stubInfo->patch.baseTagGPR = base.tagGPR();
+ m_stubInfo->patch.valueTagGPR = value.tagGPR();
+ m_stubInfo->patch.thisTagGPR = InvalidGPRReg;
#endif
}
{
RELEASE_ASSERT(thisRegs.payloadGPR() != thisRegs.tagGPR());
- m_stubInfo->patch.thisGPR = static_cast<int8_t>(thisRegs.payloadGPR());
+ m_stubInfo->patch.thisGPR = thisRegs.payloadGPR();
#if USE(JSVALUE32_64)
- m_stubInfo->patch.thisTagGPR = static_cast<int8_t>(thisRegs.tagGPR());
+ m_stubInfo->patch.thisTagGPR = thisRegs.tagGPR();
#endif
}
: JITInlineCacheGenerator(
codeBlock, codeOrigin, callSiteIndex, AccessType::InstanceOf, usedRegisters)
{
- m_stubInfo->patch.baseGPR = static_cast<int8_t>(value);
- m_stubInfo->patch.valueGPR = static_cast<int8_t>(result);
- m_stubInfo->patch.thisGPR = static_cast<int8_t>(prototype);
+ m_stubInfo->patch.baseGPR = value;
+ m_stubInfo->patch.valueGPR = result;
+ m_stubInfo->patch.thisGPR = prototype;
#if USE(JSVALUE32_64)
- m_stubInfo->patch.baseTagGPR = static_cast<int8_t>(InvalidGPRReg);
- m_stubInfo->patch.valueTagGPR = static_cast<int8_t>(InvalidGPRReg);
- m_stubInfo->patch.thisTagGPR = static_cast<int8_t>(InvalidGPRReg);
+ m_stubInfo->patch.baseTagGPR = InvalidGPRReg;
+ m_stubInfo->patch.valueTagGPR = InvalidGPRReg;
+ m_stubInfo->patch.thisTagGPR = InvalidGPRReg;
#endif
m_stubInfo->patch.usedRegisters.clear(result);