Inline caches should handle out-of-line offsets out-of-line
authorfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 7 Oct 2015 17:28:38 +0000 (17:28 +0000)
committerfpizlo@apple.com <fpizlo@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Wed, 7 Oct 2015 17:28:38 +0000 (17:28 +0000)
https://bugs.webkit.org/show_bug.cgi?id=149869

Reviewed by Saam Barati.

If we want to have a concurrent copying GC, then we need a read barrier on copied space
pointers. That makes the convertible load portion of the get_by_id/put_by_id inline caches
rather challenging. Currently we have a load instruction that we can turn into an add
instruction - the add case is when we have an inline offset, and the load case is when we
have an out-of-line offset and we need to load a copied space pointer. But if the load from
copied space requires a barrier, then there is no easy way to convert that back to the inline
case.

This patch removes the convertible load. The inline path of get_by_id/put_by_id only handles
the inline offsets. Out-of-line offsets are now handled using out-of-line stubs.

* bytecode/StructureStubInfo.h:
* ftl/FTLInlineCacheSize.cpp:
(JSC::FTL::sizeOfGetById):
(JSC::FTL::sizeOfPutById):
* jit/JITInlineCacheGenerator.cpp:
(JSC::JITByIdGenerator::finalize):
(JSC::JITByIdGenerator::generateFastPathChecks):
(JSC::JITGetByIdGenerator::JITGetByIdGenerator):
(JSC::JITGetByIdGenerator::generateFastPath):
(JSC::JITPutByIdGenerator::JITPutByIdGenerator):
(JSC::JITPutByIdGenerator::generateFastPath):
* jit/JITInlineCacheGenerator.h:
* jit/Repatch.cpp:
(JSC::repatchByIdSelfAccess):
(JSC::tryCacheGetByID):
(JSC::tryCachePutByID):
* runtime/JSObject.h:
(JSC::JSObject::butterflyTotalSize):
(JSC::indexRelativeToBase):
(JSC::offsetRelativeToBase):
(JSC::maxOffsetRelativeToBase):
(JSC::makeIdentifier):
(JSC::offsetRelativeToPatchedStorage): Deleted.
(JSC::maxOffsetRelativeToPatchedStorage): Deleted.

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@190672 268f45cc-cd09-0410-ab3c-d52691b4dbfc

Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/bytecode/StructureStubInfo.h
Source/JavaScriptCore/ftl/FTLInlineCacheSize.cpp
Source/JavaScriptCore/jit/JITInlineCacheGenerator.cpp
Source/JavaScriptCore/jit/JITInlineCacheGenerator.h
Source/JavaScriptCore/jit/Repatch.cpp
Source/JavaScriptCore/runtime/JSObject.h

index fdb1d11..903923e 100644 (file)
@@ -1,3 +1,46 @@
+2015-10-06  Filip Pizlo  <fpizlo@apple.com>
+
+        Inline caches should handle out-of-line offsets out-of-line
+        https://bugs.webkit.org/show_bug.cgi?id=149869
+
+        Reviewed by Saam Barati.
+
+        If we want to have a concurrent copying GC, then we need a read barrier on copied space
+        pointers. That makes the convertible load portion of the get_by_id/put_by_id inline caches
+        rather challenging. Currently we have a load instruction that we can turn into an add
+        instruction - the add case is when we have an inline offset, and the load case is when we
+        have an out-of-line offset and we need to load a copied space pointer. But if the load from
+        copied space requires a barrier, then there is no easy way to convert that back to the inline
+        case.
+
+        This patch removes the convertible load. The inline path of get_by_id/put_by_id only handles
+        the inline offsets. Out-of-line offsets are now handled using out-of-line stubs.
+
+        * bytecode/StructureStubInfo.h:
+        * ftl/FTLInlineCacheSize.cpp:
+        (JSC::FTL::sizeOfGetById):
+        (JSC::FTL::sizeOfPutById):
+        * jit/JITInlineCacheGenerator.cpp:
+        (JSC::JITByIdGenerator::finalize):
+        (JSC::JITByIdGenerator::generateFastPathChecks):
+        (JSC::JITGetByIdGenerator::JITGetByIdGenerator):
+        (JSC::JITGetByIdGenerator::generateFastPath):
+        (JSC::JITPutByIdGenerator::JITPutByIdGenerator):
+        (JSC::JITPutByIdGenerator::generateFastPath):
+        * jit/JITInlineCacheGenerator.h:
+        * jit/Repatch.cpp:
+        (JSC::repatchByIdSelfAccess):
+        (JSC::tryCacheGetByID):
+        (JSC::tryCachePutByID):
+        * runtime/JSObject.h:
+        (JSC::JSObject::butterflyTotalSize):
+        (JSC::indexRelativeToBase):
+        (JSC::offsetRelativeToBase):
+        (JSC::maxOffsetRelativeToBase):
+        (JSC::makeIdentifier):
+        (JSC::offsetRelativeToPatchedStorage): Deleted.
+        (JSC::maxOffsetRelativeToPatchedStorage): Deleted.
+
 2015-10-07  Commit Queue  <commit-queue@webkit.org>
 
         Unreviewed, rolling out r190664.
index 68df0c9..30eaa51 100644 (file)
@@ -139,7 +139,6 @@ public:
         int8_t valueGPR;
         RegisterSet usedRegisters;
         int32_t deltaCallToDone;
-        int32_t deltaCallToStorageLoad;
         int32_t deltaCallToJump;
         int32_t deltaCallToSlowCase;
         int32_t deltaCheckImmToCall;
index 94c7ace..4124427 100644 (file)
@@ -49,18 +49,18 @@ using namespace DFG;
 size_t sizeOfGetById()
 {
 #if CPU(ARM64)
-    return 36;
+    return 32;
 #else
-    return 30;
+    return 27;
 #endif
 }
 
 size_t sizeOfPutById()
 {
 #if CPU(ARM64)
-    return 44;
+    return 40;
 #else
-    return 32;
+    return 29;
 #endif
 }
 
index 53af500..898a690 100644 (file)
@@ -93,8 +93,6 @@ void JITByIdGenerator::finalize(LinkBuffer& fastPath, LinkBuffer& slowPath)
         callReturnLocation, slowPath.locationOf(m_slowPathBegin));
     m_stubInfo->patch.deltaCallToDone = MacroAssembler::differenceBetweenCodePtr(
         callReturnLocation, fastPath.locationOf(m_done));
-    m_stubInfo->patch.deltaCallToStorageLoad = MacroAssembler::differenceBetweenCodePtr(
-        callReturnLocation, fastPath.locationOf(m_propertyStorageLoad));
 }
 
 void JITByIdGenerator::finalize(LinkBuffer& linkBuffer)
@@ -102,15 +100,12 @@ void JITByIdGenerator::finalize(LinkBuffer& linkBuffer)
     finalize(linkBuffer, linkBuffer);
 }
 
-void JITByIdGenerator::generateFastPathChecks(MacroAssembler& jit, GPRReg butterfly)
+void JITByIdGenerator::generateFastPathChecks(MacroAssembler& jit)
 {
     m_structureCheck = jit.patchableBranch32WithPatch(
         MacroAssembler::NotEqual,
         MacroAssembler::Address(m_base.payloadGPR(), JSCell::structureIDOffset()),
         m_structureImm, MacroAssembler::TrustedImm32(0));
-    
-    m_propertyStorageLoad = jit.convertibleLoadPtr(
-        MacroAssembler::Address(m_base.payloadGPR(), JSObject::butterflyOffset()), butterfly);
 }
 
 JITGetByIdGenerator::JITGetByIdGenerator(
@@ -124,16 +119,16 @@ JITGetByIdGenerator::JITGetByIdGenerator(
 
 void JITGetByIdGenerator::generateFastPath(MacroAssembler& jit)
 {
-    generateFastPathChecks(jit, m_value.payloadGPR());
+    generateFastPathChecks(jit);
     
 #if USE(JSVALUE64)
     m_loadOrStore = jit.load64WithCompactAddressOffsetPatch(
-        MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.payloadGPR()).label();
+        MacroAssembler::Address(m_base.payloadGPR(), 0), m_value.payloadGPR()).label();
 #else
     m_tagLoadOrStore = jit.load32WithCompactAddressOffsetPatch(
-        MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.tagGPR()).label();
+        MacroAssembler::Address(m_base.payloadGPR(), 0), m_value.tagGPR()).label();
     m_loadOrStore = jit.load32WithCompactAddressOffsetPatch(
-        MacroAssembler::Address(m_value.payloadGPR(), 0), m_value.payloadGPR()).label();
+        MacroAssembler::Address(m_base.payloadGPR(), 0), m_value.payloadGPR()).label();
 #endif
     
     m_done = jit.label();
@@ -145,7 +140,6 @@ JITPutByIdGenerator::JITPutByIdGenerator(
     ECMAMode ecmaMode, PutKind putKind)
     : JITByIdGenerator(
         codeBlock, codeOrigin, callSite, AccessType::Put, usedRegisters, base, value, spillMode)
-    , m_scratch(scratch)
     , m_ecmaMode(ecmaMode)
     , m_putKind(putKind)
 {
@@ -154,16 +148,16 @@ JITPutByIdGenerator::JITPutByIdGenerator(
 
 void JITPutByIdGenerator::generateFastPath(MacroAssembler& jit)
 {
-    generateFastPathChecks(jit, m_scratch);
+    generateFastPathChecks(jit);
     
 #if USE(JSVALUE64)
     m_loadOrStore = jit.store64WithAddressOffsetPatch(
-        m_value.payloadGPR(), MacroAssembler::Address(m_scratch, 0)).label();
+        m_value.payloadGPR(), MacroAssembler::Address(m_base.payloadGPR(), 0)).label();
 #else
     m_tagLoadOrStore = jit.store32WithAddressOffsetPatch(
-        m_value.tagGPR(), MacroAssembler::Address(m_scratch, 0)).label();
+        m_value.tagGPR(), MacroAssembler::Address(m_base.payloadGPR(), 0)).label();
     m_loadOrStore = jit.store32WithAddressOffsetPatch(
-        m_value.payloadGPR(), MacroAssembler::Address(m_scratch, 0)).label();
+        m_value.payloadGPR(), MacroAssembler::Address(m_base.payloadGPR(), 0)).label();
 #endif
     
     m_done = jit.label();
index ebede9a..65fae04 100644 (file)
@@ -74,14 +74,13 @@ public:
     void finalize(LinkBuffer&);
     
 protected:
-    void generateFastPathChecks(MacroAssembler&, GPRReg butterfly);
+    void generateFastPathChecks(MacroAssembler&);
     
     JSValueRegs m_base;
     JSValueRegs m_value;
     
     MacroAssembler::DataLabel32 m_structureImm;
     MacroAssembler::PatchableJump m_structureCheck;
-    MacroAssembler::ConvertibleLoadLabel m_propertyStorageLoad;
     AssemblerLabel m_loadOrStore;
 #if USE(JSVALUE32_64)
     AssemblerLabel m_tagLoadOrStore;
@@ -115,7 +114,6 @@ public:
     V_JITOperation_ESsiJJI slowPathFunction();
 
 private:
-    GPRReg m_scratch;
     ECMAMode m_ecmaMode;
     PutKind m_putKind;
 };
index 1e895a5..1db251f 100644 (file)
@@ -104,23 +104,18 @@ static void repatchByIdSelfAccess(
     MacroAssembler::repatchInt32(
         stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall),
         bitwise_cast<int32_t>(structure->id()));
-    CodeLocationConvertibleLoad convertibleLoad = stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad);
-    if (isOutOfLineOffset(offset))
-        MacroAssembler::replaceWithLoad(convertibleLoad);
-    else
-        MacroAssembler::replaceWithAddressComputation(convertibleLoad);
 #if USE(JSVALUE64)
     if (compact)
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
     else
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToBase(offset));
 #elif USE(JSVALUE32_64)
     if (compact) {
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        MacroAssembler::repatchCompact(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     } else {
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
-        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
+        MacroAssembler::repatchInt32(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToBase(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
     }
 #endif
 }
@@ -260,7 +255,8 @@ static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, con
             && slot.isCacheableValue()
             && slot.slotBase() == baseValue
             && !slot.watchpointSet()
-            && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))
+            && isInlineOffset(slot.cachedOffset())
+            && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
             && action == AttemptToCache
             && !structure->needImpurePropertyWatchpoint()
             && !loadTargetFromProxy) {
@@ -375,9 +371,9 @@ static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Str
         if (slot.type() == PutPropertySlot::ExistingProperty) {
             structure->didCachePropertyReplacement(vm, slot.cachedOffset());
         
-            ptrdiff_t offsetToPatchedStorage = offsetRelativeToPatchedStorage(slot.cachedOffset());
             if (stubInfo.cacheType == CacheType::Unset
-                && MacroAssembler::isPtrAlignedAddressOffset(offsetToPatchedStorage)
+                && isInlineOffset(slot.cachedOffset())
+                && MacroAssembler::isPtrAlignedAddressOffset(maxOffsetRelativeToBase(slot.cachedOffset()))
                 && !structure->needImpurePropertyWatchpoint()
                 && !structure->inferredTypeFor(ident.impl())) {
 
index 47eb78c..93fcb69 100644 (file)
@@ -1406,28 +1406,6 @@ inline size_t JSObject::butterflyTotalSize()
     return Butterfly::totalSize(preCapacity, structure->outOfLineCapacity(), hasIndexingHeader, indexingPayloadSizeInBytes);
 }
 
-// Helpers for patching code where you want to emit a load or store and
-// the base is:
-// For inline offsets: a pointer to the out-of-line storage pointer.
-// For out-of-line offsets: the base of the out-of-line storage.
-inline size_t offsetRelativeToPatchedStorage(PropertyOffset offset)
-{
-    if (isOutOfLineOffset(offset))
-        return sizeof(EncodedJSValue) * offsetInButterfly(offset);
-    return JSObject::offsetOfInlineStorage() - JSObject::butterflyOffset() + sizeof(EncodedJSValue) * offsetInInlineStorage(offset);
-}
-
-// Returns the maximum offset (away from zero) a load instruction will encode.
-inline size_t maxOffsetRelativeToPatchedStorage(PropertyOffset offset)
-{
-    ptrdiff_t addressOffset = static_cast<ptrdiff_t>(offsetRelativeToPatchedStorage(offset));
-#if USE(JSVALUE32_64)
-    if (addressOffset >= 0)
-        return static_cast<size_t>(addressOffset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag);
-#endif
-    return static_cast<size_t>(addressOffset);
-}
-
 inline int indexRelativeToBase(PropertyOffset offset)
 {
     if (isOutOfLineOffset(offset))
@@ -1443,6 +1421,17 @@ inline int offsetRelativeToBase(PropertyOffset offset)
     return JSObject::offsetOfInlineStorage() + offsetInInlineStorage(offset) * sizeof(EncodedJSValue);
 }
 
+// Returns the maximum offset (away from zero) a load instruction will encode.
+inline size_t maxOffsetRelativeToBase(PropertyOffset offset)
+{
+    ptrdiff_t addressOffset = offsetRelativeToBase(offset);
+#if USE(JSVALUE32_64)
+    if (addressOffset >= 0)
+        return static_cast<size_t>(addressOffset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag);
+#endif
+    return static_cast<size_t>(addressOffset);
+}
+
 COMPILE_ASSERT(!(sizeof(JSObject) % sizeof(WriteBarrierBase<Unknown>)), JSObject_inline_storage_has_correct_alignment);
 
 ALWAYS_INLINE Identifier makeIdentifier(VM& vm, const char* name)