[JSC] Rename LargeAllocation to PreciseAllocation
authorysuzuki@apple.com <ysuzuki@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Sat, 9 Nov 2019 03:45:18 +0000 (03:45 +0000)
committerysuzuki@apple.com <ysuzuki@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc>
Sat, 9 Nov 2019 03:45:18 +0000 (03:45 +0000)
https://bugs.webkit.org/show_bug.cgi?id=204040

Reviewed by Keith Miller.

After r252298, LargeAllocation is also used for small allocations.
This patch renames from LargeAllocation to PreciseAllocation since it reflects the behavior.

* CMakeLists.txt:
* JavaScriptCore.xcodeproj/project.pbxproj:
* Sources.txt:
* heap/CellContainer.cpp:
(JSC::CellContainer::isNewlyAllocated const):
* heap/CellContainer.h:
(JSC::CellContainer::CellContainer):
(JSC::CellContainer::isMarkedBlock const):
(JSC::CellContainer::isPreciseAllocation const):
(JSC::CellContainer::preciseAllocation const):
(JSC::CellContainer::isLargeAllocation const): Deleted.
(JSC::CellContainer::largeAllocation const): Deleted.
* heap/CellContainerInlines.h:
(JSC::CellContainer::vm const):
(JSC::CellContainer::isMarked const):
(JSC::CellContainer::noteMarked):
(JSC::CellContainer::assertValidCell const):
(JSC::CellContainer::cellSize const):
(JSC::CellContainer::weakSet const):
(JSC::CellContainer::aboutToMark):
(JSC::CellContainer::areMarksStale const):
* heap/CompleteSubspace.cpp:
(JSC::CompleteSubspace::tryAllocateSlow):
(JSC::CompleteSubspace::reallocatePreciseAllocationNonVirtual):
(JSC::CompleteSubspace::reallocateLargeAllocationNonVirtual): Deleted.
* heap/CompleteSubspace.h:
* heap/Heap.cpp:
(JSC::Heap::sweepInFinalize):
* heap/HeapCell.cpp:
(JSC::HeapCell::isLive):
* heap/HeapCell.h:
* heap/HeapCellInlines.h:
(JSC::HeapCell::isPreciseAllocation const):
(JSC::HeapCell::cellContainer const):
(JSC::HeapCell::preciseAllocation const):
(JSC::HeapCell::vm const):
(JSC::HeapCell::cellSize const):
(JSC::HeapCell::cellAttributes const):
(JSC::HeapCell::subspace const):
(JSC::HeapCell::isLargeAllocation const): Deleted.
(JSC::HeapCell::largeAllocation const): Deleted.
* heap/HeapInlines.h:
(JSC::Heap::isMarked):
(JSC::Heap::testAndSetMarked):
* heap/HeapUtil.h:
(JSC::HeapUtil::findGCObjectPointersForMarking):
(JSC::HeapUtil::isPointerGCObjectJSCell):
(JSC::HeapUtil::isValueGCObject):
* heap/IsoAlignedMemoryAllocator.cpp:
(JSC::IsoAlignedMemoryAllocator::tryReallocateMemory):
* heap/IsoCellSetInlines.h:
(JSC::IsoCellSet::add):
(JSC::IsoCellSet::remove):
(JSC::IsoCellSet::contains const):
(JSC::IsoCellSet::forEachMarkedCell):
(JSC::IsoCellSet::forEachMarkedCellInParallel):
(JSC::IsoCellSet::forEachLiveCell):
* heap/IsoSubspace.cpp:
(JSC::IsoSubspace::tryAllocateFromLowerTier):
(JSC::IsoSubspace::sweepLowerTierCell):
(JSC::IsoSubspace::destroyLowerTierFreeList):
* heap/IsoSubspace.h:
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::freeMemory):
(JSC::MarkedSpace::lastChanceToFinalize):
(JSC::MarkedSpace::sweepPreciseAllocations):
(JSC::MarkedSpace::prepareForAllocation):
(JSC::MarkedSpace::enablePreciseAllocationTracking):
(JSC::MarkedSpace::prepareForConservativeScan):
(JSC::MarkedSpace::prepareForMarking):
(JSC::MarkedSpace::resumeAllocating):
(JSC::MarkedSpace::isPagedOut):
(JSC::MarkedSpace::beginMarking):
(JSC::MarkedSpace::endMarking):
(JSC::MarkedSpace::objectCount):
(JSC::MarkedSpace::size):
(JSC::MarkedSpace::sweepLargeAllocations): Deleted.
(JSC::MarkedSpace::enableLargeAllocationTracking): Deleted.
* heap/MarkedSpace.h:
(JSC::MarkedSpace:: const):
(JSC::MarkedSpace::preciseAllocationsNurseryOffset const):
(JSC::MarkedSpace::preciseAllocationsOffsetForThisCollection const):
(JSC::MarkedSpace::preciseAllocationsForThisCollectionBegin const):
(JSC::MarkedSpace::preciseAllocationsForThisCollectionEnd const):
(JSC::MarkedSpace::preciseAllocationsForThisCollectionSize const):
(JSC::MarkedSpace::largeAllocationsNurseryOffset const): Deleted.
(JSC::MarkedSpace::largeAllocationsOffsetForThisCollection const): Deleted.
(JSC::MarkedSpace::largeAllocationsForThisCollectionBegin const): Deleted.
(JSC::MarkedSpace::largeAllocationsForThisCollectionEnd const): Deleted.
(JSC::MarkedSpace::largeAllocationsForThisCollectionSize const): Deleted.
* heap/MarkedSpaceInlines.h:
(JSC::MarkedSpace::forEachLiveCell):
(JSC::MarkedSpace::forEachDeadCell):
* heap/PreciseAllocation.cpp: Renamed from Source/JavaScriptCore/heap/LargeAllocation.cpp.
(JSC::isAlignedForPreciseAllocation):
(JSC::PreciseAllocation::tryCreate):
(JSC::PreciseAllocation::tryReallocate):
(JSC::PreciseAllocation::createForLowerTier):
(JSC::PreciseAllocation::reuseForLowerTier):
(JSC::PreciseAllocation::PreciseAllocation):
(JSC::PreciseAllocation::~PreciseAllocation):
(JSC::PreciseAllocation::lastChanceToFinalize):
(JSC::PreciseAllocation::shrink):
(JSC::PreciseAllocation::visitWeakSet):
(JSC::PreciseAllocation::reapWeakSet):
(JSC::PreciseAllocation::flip):
(JSC::PreciseAllocation::isEmpty):
(JSC::PreciseAllocation::sweep):
(JSC::PreciseAllocation::destroy):
(JSC::PreciseAllocation::dump const):
(JSC::PreciseAllocation::assertValidCell const):
* heap/PreciseAllocation.h: Renamed from Source/JavaScriptCore/heap/LargeAllocation.h.
(JSC::PreciseAllocation::fromCell):
(JSC::PreciseAllocation::isPreciseAllocation):
(JSC::PreciseAllocation::headerSize):
(JSC::PreciseAllocation::basePointer const):
* heap/SlotVisitor.cpp:
(JSC::SlotVisitor::appendHiddenSlowImpl):
(JSC::SlotVisitor::appendToMarkStack):
* heap/SlotVisitorInlines.h:
(JSC::SlotVisitor::appendUnbarriered):
(JSC::SlotVisitor::appendHiddenUnbarriered):
* heap/Subspace.h:
* heap/SubspaceInlines.h:
(JSC::Subspace::forEachPreciseAllocation):
(JSC::Subspace::forEachMarkedCell):
(JSC::Subspace::forEachMarkedCellInParallel):
(JSC::Subspace::forEachLiveCell):
(JSC::Subspace::forEachLargeAllocation): Deleted.
* heap/WeakBlock.cpp:
(JSC::WeakBlock::visit):
* heap/WeakSet.cpp:
(JSC::WeakSet::sweep):
* llint/LowLevelInterpreter.asm:
* runtime/ButterflyInlines.h:
(JSC::Butterfly::reallocArrayRightIfPossible):
* runtime/OptionsList.h:
* runtime/SamplingProfiler.cpp:
(JSC::SamplingProfiler::SamplingProfiler):
* tools/VMInspector.cpp:
(JSC::VMInspector::isInHeap):
* tools/VMInspectorInlines.h:
(JSC::VMInspector::verifyCell):

git-svn-id: https://svn.webkit.org/repository/webkit/trunk@252302 268f45cc-cd09-0410-ab3c-d52691b4dbfc

36 files changed:
Source/JavaScriptCore/CMakeLists.txt
Source/JavaScriptCore/ChangeLog
Source/JavaScriptCore/JavaScriptCore.xcodeproj/project.pbxproj
Source/JavaScriptCore/Sources.txt
Source/JavaScriptCore/heap/CellContainer.cpp
Source/JavaScriptCore/heap/CellContainer.h
Source/JavaScriptCore/heap/CellContainerInlines.h
Source/JavaScriptCore/heap/CompleteSubspace.cpp
Source/JavaScriptCore/heap/CompleteSubspace.h
Source/JavaScriptCore/heap/Heap.cpp
Source/JavaScriptCore/heap/HeapCell.cpp
Source/JavaScriptCore/heap/HeapCell.h
Source/JavaScriptCore/heap/HeapCellInlines.h
Source/JavaScriptCore/heap/HeapInlines.h
Source/JavaScriptCore/heap/HeapUtil.h
Source/JavaScriptCore/heap/IsoAlignedMemoryAllocator.cpp
Source/JavaScriptCore/heap/IsoCellSetInlines.h
Source/JavaScriptCore/heap/IsoSubspace.cpp
Source/JavaScriptCore/heap/IsoSubspace.h
Source/JavaScriptCore/heap/MarkedSpace.cpp
Source/JavaScriptCore/heap/MarkedSpace.h
Source/JavaScriptCore/heap/MarkedSpaceInlines.h
Source/JavaScriptCore/heap/PreciseAllocation.cpp [moved from Source/JavaScriptCore/heap/LargeAllocation.cpp with 72% similarity]
Source/JavaScriptCore/heap/PreciseAllocation.h [moved from Source/JavaScriptCore/heap/LargeAllocation.h with 82% similarity]
Source/JavaScriptCore/heap/SlotVisitor.cpp
Source/JavaScriptCore/heap/SlotVisitorInlines.h
Source/JavaScriptCore/heap/Subspace.h
Source/JavaScriptCore/heap/SubspaceInlines.h
Source/JavaScriptCore/heap/WeakBlock.cpp
Source/JavaScriptCore/heap/WeakSet.cpp
Source/JavaScriptCore/llint/LowLevelInterpreter.asm
Source/JavaScriptCore/runtime/ButterflyInlines.h
Source/JavaScriptCore/runtime/OptionsList.h
Source/JavaScriptCore/runtime/SamplingProfiler.cpp
Source/JavaScriptCore/tools/VMInspector.cpp
Source/JavaScriptCore/tools/VMInspectorInlines.h

index 0e96650..dceb3fc 100644 (file)
@@ -616,7 +616,6 @@ set(JavaScriptCore_PRIVATE_FRAMEWORK_HEADERS
     heap/IsoSubspace.h
     heap/IsoSubspaceInlines.h
     heap/IsoSubspacePerVM.h
-    heap/LargeAllocation.h
     heap/LocalAllocator.h
     heap/LocalAllocatorInlines.h
     heap/LockDuringMarking.h
@@ -629,6 +628,7 @@ set(JavaScriptCore_PRIVATE_FRAMEWORK_HEADERS
     heap/MarkingConstraint.h
     heap/MutatorState.h
     heap/PackedCellPtr.h
+    heap/PreciseAllocation.h
     heap/RegisterState.h
     heap/RunningScope.h
     heap/SimpleMarkingConstraint.h
index 2934634..042ab58 100644 (file)
@@ -1,5 +1,159 @@
 2019-11-08  Yusuke Suzuki  <ysuzuki@apple.com>
 
+        [JSC] Rename LargeAllocation to PreciseAllocation
+        https://bugs.webkit.org/show_bug.cgi?id=204040
+
+        Reviewed by Keith Miller.
+
+        After r252298, LargeAllocation is also used for small allocations.
+        This patch renames from LargeAllocation to PreciseAllocation since it reflects the behavior.
+
+        * CMakeLists.txt:
+        * JavaScriptCore.xcodeproj/project.pbxproj:
+        * Sources.txt:
+        * heap/CellContainer.cpp:
+        (JSC::CellContainer::isNewlyAllocated const):
+        * heap/CellContainer.h:
+        (JSC::CellContainer::CellContainer):
+        (JSC::CellContainer::isMarkedBlock const):
+        (JSC::CellContainer::isPreciseAllocation const):
+        (JSC::CellContainer::preciseAllocation const):
+        (JSC::CellContainer::isLargeAllocation const): Deleted.
+        (JSC::CellContainer::largeAllocation const): Deleted.
+        * heap/CellContainerInlines.h:
+        (JSC::CellContainer::vm const):
+        (JSC::CellContainer::isMarked const):
+        (JSC::CellContainer::noteMarked):
+        (JSC::CellContainer::assertValidCell const):
+        (JSC::CellContainer::cellSize const):
+        (JSC::CellContainer::weakSet const):
+        (JSC::CellContainer::aboutToMark):
+        (JSC::CellContainer::areMarksStale const):
+        * heap/CompleteSubspace.cpp:
+        (JSC::CompleteSubspace::tryAllocateSlow):
+        (JSC::CompleteSubspace::reallocatePreciseAllocationNonVirtual):
+        (JSC::CompleteSubspace::reallocateLargeAllocationNonVirtual): Deleted.
+        * heap/CompleteSubspace.h:
+        * heap/Heap.cpp:
+        (JSC::Heap::sweepInFinalize):
+        * heap/HeapCell.cpp:
+        (JSC::HeapCell::isLive):
+        * heap/HeapCell.h:
+        * heap/HeapCellInlines.h:
+        (JSC::HeapCell::isPreciseAllocation const):
+        (JSC::HeapCell::cellContainer const):
+        (JSC::HeapCell::preciseAllocation const):
+        (JSC::HeapCell::vm const):
+        (JSC::HeapCell::cellSize const):
+        (JSC::HeapCell::cellAttributes const):
+        (JSC::HeapCell::subspace const):
+        (JSC::HeapCell::isLargeAllocation const): Deleted.
+        (JSC::HeapCell::largeAllocation const): Deleted.
+        * heap/HeapInlines.h:
+        (JSC::Heap::isMarked):
+        (JSC::Heap::testAndSetMarked):
+        * heap/HeapUtil.h:
+        (JSC::HeapUtil::findGCObjectPointersForMarking):
+        (JSC::HeapUtil::isPointerGCObjectJSCell):
+        (JSC::HeapUtil::isValueGCObject):
+        * heap/IsoAlignedMemoryAllocator.cpp:
+        (JSC::IsoAlignedMemoryAllocator::tryReallocateMemory):
+        * heap/IsoCellSetInlines.h:
+        (JSC::IsoCellSet::add):
+        (JSC::IsoCellSet::remove):
+        (JSC::IsoCellSet::contains const):
+        (JSC::IsoCellSet::forEachMarkedCell):
+        (JSC::IsoCellSet::forEachMarkedCellInParallel):
+        (JSC::IsoCellSet::forEachLiveCell):
+        * heap/IsoSubspace.cpp:
+        (JSC::IsoSubspace::tryAllocateFromLowerTier):
+        (JSC::IsoSubspace::sweepLowerTierCell):
+        (JSC::IsoSubspace::destroyLowerTierFreeList):
+        * heap/IsoSubspace.h:
+        * heap/MarkedSpace.cpp:
+        (JSC::MarkedSpace::freeMemory):
+        (JSC::MarkedSpace::lastChanceToFinalize):
+        (JSC::MarkedSpace::sweepPreciseAllocations):
+        (JSC::MarkedSpace::prepareForAllocation):
+        (JSC::MarkedSpace::enablePreciseAllocationTracking):
+        (JSC::MarkedSpace::prepareForConservativeScan):
+        (JSC::MarkedSpace::prepareForMarking):
+        (JSC::MarkedSpace::resumeAllocating):
+        (JSC::MarkedSpace::isPagedOut):
+        (JSC::MarkedSpace::beginMarking):
+        (JSC::MarkedSpace::endMarking):
+        (JSC::MarkedSpace::objectCount):
+        (JSC::MarkedSpace::size):
+        (JSC::MarkedSpace::sweepLargeAllocations): Deleted.
+        (JSC::MarkedSpace::enableLargeAllocationTracking): Deleted.
+        * heap/MarkedSpace.h:
+        (JSC::MarkedSpace:: const):
+        (JSC::MarkedSpace::preciseAllocationsNurseryOffset const):
+        (JSC::MarkedSpace::preciseAllocationsOffsetForThisCollection const):
+        (JSC::MarkedSpace::preciseAllocationsForThisCollectionBegin const):
+        (JSC::MarkedSpace::preciseAllocationsForThisCollectionEnd const):
+        (JSC::MarkedSpace::preciseAllocationsForThisCollectionSize const):
+        (JSC::MarkedSpace::largeAllocationsNurseryOffset const): Deleted.
+        (JSC::MarkedSpace::largeAllocationsOffsetForThisCollection const): Deleted.
+        (JSC::MarkedSpace::largeAllocationsForThisCollectionBegin const): Deleted.
+        (JSC::MarkedSpace::largeAllocationsForThisCollectionEnd const): Deleted.
+        (JSC::MarkedSpace::largeAllocationsForThisCollectionSize const): Deleted.
+        * heap/MarkedSpaceInlines.h:
+        (JSC::MarkedSpace::forEachLiveCell):
+        (JSC::MarkedSpace::forEachDeadCell):
+        * heap/PreciseAllocation.cpp: Renamed from Source/JavaScriptCore/heap/LargeAllocation.cpp.
+        (JSC::isAlignedForPreciseAllocation):
+        (JSC::PreciseAllocation::tryCreate):
+        (JSC::PreciseAllocation::tryReallocate):
+        (JSC::PreciseAllocation::createForLowerTier):
+        (JSC::PreciseAllocation::reuseForLowerTier):
+        (JSC::PreciseAllocation::PreciseAllocation):
+        (JSC::PreciseAllocation::~PreciseAllocation):
+        (JSC::PreciseAllocation::lastChanceToFinalize):
+        (JSC::PreciseAllocation::shrink):
+        (JSC::PreciseAllocation::visitWeakSet):
+        (JSC::PreciseAllocation::reapWeakSet):
+        (JSC::PreciseAllocation::flip):
+        (JSC::PreciseAllocation::isEmpty):
+        (JSC::PreciseAllocation::sweep):
+        (JSC::PreciseAllocation::destroy):
+        (JSC::PreciseAllocation::dump const):
+        (JSC::PreciseAllocation::assertValidCell const):
+        * heap/PreciseAllocation.h: Renamed from Source/JavaScriptCore/heap/LargeAllocation.h.
+        (JSC::PreciseAllocation::fromCell):
+        (JSC::PreciseAllocation::isPreciseAllocation):
+        (JSC::PreciseAllocation::headerSize):
+        (JSC::PreciseAllocation::basePointer const):
+        * heap/SlotVisitor.cpp:
+        (JSC::SlotVisitor::appendHiddenSlowImpl):
+        (JSC::SlotVisitor::appendToMarkStack):
+        * heap/SlotVisitorInlines.h:
+        (JSC::SlotVisitor::appendUnbarriered):
+        (JSC::SlotVisitor::appendHiddenUnbarriered):
+        * heap/Subspace.h:
+        * heap/SubspaceInlines.h:
+        (JSC::Subspace::forEachPreciseAllocation):
+        (JSC::Subspace::forEachMarkedCell):
+        (JSC::Subspace::forEachMarkedCellInParallel):
+        (JSC::Subspace::forEachLiveCell):
+        (JSC::Subspace::forEachLargeAllocation): Deleted.
+        * heap/WeakBlock.cpp:
+        (JSC::WeakBlock::visit):
+        * heap/WeakSet.cpp:
+        (JSC::WeakSet::sweep):
+        * llint/LowLevelInterpreter.asm:
+        * runtime/ButterflyInlines.h:
+        (JSC::Butterfly::reallocArrayRightIfPossible):
+        * runtime/OptionsList.h:
+        * runtime/SamplingProfiler.cpp:
+        (JSC::SamplingProfiler::SamplingProfiler):
+        * tools/VMInspector.cpp:
+        (JSC::VMInspector::isInHeap):
+        * tools/VMInspectorInlines.h:
+        (JSC::VMInspector::verifyCell):
+
+2019-11-08  Yusuke Suzuki  <ysuzuki@apple.com>
+
         [JSC] Make IsoSubspace scalable
         https://bugs.webkit.org/show_bug.cgi?id=201908
 
index ad07f68..803e08d 100644 (file)
                0F070A471D543A8B006E7232 /* CellContainer.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F070A421D543A89006E7232 /* CellContainer.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F070A481D543A90006E7232 /* CellContainerInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F070A431D543A89006E7232 /* CellContainerInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F070A491D543A93006E7232 /* HeapCellInlines.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F070A441D543A89006E7232 /* HeapCellInlines.h */; settings = {ATTRIBUTES = (Private, ); }; };
-               0F070A4B1D543A98006E7232 /* LargeAllocation.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F070A461D543A89006E7232 /* LargeAllocation.h */; settings = {ATTRIBUTES = (Private, ); }; };
+               0F070A4B1D543A98006E7232 /* PreciseAllocation.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F070A461D543A89006E7232 /* PreciseAllocation.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F0776BF14FF002B00102332 /* JITCompilationEffort.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F0776BD14FF002800102332 /* JITCompilationEffort.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F0B286B1EB8E6CF000EB5D2 /* JSWeakPrivate.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F0B286A1EB8E6CD000EB5D2 /* JSWeakPrivate.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F0B286D1EB8E6D5000EB5D2 /* JSMarkingConstraintPrivate.h in Headers */ = {isa = PBXBuildFile; fileRef = 0F0B28681EB8E6CD000EB5D2 /* JSMarkingConstraintPrivate.h */; settings = {ATTRIBUTES = (Private, ); }; };
                0F070A421D543A89006E7232 /* CellContainer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CellContainer.h; sourceTree = "<group>"; };
                0F070A431D543A89006E7232 /* CellContainerInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = CellContainerInlines.h; sourceTree = "<group>"; };
                0F070A441D543A89006E7232 /* HeapCellInlines.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HeapCellInlines.h; sourceTree = "<group>"; };
-               0F070A451D543A89006E7232 /* LargeAllocation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = LargeAllocation.cpp; sourceTree = "<group>"; };
-               0F070A461D543A89006E7232 /* LargeAllocation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LargeAllocation.h; sourceTree = "<group>"; };
+               0F070A451D543A89006E7232 /* PreciseAllocation.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = PreciseAllocation.cpp; sourceTree = "<group>"; };
+               0F070A461D543A89006E7232 /* PreciseAllocation.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PreciseAllocation.h; sourceTree = "<group>"; };
                0F0776BD14FF002800102332 /* JITCompilationEffort.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JITCompilationEffort.h; sourceTree = "<group>"; };
                0F0B28671EB8E6CD000EB5D2 /* JSMarkingConstraintPrivate.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = JSMarkingConstraintPrivate.cpp; sourceTree = "<group>"; };
                0F0B28681EB8E6CD000EB5D2 /* JSMarkingConstraintPrivate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = JSMarkingConstraintPrivate.h; sourceTree = "<group>"; };
                                0F5E0FE52086AD460097F0DE /* IsoSubspacePerVM.h */,
                                0F766D2915A8CC34008F363E /* JITStubRoutineSet.cpp */,
                                0F766D2A15A8CC34008F363E /* JITStubRoutineSet.h */,
-                               0F070A451D543A89006E7232 /* LargeAllocation.cpp */,
-                               0F070A461D543A89006E7232 /* LargeAllocation.h */,
                                0F75A059200D25F00038E2CF /* LocalAllocator.cpp */,
                                0F75A057200D25F00038E2CF /* LocalAllocator.h */,
                                0F75A05A200D25F00038E2CF /* LocalAllocatorInlines.h */,
                                0FA762031DB9242300B7A2FD /* MutatorState.h */,
                                E356987122841183008CDCCB /* PackedCellPtr.h */,
                                0F9DAA081FD1C3C80079C5B2 /* ParallelSourceAdapter.h */,
+                               0F070A451D543A89006E7232 /* PreciseAllocation.cpp */,
+                               0F070A461D543A89006E7232 /* PreciseAllocation.h */,
                                0FBB73B61DEF3AAC002C009E /* PreventCollectionScope.h */,
                                0FD0E5EF1E46BF230006AB08 /* RegisterState.h */,
                                0F7CF94E1DBEEE860098CC12 /* ReleaseHeapAccessScope.h */,
                                0F725CA81C503DED00AD943A /* B3EliminateCommonSubexpressions.h in Headers */,
                                3395C70722555F6D00BDBFAD /* B3EliminateDeadCode.h in Headers */,
                                0F5BF1711F23A5A10029D91D /* B3EnsureLoopPreHeaders.h in Headers */,
-                               522927D5235FD0B9005CB169 /* GCMemoryOperations.h in Headers */,
                                5318045C22EAAC4B004A7342 /* B3ExtractValue.h in Headers */,
                                0F6971EA1D92F42400BA02A5 /* B3FenceValue.h in Headers */,
                                0F6B8AE51C4EFE1700969052 /* B3FixSSA.h in Headers */,
                                0F2B66AE17B6B54500A7AE3F /* GCIncomingRefCountedSet.h in Headers */,
                                0F2B66AF17B6B54500A7AE3F /* GCIncomingRefCountedSetInlines.h in Headers */,
                                2AABCDE718EF294200002096 /* GCLogging.h in Headers */,
+                               522927D5235FD0B9005CB169 /* GCMemoryOperations.h in Headers */,
                                0F9715311EB28BEE00A1645D /* GCRequest.h in Headers */,
                                A54E8EB018BFFBBB00556D28 /* GCSegmentedArray.h in Headers */,
                                A54E8EB118BFFBBE00556D28 /* GCSegmentedArrayInlines.h in Headers */,
                                A72FFD64139985A800E5365A /* KeywordLookup.h in Headers */,
                                969A072A0ED1CE6900F1F681 /* Label.h in Headers */,
                                960097A60EBABB58007A7297 /* LabelScope.h in Headers */,
-                               0F070A4B1D543A98006E7232 /* LargeAllocation.h in Headers */,
                                DCF3D56A1CD29470003D5C65 /* LazyClassStructure.h in Headers */,
                                DCF3D56B1CD29472003D5C65 /* LazyClassStructureInlines.h in Headers */,
                                0FB5467714F59B5C002C2989 /* LazyOperandValueProfile.h in Headers */,
                                0FF9CE741B9CD6D0004EDCA6 /* PolymorphicAccess.h in Headers */,
                                0FE834181A6EF97B00D04847 /* PolymorphicCallStubRoutine.h in Headers */,
                                521131F71F82BF14007CCEEE /* PolyProtoAccessChain.h in Headers */,
+                               0F070A4B1D543A98006E7232 /* PreciseAllocation.h in Headers */,
                                0F98206116BFE38300240D02 /* PreciseJumpTargets.h in Headers */,
                                E3A421431D6F58930007C617 /* PreciseJumpTargetsInlines.h in Headers */,
                                0FBB73B81DEF3AAE002C009E /* PreventCollectionScope.h in Headers */,
index 51921d7..2d7da42 100644 (file)
@@ -524,7 +524,7 @@ heap/IsoCellSet.cpp
 heap/IsoSubspace.cpp
 heap/IsoSubspacePerVM.cpp
 heap/JITStubRoutineSet.cpp
-heap/LargeAllocation.cpp
+heap/PreciseAllocation.cpp
 heap/LocalAllocator.cpp
 heap/MachineStackMarker.cpp
 heap/MarkStack.cpp
index 7e26678..6c01012 100644 (file)
@@ -32,8 +32,8 @@ namespace JSC {
 
 bool CellContainer::isNewlyAllocated(HeapCell* cell) const
 {
-    if (isLargeAllocation())
-        return largeAllocation().isNewlyAllocated();
+    if (isPreciseAllocation())
+        return preciseAllocation().isNewlyAllocated();
     MarkedBlock& block = markedBlock();
     return !block.isNewlyAllocatedStale()
         && block.isNewlyAllocated(cell);
index 1025ac2..0654153 100644 (file)
@@ -31,14 +31,14 @@ namespace JSC {
 
 class Heap;
 class HeapCell;
-class LargeAllocation;
+class PreciseAllocation;
 class MarkedBlock;
 class WeakSet;
 class VM;
 
 typedef uint32_t HeapVersion;
 
-// This is how we abstract over either MarkedBlock& or LargeAllocation&. Put things in here as you
+// This is how we abstract over either MarkedBlock& or PreciseAllocation&. Put things in here as you
 // find need for them.
 
 class CellContainer {
@@ -53,8 +53,8 @@ public:
     {
     }
     
-    CellContainer(LargeAllocation& largeAllocation)
-        : m_encodedPointer(bitwise_cast<uintptr_t>(&largeAllocation) | isLargeAllocationBit)
+    CellContainer(PreciseAllocation& preciseAllocation)
+        : m_encodedPointer(bitwise_cast<uintptr_t>(&preciseAllocation) | isPreciseAllocationBit)
     {
     }
     
@@ -63,8 +63,8 @@ public:
     
     explicit operator bool() const { return !!m_encodedPointer; }
     
-    bool isMarkedBlock() const { return m_encodedPointer && !(m_encodedPointer & isLargeAllocationBit); }
-    bool isLargeAllocation() const { return m_encodedPointer & isLargeAllocationBit; }
+    bool isMarkedBlock() const { return m_encodedPointer && !(m_encodedPointer & isPreciseAllocationBit); }
+    bool isPreciseAllocation() const { return m_encodedPointer & isPreciseAllocationBit; }
     
     MarkedBlock& markedBlock() const
     {
@@ -72,10 +72,10 @@ public:
         return *bitwise_cast<MarkedBlock*>(m_encodedPointer);
     }
     
-    LargeAllocation& largeAllocation() const
+    PreciseAllocation& preciseAllocation() const
     {
-        ASSERT(isLargeAllocation());
-        return *bitwise_cast<LargeAllocation*>(m_encodedPointer - isLargeAllocationBit);
+        ASSERT(isPreciseAllocation());
+        return *bitwise_cast<PreciseAllocation*>(m_encodedPointer - isPreciseAllocationBit);
     }
     
     void aboutToMark(HeapVersion markingVersion);
@@ -94,7 +94,7 @@ public:
     WeakSet& weakSet() const;
     
 private:
-    static constexpr uintptr_t isLargeAllocationBit = 1;
+    static constexpr uintptr_t isPreciseAllocationBit = 1;
     uintptr_t m_encodedPointer;
 };
 
index 2c09b24..ef48a6d 100644 (file)
 
 #include "CellContainer.h"
 #include "JSCast.h"
-#include "LargeAllocation.h"
 #include "MarkedBlock.h"
+#include "PreciseAllocation.h"
 #include "VM.h"
 
 namespace JSC {
 
 inline VM& CellContainer::vm() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().vm();
+    if (isPreciseAllocation())
+        return preciseAllocation().vm();
     return markedBlock().vm();
 }
 
@@ -47,55 +47,55 @@ inline Heap* CellContainer::heap() const
 
 inline bool CellContainer::isMarked(HeapCell* cell) const
 {
-    if (isLargeAllocation())
-        return largeAllocation().isMarked();
+    if (isPreciseAllocation())
+        return preciseAllocation().isMarked();
     return markedBlock().isMarked(cell);
 }
 
 inline bool CellContainer::isMarked(HeapVersion markingVersion, HeapCell* cell) const
 {
-    if (isLargeAllocation())
-        return largeAllocation().isMarked();
+    if (isPreciseAllocation())
+        return preciseAllocation().isMarked();
     return markedBlock().isMarked(markingVersion, cell);
 }
 
 inline void CellContainer::noteMarked()
 {
-    if (!isLargeAllocation())
+    if (!isPreciseAllocation())
         markedBlock().noteMarked();
 }
 
 inline void CellContainer::assertValidCell(VM& vm, HeapCell* cell) const
 {
-    if (isLargeAllocation())
-        largeAllocation().assertValidCell(vm, cell);
+    if (isPreciseAllocation())
+        preciseAllocation().assertValidCell(vm, cell);
     else
         markedBlock().assertValidCell(vm, cell);
 }
 
 inline size_t CellContainer::cellSize() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().cellSize();
+    if (isPreciseAllocation())
+        return preciseAllocation().cellSize();
     return markedBlock().cellSize();
 }
 
 inline WeakSet& CellContainer::weakSet() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().weakSet();
+    if (isPreciseAllocation())
+        return preciseAllocation().weakSet();
     return markedBlock().weakSet();
 }
 
 inline void CellContainer::aboutToMark(HeapVersion markingVersion)
 {
-    if (!isLargeAllocation())
+    if (!isPreciseAllocation())
         markedBlock().aboutToMark(markingVersion);
 }
 
 inline bool CellContainer::areMarksStale() const
 {
-    if (isLargeAllocation())
+    if (isPreciseAllocation())
         return false;
     return markedBlock().areMarksStale();
 }
index 403c13a..2cb952f 100644 (file)
@@ -129,7 +129,7 @@ void* CompleteSubspace::tryAllocateSlow(VM& vm, size_t size, GCDeferralContext*
     if (Allocator allocator = allocatorFor(size, AllocatorForMode::EnsureAllocator))
         return allocator.allocate(deferralContext, AllocationFailureMode::ReturnNull);
     
-    if (size <= Options::largeAllocationCutoff()
+    if (size <= Options::preciseAllocationCutoff()
         && size <= MarkedSpace::largeCutoff) {
         dataLog("FATAL: attampting to allocate small object using large allocation.\n");
         dataLog("Requested allocation size: ", size, "\n");
@@ -139,31 +139,31 @@ void* CompleteSubspace::tryAllocateSlow(VM& vm, size_t size, GCDeferralContext*
     vm.heap.collectIfNecessaryOrDefer(deferralContext);
     
     size = WTF::roundUpToMultipleOf<MarkedSpace::sizeStep>(size);
-    LargeAllocation* allocation = LargeAllocation::tryCreate(vm.heap, size, this, m_space.m_largeAllocations.size());
+    PreciseAllocation* allocation = PreciseAllocation::tryCreate(vm.heap, size, this, m_space.m_preciseAllocations.size());
     if (!allocation)
         return nullptr;
     
-    m_space.m_largeAllocations.append(allocation);
-    if (auto* set = m_space.largeAllocationSet())
+    m_space.m_preciseAllocations.append(allocation);
+    if (auto* set = m_space.preciseAllocationSet())
         set->add(allocation->cell());
-    ASSERT(allocation->indexInSpace() == m_space.m_largeAllocations.size() - 1);
+    ASSERT(allocation->indexInSpace() == m_space.m_preciseAllocations.size() - 1);
     vm.heap.didAllocate(size);
     m_space.m_capacity += size;
     
-    m_largeAllocations.append(allocation);
+    m_preciseAllocations.append(allocation);
         
     return allocation->cell();
 }
 
-void* CompleteSubspace::reallocateLargeAllocationNonVirtual(VM& vm, HeapCell* oldCell, size_t size, GCDeferralContext* deferralContext, AllocationFailureMode failureMode)
+void* CompleteSubspace::reallocatePreciseAllocationNonVirtual(VM& vm, HeapCell* oldCell, size_t size, GCDeferralContext* deferralContext, AllocationFailureMode failureMode)
 {
     if (validateDFGDoesGC)
         RELEASE_ASSERT(vm.heap.expectDoesGC());
 
     // The following conditions are met in Butterfly for example.
-    ASSERT(oldCell->isLargeAllocation());
+    ASSERT(oldCell->isPreciseAllocation());
 
-    LargeAllocation* oldAllocation = &oldCell->largeAllocation();
+    PreciseAllocation* oldAllocation = &oldCell->preciseAllocation();
     ASSERT(oldAllocation->cellSize() <= size);
     ASSERT(oldAllocation->weakSet().isTriviallyDestructible());
     ASSERT(oldAllocation->attributes().destruction == DoesNotNeedDestruction);
@@ -172,7 +172,7 @@ void* CompleteSubspace::reallocateLargeAllocationNonVirtual(VM& vm, HeapCell* ol
 
     sanitizeStackForVM(vm);
 
-    if (size <= Options::largeAllocationCutoff()
+    if (size <= Options::preciseAllocationCutoff()
         && size <= MarkedSpace::largeCutoff) {
         dataLog("FATAL: attampting to allocate small object using large allocation.\n");
         dataLog("Requested allocation size: ", size, "\n");
@@ -187,27 +187,27 @@ void* CompleteSubspace::reallocateLargeAllocationNonVirtual(VM& vm, HeapCell* ol
     if (oldAllocation->isOnList())
         oldAllocation->remove();
 
-    LargeAllocation* allocation = oldAllocation->tryReallocate(size, this);
+    PreciseAllocation* allocation = oldAllocation->tryReallocate(size, this);
     if (!allocation) {
         RELEASE_ASSERT(failureMode != AllocationFailureMode::Assert);
-        m_largeAllocations.append(oldAllocation);
+        m_preciseAllocations.append(oldAllocation);
         return nullptr;
     }
     ASSERT(oldIndexInSpace == allocation->indexInSpace());
 
     // If reallocation changes the address, we should update HashSet.
     if (oldAllocation != allocation) {
-        if (auto* set = m_space.largeAllocationSet()) {
+        if (auto* set = m_space.preciseAllocationSet()) {
             set->remove(oldAllocation->cell());
             set->add(allocation->cell());
         }
     }
 
-    m_space.m_largeAllocations[oldIndexInSpace] = allocation;
+    m_space.m_preciseAllocations[oldIndexInSpace] = allocation;
     vm.heap.didAllocate(difference);
     m_space.m_capacity += difference;
 
-    m_largeAllocations.append(allocation);
+    m_preciseAllocations.append(allocation);
 
     return allocation->cell();
 }
index 070f65a..4c8a864 100644 (file)
@@ -44,7 +44,7 @@ public:
     
     void* allocate(VM&, size_t, GCDeferralContext*, AllocationFailureMode) override;
     void* allocateNonVirtual(VM&, size_t, GCDeferralContext*, AllocationFailureMode);
-    void* reallocateLargeAllocationNonVirtual(VM&, HeapCell*, size_t, GCDeferralContext*, AllocationFailureMode);
+    void* reallocatePreciseAllocationNonVirtual(VM&, HeapCell*, size_t, GCDeferralContext*, AllocationFailureMode);
     
     static ptrdiff_t offsetOfAllocatorForSizeStep() { return OBJECT_OFFSETOF(CompleteSubspace, m_allocatorForSizeStep); }
     
index cdf44e4..89fdc1c 100644 (file)
@@ -2153,7 +2153,7 @@ void Heap::waitForCollection(Ticket ticket)
 
 void Heap::sweepInFinalize()
 {
-    m_objectSpace.sweepLargeAllocations();
+    m_objectSpace.sweepPreciseAllocations();
     vm().eagerlySweptDestructibleObjectSpace.sweep();
 }
 
index 386fcd1..0a72b1e 100644 (file)
@@ -34,8 +34,8 @@ namespace JSC {
 
 bool HeapCell::isLive()
 {
-    if (isLargeAllocation())
-        return largeAllocation().isLive();
+    if (isPreciseAllocation())
+        return preciseAllocation().isLive();
     auto& markedBlockHandle = markedBlock().handle();
     if (markedBlockHandle.isFreeListed())
         return !markedBlockHandle.isFreeListedCell(this);
index e1ae0bb..e270b65 100644 (file)
@@ -31,7 +31,7 @@ namespace JSC {
 
 class CellContainer;
 class Heap;
-class LargeAllocation;
+class PreciseAllocation;
 class MarkedBlock;
 class Subspace;
 class VM;
@@ -61,13 +61,13 @@ public:
 
     bool isLive();
 
-    bool isLargeAllocation() const;
+    bool isPreciseAllocation() const;
     CellContainer cellContainer() const;
     MarkedBlock& markedBlock() const;
-    LargeAllocation& largeAllocation() const;
+    PreciseAllocation& preciseAllocation() const;
 
     // If you want performance and you know that your cell is small, you can do this instead:
-    // ASSERT(!cell->isLargeAllocation());
+    // ASSERT(!cell->isPreciseAllocation());
     // cell->markedBlock().vm()
     // We currently only use this hack for callees to make CallFrame::vm() fast. It's not
     // recommended to use it for too many other things, since the large allocation cutoff is
index 6548f16..6782d64 100644 (file)
 
 #include "CellContainer.h"
 #include "HeapCell.h"
-#include "LargeAllocation.h"
+#include "PreciseAllocation.h"
 #include "VM.h"
 
 namespace JSC {
 
-ALWAYS_INLINE bool HeapCell::isLargeAllocation() const
+ALWAYS_INLINE bool HeapCell::isPreciseAllocation() const
 {
-    return LargeAllocation::isLargeAllocation(const_cast<HeapCell*>(this));
+    return PreciseAllocation::isPreciseAllocation(const_cast<HeapCell*>(this));
 }
 
 ALWAYS_INLINE CellContainer HeapCell::cellContainer() const
 {
-    if (isLargeAllocation())
-        return largeAllocation();
+    if (isPreciseAllocation())
+        return preciseAllocation();
     return markedBlock();
 }
 
@@ -49,9 +49,9 @@ ALWAYS_INLINE MarkedBlock& HeapCell::markedBlock() const
     return *MarkedBlock::blockFor(this);
 }
 
-ALWAYS_INLINE LargeAllocation& HeapCell::largeAllocation() const
+ALWAYS_INLINE PreciseAllocation& HeapCell::preciseAllocation() const
 {
-    return *LargeAllocation::fromCell(const_cast<HeapCell*>(this));
+    return *PreciseAllocation::fromCell(const_cast<HeapCell*>(this));
 }
 
 ALWAYS_INLINE Heap* HeapCell::heap() const
@@ -61,22 +61,22 @@ ALWAYS_INLINE Heap* HeapCell::heap() const
 
 ALWAYS_INLINE VM& HeapCell::vm() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().vm();
+    if (isPreciseAllocation())
+        return preciseAllocation().vm();
     return markedBlock().vm();
 }
     
 ALWAYS_INLINE size_t HeapCell::cellSize() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().cellSize();
+    if (isPreciseAllocation())
+        return preciseAllocation().cellSize();
     return markedBlock().cellSize();
 }
 
 ALWAYS_INLINE CellAttributes HeapCell::cellAttributes() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().attributes();
+    if (isPreciseAllocation())
+        return preciseAllocation().attributes();
     return markedBlock().attributes();
 }
 
@@ -92,8 +92,8 @@ ALWAYS_INLINE HeapCell::Kind HeapCell::cellKind() const
 
 ALWAYS_INLINE Subspace* HeapCell::subspace() const
 {
-    if (isLargeAllocation())
-        return largeAllocation().subspace();
+    if (isPreciseAllocation())
+        return preciseAllocation().subspace();
     return markedBlock().subspace();
 }
 
index 78be9da..17f2411 100644 (file)
@@ -70,8 +70,8 @@ inline bool Heap::worldIsStopped() const
 ALWAYS_INLINE bool Heap::isMarked(const void* rawCell)
 {
     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
-    if (cell->isLargeAllocation())
-        return cell->largeAllocation().isMarked();
+    if (cell->isPreciseAllocation())
+        return cell->preciseAllocation().isMarked();
     MarkedBlock& block = cell->markedBlock();
     return block.isMarked(m_objectSpace.markingVersion(), cell);
 }
@@ -79,8 +79,8 @@ ALWAYS_INLINE bool Heap::isMarked(const void* rawCell)
 ALWAYS_INLINE bool Heap::testAndSetMarked(HeapVersion markingVersion, const void* rawCell)
 {
     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
-    if (cell->isLargeAllocation())
-        return cell->largeAllocation().testAndSetMarked();
+    if (cell->isPreciseAllocation())
+        return cell->preciseAllocation().testAndSetMarked();
     MarkedBlock& block = cell->markedBlock();
     Dependency dependency = block.aboutToMark(markingVersion);
     return block.testAndSetMarked(cell, dependency);
index 8f053c2..dc95a1c 100644 (file)
@@ -57,24 +57,24 @@ public:
         char* pointer = static_cast<char*>(passedPointer);
         
         // It could point to a large allocation.
-        if (heap.objectSpace().largeAllocationsForThisCollectionSize()) {
-            if (heap.objectSpace().largeAllocationsForThisCollectionBegin()[0]->aboveLowerBound(pointer)
-                && heap.objectSpace().largeAllocationsForThisCollectionEnd()[-1]->belowUpperBound(pointer)) {
-                LargeAllocation** result = approximateBinarySearch<LargeAllocation*>(
-                    heap.objectSpace().largeAllocationsForThisCollectionBegin(),
-                    heap.objectSpace().largeAllocationsForThisCollectionSize(),
-                    LargeAllocation::fromCell(pointer),
-                    [] (LargeAllocation** ptr) -> LargeAllocation* { return *ptr; });
+        if (heap.objectSpace().preciseAllocationsForThisCollectionSize()) {
+            if (heap.objectSpace().preciseAllocationsForThisCollectionBegin()[0]->aboveLowerBound(pointer)
+                && heap.objectSpace().preciseAllocationsForThisCollectionEnd()[-1]->belowUpperBound(pointer)) {
+                PreciseAllocation** result = approximateBinarySearch<PreciseAllocation*>(
+                    heap.objectSpace().preciseAllocationsForThisCollectionBegin(),
+                    heap.objectSpace().preciseAllocationsForThisCollectionSize(),
+                    PreciseAllocation::fromCell(pointer),
+                    [] (PreciseAllocation** ptr) -> PreciseAllocation* { return *ptr; });
                 if (result) {
-                    auto attemptLarge = [&] (LargeAllocation* allocation) {
+                    auto attemptLarge = [&] (PreciseAllocation* allocation) {
                         if (allocation->contains(pointer))
                             func(allocation->cell(), allocation->attributes().cellKind);
                     };
                     
-                    if (result > heap.objectSpace().largeAllocationsForThisCollectionBegin())
+                    if (result > heap.objectSpace().preciseAllocationsForThisCollectionBegin())
                         attemptLarge(result[-1]);
                     attemptLarge(result[0]);
-                    if (result + 1 < heap.objectSpace().largeAllocationsForThisCollectionEnd())
+                    if (result + 1 < heap.objectSpace().preciseAllocationsForThisCollectionEnd())
                         attemptLarge(result[1]);
                 }
             }
@@ -131,8 +131,8 @@ public:
     static bool isPointerGCObjectJSCell(Heap& heap, TinyBloomFilter filter, JSCell* pointer)
     {
         // It could point to a large allocation.
-        if (pointer->isLargeAllocation()) {
-            auto* set = heap.objectSpace().largeAllocationSet();
+        if (pointer->isPreciseAllocation()) {
+            auto* set = heap.objectSpace().preciseAllocationSet();
             ASSERT(set);
             if (set->isEmpty())
                 return false;
@@ -166,7 +166,7 @@ public:
     static bool isValueGCObject(
         Heap& heap, TinyBloomFilter filter, JSValue value)
     {
-        ASSERT(heap.objectSpace().largeAllocationSet());
+        ASSERT(heap.objectSpace().preciseAllocationSet());
         if (!value.isCell())
             return false;
         return isPointerGCObjectJSCell(heap, filter, value.asCell());
index 6365820..1774200 100644 (file)
@@ -101,7 +101,7 @@ void IsoAlignedMemoryAllocator::freeMemory(void* pointer)
 
 void* IsoAlignedMemoryAllocator::tryReallocateMemory(void*, size_t)
 {
-    // In IsoSubspace-managed LargeAllocation, we must not perform realloc.
+    // In IsoSubspace-managed PreciseAllocation, we must not perform realloc.
     RELEASE_ASSERT_NOT_REACHED();
 }
 
index 5149b97..afee239 100644 (file)
@@ -33,8 +33,8 @@ namespace JSC {
 
 inline bool IsoCellSet::add(HeapCell* cell)
 {
-    if (cell->isLargeAllocation())
-        return !m_lowerTierBits.concurrentTestAndSet(cell->largeAllocation().lowerTierIndex());
+    if (cell->isPreciseAllocation())
+        return !m_lowerTierBits.concurrentTestAndSet(cell->preciseAllocation().lowerTierIndex());
     AtomIndices atomIndices(cell);
     auto& bitsPtrRef = m_bits[atomIndices.blockIndex];
     auto* bits = bitsPtrRef.get();
@@ -45,8 +45,8 @@ inline bool IsoCellSet::add(HeapCell* cell)
 
 inline bool IsoCellSet::remove(HeapCell* cell)
 {
-    if (cell->isLargeAllocation())
-        return !m_lowerTierBits.concurrentTestAndClear(cell->largeAllocation().lowerTierIndex());
+    if (cell->isPreciseAllocation())
+        return !m_lowerTierBits.concurrentTestAndClear(cell->preciseAllocation().lowerTierIndex());
     AtomIndices atomIndices(cell);
     auto& bitsPtrRef = m_bits[atomIndices.blockIndex];
     auto* bits = bitsPtrRef.get();
@@ -57,8 +57,8 @@ inline bool IsoCellSet::remove(HeapCell* cell)
 
 inline bool IsoCellSet::contains(HeapCell* cell) const
 {
-    if (cell->isLargeAllocation())
-        return !m_lowerTierBits.get(cell->largeAllocation().lowerTierIndex());
+    if (cell->isPreciseAllocation())
+        return !m_lowerTierBits.get(cell->preciseAllocation().lowerTierIndex());
     AtomIndices atomIndices(cell);
     auto* bits = m_bits[atomIndices.blockIndex].get();
     if (bits)
@@ -84,8 +84,8 @@ void IsoCellSet::forEachMarkedCell(const Func& func)
         });
 
     CellAttributes attributes = m_subspace.attributes();
-    m_subspace.forEachLargeAllocation(
-        [&] (LargeAllocation* allocation) {
+    m_subspace.forEachPreciseAllocation(
+        [&] (PreciseAllocation* allocation) {
             if (m_lowerTierBits.get(allocation->lowerTierIndex()) && allocation->isMarked())
                 func(allocation->cell(), attributes.cellKind);
         });
@@ -118,14 +118,14 @@ Ref<SharedTask<void(SlotVisitor&)>> IsoCellSet::forEachMarkedCellInParallel(cons
 
             {
                 auto locker = holdLock(m_lock);
-                if (!m_needToVisitLargeAllocations)
+                if (!m_needToVisitPreciseAllocations)
                     return;
-                m_needToVisitLargeAllocations = false;
+                m_needToVisitPreciseAllocations = false;
             }
 
             CellAttributes attributes = m_set.m_subspace.attributes();
-            m_set.m_subspace.forEachLargeAllocation(
-                [&] (LargeAllocation* allocation) {
+            m_set.m_subspace.forEachPreciseAllocation(
+                [&] (PreciseAllocation* allocation) {
                     if (m_set.m_lowerTierBits.get(allocation->lowerTierIndex()) && allocation->isMarked())
                         m_func(visitor, allocation->cell(), attributes.cellKind);
                 });
@@ -136,7 +136,7 @@ Ref<SharedTask<void(SlotVisitor&)>> IsoCellSet::forEachMarkedCellInParallel(cons
         Ref<SharedTask<MarkedBlock::Handle*()>> m_blockSource;
         Func m_func;
         Lock m_lock;
-        bool m_needToVisitLargeAllocations { true };
+        bool m_needToVisitPreciseAllocations { true };
     };
     
     return adoptRef(*new Task(*this, func));
@@ -160,8 +160,8 @@ void IsoCellSet::forEachLiveCell(const Func& func)
         });
 
     CellAttributes attributes = m_subspace.attributes();
-    m_subspace.forEachLargeAllocation(
-        [&] (LargeAllocation* allocation) {
+    m_subspace.forEachPreciseAllocation(
+        [&] (PreciseAllocation* allocation) {
             if (m_lowerTierBits.get(allocation->lowerTierIndex()) && allocation->isLive())
                 func(allocation->cell(), attributes.cellKind);
         });
index 200d14c..2c1b95f 100644 (file)
@@ -92,35 +92,35 @@ void IsoSubspace::didBeginSweepingToFreeList(MarkedBlock::Handle* block)
 
 void* IsoSubspace::tryAllocateFromLowerTier()
 {
-    auto revive = [&] (LargeAllocation* allocation) {
-        allocation->setIndexInSpace(m_space.m_largeAllocations.size());
+    auto revive = [&] (PreciseAllocation* allocation) {
+        allocation->setIndexInSpace(m_space.m_preciseAllocations.size());
         allocation->m_hasValidCell = true;
-        m_space.m_largeAllocations.append(allocation);
-        if (auto* set = m_space.largeAllocationSet())
+        m_space.m_preciseAllocations.append(allocation);
+        if (auto* set = m_space.preciseAllocationSet())
             set->add(allocation->cell());
-        ASSERT(allocation->indexInSpace() == m_space.m_largeAllocations.size() - 1);
-        m_largeAllocations.append(allocation);
+        ASSERT(allocation->indexInSpace() == m_space.m_preciseAllocations.size() - 1);
+        m_preciseAllocations.append(allocation);
         return allocation->cell();
     };
 
     if (!m_lowerTierFreeList.isEmpty()) {
-        LargeAllocation* allocation = m_lowerTierFreeList.begin();
+        PreciseAllocation* allocation = m_lowerTierFreeList.begin();
         allocation->remove();
         return revive(allocation);
     }
     if (m_lowerTierCellCount != MarkedBlock::numberOfLowerTierCells) {
         size_t size = WTF::roundUpToMultipleOf<MarkedSpace::sizeStep>(m_size);
-        LargeAllocation* allocation = LargeAllocation::createForLowerTier(*m_space.heap(), size, this, m_lowerTierCellCount++);
+        PreciseAllocation* allocation = PreciseAllocation::createForLowerTier(*m_space.heap(), size, this, m_lowerTierCellCount++);
         return revive(allocation);
     }
     return nullptr;
 }
 
-void IsoSubspace::sweepLowerTierCell(LargeAllocation* largeAllocation)
+void IsoSubspace::sweepLowerTierCell(PreciseAllocation* preciseAllocation)
 {
-    unsigned lowerTierIndex = largeAllocation->lowerTierIndex();
-    largeAllocation = largeAllocation->reuseForLowerTier();
-    m_lowerTierFreeList.append(largeAllocation);
+    unsigned lowerTierIndex = preciseAllocation->lowerTierIndex();
+    preciseAllocation = preciseAllocation->reuseForLowerTier();
+    m_lowerTierFreeList.append(preciseAllocation);
     m_cellSets.forEach(
         [&] (IsoCellSet* set) {
             set->sweepLowerTierCell(lowerTierIndex);
@@ -129,7 +129,7 @@ void IsoSubspace::sweepLowerTierCell(LargeAllocation* largeAllocation)
 
 void IsoSubspace::destroyLowerTierFreeList()
 {
-    m_lowerTierFreeList.forEach([&](LargeAllocation* allocation) {
+    m_lowerTierFreeList.forEach([&](PreciseAllocation* allocation) {
         allocation->destroy();
     });
 }
index 187226a..bfc3de9 100644 (file)
@@ -48,7 +48,7 @@ public:
     void* allocate(VM&, size_t, GCDeferralContext*, AllocationFailureMode) override;
     void* allocateNonVirtual(VM&, size_t, GCDeferralContext*, AllocationFailureMode);
 
-    void sweepLowerTierCell(LargeAllocation*);
+    void sweepLowerTierCell(PreciseAllocation*);
 
     void* tryAllocateFromLowerTier();
     void destroyLowerTierFreeList();
@@ -64,7 +64,7 @@ private:
     BlockDirectory m_directory;
     LocalAllocator m_localAllocator;
     std::unique_ptr<IsoAlignedMemoryAllocator> m_isoAlignedMemoryAllocator;
-    SentinelLinkedList<LargeAllocation, PackedRawSentinelNode<LargeAllocation>> m_lowerTierFreeList;
+    SentinelLinkedList<PreciseAllocation, PackedRawSentinelNode<PreciseAllocation>> m_lowerTierFreeList;
     SentinelLinkedList<IsoCellSet, PackedRawSentinelNode<IsoCellSet>> m_cellSets;
     uint8_t m_lowerTierCellCount { 0 };
 };
index 02e750b..73cec8e 100644 (file)
@@ -130,7 +130,7 @@ const Vector<size_t>& sizeClasses()
                 
                 // This is usually how we get out of the loop.
                 if (betterSizeClass > MarkedSpace::largeCutoff
-                    || betterSizeClass > Options::largeAllocationCutoff())
+                    || betterSizeClass > Options::preciseAllocationCutoff())
                     break;
                 
                 add(betterSizeClass);
@@ -211,7 +211,7 @@ void MarkedSpace::freeMemory()
         [&] (MarkedBlock::Handle* block) {
             freeBlock(block);
         });
-    for (LargeAllocation* allocation : m_largeAllocations)
+    for (PreciseAllocation* allocation : m_preciseAllocations)
         allocation->destroy();
     forEachSubspace([&](Subspace& subspace) {
         if (subspace.isIsoSubspace())
@@ -227,7 +227,7 @@ void MarkedSpace::lastChanceToFinalize()
             directory.lastChanceToFinalize();
             return IterationStatus::Continue;
         });
-    for (LargeAllocation* allocation : m_largeAllocations)
+    for (PreciseAllocation* allocation : m_preciseAllocations)
         allocation->lastChanceToFinalize();
     // We do not call lastChanceToFinalize for lower-tier swept cells since we need nothing to do.
 }
@@ -242,16 +242,16 @@ void MarkedSpace::sweep()
         });
 }
 
-void MarkedSpace::sweepLargeAllocations()
+void MarkedSpace::sweepPreciseAllocations()
 {
-    RELEASE_ASSERT(m_largeAllocationsNurseryOffset == m_largeAllocations.size());
-    unsigned srcIndex = m_largeAllocationsNurseryOffsetForSweep;
+    RELEASE_ASSERT(m_preciseAllocationsNurseryOffset == m_preciseAllocations.size());
+    unsigned srcIndex = m_preciseAllocationsNurseryOffsetForSweep;
     unsigned dstIndex = srcIndex;
-    while (srcIndex < m_largeAllocations.size()) {
-        LargeAllocation* allocation = m_largeAllocations[srcIndex++];
+    while (srcIndex < m_preciseAllocations.size()) {
+        PreciseAllocation* allocation = m_preciseAllocations[srcIndex++];
         allocation->sweep();
         if (allocation->isEmpty()) {
-            if (auto* set = largeAllocationSet())
+            if (auto* set = preciseAllocationSet())
                 set->remove(allocation->cell());
             if (allocation->isLowerTier())
                 static_cast<IsoSubspace*>(allocation->subspace())->sweepLowerTierCell(allocation);
@@ -262,10 +262,10 @@ void MarkedSpace::sweepLargeAllocations()
             continue;
         }
         allocation->setIndexInSpace(dstIndex);
-        m_largeAllocations[dstIndex++] = allocation;
+        m_preciseAllocations[dstIndex++] = allocation;
     }
-    m_largeAllocations.shrink(dstIndex);
-    m_largeAllocationsNurseryOffset = m_largeAllocations.size();
+    m_preciseAllocations.shrink(dstIndex);
+    m_preciseAllocationsNurseryOffset = m_preciseAllocations.size();
 }
 
 void MarkedSpace::prepareForAllocation()
@@ -277,17 +277,17 @@ void MarkedSpace::prepareForAllocation()
     m_activeWeakSets.takeFrom(m_newActiveWeakSets);
     
     if (m_heap->collectionScope() == CollectionScope::Eden)
-        m_largeAllocationsNurseryOffsetForSweep = m_largeAllocationsNurseryOffset;
+        m_preciseAllocationsNurseryOffsetForSweep = m_preciseAllocationsNurseryOffset;
     else
-        m_largeAllocationsNurseryOffsetForSweep = 0;
-    m_largeAllocationsNurseryOffset = m_largeAllocations.size();
+        m_preciseAllocationsNurseryOffsetForSweep = 0;
+    m_preciseAllocationsNurseryOffset = m_preciseAllocations.size();
 }
 
-void MarkedSpace::enableLargeAllocationTracking()
+void MarkedSpace::enablePreciseAllocationTracking()
 {
-    m_largeAllocationSet = makeUnique<HashSet<HeapCell*>>();
-    for (auto* allocation : m_largeAllocations)
-        m_largeAllocationSet->add(allocation->cell());
+    m_preciseAllocationSet = makeUnique<HashSet<HeapCell*>>();
+    for (auto* allocation : m_preciseAllocations)
+        m_preciseAllocationSet->add(allocation->cell());
 }
 
 void MarkedSpace::visitWeakSets(SlotVisitor& visitor)
@@ -336,30 +336,30 @@ void MarkedSpace::stopAllocatingForGood()
 
 void MarkedSpace::prepareForConservativeScan()
 {
-    m_largeAllocationsForThisCollectionBegin = m_largeAllocations.begin() + m_largeAllocationsOffsetForThisCollection;
-    m_largeAllocationsForThisCollectionSize = m_largeAllocations.size() - m_largeAllocationsOffsetForThisCollection;
-    m_largeAllocationsForThisCollectionEnd = m_largeAllocations.end();
-    RELEASE_ASSERT(m_largeAllocationsForThisCollectionEnd == m_largeAllocationsForThisCollectionBegin + m_largeAllocationsForThisCollectionSize);
+    m_preciseAllocationsForThisCollectionBegin = m_preciseAllocations.begin() + m_preciseAllocationsOffsetForThisCollection;
+    m_preciseAllocationsForThisCollectionSize = m_preciseAllocations.size() - m_preciseAllocationsOffsetForThisCollection;
+    m_preciseAllocationsForThisCollectionEnd = m_preciseAllocations.end();
+    RELEASE_ASSERT(m_preciseAllocationsForThisCollectionEnd == m_preciseAllocationsForThisCollectionBegin + m_preciseAllocationsForThisCollectionSize);
     
     std::sort(
-        m_largeAllocationsForThisCollectionBegin, m_largeAllocationsForThisCollectionEnd,
-        [&] (LargeAllocation* a, LargeAllocation* b) {
+        m_preciseAllocationsForThisCollectionBegin, m_preciseAllocationsForThisCollectionEnd,
+        [&] (PreciseAllocation* a, PreciseAllocation* b) {
             return a < b;
         });
-    unsigned index = m_largeAllocationsOffsetForThisCollection;
-    for (auto* start = m_largeAllocationsForThisCollectionBegin; start != m_largeAllocationsForThisCollectionEnd; ++start, ++index) {
+    unsigned index = m_preciseAllocationsOffsetForThisCollection;
+    for (auto* start = m_preciseAllocationsForThisCollectionBegin; start != m_preciseAllocationsForThisCollectionEnd; ++start, ++index) {
         (*start)->setIndexInSpace(index);
-        ASSERT(m_largeAllocations[index] == *start);
-        ASSERT(m_largeAllocations[index]->indexInSpace() == index);
+        ASSERT(m_preciseAllocations[index] == *start);
+        ASSERT(m_preciseAllocations[index]->indexInSpace() == index);
     }
 }
 
 void MarkedSpace::prepareForMarking()
 {
     if (m_heap->collectionScope() == CollectionScope::Eden)
-        m_largeAllocationsOffsetForThisCollection = m_largeAllocationsNurseryOffset;
+        m_preciseAllocationsOffsetForThisCollection = m_preciseAllocationsNurseryOffset;
     else
-        m_largeAllocationsOffsetForThisCollection = 0;
+        m_preciseAllocationsOffsetForThisCollection = 0;
 }
 
 void MarkedSpace::resumeAllocating()
@@ -369,7 +369,7 @@ void MarkedSpace::resumeAllocating()
             directory.resumeAllocating();
             return IterationStatus::Continue;
         });
-    // Nothing to do for LargeAllocations.
+    // Nothing to do for PreciseAllocations.
 }
 
 bool MarkedSpace::isPagedOut(MonotonicTime deadline)
@@ -383,7 +383,7 @@ bool MarkedSpace::isPagedOut(MonotonicTime deadline)
             }
             return IterationStatus::Continue;
         });
-    // FIXME: Consider taking LargeAllocations into account here.
+    // FIXME: Consider taking PreciseAllocations into account here.
     return result;
 }
 
@@ -432,7 +432,7 @@ void MarkedSpace::beginMarking()
         
         m_markingVersion = nextVersion(m_markingVersion);
         
-        for (LargeAllocation* allocation : m_largeAllocations)
+        for (PreciseAllocation* allocation : m_preciseAllocations)
             allocation->flip();
     }
 
@@ -459,11 +459,11 @@ void MarkedSpace::endMarking()
     
     m_newlyAllocatedVersion = nextVersion(m_newlyAllocatedVersion);
     
-    for (unsigned i = m_largeAllocationsOffsetForThisCollection; i < m_largeAllocations.size(); ++i)
-        m_largeAllocations[i]->clearNewlyAllocated();
+    for (unsigned i = m_preciseAllocationsOffsetForThisCollection; i < m_preciseAllocations.size(); ++i)
+        m_preciseAllocations[i]->clearNewlyAllocated();
 
     if (!ASSERT_DISABLED) {
-        for (LargeAllocation* allocation : m_largeAllocations)
+        for (PreciseAllocation* allocation : m_preciseAllocations)
             ASSERT_UNUSED(allocation, !allocation->isNewlyAllocated());
     }
 
@@ -497,7 +497,7 @@ size_t MarkedSpace::objectCount()
         [&] (MarkedBlock::Handle* block) {
             result += block->markCount();
         });
-    for (LargeAllocation* allocation : m_largeAllocations) {
+    for (PreciseAllocation* allocation : m_preciseAllocations) {
         if (allocation->isMarked())
             result++;
     }
@@ -511,7 +511,7 @@ size_t MarkedSpace::size()
         [&] (MarkedBlock::Handle* block) {
             result += block->markCount() * block->cellSize();
         });
-    for (LargeAllocation* allocation : m_largeAllocations) {
+    for (PreciseAllocation* allocation : m_preciseAllocations) {
         if (allocation->isMarked())
             result += allocation->cellSize();
     }
index 017db7a..e1fe7eb 100644 (file)
@@ -23,9 +23,9 @@
 
 #include "BlockDirectory.h"
 #include "IterationStatus.h"
-#include "LargeAllocation.h"
 #include "MarkedBlock.h"
 #include "MarkedBlockSet.h"
+#include "PreciseAllocation.h"
 #include <array>
 #include <wtf/Bag.h>
 #include <wtf/HashSet.h>
@@ -141,7 +141,7 @@ public:
     void snapshotUnswept();
     void clearNewlyAllocated();
     void sweep();
-    void sweepLargeAllocations();
+    void sweepPreciseAllocations();
     void assertNoUnswept();
     size_t objectCount();
     size_t size();
@@ -152,18 +152,18 @@ public:
     HeapVersion markingVersion() const { return m_markingVersion; }
     HeapVersion newlyAllocatedVersion() const { return m_newlyAllocatedVersion; }
 
-    const Vector<LargeAllocation*>& largeAllocations() const { return m_largeAllocations; }
-    unsigned largeAllocationsNurseryOffset() const { return m_largeAllocationsNurseryOffset; }
-    unsigned largeAllocationsOffsetForThisCollection() const { return m_largeAllocationsOffsetForThisCollection; }
-    HashSet<HeapCell*>* largeAllocationSet() const { return m_largeAllocationSet.get(); }
+    const Vector<PreciseAllocation*>& preciseAllocations() const { return m_preciseAllocations; }
+    unsigned preciseAllocationsNurseryOffset() const { return m_preciseAllocationsNurseryOffset; }
+    unsigned preciseAllocationsOffsetForThisCollection() const { return m_preciseAllocationsOffsetForThisCollection; }
+    HashSet<HeapCell*>* preciseAllocationSet() const { return m_preciseAllocationSet.get(); }
 
-    void enableLargeAllocationTracking();
+    void enablePreciseAllocationTracking();
     
     // These are cached pointers and offsets for quickly searching the large allocations that are
     // relevant to this collection.
-    LargeAllocation** largeAllocationsForThisCollectionBegin() const { return m_largeAllocationsForThisCollectionBegin; }
-    LargeAllocation** largeAllocationsForThisCollectionEnd() const { return m_largeAllocationsForThisCollectionEnd; }
-    unsigned largeAllocationsForThisCollectionSize() const { return m_largeAllocationsForThisCollectionSize; }
+    PreciseAllocation** preciseAllocationsForThisCollectionBegin() const { return m_preciseAllocationsForThisCollectionBegin; }
+    PreciseAllocation** preciseAllocationsForThisCollectionEnd() const { return m_preciseAllocationsForThisCollectionEnd; }
+    unsigned preciseAllocationsForThisCollectionSize() const { return m_preciseAllocationsForThisCollectionSize; }
     
     BlockDirectory* firstDirectory() const { return m_directories.first(); }
     
@@ -204,14 +204,14 @@ private:
 
     Vector<Subspace*> m_subspaces;
 
-    std::unique_ptr<HashSet<HeapCell*>> m_largeAllocationSet;
-    Vector<LargeAllocation*> m_largeAllocations;
-    unsigned m_largeAllocationsNurseryOffset { 0 };
-    unsigned m_largeAllocationsOffsetForThisCollection { 0 };
-    unsigned m_largeAllocationsNurseryOffsetForSweep { 0 };
-    unsigned m_largeAllocationsForThisCollectionSize { 0 };
-    LargeAllocation** m_largeAllocationsForThisCollectionBegin { nullptr };
-    LargeAllocation** m_largeAllocationsForThisCollectionEnd { nullptr };
+    std::unique_ptr<HashSet<HeapCell*>> m_preciseAllocationSet;
+    Vector<PreciseAllocation*> m_preciseAllocations;
+    unsigned m_preciseAllocationsNurseryOffset { 0 };
+    unsigned m_preciseAllocationsOffsetForThisCollection { 0 };
+    unsigned m_preciseAllocationsNurseryOffsetForSweep { 0 };
+    unsigned m_preciseAllocationsForThisCollectionSize { 0 };
+    PreciseAllocation** m_preciseAllocationsForThisCollectionBegin { nullptr };
+    PreciseAllocation** m_preciseAllocationsForThisCollectionEnd { nullptr };
 
     Heap* m_heap;
     size_t m_capacity { 0 };
index 755ab31..df8531d 100644 (file)
@@ -47,7 +47,7 @@ template<typename Functor> inline void MarkedSpace::forEachLiveCell(const Functo
         if (result == IterationStatus::Done)
             return;
     }
-    for (LargeAllocation* allocation : m_largeAllocations) {
+    for (PreciseAllocation* allocation : m_preciseAllocations) {
         if (allocation->isLive()) {
             if (functor(allocation->cell(), allocation->attributes().cellKind) == IterationStatus::Done)
                 return;
@@ -63,7 +63,7 @@ template<typename Functor> inline void MarkedSpace::forEachDeadCell(HeapIteratio
         if ((*it)->handle().forEachDeadCell(functor) == IterationStatus::Done)
             return;
     }
-    for (LargeAllocation* allocation : m_largeAllocations) {
+    for (PreciseAllocation* allocation : m_preciseAllocations) {
         if (!allocation->isLive()) {
             if (functor(allocation->cell(), allocation->attributes().cellKind) == IterationStatus::Done)
                 return;
@@ -24,7 +24,7 @@
  */
 
 #include "config.h"
-#include "LargeAllocation.h"
+#include "PreciseAllocation.h"
 
 #include "AlignedMemoryAllocator.h"
 #include "Heap.h"
 
 namespace JSC {
 
-static inline bool isAlignedForLargeAllocation(void* memory)
+static inline bool isAlignedForPreciseAllocation(void* memory)
 {
     uintptr_t allocatedPointer = bitwise_cast<uintptr_t>(memory);
-    return !(allocatedPointer & (LargeAllocation::alignment - 1));
+    return !(allocatedPointer & (PreciseAllocation::alignment - 1));
 }
 
-LargeAllocation* LargeAllocation::tryCreate(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace)
+PreciseAllocation* PreciseAllocation::tryCreate(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace)
 {
     if (validateDFGDoesGC)
         RELEASE_ASSERT(heap.expectDoesGC());
@@ -54,18 +54,18 @@ LargeAllocation* LargeAllocation::tryCreate(Heap& heap, size_t size, Subspace* s
         return nullptr;
 
     bool adjustedAlignment = false;
-    if (!isAlignedForLargeAllocation(space)) {
+    if (!isAlignedForPreciseAllocation(space)) {
         space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
         adjustedAlignment = true;
-        ASSERT(isAlignedForLargeAllocation(space));
+        ASSERT(isAlignedForPreciseAllocation(space));
     }
     
     if (scribbleFreeCells())
         scribble(space, size);
-    return new (NotNull, space) LargeAllocation(heap, size, subspace, indexInSpace, adjustedAlignment);
+    return new (NotNull, space) PreciseAllocation(heap, size, subspace, indexInSpace, adjustedAlignment);
 }
 
-LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace)
+PreciseAllocation* PreciseAllocation::tryReallocate(size_t size, Subspace* subspace)
 {
     ASSERT(!isLowerTier());
     size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
@@ -81,12 +81,12 @@ LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace)
     if (!newBasePointer)
         return nullptr;
 
-    LargeAllocation* newAllocation = bitwise_cast<LargeAllocation*>(newBasePointer);
+    PreciseAllocation* newAllocation = bitwise_cast<PreciseAllocation*>(newBasePointer);
     bool newAdjustedAlignment = false;
-    if (!isAlignedForLargeAllocation(newBasePointer)) {
+    if (!isAlignedForPreciseAllocation(newBasePointer)) {
         newAdjustedAlignment = true;
-        newAllocation = bitwise_cast<LargeAllocation*>(bitwise_cast<uintptr_t>(newBasePointer) + halfAlignment);
-        ASSERT(isAlignedForLargeAllocation(static_cast<void*>(newAllocation)));
+        newAllocation = bitwise_cast<PreciseAllocation*>(bitwise_cast<uintptr_t>(newBasePointer) + halfAlignment);
+        ASSERT(isAlignedForPreciseAllocation(static_cast<void*>(newAllocation)));
     }
 
     // We have 4 patterns.
@@ -102,7 +102,7 @@ LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace)
             // Old   [ 8 ][  content  ]
             // Now   [   ][  content  ]
             // New   [  content  ]...
-            memmove(newBasePointer, bitwise_cast<char*>(newBasePointer) + halfAlignment, oldCellSize + LargeAllocation::headerSize());
+            memmove(newBasePointer, bitwise_cast<char*>(newBasePointer) + halfAlignment, oldCellSize + PreciseAllocation::headerSize());
         } else {
             ASSERT(newAdjustedAlignment);
             ASSERT(newAllocation != newBasePointer);
@@ -110,7 +110,7 @@ LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace)
             // Old   [  content  ]
             // Now   [  content  ][   ]
             // New   [ 8 ][  content  ]
-            memmove(bitwise_cast<char*>(newBasePointer) + halfAlignment, newBasePointer, oldCellSize + LargeAllocation::headerSize());
+            memmove(bitwise_cast<char*>(newBasePointer) + halfAlignment, newBasePointer, oldCellSize + PreciseAllocation::headerSize());
         }
     }
 
@@ -120,7 +120,7 @@ LargeAllocation* LargeAllocation::tryReallocate(size_t size, Subspace* subspace)
 }
 
 
-LargeAllocation* LargeAllocation::createForLowerTier(Heap& heap, size_t size, Subspace* subspace, uint8_t lowerTierIndex)
+PreciseAllocation* PreciseAllocation::createForLowerTier(Heap& heap, size_t size, Subspace* subspace, uint8_t lowerTierIndex)
 {
     if (validateDFGDoesGC)
         RELEASE_ASSERT(heap.expectDoesGC());
@@ -132,20 +132,20 @@ LargeAllocation* LargeAllocation::createForLowerTier(Heap& heap, size_t size, Su
     RELEASE_ASSERT(space);
 
     bool adjustedAlignment = false;
-    if (!isAlignedForLargeAllocation(space)) {
+    if (!isAlignedForPreciseAllocation(space)) {
         space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
         adjustedAlignment = true;
-        ASSERT(isAlignedForLargeAllocation(space));
+        ASSERT(isAlignedForPreciseAllocation(space));
     }
 
     if (scribbleFreeCells())
         scribble(space, size);
-    LargeAllocation* largeAllocation = new (NotNull, space) LargeAllocation(heap, size, subspace, 0, adjustedAlignment);
-    largeAllocation->m_lowerTierIndex = lowerTierIndex;
-    return largeAllocation;
+    PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
+    preciseAllocation->m_lowerTierIndex = lowerTierIndex;
+    return preciseAllocation;
 }
 
-LargeAllocation* LargeAllocation::reuseForLowerTier()
+PreciseAllocation* PreciseAllocation::reuseForLowerTier()
 {
     Heap& heap = *this->heap();
     size_t size = m_cellSize;
@@ -154,15 +154,15 @@ LargeAllocation* LargeAllocation::reuseForLowerTier()
     uint8_t lowerTierIndex = m_lowerTierIndex;
 
     void* space = this->basePointer();
-    this->~LargeAllocation();
+    this->~PreciseAllocation();
 
-    LargeAllocation* largeAllocation = new (NotNull, space) LargeAllocation(heap, size, subspace, 0, adjustedAlignment);
-    largeAllocation->m_lowerTierIndex = lowerTierIndex;
-    largeAllocation->m_hasValidCell = false;
-    return largeAllocation;
+    PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
+    preciseAllocation->m_lowerTierIndex = lowerTierIndex;
+    preciseAllocation->m_hasValidCell = false;
+    return preciseAllocation;
 }
 
-LargeAllocation::LargeAllocation(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace, bool adjustedAlignment)
+PreciseAllocation::PreciseAllocation(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace, bool adjustedAlignment)
     : m_indexInSpace(indexInSpace)
     , m_cellSize(size)
     , m_isNewlyAllocated(true)
@@ -175,13 +175,13 @@ LargeAllocation::LargeAllocation(Heap& heap, size_t size, Subspace* subspace, un
     m_isMarked.store(0);
 }
 
-LargeAllocation::~LargeAllocation()
+PreciseAllocation::~PreciseAllocation()
 {
     if (isOnList())
         remove();
 }
 
-void LargeAllocation::lastChanceToFinalize()
+void PreciseAllocation::lastChanceToFinalize()
 {
     m_weakSet.lastChanceToFinalize();
     clearMarked();
@@ -189,33 +189,33 @@ void LargeAllocation::lastChanceToFinalize()
     sweep();
 }
 
-void LargeAllocation::shrink()
+void PreciseAllocation::shrink()
 {
     m_weakSet.shrink();
 }
 
-void LargeAllocation::visitWeakSet(SlotVisitor& visitor)
+void PreciseAllocation::visitWeakSet(SlotVisitor& visitor)
 {
     m_weakSet.visit(visitor);
 }
 
-void LargeAllocation::reapWeakSet()
+void PreciseAllocation::reapWeakSet()
 {
     return m_weakSet.reap();
 }
 
-void LargeAllocation::flip()
+void PreciseAllocation::flip()
 {
     ASSERT(heap()->collectionScope() == CollectionScope::Full);
     clearMarked();
 }
 
-bool LargeAllocation::isEmpty()
+bool PreciseAllocation::isEmpty()
 {
     return !isMarked() && m_weakSet.isEmpty() && !isNewlyAllocated();
 }
 
-void LargeAllocation::sweep()
+void PreciseAllocation::sweep()
 {
     m_weakSet.sweep();
     
@@ -226,21 +226,21 @@ void LargeAllocation::sweep()
     }
 }
 
-void LargeAllocation::destroy()
+void PreciseAllocation::destroy()
 {
     AlignedMemoryAllocator* allocator = m_subspace->alignedMemoryAllocator();
     void* basePointer = this->basePointer();
-    this->~LargeAllocation();
+    this->~PreciseAllocation();
     allocator->freeMemory(basePointer);
 }
 
-void LargeAllocation::dump(PrintStream& out) const
+void PreciseAllocation::dump(PrintStream& out) const
 {
     out.print(RawPointer(this), ":(cell at ", RawPointer(cell()), " with size ", m_cellSize, " and attributes ", m_attributes, ")");
 }
 
 #if !ASSERT_DISABLED
-void LargeAllocation::assertValidCell(VM& vm, HeapCell* cell) const
+void PreciseAllocation::assertValidCell(VM& vm, HeapCell* cell) const
 {
     ASSERT(&vm == &this->vm());
     ASSERT(cell == this->cell());
@@ -34,27 +34,27 @@ class IsoSubspace;
 class SlotVisitor;
 
 // WebKit has a good malloc that already knows what to do for large allocations. The GC shouldn't
-// have to think about such things. That's where LargeAllocation comes in. We will allocate large
-// objects directly using malloc, and put the LargeAllocation header just before them. We can detect
-// when a HeapCell* is a LargeAllocation because it will have the MarkedBlock::atomSize / 2 bit set.
+// have to think about such things. That's where PreciseAllocation comes in. We will allocate large
+// objects directly using malloc, and put the PreciseAllocation header just before them. We can detect
+// when a HeapCell* is a PreciseAllocation because it will have the MarkedBlock::atomSize / 2 bit set.
 
-class LargeAllocation : public PackedRawSentinelNode<LargeAllocation> {
+class PreciseAllocation : public PackedRawSentinelNode<PreciseAllocation> {
 public:
     friend class LLIntOffsetsExtractor;
     friend class IsoSubspace;
 
-    static LargeAllocation* tryCreate(Heap&, size_t, Subspace*, unsigned indexInSpace);
+    static PreciseAllocation* tryCreate(Heap&, size_t, Subspace*, unsigned indexInSpace);
 
-    static LargeAllocation* createForLowerTier(Heap&, size_t, Subspace*, uint8_t lowerTierIndex);
-    LargeAllocation* reuseForLowerTier();
+    static PreciseAllocation* createForLowerTier(Heap&, size_t, Subspace*, uint8_t lowerTierIndex);
+    PreciseAllocation* reuseForLowerTier();
 
-    LargeAllocation* tryReallocate(size_t, Subspace*);
+    PreciseAllocation* tryReallocate(size_t, Subspace*);
     
-    ~LargeAllocation();
+    ~PreciseAllocation();
     
-    static LargeAllocation* fromCell(const void* cell)
+    static PreciseAllocation* fromCell(const void* cell)
     {
-        return bitwise_cast<LargeAllocation*>(bitwise_cast<char*>(cell) - headerSize());
+        return bitwise_cast<PreciseAllocation*>(bitwise_cast<char*>(cell) - headerSize());
     }
     
     HeapCell* cell() const
@@ -62,7 +62,7 @@ public:
         return bitwise_cast<HeapCell*>(bitwise_cast<char*>(this) + headerSize());
     }
     
-    static bool isLargeAllocation(HeapCell* cell)
+    static bool isPreciseAllocation(HeapCell* cell)
     {
         return bitwise_cast<uintptr_t>(cell) & halfAlignment;
     }
@@ -158,10 +158,10 @@ public:
     
     static constexpr unsigned alignment = MarkedBlock::atomSize;
     static constexpr unsigned halfAlignment = alignment / 2;
-    static constexpr unsigned headerSize() { return ((sizeof(LargeAllocation) + halfAlignment - 1) & ~(halfAlignment - 1)) | halfAlignment; }
+    static constexpr unsigned headerSize() { return ((sizeof(PreciseAllocation) + halfAlignment - 1) & ~(halfAlignment - 1)) | halfAlignment; }
 
 private:
-    LargeAllocation(Heap&, size_t, Subspace*, unsigned indexInSpace, bool adjustedAlignment);
+    PreciseAllocation(Heap&, size_t, Subspace*, unsigned indexInSpace, bool adjustedAlignment);
     
     void* basePointer() const;
     
@@ -177,7 +177,7 @@ private:
     WeakSet m_weakSet;
 };
 
-inline void* LargeAllocation::basePointer() const
+inline void* PreciseAllocation::basePointer() const
 {
     if (m_adjustedAlignment)
         return bitwise_cast<char*>(this) - halfAlignment;
index e0e9071..0c2eb41 100644 (file)
@@ -257,8 +257,8 @@ ALWAYS_INLINE void SlotVisitor::appendHiddenSlowImpl(JSCell* cell, Dependency de
     validate(cell);
 #endif
     
-    if (cell->isLargeAllocation())
-        setMarkedAndAppendToMarkStack(cell->largeAllocation(), cell, dependency);
+    if (cell->isPreciseAllocation())
+        setMarkedAndAppendToMarkStack(cell->preciseAllocation(), cell, dependency);
     else
         setMarkedAndAppendToMarkStack(cell->markedBlock(), cell, dependency);
 }
@@ -281,8 +281,8 @@ ALWAYS_INLINE void SlotVisitor::setMarkedAndAppendToMarkStack(ContainerType& con
 
 void SlotVisitor::appendToMarkStack(JSCell* cell)
 {
-    if (cell->isLargeAllocation())
-        appendToMarkStack(cell->largeAllocation(), cell);
+    if (cell->isPreciseAllocation())
+        appendToMarkStack(cell->preciseAllocation(), cell);
     else
         appendToMarkStack(cell->markedBlock(), cell);
 }
index 2a29771..4aad9ad 100644 (file)
@@ -46,8 +46,8 @@ ALWAYS_INLINE void SlotVisitor::appendUnbarriered(JSCell* cell)
         return;
     
     Dependency dependency;
-    if (UNLIKELY(cell->isLargeAllocation())) {
-        if (LIKELY(cell->largeAllocation().isMarked())) {
+    if (UNLIKELY(cell->isPreciseAllocation())) {
+        if (LIKELY(cell->preciseAllocation().isMarked())) {
             if (LIKELY(!m_heapAnalyzer))
                 return;
         }
@@ -84,8 +84,8 @@ ALWAYS_INLINE void SlotVisitor::appendHiddenUnbarriered(JSCell* cell)
         return;
     
     Dependency dependency;
-    if (UNLIKELY(cell->isLargeAllocation())) {
-        if (LIKELY(cell->largeAllocation().isMarked()))
+    if (UNLIKELY(cell->isPreciseAllocation())) {
+        if (LIKELY(cell->preciseAllocation().isMarked()))
             return;
     } else {
         MarkedBlock& block = cell->markedBlock();
index ebb72c2..0e917a0 100644 (file)
@@ -81,7 +81,7 @@ public:
     JS_EXPORT_PRIVATE Ref<SharedTask<MarkedBlock::Handle*()>> parallelNotEmptyMarkedBlockSource();
     
     template<typename Func>
-    void forEachLargeAllocation(const Func&);
+    void forEachPreciseAllocation(const Func&);
     
     template<typename Func>
     void forEachMarkedCell(const Func&);
@@ -113,7 +113,7 @@ protected:
     
     BlockDirectory* m_firstDirectory { nullptr };
     BlockDirectory* m_directoryForEmptyAllocation { nullptr }; // Uses the MarkedSpace linked list of blocks.
-    SentinelLinkedList<LargeAllocation, PackedRawSentinelNode<LargeAllocation>> m_largeAllocations;
+    SentinelLinkedList<PreciseAllocation, PackedRawSentinelNode<PreciseAllocation>> m_preciseAllocations;
     Subspace* m_nextSubspaceInAlignedMemoryAllocator { nullptr };
 
     CString m_name;
index 20b41bb..f4ff70c 100644 (file)
@@ -60,9 +60,9 @@ void Subspace::forEachNotEmptyMarkedBlock(const Func& func)
 }
 
 template<typename Func>
-void Subspace::forEachLargeAllocation(const Func& func)
+void Subspace::forEachPreciseAllocation(const Func& func)
 {
-    for (LargeAllocation* allocation = m_largeAllocations.begin(); allocation != m_largeAllocations.end(); allocation = allocation->next())
+    for (PreciseAllocation* allocation = m_preciseAllocations.begin(); allocation != m_preciseAllocations.end(); allocation = allocation->next())
         func(allocation);
 }
 
@@ -78,8 +78,8 @@ void Subspace::forEachMarkedCell(const Func& func)
                 });
         });
     CellAttributes attributes = this->attributes();
-    forEachLargeAllocation(
-        [&] (LargeAllocation* allocation) {
+    forEachPreciseAllocation(
+        [&] (PreciseAllocation* allocation) {
             if (allocation->isMarked())
                 func(allocation->cell(), attributes.cellKind);
         });
@@ -109,14 +109,14 @@ Ref<SharedTask<void(SlotVisitor&)>> Subspace::forEachMarkedCellInParallel(const
             
             {
                 auto locker = holdLock(m_lock);
-                if (!m_needToVisitLargeAllocations)
+                if (!m_needToVisitPreciseAllocations)
                     return;
-                m_needToVisitLargeAllocations = false;
+                m_needToVisitPreciseAllocations = false;
             }
             
             CellAttributes attributes = m_subspace.attributes();
-            m_subspace.forEachLargeAllocation(
-                [&] (LargeAllocation* allocation) {
+            m_subspace.forEachPreciseAllocation(
+                [&] (PreciseAllocation* allocation) {
                     if (allocation->isMarked())
                         m_func(visitor, allocation->cell(), attributes.cellKind);
                 });
@@ -127,7 +127,7 @@ Ref<SharedTask<void(SlotVisitor&)>> Subspace::forEachMarkedCellInParallel(const
         Ref<SharedTask<MarkedBlock::Handle*()>> m_blockSource;
         Func m_func;
         Lock m_lock;
-        bool m_needToVisitLargeAllocations { true };
+        bool m_needToVisitPreciseAllocations { true };
     };
     
     return adoptRef(*new Task(*this, func));
@@ -145,8 +145,8 @@ void Subspace::forEachLiveCell(const Func& func)
                 });
         });
     CellAttributes attributes = this->attributes();
-    forEachLargeAllocation(
-        [&] (LargeAllocation* allocation) {
+    forEachPreciseAllocation(
+        [&] (PreciseAllocation* allocation) {
             if (allocation->isLive())
                 func(allocation->cell(), attributes.cellKind);
         });
index 9071bf9..db21d03 100644 (file)
@@ -141,8 +141,8 @@ void WeakBlock::visit(SlotVisitor& visitor)
     // If this WeakBlock doesn't belong to a CellContainer, we won't even be here.
     ASSERT(m_container);
     
-    if (m_container.isLargeAllocation())
-        specializedVisit(m_container.largeAllocation(), visitor);
+    if (m_container.isPreciseAllocation())
+        specializedVisit(m_container.preciseAllocation(), visitor);
     else
         specializedVisit(m_container.markedBlock(), visitor);
 }
index 8340f36..fe33163 100644 (file)
@@ -56,7 +56,7 @@ void WeakSet::sweep()
         if (block->isLogicallyEmptyButNotFree()) {
             // If this WeakBlock is logically empty, but still has Weaks pointing into it,
             // we can't destroy it just yet. Detach it from the WeakSet and hand ownership
-            // to the Heap so we don't pin down the entire MarkedBlock or LargeAllocation.
+            // to the Heap so we don't pin down the entire MarkedBlock or PreciseAllocation.
             m_blocks.remove(block);
             heap()->addLogicallyEmptyWeakBlock(block);
             block->disconnectContainer();
index 9ce34c8..a8800b0 100644 (file)
@@ -557,8 +557,8 @@ const NotInitialization = constexpr InitializationMode::NotInitialization
 const MarkedBlockSize = constexpr MarkedBlock::blockSize
 const MarkedBlockMask = ~(MarkedBlockSize - 1)
 const MarkedBlockFooterOffset = constexpr MarkedBlock::offsetOfFooter
-const LargeAllocationHeaderSize = constexpr (LargeAllocation::headerSize())
-const LargeAllocationVMOffset = (LargeAllocation::m_weakSet + WeakSet::m_vm - LargeAllocationHeaderSize)
+const PreciseAllocationHeaderSize = constexpr (PreciseAllocation::headerSize())
+const PreciseAllocationVMOffset = (PreciseAllocation::m_weakSet + WeakSet::m_vm - PreciseAllocationHeaderSize)
 
 const BlackThreshold = constexpr blackThreshold
 
@@ -1165,12 +1165,12 @@ macro notFunctionCodeBlockSetter(sourceRegister)
 end
 
 macro convertCalleeToVM(callee)
-    btpnz callee, (constexpr LargeAllocation::halfAlignment), .largeAllocation
+    btpnz callee, (constexpr PreciseAllocation::halfAlignment), .preciseAllocation
     andp MarkedBlockMask, callee
     loadp MarkedBlockFooterOffset + MarkedBlock::Footer::m_vm[callee], callee
     jmp .done
-.largeAllocation:
-    loadp LargeAllocationVMOffset[callee], callee
+.preciseAllocation:
+    loadp PreciseAllocationVMOffset[callee], callee
 .done:
 end
 
index 4860605..035e416 100644 (file)
@@ -206,12 +206,12 @@ inline Butterfly* Butterfly::reallocArrayRightIfPossible(
     size_t newSize = totalSize(0, propertyCapacity, true, newIndexingPayloadSizeInBytes);
     ASSERT(newSize >= oldSize);
 
-    // We can eagerly destroy butterfly backed by LargeAllocation if (1) concurrent collector is not active and (2) the butterfly does not contain any property storage.
+    // We can eagerly destroy butterfly backed by PreciseAllocation if (1) concurrent collector is not active and (2) the butterfly does not contain any property storage.
     // This is because during deallocation concurrent collector can access butterfly and DFG concurrent compilers accesses properties.
     // Objects with no properties are common in arrays, and we are focusing on very large array crafted by repeating Array#push, so... that's fine!
-    bool canRealloc = !propertyCapacity && !vm.heap.mutatorShouldBeFenced() && bitwise_cast<HeapCell*>(theBase)->isLargeAllocation();
+    bool canRealloc = !propertyCapacity && !vm.heap.mutatorShouldBeFenced() && bitwise_cast<HeapCell*>(theBase)->isPreciseAllocation();
     if (canRealloc) {
-        void* newBase = vm.jsValueGigacageAuxiliarySpace.reallocateLargeAllocationNonVirtual(vm, bitwise_cast<HeapCell*>(theBase), newSize, &deferralContext, AllocationFailureMode::ReturnNull);
+        void* newBase = vm.jsValueGigacageAuxiliarySpace.reallocatePreciseAllocationNonVirtual(vm, bitwise_cast<HeapCell*>(theBase), newSize, &deferralContext, AllocationFailureMode::ReturnNull);
         if (!newBase)
             return nullptr;
         return fromBase(newBase, 0, propertyCapacity);
index 5320fc1..28c0acf 100644 (file)
@@ -183,7 +183,7 @@ namespace JSC {
     v(Double, gcIncrementScale, 0, Normal, nullptr) \
     v(Bool, scribbleFreeCells, false, Normal, nullptr) \
     v(Double, sizeClassProgression, 1.4, Normal, nullptr) \
-    v(Unsigned, largeAllocationCutoff, 100000, Normal, nullptr) \
+    v(Unsigned, preciseAllocationCutoff, 100000, Normal, nullptr) \
     v(Bool, dumpSizeClasses, false, Normal, nullptr) \
     v(Bool, useBumpAllocator, true, Normal, nullptr) \
     v(Bool, stealEmptyBlocksFromOtherAllocators, true, Normal, nullptr) \
index 0751f9b..61d71e2 100644 (file)
@@ -307,7 +307,7 @@ SamplingProfiler::SamplingProfiler(VM& vm, RefPtr<Stopwatch>&& stopwatch)
     }
 
     m_currentFrames.grow(256);
-    vm.heap.objectSpace().enableLargeAllocationTracking();
+    vm.heap.objectSpace().enablePreciseAllocationTracking();
 }
 
 SamplingProfiler::~SamplingProfiler()
index 016caad..f03f16f 100644 (file)
@@ -231,7 +231,7 @@ bool VMInspector::isInHeap(Heap* heap, void* ptr)
     MarkedBlock* candidate = MarkedBlock::blockFor(ptr);
     if (heap->objectSpace().blocks().set().contains(candidate))
         return true;
-    for (LargeAllocation* allocation : heap->objectSpace().largeAllocations()) {
+    for (PreciseAllocation* allocation : heap->objectSpace().preciseAllocations()) {
         if (allocation->contains(ptr))
             return true;
     }
index 75e6308..d9f9546 100644 (file)
@@ -59,20 +59,20 @@ template<VMInspector::VerifierAction action, VMInspector::VerifyFunctor verifier
 bool VMInspector::verifyCell(VM& vm, JSCell* cell)
 {
     size_t allocatorCellSize = 0;
-    if (cell->isLargeAllocation()) {
-        LargeAllocation& largeAllocation = cell->largeAllocation();
-        AUDIT_VERIFY(action, verifier, &largeAllocation.vm() == &vm, cell, cell->type(), &largeAllocation.vm(), &vm);
-
-        bool isValidLargeAllocation = false;
-        for (auto* i : vm.heap.objectSpace().largeAllocations()) {
-            if (i == &largeAllocation) {
-                isValidLargeAllocation = true;
+    if (cell->isPreciseAllocation()) {
+        PreciseAllocation& preciseAllocation = cell->preciseAllocation();
+        AUDIT_VERIFY(action, verifier, &preciseAllocation.vm() == &vm, cell, cell->type(), &preciseAllocation.vm(), &vm);
+
+        bool isValidPreciseAllocation = false;
+        for (auto* i : vm.heap.objectSpace().preciseAllocations()) {
+            if (i == &preciseAllocation) {
+                isValidPreciseAllocation = true;
                 break;
             }
         }
-        AUDIT_VERIFY(action, verifier, isValidLargeAllocation, cell, cell->type());
+        AUDIT_VERIFY(action, verifier, isValidPreciseAllocation, cell, cell->type());
 
-        allocatorCellSize = largeAllocation.cellSize();
+        allocatorCellSize = preciseAllocation.cellSize();
     } else {
         MarkedBlock& block = cell->markedBlock();
         MarkedBlock::Handle& blockHandle = block.handle();