2 * Copyright (C) 2011, 2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
27 #include "MarkedBlock.h"
30 #include "JSDestructibleObject.h"
31 #include "JSCInlines.h"
32 #include "MarkedBlockInlines.h"
33 #include "SuperSampler.h"
37 static const bool computeBalance = false;
38 static size_t balance;
40 MarkedBlock::Handle* MarkedBlock::tryCreate(Heap& heap)
45 dataLog("MarkedBlock Balance: ", balance, "\n");
47 void* blockSpace = tryFastAlignedMalloc(blockSize, blockSize);
50 if (scribbleFreeCells())
51 scribble(blockSpace, blockSize);
52 return new Handle(heap, blockSpace);
55 MarkedBlock::Handle::Handle(Heap& heap, void* blockSpace)
56 : m_weakSet(heap.vm(), CellContainer())
58 m_block = new (NotNull, blockSpace) MarkedBlock(*heap.vm(), *this);
60 m_weakSet.setContainer(*m_block);
62 heap.didAllocateBlock(blockSize);
65 MarkedBlock::Handle::~Handle()
67 Heap& heap = *this->heap();
71 dataLog("MarkedBlock Balance: ", balance, "\n");
73 removeFromAllocator();
74 m_block->~MarkedBlock();
75 fastAlignedFree(m_block);
76 heap.didFreeBlock(blockSize);
79 MarkedBlock::MarkedBlock(VM& vm, Handle& handle)
80 : m_version(MarkedSpace::nullVersion)
86 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode, MarkedBlock::Handle::FlipMode flipMode>
87 FreeList MarkedBlock::Handle::specializedSweep()
89 RELEASE_ASSERT(!(destructionMode == DoesNotNeedDestruction && sweepMode == SweepOnly));
91 SuperSamplerScope superSamplerScope(false);
93 MarkedBlock& block = this->block();
96 dataLog(RawPointer(this), ": MarkedBlock::Handle::specializedSweep!\n");
98 if (Options::useBumpAllocator()
99 && emptyMode == IsEmpty
100 && newlyAllocatedMode == DoesNotHaveNewlyAllocated) {
101 ASSERT(flipMode == NeedsFlip);
103 char* startOfLastCell = static_cast<char*>(cellAlign(block.atoms() + m_endAtom - 1));
104 char* payloadEnd = startOfLastCell + cellSize();
105 RELEASE_ASSERT(payloadEnd - MarkedBlock::blockSize <= bitwise_cast<char*>(&block));
106 char* payloadBegin = bitwise_cast<char*>(block.atoms() + firstAtom());
107 if (scribbleMode == Scribble)
108 scribble(payloadBegin, payloadEnd - payloadBegin);
109 if (sweepMode == SweepToFreeList)
112 m_allocator->setIsEmpty(this, true);
113 FreeList result = FreeList::bump(payloadEnd, payloadEnd - payloadBegin);
115 dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
119 // This produces a free list that is ordered in reverse through the block.
120 // This is fine, since the allocation code makes no assumptions about the
121 // order of the free list.
125 for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
126 if (emptyMode == NotEmpty
127 && ((flipMode == DoesNotNeedFlip && block.m_marks.get(i))
128 || (newlyAllocatedMode == HasNewlyAllocated && m_newlyAllocated->get(i)))) {
133 HeapCell* cell = reinterpret_cast_ptr<HeapCell*>(&block.atoms()[i]);
135 if (destructionMode == NeedsDestruction && emptyMode == NotEmpty)
136 static_cast<JSCell*>(cell)->callDestructor(*vm());
138 if (sweepMode == SweepToFreeList) {
139 FreeCell* freeCell = reinterpret_cast<FreeCell*>(cell);
140 if (scribbleMode == Scribble)
141 scribble(freeCell, cellSize());
142 freeCell->next = head;
148 // We only want to discard the newlyAllocated bits if we're creating a FreeList,
149 // otherwise we would lose information on what's currently alive.
150 if (sweepMode == SweepToFreeList && newlyAllocatedMode == HasNewlyAllocated)
151 m_newlyAllocated = nullptr;
153 FreeList result = FreeList::list(head, count * cellSize());
154 if (sweepMode == SweepToFreeList)
157 m_allocator->setIsEmpty(this, true);
159 dataLog("Slowly swept block ", RawPointer(&block), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
163 FreeList MarkedBlock::Handle::sweep(SweepMode sweepMode)
165 m_allocator->setIsUnswept(this, false);
169 if (sweepMode == SweepOnly && m_attributes.destruction == DoesNotNeedDestruction)
172 if (UNLIKELY(m_isFreeListed)) {
173 RELEASE_ASSERT(sweepMode == SweepToFreeList);
177 ASSERT(!m_allocator->isAllocated(this));
179 if (m_attributes.destruction == NeedsDestruction)
180 return sweepHelperSelectScribbleMode<NeedsDestruction>(sweepMode);
181 return sweepHelperSelectScribbleMode<DoesNotNeedDestruction>(sweepMode);
184 template<DestructionMode destructionMode>
185 FreeList MarkedBlock::Handle::sweepHelperSelectScribbleMode(SweepMode sweepMode)
187 if (scribbleFreeCells())
188 return sweepHelperSelectEmptyMode<destructionMode, Scribble>(sweepMode);
189 return sweepHelperSelectEmptyMode<destructionMode, DontScribble>(sweepMode);
192 template<DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
193 FreeList MarkedBlock::Handle::sweepHelperSelectEmptyMode(SweepMode sweepMode)
195 // It's not obvious, but this is the only way to know if the block is empty. It's the only
196 // bit that captures these caveats:
197 // - It's true when the block is freshly allocated.
198 // - It's true if the block had been swept in the past, all destructors were called, and that
199 // sweep proved that the block is empty.
200 // - It's false if there are any destructors that need to be called, even if the block has no
202 if (m_allocator->isEmpty(this))
203 return sweepHelperSelectHasNewlyAllocated<IsEmpty, destructionMode, scribbleMode>(sweepMode);
204 return sweepHelperSelectHasNewlyAllocated<NotEmpty, destructionMode, scribbleMode>(sweepMode);
207 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
208 FreeList MarkedBlock::Handle::sweepHelperSelectHasNewlyAllocated(SweepMode sweepMode)
210 if (m_newlyAllocated)
211 return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, HasNewlyAllocated>(sweepMode);
212 return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, DoesNotHaveNewlyAllocated>(sweepMode);
215 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
216 FreeList MarkedBlock::Handle::sweepHelperSelectSweepMode(SweepMode sweepMode)
218 if (sweepMode == SweepToFreeList)
219 return sweepHelperSelectFlipMode<emptyMode, SweepToFreeList, destructionMode, scribbleMode, newlyAllocatedMode>();
220 return sweepHelperSelectFlipMode<emptyMode, SweepOnly, destructionMode, scribbleMode, newlyAllocatedMode>();
223 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
224 FreeList MarkedBlock::Handle::sweepHelperSelectFlipMode()
227 return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, NeedsFlip>();
228 return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, DoesNotNeedFlip>();
231 void MarkedBlock::Handle::unsweepWithNoNewlyAllocated()
233 RELEASE_ASSERT(m_isFreeListed);
234 m_isFreeListed = false;
237 void MarkedBlock::Handle::setIsFreeListed()
239 m_allocator->setIsEmpty(this, false);
240 m_isFreeListed = true;
243 class SetNewlyAllocatedFunctor : public MarkedBlock::VoidFunctor {
245 SetNewlyAllocatedFunctor(MarkedBlock::Handle* block)
250 IterationStatus operator()(HeapCell* cell, HeapCell::Kind) const
252 ASSERT(MarkedBlock::blockFor(cell) == &m_block->block());
253 m_block->setNewlyAllocated(cell);
254 return IterationStatus::Continue;
258 MarkedBlock::Handle* m_block;
261 void MarkedBlock::Handle::stopAllocating(const FreeList& freeList)
264 dataLog(RawPointer(this), ": MarkedBlock::Handle::stopAllocating!\n");
265 ASSERT(!allocator()->isAllocated(this));
267 if (!isFreeListed()) {
268 // This means that we either didn't use this block at all for allocation since last GC,
269 // or someone had already done stopAllocating() before.
270 ASSERT(freeList.allocationWillFail());
274 // Roll back to a coherent state for Heap introspection. Cells newly
275 // allocated from our free list are not currently marked, so we need another
276 // way to tell what's live vs dead.
278 ASSERT(!m_newlyAllocated);
279 m_newlyAllocated = std::make_unique<WTF::Bitmap<atomsPerBlock>>();
281 SetNewlyAllocatedFunctor functor(this);
282 forEachCell(functor);
286 [&] (HeapCell* cell) {
287 if (m_attributes.destruction == NeedsDestruction)
289 clearNewlyAllocated(cell);
292 m_isFreeListed = false;
295 void MarkedBlock::Handle::lastChanceToFinalize()
297 allocator()->setIsAllocated(this, false);
298 m_block->clearMarks();
299 m_weakSet.lastChanceToFinalize();
301 clearNewlyAllocated();
305 FreeList MarkedBlock::Handle::resumeAllocating()
307 ASSERT(!allocator()->isAllocated(this));
308 ASSERT(!isFreeListed());
310 if (!m_newlyAllocated) {
311 // This means we had already exhausted the block when we stopped allocation.
315 // Re-create our free list from before stopping allocation. Note that this may return an empty
316 // freelist, in which case the block will still be Marked!
317 return sweep(SweepToFreeList);
320 void MarkedBlock::Handle::zap(const FreeList& freeList)
324 [&] (HeapCell* cell) {
325 if (m_attributes.destruction == NeedsDestruction)
330 template<typename Func>
331 void MarkedBlock::Handle::forEachFreeCell(const FreeList& freeList, const Func& func)
333 if (freeList.remaining) {
334 for (unsigned remaining = freeList.remaining; remaining; remaining -= cellSize())
335 func(bitwise_cast<HeapCell*>(freeList.payloadEnd - remaining));
337 for (FreeCell* current = freeList.head; current;) {
338 FreeCell* next = current->next;
339 func(bitwise_cast<HeapCell*>(current));
345 void MarkedBlock::aboutToMarkSlow(HeapVersion heapVersion)
347 ASSERT(vm()->heap.objectSpace().isMarking());
348 LockHolder locker(m_lock);
349 if (needsFlip(heapVersion)) {
350 clearMarks(heapVersion);
351 // This means we're the first ones to mark any object in this block.
352 handle().allocator()->atomicSetAndCheckIsMarkingNotEmpty(&handle(), true);
356 void MarkedBlock::clearMarks()
358 clearMarks(vm()->heap.objectSpace().version());
361 void MarkedBlock::clearMarks(HeapVersion heapVersion)
365 WTF::storeStoreFence();
366 m_version = heapVersion;
370 void MarkedBlock::assertFlipped()
372 ASSERT(m_version == vm()->heap.objectSpace().version());
374 #endif // !ASSERT_DISABLED
376 bool MarkedBlock::needsFlip()
378 return needsFlip(vm()->heap.objectSpace().version());
381 bool MarkedBlock::Handle::needsFlip()
383 return m_block->needsFlip();
386 bool MarkedBlock::isMarked(const void* p)
388 return isMarked(vm()->heap.objectSpace().version(), p);
391 bool MarkedBlock::Handle::isMarkedOrNewlyAllocated(const HeapCell* cell)
393 return isMarkedOrNewlyAllocated(vm()->heap.objectSpace().version(), cell);
396 bool MarkedBlock::isMarkedOrNewlyAllocated(const HeapCell* cell)
398 return isMarkedOrNewlyAllocated(vm()->heap.objectSpace().version(), cell);
401 void MarkedBlock::Handle::didConsumeFreeList()
404 dataLog(RawPointer(this), ": MarkedBlock::Handle::didConsumeFreeList!\n");
405 ASSERT(isFreeListed());
406 m_isFreeListed = false;
407 allocator()->setIsAllocated(this, true);
410 size_t MarkedBlock::markCount()
412 return needsFlip() ? 0 : m_marks.count();
415 bool MarkedBlock::Handle::isEmpty()
417 return m_allocator->isEmpty(this);
420 void MarkedBlock::clearHasAnyMarked()
422 m_biasedMarkCount = m_markCountBias;
425 void MarkedBlock::noteMarkedSlow()
427 handle().allocator()->atomicSetAndCheckIsMarkingRetired(&handle(), true);
430 void MarkedBlock::Handle::removeFromAllocator()
435 m_allocator->removeBlock(this);
438 void MarkedBlock::Handle::didAddToAllocator(MarkedAllocator* allocator, size_t index)
440 ASSERT(m_index == std::numeric_limits<size_t>::max());
441 ASSERT(!m_allocator);
444 m_allocator = allocator;
446 size_t cellSize = allocator->cellSize();
447 m_atomsPerCell = (cellSize + atomSize - 1) / atomSize;
448 m_endAtom = atomsPerBlock - m_atomsPerCell + 1;
450 m_attributes = allocator->attributes();
452 if (m_attributes.cellKind != HeapCell::JSCell)
453 RELEASE_ASSERT(m_attributes.destruction == DoesNotNeedDestruction);
455 block().m_needsDestruction = needsDestruction();
457 unsigned cellsPerBlock = MarkedSpace::blockPayload / cellSize;
458 double markCountBias = -(Options::minMarkedBlockUtilization() * cellsPerBlock);
460 // The mark count bias should be comfortably within this range.
461 RELEASE_ASSERT(markCountBias > static_cast<double>(std::numeric_limits<int16_t>::min()));
462 RELEASE_ASSERT(markCountBias < 0);
464 // This means we haven't marked anything yet.
465 block().m_biasedMarkCount = block().m_markCountBias = static_cast<int16_t>(markCountBias);
468 void MarkedBlock::Handle::didRemoveFromAllocator()
470 ASSERT(m_index != std::numeric_limits<size_t>::max());
473 m_index = std::numeric_limits<size_t>::max();
474 m_allocator = nullptr;
477 bool MarkedBlock::Handle::isLive(const HeapCell* cell)
479 return isLive(vm()->heap.objectSpace().version(), cell);
482 bool MarkedBlock::Handle::isLiveCell(const void* p)
484 return isLiveCell(vm()->heap.objectSpace().version(), p);
491 void printInternal(PrintStream& out, JSC::MarkedBlock::Handle::SweepMode mode)
494 case JSC::MarkedBlock::Handle::SweepToFreeList:
495 out.print("SweepToFreeList");
497 case JSC::MarkedBlock::Handle::SweepOnly:
498 out.print("SweepOnly");
501 RELEASE_ASSERT_NOT_REACHED();