MarkedBlock should know what objects are live during marking
[WebKit-https.git] / Source / JavaScriptCore / heap / MarkedBlock.cpp
1 /*
2  * Copyright (C) 2011, 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "MarkedBlock.h"
28
29 #include "JSCell.h"
30 #include "JSDestructibleObject.h"
31 #include "JSCInlines.h"
32 #include "MarkedBlockInlines.h"
33 #include "SuperSampler.h"
34
35 namespace JSC {
36
37 static const bool computeBalance = false;
38 static size_t balance;
39
40 MarkedBlock::Handle* MarkedBlock::tryCreate(Heap& heap)
41 {
42     if (computeBalance) {
43         balance++;
44         if (!(balance % 10))
45             dataLog("MarkedBlock Balance: ", balance, "\n");
46     }
47     void* blockSpace = tryFastAlignedMalloc(blockSize, blockSize);
48     if (!blockSpace)
49         return nullptr;
50     if (scribbleFreeCells())
51         scribble(blockSpace, blockSize);
52     return new Handle(heap, blockSpace);
53 }
54
55 MarkedBlock::Handle::Handle(Heap& heap, void* blockSpace)
56     : m_weakSet(heap.vm(), CellContainer())
57     , m_newlyAllocatedVersion(MarkedSpace::nullVersion)
58 {
59     m_block = new (NotNull, blockSpace) MarkedBlock(*heap.vm(), *this);
60     
61     m_weakSet.setContainer(*m_block);
62     
63     heap.didAllocateBlock(blockSize);
64 }
65
66 MarkedBlock::Handle::~Handle()
67 {
68     Heap& heap = *this->heap();
69     if (computeBalance) {
70         balance--;
71         if (!(balance % 10))
72             dataLog("MarkedBlock Balance: ", balance, "\n");
73     }
74     removeFromAllocator();
75     m_block->~MarkedBlock();
76     fastAlignedFree(m_block);
77     heap.didFreeBlock(blockSize);
78 }
79
80 MarkedBlock::MarkedBlock(VM& vm, Handle& handle)
81     : m_markingVersion(MarkedSpace::nullVersion)
82     , m_handle(handle)
83     , m_vm(&vm)
84 {
85 }
86
87 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode, MarkedBlock::Handle::MarksMode marksMode>
88 FreeList MarkedBlock::Handle::specializedSweep()
89 {
90     RELEASE_ASSERT(!(destructionMode == DoesNotNeedDestruction && sweepMode == SweepOnly));
91     
92     SuperSamplerScope superSamplerScope(false);
93
94     MarkedBlock& block = this->block();
95     
96     if (false)
97         dataLog(RawPointer(this), ": MarkedBlock::Handle::specializedSweep!\n");
98     
99     if (Options::useBumpAllocator()
100         && emptyMode == IsEmpty
101         && newlyAllocatedMode == DoesNotHaveNewlyAllocated) {
102         ASSERT(marksMode == MarksStale);
103         
104         char* startOfLastCell = static_cast<char*>(cellAlign(block.atoms() + m_endAtom - 1));
105         char* payloadEnd = startOfLastCell + cellSize();
106         RELEASE_ASSERT(payloadEnd - MarkedBlock::blockSize <= bitwise_cast<char*>(&block));
107         char* payloadBegin = bitwise_cast<char*>(block.atoms() + firstAtom());
108         if (scribbleMode == Scribble)
109             scribble(payloadBegin, payloadEnd - payloadBegin);
110         if (sweepMode == SweepToFreeList)
111             setIsFreeListed();
112         else
113             m_allocator->setIsEmpty(this, true);
114         FreeList result = FreeList::bump(payloadEnd, payloadEnd - payloadBegin);
115         if (false)
116             dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
117         return result;
118     }
119
120     // This produces a free list that is ordered in reverse through the block.
121     // This is fine, since the allocation code makes no assumptions about the
122     // order of the free list.
123     FreeCell* head = 0;
124     size_t count = 0;
125     bool isEmpty = true;
126     for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
127         if (emptyMode == NotEmpty
128             && ((marksMode == MarksNotStale && block.m_marks.get(i))
129                 || (newlyAllocatedMode == HasNewlyAllocated && m_newlyAllocated.get(i)))) {
130             isEmpty = false;
131             continue;
132         }
133         
134         HeapCell* cell = reinterpret_cast_ptr<HeapCell*>(&block.atoms()[i]);
135
136         if (destructionMode == NeedsDestruction && emptyMode == NotEmpty)
137             static_cast<JSCell*>(cell)->callDestructor(*vm());
138
139         if (sweepMode == SweepToFreeList) {
140             FreeCell* freeCell = reinterpret_cast<FreeCell*>(cell);
141             if (scribbleMode == Scribble)
142                 scribble(freeCell, cellSize());
143             freeCell->next = head;
144             head = freeCell;
145             ++count;
146         }
147     }
148
149     // We only want to discard the newlyAllocated bits if we're creating a FreeList,
150     // otherwise we would lose information on what's currently alive.
151     if (sweepMode == SweepToFreeList && newlyAllocatedMode == HasNewlyAllocated)
152         m_newlyAllocatedVersion = MarkedSpace::nullVersion;
153
154     FreeList result = FreeList::list(head, count * cellSize());
155     if (sweepMode == SweepToFreeList)
156         setIsFreeListed();
157     else if (isEmpty)
158         m_allocator->setIsEmpty(this, true);
159     if (false)
160         dataLog("Slowly swept block ", RawPointer(&block), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
161     return result;
162 }
163
164 FreeList MarkedBlock::Handle::sweep(SweepMode sweepMode)
165 {
166     m_allocator->setIsUnswept(this, false);
167     
168     m_weakSet.sweep();
169
170     if (sweepMode == SweepOnly && m_attributes.destruction == DoesNotNeedDestruction)
171         return FreeList();
172
173     if (UNLIKELY(m_isFreeListed)) {
174         RELEASE_ASSERT(sweepMode == SweepToFreeList);
175         return FreeList();
176     }
177     
178     ASSERT(!m_allocator->isAllocated(this));
179     
180     if (m_attributes.destruction == NeedsDestruction)
181         return sweepHelperSelectScribbleMode<NeedsDestruction>(sweepMode);
182     return sweepHelperSelectScribbleMode<DoesNotNeedDestruction>(sweepMode);
183 }
184
185 template<DestructionMode destructionMode>
186 FreeList MarkedBlock::Handle::sweepHelperSelectScribbleMode(SweepMode sweepMode)
187 {
188     if (scribbleFreeCells())
189         return sweepHelperSelectEmptyMode<destructionMode, Scribble>(sweepMode);
190     return sweepHelperSelectEmptyMode<destructionMode, DontScribble>(sweepMode);
191 }
192
193 template<DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
194 FreeList MarkedBlock::Handle::sweepHelperSelectEmptyMode(SweepMode sweepMode)
195 {
196     // It's not obvious, but this is the only way to know if the block is empty. It's the only
197     // bit that captures these caveats:
198     // - It's true when the block is freshly allocated.
199     // - It's true if the block had been swept in the past, all destructors were called, and that
200     //   sweep proved that the block is empty.
201     // - It's false if there are any destructors that need to be called, even if the block has no
202     //   live objects.
203     if (m_allocator->isEmpty(this))
204         return sweepHelperSelectHasNewlyAllocated<IsEmpty, destructionMode, scribbleMode>(sweepMode);
205     return sweepHelperSelectHasNewlyAllocated<NotEmpty, destructionMode, scribbleMode>(sweepMode);
206 }
207
208 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
209 FreeList MarkedBlock::Handle::sweepHelperSelectHasNewlyAllocated(SweepMode sweepMode)
210 {
211     if (hasAnyNewlyAllocated())
212         return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, HasNewlyAllocated>(sweepMode);
213     return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, DoesNotHaveNewlyAllocated>(sweepMode);
214 }
215
216 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
217 FreeList MarkedBlock::Handle::sweepHelperSelectSweepMode(SweepMode sweepMode)
218 {
219     if (sweepMode == SweepToFreeList)
220         return sweepHelperSelectMarksMode<emptyMode, SweepToFreeList, destructionMode, scribbleMode, newlyAllocatedMode>();
221     return sweepHelperSelectMarksMode<emptyMode, SweepOnly, destructionMode, scribbleMode, newlyAllocatedMode>();
222 }
223
224 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
225 FreeList MarkedBlock::Handle::sweepHelperSelectMarksMode()
226 {
227     if (areMarksStale())
228         return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, MarksStale>();
229     return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, MarksNotStale>();
230 }
231
232 void MarkedBlock::Handle::unsweepWithNoNewlyAllocated()
233 {
234     RELEASE_ASSERT(m_isFreeListed);
235     m_isFreeListed = false;
236 }
237
238 void MarkedBlock::Handle::setIsFreeListed()
239 {
240     m_allocator->setIsEmpty(this, false);
241     m_isFreeListed = true;
242 }
243
244 class SetNewlyAllocatedFunctor : public MarkedBlock::VoidFunctor {
245 public:
246     SetNewlyAllocatedFunctor(MarkedBlock::Handle* block)
247         : m_block(block)
248     {
249     }
250
251     IterationStatus operator()(HeapCell* cell, HeapCell::Kind) const
252     {
253         ASSERT(MarkedBlock::blockFor(cell) == &m_block->block());
254         m_block->setNewlyAllocated(cell);
255         return IterationStatus::Continue;
256     }
257
258 private:
259     MarkedBlock::Handle* m_block;
260 };
261
262 void MarkedBlock::Handle::stopAllocating(const FreeList& freeList)
263 {
264     if (false)
265         dataLog(RawPointer(this), ": MarkedBlock::Handle::stopAllocating!\n");
266     ASSERT(!allocator()->isAllocated(this));
267
268     if (!isFreeListed()) {
269         // This means that we either didn't use this block at all for allocation since last GC,
270         // or someone had already done stopAllocating() before.
271         ASSERT(freeList.allocationWillFail());
272         return;
273     }
274     
275     // Roll back to a coherent state for Heap introspection. Cells newly
276     // allocated from our free list are not currently marked, so we need another
277     // way to tell what's live vs dead. 
278     
279     m_newlyAllocated.clearAll();
280     m_newlyAllocatedVersion = heap()->objectSpace().newlyAllocatedVersion();
281
282     SetNewlyAllocatedFunctor functor(this);
283     forEachCell(functor);
284
285     forEachFreeCell(
286         freeList,
287         [&] (HeapCell* cell) {
288             if (m_attributes.destruction == NeedsDestruction)
289                 cell->zap();
290             clearNewlyAllocated(cell);
291         });
292     
293     m_isFreeListed = false;
294 }
295
296 void MarkedBlock::Handle::lastChanceToFinalize()
297 {
298     allocator()->setIsAllocated(this, false);
299     m_block->m_marks.clearAll();
300     m_block->clearHasAnyMarked();
301     m_block->m_markingVersion = heap()->objectSpace().markingVersion();
302     m_weakSet.lastChanceToFinalize();
303     m_newlyAllocated.clearAll();
304     m_newlyAllocatedVersion = heap()->objectSpace().newlyAllocatedVersion();
305     sweep();
306 }
307
308 FreeList MarkedBlock::Handle::resumeAllocating()
309 {
310     ASSERT(!allocator()->isAllocated(this));
311     ASSERT(!isFreeListed());
312
313     if (!hasAnyNewlyAllocated()) {
314         // This means we had already exhausted the block when we stopped allocation.
315         return FreeList();
316     }
317
318     // Re-create our free list from before stopping allocation. Note that this may return an empty
319     // freelist, in which case the block will still be Marked!
320     return sweep(SweepToFreeList);
321 }
322
323 void MarkedBlock::Handle::zap(const FreeList& freeList)
324 {
325     forEachFreeCell(
326         freeList,
327         [&] (HeapCell* cell) {
328             if (m_attributes.destruction == NeedsDestruction)
329                 cell->zap();
330         });
331 }
332
333 template<typename Func>
334 void MarkedBlock::Handle::forEachFreeCell(const FreeList& freeList, const Func& func)
335 {
336     if (freeList.remaining) {
337         for (unsigned remaining = freeList.remaining; remaining; remaining -= cellSize())
338             func(bitwise_cast<HeapCell*>(freeList.payloadEnd - remaining));
339     } else {
340         for (FreeCell* current = freeList.head; current;) {
341             FreeCell* next = current->next;
342             func(bitwise_cast<HeapCell*>(current));
343             current = next;
344         }
345     }
346 }
347
348 void MarkedBlock::aboutToMarkSlow(HeapVersion markingVersion)
349 {
350     ASSERT(vm()->heap.objectSpace().isMarking());
351     LockHolder locker(m_lock);
352     if (!areMarksStale(markingVersion))
353         return;
354     
355     if (handle().allocator()->isAllocated(&handle())
356         || !marksConveyLivenessDuringMarking(markingVersion)) {
357         // We already know that the block is full and is already recognized as such, or that the
358         // block did not survive the previous GC. So, we can clear mark bits the old fashioned
359         // way. Note that it's possible for such a block to have newlyAllocated with an up-to-
360         // date version! If it does, then we want to leave the newlyAllocated alone, since that
361         // means that we had allocated in this previously empty block but did not fill it up, so
362         // we created a newlyAllocated.
363         m_marks.clearAll();
364     } else {
365         HeapVersion newlyAllocatedVersion = space()->newlyAllocatedVersion();
366         if (handle().m_newlyAllocatedVersion == newlyAllocatedVersion) {
367             // Merge the contents of marked into newlyAllocated. If we get the full set of bits
368             // then invalidate newlyAllocated and set allocated.
369             handle().m_newlyAllocated.mergeAndClear(m_marks);
370         } else {
371             // Replace the contents of newlyAllocated with marked. If we get the full set of
372             // bits then invalidate newlyAllocated and set allocated.
373             handle().m_newlyAllocated.setAndClear(m_marks);
374         }
375         handle().m_newlyAllocatedVersion = newlyAllocatedVersion;
376     }
377     clearHasAnyMarked();
378     WTF::storeStoreFence();
379     m_markingVersion = markingVersion;
380     
381     // This means we're the first ones to mark any object in this block.
382     handle().allocator()->atomicSetAndCheckIsMarkingNotEmpty(&handle(), true);
383 }
384
385 void MarkedBlock::Handle::resetAllocated()
386 {
387     m_newlyAllocated.clearAll();
388     m_newlyAllocatedVersion = MarkedSpace::nullVersion;
389 }
390
391 void MarkedBlock::resetMarks()
392 {
393     // We want aboutToMarkSlow() to see what the mark bits were after the last collection. It uses
394     // the version number to distinguish between the marks having already been stale before
395     // beginMarking(), or just stale now that beginMarking() bumped the version. If we have a version
396     // wraparound, then we will call this method before resetting the version to null. When the
397     // version is null, aboutToMarkSlow() will assume that the marks were not stale as of before
398     // beginMarking(). Hence the need to whip the marks into shape.
399     if (areMarksStale())
400         m_marks.clearAll();
401     m_markingVersion = MarkedSpace::nullVersion;
402 }
403
404 #if !ASSERT_DISABLED
405 void MarkedBlock::assertMarksNotStale()
406 {
407     ASSERT(m_markingVersion == vm()->heap.objectSpace().markingVersion());
408 }
409 #endif // !ASSERT_DISABLED
410
411 bool MarkedBlock::areMarksStale()
412 {
413     return areMarksStale(vm()->heap.objectSpace().markingVersion());
414 }
415
416 bool MarkedBlock::Handle::areMarksStale()
417 {
418     return m_block->areMarksStale();
419 }
420
421 bool MarkedBlock::isMarked(const void* p)
422 {
423     return isMarked(vm()->heap.objectSpace().markingVersion(), p);
424 }
425
426 void MarkedBlock::Handle::didConsumeFreeList()
427 {
428     if (false)
429         dataLog(RawPointer(this), ": MarkedBlock::Handle::didConsumeFreeList!\n");
430     ASSERT(isFreeListed());
431     m_isFreeListed = false;
432     allocator()->setIsAllocated(this, true);
433 }
434
435 size_t MarkedBlock::markCount()
436 {
437     return areMarksStale() ? 0 : m_marks.count();
438 }
439
440 bool MarkedBlock::Handle::isEmpty()
441 {
442     return m_allocator->isEmpty(this);
443 }
444
445 void MarkedBlock::clearHasAnyMarked()
446 {
447     m_biasedMarkCount = m_markCountBias;
448 }
449
450 void MarkedBlock::noteMarkedSlow()
451 {
452     handle().allocator()->atomicSetAndCheckIsMarkingRetired(&handle(), true);
453 }
454
455 void MarkedBlock::Handle::removeFromAllocator()
456 {
457     if (!m_allocator)
458         return;
459     
460     m_allocator->removeBlock(this);
461 }
462
463 void MarkedBlock::updateNeedsDestruction()
464 {
465     m_needsDestruction = handle().needsDestruction();
466 }
467
468 void MarkedBlock::Handle::didAddToAllocator(MarkedAllocator* allocator, size_t index)
469 {
470     ASSERT(m_index == std::numeric_limits<size_t>::max());
471     ASSERT(!m_allocator);
472     
473     m_index = index;
474     m_allocator = allocator;
475     
476     size_t cellSize = allocator->cellSize();
477     m_atomsPerCell = (cellSize + atomSize - 1) / atomSize;
478     m_endAtom = atomsPerBlock - m_atomsPerCell + 1;
479     
480     m_attributes = allocator->attributes();
481
482     if (m_attributes.cellKind != HeapCell::JSCell)
483         RELEASE_ASSERT(m_attributes.destruction == DoesNotNeedDestruction);
484     
485     block().updateNeedsDestruction();
486     
487     double markCountBias = -(Options::minMarkedBlockUtilization() * cellsPerBlock());
488     
489     // The mark count bias should be comfortably within this range.
490     RELEASE_ASSERT(markCountBias > static_cast<double>(std::numeric_limits<int16_t>::min()));
491     RELEASE_ASSERT(markCountBias < 0);
492     
493     // This means we haven't marked anything yet.
494     block().m_biasedMarkCount = block().m_markCountBias = static_cast<int16_t>(markCountBias);
495 }
496
497 void MarkedBlock::Handle::didRemoveFromAllocator()
498 {
499     ASSERT(m_index != std::numeric_limits<size_t>::max());
500     ASSERT(m_allocator);
501     
502     m_index = std::numeric_limits<size_t>::max();
503     m_allocator = nullptr;
504 }
505
506 bool MarkedBlock::Handle::isLive(const HeapCell* cell)
507 {
508     return isLive(space()->markingVersion(), space()->isMarking(), cell);
509 }
510
511 bool MarkedBlock::Handle::isLiveCell(const void* p)
512 {
513     return isLiveCell(space()->markingVersion(), space()->isMarking(), p);
514 }
515
516 } // namespace JSC
517
518 namespace WTF {
519
520 void printInternal(PrintStream& out, JSC::MarkedBlock::Handle::SweepMode mode)
521 {
522     switch (mode) {
523     case JSC::MarkedBlock::Handle::SweepToFreeList:
524         out.print("SweepToFreeList");
525         return;
526     case JSC::MarkedBlock::Handle::SweepOnly:
527         out.print("SweepOnly");
528         return;
529     }
530     RELEASE_ASSERT_NOT_REACHED();
531 }
532
533 } // namespace WTF
534