83d035b36924af289bbfa3aacc1d0ab7533a45f5
[WebKit-https.git] / Source / JavaScriptCore / heap / MarkedBlock.cpp
1 /*
2  * Copyright (C) 2011, 2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "MarkedBlock.h"
28
29 #include "JSCell.h"
30 #include "JSDestructibleObject.h"
31 #include "JSCInlines.h"
32 #include "MarkedBlockInlines.h"
33 #include "SuperSampler.h"
34
35 namespace JSC {
36
37 static const bool computeBalance = false;
38 static size_t balance;
39
40 MarkedBlock::Handle* MarkedBlock::tryCreate(Heap& heap)
41 {
42     if (computeBalance) {
43         balance++;
44         if (!(balance % 10))
45             dataLog("MarkedBlock Balance: ", balance, "\n");
46     }
47     void* blockSpace = tryFastAlignedMalloc(blockSize, blockSize);
48     if (!blockSpace)
49         return nullptr;
50     if (scribbleFreeCells())
51         scribble(blockSpace, blockSize);
52     return new Handle(heap, blockSpace);
53 }
54
55 MarkedBlock::Handle::Handle(Heap& heap, void* blockSpace)
56     : m_weakSet(heap.vm(), CellContainer())
57 {
58     m_block = new (NotNull, blockSpace) MarkedBlock(*heap.vm(), *this);
59     
60     m_weakSet.setContainer(*m_block);
61     
62     heap.didAllocateBlock(blockSize);
63 }
64
65 MarkedBlock::Handle::~Handle()
66 {
67     Heap& heap = *this->heap();
68     if (computeBalance) {
69         balance--;
70         if (!(balance % 10))
71             dataLog("MarkedBlock Balance: ", balance, "\n");
72     }
73     removeFromAllocator();
74     m_block->~MarkedBlock();
75     fastAlignedFree(m_block);
76     heap.didFreeBlock(blockSize);
77 }
78
79 MarkedBlock::MarkedBlock(VM& vm, Handle& handle)
80     : m_version(MarkedSpace::nullVersion)
81     , m_handle(handle)
82     , m_vm(&vm)
83 {
84 }
85
86 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode, MarkedBlock::Handle::FlipMode flipMode>
87 FreeList MarkedBlock::Handle::specializedSweep()
88 {
89     RELEASE_ASSERT(!(destructionMode == DoesNotNeedDestruction && sweepMode == SweepOnly));
90     
91     SuperSamplerScope superSamplerScope(false);
92
93     MarkedBlock& block = this->block();
94     
95     if (false)
96         dataLog(RawPointer(this), ": MarkedBlock::Handle::specializedSweep!\n");
97     
98     if (Options::useBumpAllocator()
99         && emptyMode == IsEmpty
100         && newlyAllocatedMode == DoesNotHaveNewlyAllocated) {
101         ASSERT(flipMode == NeedsFlip);
102         
103         char* startOfLastCell = static_cast<char*>(cellAlign(block.atoms() + m_endAtom - 1));
104         char* payloadEnd = startOfLastCell + cellSize();
105         RELEASE_ASSERT(payloadEnd - MarkedBlock::blockSize <= bitwise_cast<char*>(&block));
106         char* payloadBegin = bitwise_cast<char*>(block.atoms() + firstAtom());
107         if (scribbleMode == Scribble)
108             scribble(payloadBegin, payloadEnd - payloadBegin);
109         if (sweepMode == SweepToFreeList)
110             setIsFreeListed();
111         else
112             m_allocator->setIsEmpty(this, true);
113         FreeList result = FreeList::bump(payloadEnd, payloadEnd - payloadBegin);
114         if (false)
115             dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
116         return result;
117     }
118
119     // This produces a free list that is ordered in reverse through the block.
120     // This is fine, since the allocation code makes no assumptions about the
121     // order of the free list.
122     FreeCell* head = 0;
123     size_t count = 0;
124     bool isEmpty = true;
125     for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
126         if (emptyMode == NotEmpty
127             && ((flipMode == DoesNotNeedFlip && block.m_marks.get(i))
128                 || (newlyAllocatedMode == HasNewlyAllocated && m_newlyAllocated->get(i)))) {
129             isEmpty = false;
130             continue;
131         }
132         
133         HeapCell* cell = reinterpret_cast_ptr<HeapCell*>(&block.atoms()[i]);
134
135         if (destructionMode == NeedsDestruction && emptyMode == NotEmpty)
136             static_cast<JSCell*>(cell)->callDestructor(*vm());
137
138         if (sweepMode == SweepToFreeList) {
139             FreeCell* freeCell = reinterpret_cast<FreeCell*>(cell);
140             if (scribbleMode == Scribble)
141                 scribble(freeCell, cellSize());
142             freeCell->next = head;
143             head = freeCell;
144             ++count;
145         }
146     }
147
148     // We only want to discard the newlyAllocated bits if we're creating a FreeList,
149     // otherwise we would lose information on what's currently alive.
150     if (sweepMode == SweepToFreeList && newlyAllocatedMode == HasNewlyAllocated)
151         m_newlyAllocated = nullptr;
152
153     FreeList result = FreeList::list(head, count * cellSize());
154     if (sweepMode == SweepToFreeList)
155         setIsFreeListed();
156     else if (isEmpty)
157         m_allocator->setIsEmpty(this, true);
158     if (false)
159         dataLog("Slowly swept block ", RawPointer(&block), " with cell size ", cellSize(), " and attributes ", m_attributes, ": ", result, "\n");
160     return result;
161 }
162
163 FreeList MarkedBlock::Handle::sweep(SweepMode sweepMode)
164 {
165     m_allocator->setIsUnswept(this, false);
166     
167     m_weakSet.sweep();
168
169     if (sweepMode == SweepOnly && m_attributes.destruction == DoesNotNeedDestruction)
170         return FreeList();
171
172     if (UNLIKELY(m_isFreeListed)) {
173         RELEASE_ASSERT(sweepMode == SweepToFreeList);
174         return FreeList();
175     }
176     
177     ASSERT(!m_allocator->isAllocated(this));
178     
179     if (m_attributes.destruction == NeedsDestruction)
180         return sweepHelperSelectScribbleMode<NeedsDestruction>(sweepMode);
181     return sweepHelperSelectScribbleMode<DoesNotNeedDestruction>(sweepMode);
182 }
183
184 template<DestructionMode destructionMode>
185 FreeList MarkedBlock::Handle::sweepHelperSelectScribbleMode(SweepMode sweepMode)
186 {
187     if (scribbleFreeCells())
188         return sweepHelperSelectEmptyMode<destructionMode, Scribble>(sweepMode);
189     return sweepHelperSelectEmptyMode<destructionMode, DontScribble>(sweepMode);
190 }
191
192 template<DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
193 FreeList MarkedBlock::Handle::sweepHelperSelectEmptyMode(SweepMode sweepMode)
194 {
195     // It's not obvious, but this is the only way to know if the block is empty. It's the only
196     // bit that captures these caveats:
197     // - It's true when the block is freshly allocated.
198     // - It's true if the block had been swept in the past, all destructors were called, and that
199     //   sweep proved that the block is empty.
200     // - It's false if there are any destructors that need to be called, even if the block has no
201     //   live objects.
202     if (m_allocator->isEmpty(this))
203         return sweepHelperSelectHasNewlyAllocated<IsEmpty, destructionMode, scribbleMode>(sweepMode);
204     return sweepHelperSelectHasNewlyAllocated<NotEmpty, destructionMode, scribbleMode>(sweepMode);
205 }
206
207 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode>
208 FreeList MarkedBlock::Handle::sweepHelperSelectHasNewlyAllocated(SweepMode sweepMode)
209 {
210     if (m_newlyAllocated)
211         return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, HasNewlyAllocated>(sweepMode);
212     return sweepHelperSelectSweepMode<emptyMode, destructionMode, scribbleMode, DoesNotHaveNewlyAllocated>(sweepMode);
213 }
214
215 template<MarkedBlock::Handle::EmptyMode emptyMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
216 FreeList MarkedBlock::Handle::sweepHelperSelectSweepMode(SweepMode sweepMode)
217 {
218     if (sweepMode == SweepToFreeList)
219         return sweepHelperSelectFlipMode<emptyMode, SweepToFreeList, destructionMode, scribbleMode, newlyAllocatedMode>();
220     return sweepHelperSelectFlipMode<emptyMode, SweepOnly, destructionMode, scribbleMode, newlyAllocatedMode>();
221 }
222
223 template<MarkedBlock::Handle::EmptyMode emptyMode, MarkedBlock::Handle::SweepMode sweepMode, DestructionMode destructionMode, MarkedBlock::Handle::ScribbleMode scribbleMode, MarkedBlock::Handle::NewlyAllocatedMode newlyAllocatedMode>
224 FreeList MarkedBlock::Handle::sweepHelperSelectFlipMode()
225 {
226     if (needsFlip())
227         return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, NeedsFlip>();
228     return specializedSweep<emptyMode, sweepMode, destructionMode, scribbleMode, newlyAllocatedMode, DoesNotNeedFlip>();
229 }
230
231 void MarkedBlock::Handle::unsweepWithNoNewlyAllocated()
232 {
233     RELEASE_ASSERT(m_isFreeListed);
234     m_isFreeListed = false;
235 }
236
237 void MarkedBlock::Handle::setIsFreeListed()
238 {
239     m_allocator->setIsEmpty(this, false);
240     m_isFreeListed = true;
241 }
242
243 class SetNewlyAllocatedFunctor : public MarkedBlock::VoidFunctor {
244 public:
245     SetNewlyAllocatedFunctor(MarkedBlock::Handle* block)
246         : m_block(block)
247     {
248     }
249
250     IterationStatus operator()(HeapCell* cell, HeapCell::Kind) const
251     {
252         ASSERT(MarkedBlock::blockFor(cell) == &m_block->block());
253         m_block->setNewlyAllocated(cell);
254         return IterationStatus::Continue;
255     }
256
257 private:
258     MarkedBlock::Handle* m_block;
259 };
260
261 void MarkedBlock::Handle::stopAllocating(const FreeList& freeList)
262 {
263     if (false)
264         dataLog(RawPointer(this), ": MarkedBlock::Handle::stopAllocating!\n");
265     ASSERT(!allocator()->isAllocated(this));
266
267     if (!isFreeListed()) {
268         // This means that we either didn't use this block at all for allocation since last GC,
269         // or someone had already done stopAllocating() before.
270         ASSERT(freeList.allocationWillFail());
271         return;
272     }
273     
274     // Roll back to a coherent state for Heap introspection. Cells newly
275     // allocated from our free list are not currently marked, so we need another
276     // way to tell what's live vs dead. 
277     
278     ASSERT(!m_newlyAllocated);
279     m_newlyAllocated = std::make_unique<WTF::Bitmap<atomsPerBlock>>();
280
281     SetNewlyAllocatedFunctor functor(this);
282     forEachCell(functor);
283
284     forEachFreeCell(
285         freeList,
286         [&] (HeapCell* cell) {
287             if (m_attributes.destruction == NeedsDestruction)
288                 cell->zap();
289             clearNewlyAllocated(cell);
290         });
291     
292     m_isFreeListed = false;
293 }
294
295 void MarkedBlock::Handle::lastChanceToFinalize()
296 {
297     allocator()->setIsAllocated(this, false);
298     m_block->clearMarks();
299     m_weakSet.lastChanceToFinalize();
300
301     clearNewlyAllocated();
302     sweep();
303 }
304
305 FreeList MarkedBlock::Handle::resumeAllocating()
306 {
307     ASSERT(!allocator()->isAllocated(this));
308     ASSERT(!isFreeListed());
309
310     if (!m_newlyAllocated) {
311         // This means we had already exhausted the block when we stopped allocation.
312         return FreeList();
313     }
314
315     // Re-create our free list from before stopping allocation. Note that this may return an empty
316     // freelist, in which case the block will still be Marked!
317     return sweep(SweepToFreeList);
318 }
319
320 void MarkedBlock::Handle::zap(const FreeList& freeList)
321 {
322     forEachFreeCell(
323         freeList,
324         [&] (HeapCell* cell) {
325             if (m_attributes.destruction == NeedsDestruction)
326                 cell->zap();
327         });
328 }
329
330 template<typename Func>
331 void MarkedBlock::Handle::forEachFreeCell(const FreeList& freeList, const Func& func)
332 {
333     if (freeList.remaining) {
334         for (unsigned remaining = freeList.remaining; remaining; remaining -= cellSize())
335             func(bitwise_cast<HeapCell*>(freeList.payloadEnd - remaining));
336     } else {
337         for (FreeCell* current = freeList.head; current;) {
338             FreeCell* next = current->next;
339             func(bitwise_cast<HeapCell*>(current));
340             current = next;
341         }
342     }
343 }
344
345 void MarkedBlock::aboutToMarkSlow(HeapVersion heapVersion)
346 {
347     ASSERT(vm()->heap.objectSpace().isMarking());
348     LockHolder locker(m_lock);
349     if (needsFlip(heapVersion)) {
350         clearMarks(heapVersion);
351         // This means we're the first ones to mark any object in this block.
352         handle().allocator()->atomicSetAndCheckIsMarkingNotEmpty(&handle(), true);
353     }
354 }
355
356 void MarkedBlock::clearMarks()
357 {
358     clearMarks(vm()->heap.objectSpace().version());
359 }
360
361 void MarkedBlock::clearMarks(HeapVersion heapVersion)
362 {
363     m_marks.clearAll();
364     clearHasAnyMarked();
365     WTF::storeStoreFence();
366     m_version = heapVersion;
367 }
368
369 #if !ASSERT_DISABLED
370 void MarkedBlock::assertFlipped()
371 {
372     ASSERT(m_version == vm()->heap.objectSpace().version());
373 }
374 #endif // !ASSERT_DISABLED
375
376 bool MarkedBlock::needsFlip()
377 {
378     return needsFlip(vm()->heap.objectSpace().version());
379 }
380
381 bool MarkedBlock::Handle::needsFlip()
382 {
383     return m_block->needsFlip();
384 }
385
386 bool MarkedBlock::isMarked(const void* p)
387 {
388     return isMarked(vm()->heap.objectSpace().version(), p);
389 }
390
391 bool MarkedBlock::Handle::isMarkedOrNewlyAllocated(const HeapCell* cell)
392 {
393     return isMarkedOrNewlyAllocated(vm()->heap.objectSpace().version(), cell);
394 }
395
396 bool MarkedBlock::isMarkedOrNewlyAllocated(const HeapCell* cell)
397 {
398     return isMarkedOrNewlyAllocated(vm()->heap.objectSpace().version(), cell);
399 }
400
401 void MarkedBlock::Handle::didConsumeFreeList()
402 {
403     if (false)
404         dataLog(RawPointer(this), ": MarkedBlock::Handle::didConsumeFreeList!\n");
405     ASSERT(isFreeListed());
406     m_isFreeListed = false;
407     allocator()->setIsAllocated(this, true);
408 }
409
410 size_t MarkedBlock::markCount()
411 {
412     return needsFlip() ? 0 : m_marks.count();
413 }
414
415 bool MarkedBlock::Handle::isEmpty()
416 {
417     return m_allocator->isEmpty(this);
418 }
419
420 void MarkedBlock::clearHasAnyMarked()
421 {
422     m_biasedMarkCount = m_markCountBias;
423 }
424
425 void MarkedBlock::noteMarkedSlow()
426 {
427     handle().allocator()->atomicSetAndCheckIsMarkingRetired(&handle(), true);
428 }
429
430 void MarkedBlock::Handle::removeFromAllocator()
431 {
432     if (!m_allocator)
433         return;
434     
435     m_allocator->removeBlock(this);
436 }
437
438 void MarkedBlock::Handle::didAddToAllocator(MarkedAllocator* allocator, size_t index)
439 {
440     ASSERT(m_index == std::numeric_limits<size_t>::max());
441     ASSERT(!m_allocator);
442     
443     m_index = index;
444     m_allocator = allocator;
445     
446     size_t cellSize = allocator->cellSize();
447     m_atomsPerCell = (cellSize + atomSize - 1) / atomSize;
448     m_endAtom = atomsPerBlock - m_atomsPerCell + 1;
449     
450     m_attributes = allocator->attributes();
451
452     if (m_attributes.cellKind != HeapCell::JSCell)
453         RELEASE_ASSERT(m_attributes.destruction == DoesNotNeedDestruction);
454     
455     block().m_needsDestruction = needsDestruction();
456     
457     unsigned cellsPerBlock = MarkedSpace::blockPayload / cellSize;
458     double markCountBias = -(Options::minMarkedBlockUtilization() * cellsPerBlock);
459     
460     // The mark count bias should be comfortably within this range.
461     RELEASE_ASSERT(markCountBias > static_cast<double>(std::numeric_limits<int16_t>::min()));
462     RELEASE_ASSERT(markCountBias < 0);
463     
464     // This means we haven't marked anything yet.
465     block().m_biasedMarkCount = block().m_markCountBias = static_cast<int16_t>(markCountBias);
466 }
467
468 void MarkedBlock::Handle::didRemoveFromAllocator()
469 {
470     ASSERT(m_index != std::numeric_limits<size_t>::max());
471     ASSERT(m_allocator);
472     
473     m_index = std::numeric_limits<size_t>::max();
474     m_allocator = nullptr;
475 }
476
477 bool MarkedBlock::Handle::isLive(const HeapCell* cell)
478 {
479     return isLive(vm()->heap.objectSpace().version(), cell);
480 }
481
482 bool MarkedBlock::Handle::isLiveCell(const void* p)
483 {
484     return isLiveCell(vm()->heap.objectSpace().version(), p);
485 }
486
487 } // namespace JSC
488
489 namespace WTF {
490
491 void printInternal(PrintStream& out, JSC::MarkedBlock::Handle::SweepMode mode)
492 {
493     switch (mode) {
494     case JSC::MarkedBlock::Handle::SweepToFreeList:
495         out.print("SweepToFreeList");
496         return;
497     case JSC::MarkedBlock::Handle::SweepOnly:
498         out.print("SweepOnly");
499         return;
500     }
501     RELEASE_ASSERT_NOT_REACHED();
502 }
503
504 } // namespace WTF
505