2 * Copyright (C) 2012, 2015-2016 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
27 #include "SlotVisitor.h"
29 #include "AbstractMacroAssembler.h"
30 #include "ConservativeRoots.h"
31 #include "GCSegmentedArrayInlines.h"
32 #include "HeapCellInlines.h"
33 #include "HeapProfiler.h"
34 #include "HeapSnapshotBuilder.h"
36 #include "JSDestructibleObject.h"
39 #include "JSCInlines.h"
40 #include "SlotVisitorInlines.h"
41 #include "SuperSampler.h"
47 #if ENABLE(GC_VALIDATION)
48 static void validate(JSCell* cell)
52 if (!cell->structure()) {
53 dataLogF("cell at %p has a null structure\n" , cell);
57 // Both the cell's structure, and the cell's structure's structure should be the Structure Structure.
58 // I hate this sentence.
59 if (cell->structure()->structure()->JSCell::classInfo() != cell->structure()->JSCell::classInfo()) {
60 const char* parentClassName = 0;
61 const char* ourClassName = 0;
62 if (cell->structure()->structure() && cell->structure()->structure()->JSCell::classInfo())
63 parentClassName = cell->structure()->structure()->JSCell::classInfo()->className;
64 if (cell->structure()->JSCell::classInfo())
65 ourClassName = cell->structure()->JSCell::classInfo()->className;
66 dataLogF("parent structure (%p <%s>) of cell at %p doesn't match cell's structure (%p <%s>)\n",
67 cell->structure()->structure(), parentClassName, cell, cell->structure(), ourClassName);
71 // Make sure we can walk the ClassInfo chain
72 const ClassInfo* info = cell->classInfo();
73 do { } while ((info = info->parentClass));
77 SlotVisitor::SlotVisitor(Heap& heap)
82 , m_isInParallelMode(false)
83 , m_markingVersion(MarkedSpace::initialVersion)
86 , m_isCheckingForDefaultMarkViolation(false)
92 SlotVisitor::~SlotVisitor()
97 void SlotVisitor::didStartMarking()
99 if (heap()->operationInProgress() == FullCollection)
100 ASSERT(m_opaqueRoots.isEmpty()); // Should have merged by now.
104 if (HeapProfiler* heapProfiler = vm().heapProfiler())
105 m_heapSnapshotBuilder = heapProfiler->activeSnapshotBuilder();
107 m_markingVersion = heap()->objectSpace().markingVersion();
110 void SlotVisitor::reset()
115 m_heapSnapshotBuilder = nullptr;
116 ASSERT(!m_currentCell);
119 void SlotVisitor::clearMarkStack()
124 void SlotVisitor::append(ConservativeRoots& conservativeRoots)
126 HeapCell** roots = conservativeRoots.roots();
127 size_t size = conservativeRoots.size();
128 for (size_t i = 0; i < size; ++i)
129 appendJSCellOrAuxiliary(roots[i]);
132 void SlotVisitor::appendJSCellOrAuxiliary(HeapCell* heapCell)
137 ASSERT(!m_isCheckingForDefaultMarkViolation);
139 if (Heap::testAndSetMarked(m_markingVersion, heapCell))
142 switch (heapCell->cellKind()) {
143 case HeapCell::JSCell: {
144 JSCell* jsCell = static_cast<JSCell*>(heapCell);
146 if (!jsCell->structure()) {
147 ASSERT_NOT_REACHED();
151 jsCell->setCellState(CellState::NewGrey);
153 appendToMarkStack(jsCell);
157 case HeapCell::Auxiliary: {
158 noteLiveAuxiliaryCell(heapCell);
163 void SlotVisitor::append(JSValue value)
165 if (!value || !value.isCell())
168 if (UNLIKELY(m_heapSnapshotBuilder))
169 m_heapSnapshotBuilder->appendEdge(m_currentCell, value.asCell());
171 setMarkedAndAppendToMarkStack(value.asCell());
174 void SlotVisitor::appendHidden(JSValue value)
176 if (!value || !value.isCell())
179 setMarkedAndAppendToMarkStack(value.asCell());
182 void SlotVisitor::setMarkedAndAppendToMarkStack(JSCell* cell)
184 SuperSamplerScope superSamplerScope(false);
186 ASSERT(!m_isCheckingForDefaultMarkViolation);
190 #if ENABLE(GC_VALIDATION)
194 //dataLog(" Marking ", RawPointer(cell), "\n");
196 if (cell->isLargeAllocation())
197 setMarkedAndAppendToMarkStack(cell->largeAllocation(), cell);
199 setMarkedAndAppendToMarkStack(cell->markedBlock(), cell);
202 template<typename ContainerType>
203 ALWAYS_INLINE void SlotVisitor::setMarkedAndAppendToMarkStack(ContainerType& container, JSCell* cell)
205 container.aboutToMark(m_markingVersion);
207 if (container.testAndSetMarked(cell))
210 ASSERT(cell->structure());
212 // Indicate that the object is grey and that:
213 // In case of concurrent GC: it's the first time it is grey in this GC cycle.
214 // In case of eden collection: it's a new object that became grey rather than an old remembered object.
215 cell->setCellState(CellState::NewGrey);
217 appendToMarkStack(container, cell);
220 void SlotVisitor::appendToMarkStack(JSCell* cell)
222 if (cell->isLargeAllocation())
223 appendToMarkStack(cell->largeAllocation(), cell);
225 appendToMarkStack(cell->markedBlock(), cell);
228 template<typename ContainerType>
229 ALWAYS_INLINE void SlotVisitor::appendToMarkStack(ContainerType& container, JSCell* cell)
231 ASSERT(Heap::isMarkedConcurrently(cell));
232 ASSERT(!cell->isZapped());
234 container.noteMarked();
236 // FIXME: These "just work" because the GC resets these fields before doing anything else. But
237 // that won't be the case when we do concurrent GC.
239 m_bytesVisited += container.cellSize();
241 m_stack.append(cell);
243 if (UNLIKELY(m_heapSnapshotBuilder))
244 m_heapSnapshotBuilder->appendNode(cell);
247 void SlotVisitor::markAuxiliary(const void* base)
249 HeapCell* cell = bitwise_cast<HeapCell*>(base);
251 ASSERT(cell->heap() == heap());
253 if (Heap::testAndSetMarked(m_markingVersion, cell))
256 noteLiveAuxiliaryCell(cell);
259 void SlotVisitor::noteLiveAuxiliaryCell(HeapCell* cell)
261 // We get here once per GC under these circumstances:
263 // Eden collection: if the cell was allocated since the last collection and is live somehow.
265 // Full collection: if the cell is live somehow.
267 CellContainer container = cell->cellContainer();
269 container.noteMarked();
272 m_bytesVisited += container.cellSize();
275 class SetCurrentCellScope {
277 SetCurrentCellScope(SlotVisitor& visitor, const JSCell* cell)
280 ASSERT(!m_visitor.m_currentCell);
281 m_visitor.m_currentCell = const_cast<JSCell*>(cell);
284 ~SetCurrentCellScope()
286 ASSERT(m_visitor.m_currentCell);
287 m_visitor.m_currentCell = nullptr;
291 SlotVisitor& m_visitor;
295 ALWAYS_INLINE void SlotVisitor::visitChildren(const JSCell* cell)
297 ASSERT(Heap::isMarkedConcurrently(cell));
299 SetCurrentCellScope currentCellScope(*this, cell);
301 cell->setCellState(blacken(cell->cellState()));
303 // FIXME: Make this work on ARM also.
304 // https://bugs.webkit.org/show_bug.cgi?id=162461
306 WTF::storeLoadFence();
308 switch (cell->type()) {
310 JSString::visitChildren(const_cast<JSCell*>(cell), *this);
313 case FinalObjectType:
314 JSFinalObject::visitChildren(const_cast<JSCell*>(cell), *this);
318 JSArray::visitChildren(const_cast<JSCell*>(cell), *this);
322 // FIXME: This could be so much better.
323 // https://bugs.webkit.org/show_bug.cgi?id=162462
324 cell->methodTable()->visitChildren(const_cast<JSCell*>(cell), *this);
329 void SlotVisitor::donateKnownParallel()
331 // NOTE: Because we re-try often, we can afford to be conservative, and
332 // assume that donating is not profitable.
334 // Avoid locking when a thread reaches a dead end in the object graph.
335 if (m_stack.size() < 2)
338 // If there's already some shared work queued up, be conservative and assume
339 // that donating more is not profitable.
340 if (m_heap.m_sharedMarkStack.size())
343 // If we're contending on the lock, be conservative and assume that another
344 // thread is already donating.
345 std::unique_lock<Lock> lock(m_heap.m_markingMutex, std::try_to_lock);
346 if (!lock.owns_lock())
349 // Otherwise, assume that a thread will go idle soon, and donate.
350 m_stack.donateSomeCellsTo(m_heap.m_sharedMarkStack);
352 m_heap.m_markingConditionVariable.notifyAll();
355 void SlotVisitor::drain()
357 ASSERT(m_isInParallelMode);
359 while (!m_stack.isEmpty()) {
361 for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance(); m_stack.canRemoveLast() && countdown--;)
362 visitChildren(m_stack.removeLast());
363 donateKnownParallel();
366 mergeOpaqueRootsIfNecessary();
369 void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)
371 ASSERT(m_isInParallelMode);
373 ASSERT(Options::numberOfGCMarkers());
376 std::lock_guard<Lock> lock(m_heap.m_markingMutex);
377 m_heap.m_numberOfActiveParallelMarkers++;
381 std::unique_lock<Lock> lock(m_heap.m_markingMutex);
382 m_heap.m_numberOfActiveParallelMarkers--;
383 m_heap.m_numberOfWaitingParallelMarkers++;
385 // How we wait differs depending on drain mode.
386 if (sharedDrainMode == MasterDrain) {
387 // Wait until either termination is reached, or until there is some work
390 // Did we reach termination?
391 if (!m_heap.m_numberOfActiveParallelMarkers
392 && m_heap.m_sharedMarkStack.isEmpty()) {
393 // Let any sleeping slaves know it's time for them to return;
394 m_heap.m_markingConditionVariable.notifyAll();
398 // Is there work to be done?
399 if (!m_heap.m_sharedMarkStack.isEmpty())
403 m_heap.m_markingConditionVariable.wait(lock);
406 ASSERT(sharedDrainMode == SlaveDrain);
408 // Did we detect termination? If so, let the master know.
409 if (!m_heap.m_numberOfActiveParallelMarkers
410 && m_heap.m_sharedMarkStack.isEmpty())
411 m_heap.m_markingConditionVariable.notifyAll();
413 m_heap.m_markingConditionVariable.wait(
416 return !m_heap.m_sharedMarkStack.isEmpty()
417 || m_heap.m_parallelMarkersShouldExit;
420 // Is the current phase done? If so, return from this function.
421 if (m_heap.m_parallelMarkersShouldExit)
425 m_stack.stealSomeCellsFrom(
426 m_heap.m_sharedMarkStack, m_heap.m_numberOfWaitingParallelMarkers);
427 m_heap.m_numberOfActiveParallelMarkers++;
428 m_heap.m_numberOfWaitingParallelMarkers--;
435 void SlotVisitor::addOpaqueRoot(void* root)
437 if (Options::numberOfGCMarkers() == 1) {
438 // Put directly into the shared HashSet.
439 m_heap.m_opaqueRoots.add(root);
442 // Put into the local set, but merge with the shared one every once in
443 // a while to make sure that the local sets don't grow too large.
444 mergeOpaqueRootsIfProfitable();
445 m_opaqueRoots.add(root);
448 bool SlotVisitor::containsOpaqueRoot(void* root) const
450 ASSERT(!m_isInParallelMode);
451 ASSERT(m_opaqueRoots.isEmpty());
452 return m_heap.m_opaqueRoots.contains(root);
455 TriState SlotVisitor::containsOpaqueRootTriState(void* root) const
457 if (m_opaqueRoots.contains(root))
459 std::lock_guard<Lock> lock(m_heap.m_opaqueRootsMutex);
460 if (m_heap.m_opaqueRoots.contains(root))
462 return MixedTriState;
465 int SlotVisitor::opaqueRootCount()
467 ASSERT(!m_isInParallelMode);
468 ASSERT(m_opaqueRoots.isEmpty());
469 return m_heap.m_opaqueRoots.size();
472 void SlotVisitor::mergeOpaqueRootsIfNecessary()
474 if (m_opaqueRoots.isEmpty())
479 void SlotVisitor::mergeOpaqueRootsIfProfitable()
481 if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold())
486 void SlotVisitor::donate()
488 ASSERT(m_isInParallelMode);
489 if (Options::numberOfGCMarkers() == 1)
492 donateKnownParallel();
495 void SlotVisitor::donateAndDrain()
501 void SlotVisitor::mergeOpaqueRoots()
503 ASSERT(!m_opaqueRoots.isEmpty()); // Should only be called when opaque roots are non-empty.
505 std::lock_guard<Lock> lock(m_heap.m_opaqueRootsMutex);
506 for (auto* root : m_opaqueRoots)
507 m_heap.m_opaqueRoots.add(root);
509 m_opaqueRoots.clear();
512 void SlotVisitor::harvestWeakReferences()
514 for (WeakReferenceHarvester* current = m_heap.m_weakReferenceHarvesters.head(); current; current = current->next())
515 current->visitWeakReferences(*this);
518 void SlotVisitor::finalizeUnconditionalFinalizers()
520 while (m_heap.m_unconditionalFinalizers.hasNext())
521 m_heap.m_unconditionalFinalizers.removeNext()->finalizeUnconditionally();
524 void SlotVisitor::dump(PrintStream&) const
526 for (const JSCell* cell : markStack())
527 dataLog(*cell, "\n");