MarkedBlock should know what objects are live during marking
[WebKit.git] / Source / JavaScriptCore / heap / SlotVisitor.cpp
1 /*
2  * Copyright (C) 2012, 2015-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #include "config.h"
27 #include "SlotVisitor.h"
28
29 #include "CPU.h"
30 #include "ConservativeRoots.h"
31 #include "GCSegmentedArrayInlines.h"
32 #include "HeapCellInlines.h"
33 #include "HeapProfiler.h"
34 #include "HeapSnapshotBuilder.h"
35 #include "JSArray.h"
36 #include "JSDestructibleObject.h"
37 #include "JSObject.h"
38 #include "JSString.h"
39 #include "JSCInlines.h"
40 #include "SlotVisitorInlines.h"
41 #include "SuperSampler.h"
42 #include "VM.h"
43 #include <wtf/Lock.h>
44
45 namespace JSC {
46
47 #if ENABLE(GC_VALIDATION)
48 static void validate(JSCell* cell)
49 {
50     RELEASE_ASSERT(cell);
51
52     if (!cell->structure()) {
53         dataLogF("cell at %p has a null structure\n" , cell);
54         CRASH();
55     }
56
57     // Both the cell's structure, and the cell's structure's structure should be the Structure Structure.
58     // I hate this sentence.
59     if (cell->structure()->structure()->JSCell::classInfo() != cell->structure()->JSCell::classInfo()) {
60         const char* parentClassName = 0;
61         const char* ourClassName = 0;
62         if (cell->structure()->structure() && cell->structure()->structure()->JSCell::classInfo())
63             parentClassName = cell->structure()->structure()->JSCell::classInfo()->className;
64         if (cell->structure()->JSCell::classInfo())
65             ourClassName = cell->structure()->JSCell::classInfo()->className;
66         dataLogF("parent structure (%p <%s>) of cell at %p doesn't match cell's structure (%p <%s>)\n",
67             cell->structure()->structure(), parentClassName, cell, cell->structure(), ourClassName);
68         CRASH();
69     }
70
71     // Make sure we can walk the ClassInfo chain
72     const ClassInfo* info = cell->classInfo();
73     do { } while ((info = info->parentClass));
74 }
75 #endif
76
77 SlotVisitor::SlotVisitor(Heap& heap)
78     : m_stack()
79     , m_bytesVisited(0)
80     , m_bytesCopied(0)
81     , m_visitCount(0)
82     , m_isInParallelMode(false)
83     , m_markingVersion(MarkedSpace::initialVersion)
84     , m_heap(heap)
85 #if !ASSERT_DISABLED
86     , m_isCheckingForDefaultMarkViolation(false)
87     , m_isDraining(false)
88 #endif
89 {
90 }
91
92 SlotVisitor::~SlotVisitor()
93 {
94     clearMarkStack();
95 }
96
97 void SlotVisitor::didStartMarking()
98 {
99     if (heap()->operationInProgress() == FullCollection)
100         ASSERT(m_opaqueRoots.isEmpty()); // Should have merged by now.
101     else
102         reset();
103
104     if (HeapProfiler* heapProfiler = vm().heapProfiler())
105         m_heapSnapshotBuilder = heapProfiler->activeSnapshotBuilder();
106     
107     m_markingVersion = heap()->objectSpace().markingVersion();
108 }
109
110 void SlotVisitor::reset()
111 {
112     m_bytesVisited = 0;
113     m_bytesCopied = 0;
114     m_visitCount = 0;
115     m_heapSnapshotBuilder = nullptr;
116     ASSERT(!m_currentCell);
117 }
118
119 void SlotVisitor::clearMarkStack()
120 {
121     m_stack.clear();
122 }
123
124 void SlotVisitor::append(ConservativeRoots& conservativeRoots)
125 {
126     HeapCell** roots = conservativeRoots.roots();
127     size_t size = conservativeRoots.size();
128     for (size_t i = 0; i < size; ++i)
129         appendJSCellOrAuxiliary(roots[i]);
130 }
131
132 void SlotVisitor::appendJSCellOrAuxiliary(HeapCell* heapCell)
133 {
134     if (!heapCell)
135         return;
136     
137     ASSERT(!m_isCheckingForDefaultMarkViolation);
138     
139     if (Heap::testAndSetMarked(m_markingVersion, heapCell))
140         return;
141     
142     switch (heapCell->cellKind()) {
143     case HeapCell::JSCell: {
144         JSCell* jsCell = static_cast<JSCell*>(heapCell);
145         
146         if (!jsCell->structure()) {
147             ASSERT_NOT_REACHED();
148             return;
149         }
150         
151         jsCell->setCellState(CellState::NewGrey);
152
153         appendToMarkStack(jsCell);
154         return;
155     }
156         
157     case HeapCell::Auxiliary: {
158         noteLiveAuxiliaryCell(heapCell);
159         return;
160     } }
161 }
162
163 void SlotVisitor::append(JSValue value)
164 {
165     if (!value || !value.isCell())
166         return;
167
168     if (UNLIKELY(m_heapSnapshotBuilder))
169         m_heapSnapshotBuilder->appendEdge(m_currentCell, value.asCell());
170
171     setMarkedAndAppendToMarkStack(value.asCell());
172 }
173
174 void SlotVisitor::appendHidden(JSValue value)
175 {
176     if (!value || !value.isCell())
177         return;
178
179     setMarkedAndAppendToMarkStack(value.asCell());
180 }
181
182 void SlotVisitor::setMarkedAndAppendToMarkStack(JSCell* cell)
183 {
184     SuperSamplerScope superSamplerScope(false);
185     
186     ASSERT(!m_isCheckingForDefaultMarkViolation);
187     if (!cell)
188         return;
189
190 #if ENABLE(GC_VALIDATION)
191     validate(cell);
192 #endif
193     
194     if (cell->isLargeAllocation())
195         setMarkedAndAppendToMarkStack(cell->largeAllocation(), cell);
196     else
197         setMarkedAndAppendToMarkStack(cell->markedBlock(), cell);
198 }
199
200 template<typename ContainerType>
201 ALWAYS_INLINE void SlotVisitor::setMarkedAndAppendToMarkStack(ContainerType& container, JSCell* cell)
202 {
203     container.aboutToMark(m_markingVersion);
204     
205     if (container.testAndSetMarked(cell))
206         return;
207     
208     ASSERT(cell->structure());
209     
210     // Indicate that the object is grey and that:
211     // In case of concurrent GC: it's the first time it is grey in this GC cycle.
212     // In case of eden collection: it's a new object that became grey rather than an old remembered object.
213     cell->setCellState(CellState::NewGrey);
214     
215     appendToMarkStack(container, cell);
216 }
217
218 void SlotVisitor::appendToMarkStack(JSCell* cell)
219 {
220     if (cell->isLargeAllocation())
221         appendToMarkStack(cell->largeAllocation(), cell);
222     else
223         appendToMarkStack(cell->markedBlock(), cell);
224 }
225
226 template<typename ContainerType>
227 ALWAYS_INLINE void SlotVisitor::appendToMarkStack(ContainerType& container, JSCell* cell)
228 {
229     ASSERT(Heap::isMarkedConcurrently(cell));
230     ASSERT(!cell->isZapped());
231     
232     container.noteMarked();
233     
234     // FIXME: These "just work" because the GC resets these fields before doing anything else. But
235     // that won't be the case when we do concurrent GC.
236     m_visitCount++;
237     m_bytesVisited += container.cellSize();
238     
239     m_stack.append(cell);
240
241     if (UNLIKELY(m_heapSnapshotBuilder))
242         m_heapSnapshotBuilder->appendNode(cell);
243 }
244
245 void SlotVisitor::markAuxiliary(const void* base)
246 {
247     HeapCell* cell = bitwise_cast<HeapCell*>(base);
248     
249     ASSERT(cell->heap() == heap());
250     
251     if (Heap::testAndSetMarked(m_markingVersion, cell))
252         return;
253     
254     noteLiveAuxiliaryCell(cell);
255 }
256
257 void SlotVisitor::noteLiveAuxiliaryCell(HeapCell* cell)
258 {
259     // We get here once per GC under these circumstances:
260     //
261     // Eden collection: if the cell was allocated since the last collection and is live somehow.
262     //
263     // Full collection: if the cell is live somehow.
264     
265     CellContainer container = cell->cellContainer();
266     
267     container.noteMarked();
268     
269     m_visitCount++;
270     m_bytesVisited += container.cellSize();
271 }
272
273 class SetCurrentCellScope {
274 public:
275     SetCurrentCellScope(SlotVisitor& visitor, const JSCell* cell)
276         : m_visitor(visitor)
277     {
278         ASSERT(!m_visitor.m_currentCell);
279         m_visitor.m_currentCell = const_cast<JSCell*>(cell);
280     }
281
282     ~SetCurrentCellScope()
283     {
284         ASSERT(m_visitor.m_currentCell);
285         m_visitor.m_currentCell = nullptr;
286     }
287
288 private:
289     SlotVisitor& m_visitor;
290 };
291
292
293 ALWAYS_INLINE void SlotVisitor::visitChildren(const JSCell* cell)
294 {
295     ASSERT(Heap::isMarkedConcurrently(cell));
296     
297     SetCurrentCellScope currentCellScope(*this, cell);
298     
299     cell->setCellState(blacken(cell->cellState()));
300     
301     WTF::storeLoadFence();
302     
303     switch (cell->type()) {
304     case StringType:
305         JSString::visitChildren(const_cast<JSCell*>(cell), *this);
306         break;
307         
308     case FinalObjectType:
309         JSFinalObject::visitChildren(const_cast<JSCell*>(cell), *this);
310         break;
311
312     case ArrayType:
313         JSArray::visitChildren(const_cast<JSCell*>(cell), *this);
314         break;
315         
316     default:
317         // FIXME: This could be so much better.
318         // https://bugs.webkit.org/show_bug.cgi?id=162462
319         cell->methodTable()->visitChildren(const_cast<JSCell*>(cell), *this);
320         break;
321     }
322 }
323
324 void SlotVisitor::donateKnownParallel()
325 {
326     // NOTE: Because we re-try often, we can afford to be conservative, and
327     // assume that donating is not profitable.
328
329     // Avoid locking when a thread reaches a dead end in the object graph.
330     if (m_stack.size() < 2)
331         return;
332
333     // If there's already some shared work queued up, be conservative and assume
334     // that donating more is not profitable.
335     if (m_heap.m_sharedMarkStack.size())
336         return;
337
338     // If we're contending on the lock, be conservative and assume that another
339     // thread is already donating.
340     std::unique_lock<Lock> lock(m_heap.m_markingMutex, std::try_to_lock);
341     if (!lock.owns_lock())
342         return;
343
344     // Otherwise, assume that a thread will go idle soon, and donate.
345     m_stack.donateSomeCellsTo(m_heap.m_sharedMarkStack);
346
347     m_heap.m_markingConditionVariable.notifyAll();
348 }
349
350 void SlotVisitor::drain()
351 {
352     ASSERT(m_isInParallelMode);
353    
354     while (!m_stack.isEmpty()) {
355         m_stack.refill();
356         for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance(); m_stack.canRemoveLast() && countdown--;)
357             visitChildren(m_stack.removeLast());
358         donateKnownParallel();
359     }
360     
361     mergeOpaqueRootsIfNecessary();
362 }
363
364 void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)
365 {
366     ASSERT(m_isInParallelMode);
367     
368     ASSERT(Options::numberOfGCMarkers());
369     
370     {
371         std::lock_guard<Lock> lock(m_heap.m_markingMutex);
372         m_heap.m_numberOfActiveParallelMarkers++;
373     }
374     while (true) {
375         {
376             std::unique_lock<Lock> lock(m_heap.m_markingMutex);
377             m_heap.m_numberOfActiveParallelMarkers--;
378             m_heap.m_numberOfWaitingParallelMarkers++;
379
380             // How we wait differs depending on drain mode.
381             if (sharedDrainMode == MasterDrain) {
382                 // Wait until either termination is reached, or until there is some work
383                 // for us to do.
384                 while (true) {
385                     // Did we reach termination?
386                     if (!m_heap.m_numberOfActiveParallelMarkers
387                         && m_heap.m_sharedMarkStack.isEmpty()) {
388                         // Let any sleeping slaves know it's time for them to return;
389                         m_heap.m_markingConditionVariable.notifyAll();
390                         return;
391                     }
392                     
393                     // Is there work to be done?
394                     if (!m_heap.m_sharedMarkStack.isEmpty())
395                         break;
396                     
397                     // Otherwise wait.
398                     m_heap.m_markingConditionVariable.wait(lock);
399                 }
400             } else {
401                 ASSERT(sharedDrainMode == SlaveDrain);
402                 
403                 // Did we detect termination? If so, let the master know.
404                 if (!m_heap.m_numberOfActiveParallelMarkers
405                     && m_heap.m_sharedMarkStack.isEmpty())
406                     m_heap.m_markingConditionVariable.notifyAll();
407
408                 m_heap.m_markingConditionVariable.wait(
409                     lock,
410                     [this] {
411                         return !m_heap.m_sharedMarkStack.isEmpty()
412                             || m_heap.m_parallelMarkersShouldExit;
413                     });
414                 
415                 // Is the current phase done? If so, return from this function.
416                 if (m_heap.m_parallelMarkersShouldExit)
417                     return;
418             }
419
420             m_stack.stealSomeCellsFrom(
421                 m_heap.m_sharedMarkStack, m_heap.m_numberOfWaitingParallelMarkers);
422             m_heap.m_numberOfActiveParallelMarkers++;
423             m_heap.m_numberOfWaitingParallelMarkers--;
424         }
425         
426         drain();
427     }
428 }
429
430 void SlotVisitor::addOpaqueRoot(void* root)
431 {
432     if (Options::numberOfGCMarkers() == 1) {
433         // Put directly into the shared HashSet.
434         m_heap.m_opaqueRoots.add(root);
435         return;
436     }
437     // Put into the local set, but merge with the shared one every once in
438     // a while to make sure that the local sets don't grow too large.
439     mergeOpaqueRootsIfProfitable();
440     m_opaqueRoots.add(root);
441 }
442
443 bool SlotVisitor::containsOpaqueRoot(void* root) const
444 {
445     ASSERT(!m_isInParallelMode);
446     ASSERT(m_opaqueRoots.isEmpty());
447     return m_heap.m_opaqueRoots.contains(root);
448 }
449
450 TriState SlotVisitor::containsOpaqueRootTriState(void* root) const
451 {
452     if (m_opaqueRoots.contains(root))
453         return TrueTriState;
454     std::lock_guard<Lock> lock(m_heap.m_opaqueRootsMutex);
455     if (m_heap.m_opaqueRoots.contains(root))
456         return TrueTriState;
457     return MixedTriState;
458 }
459
460 int SlotVisitor::opaqueRootCount()
461 {
462     ASSERT(!m_isInParallelMode);
463     ASSERT(m_opaqueRoots.isEmpty());
464     return m_heap.m_opaqueRoots.size();
465 }
466
467 void SlotVisitor::mergeOpaqueRootsIfNecessary()
468 {
469     if (m_opaqueRoots.isEmpty())
470         return;
471     mergeOpaqueRoots();
472 }
473     
474 void SlotVisitor::mergeOpaqueRootsIfProfitable()
475 {
476     if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold())
477         return;
478     mergeOpaqueRoots();
479 }
480     
481 void SlotVisitor::donate()
482 {
483     ASSERT(m_isInParallelMode);
484     if (Options::numberOfGCMarkers() == 1)
485         return;
486     
487     donateKnownParallel();
488 }
489
490 void SlotVisitor::donateAndDrain()
491 {
492     donate();
493     drain();
494 }
495
496 void SlotVisitor::mergeOpaqueRoots()
497 {
498     ASSERT(!m_opaqueRoots.isEmpty()); // Should only be called when opaque roots are non-empty.
499     {
500         std::lock_guard<Lock> lock(m_heap.m_opaqueRootsMutex);
501         for (auto* root : m_opaqueRoots)
502             m_heap.m_opaqueRoots.add(root);
503     }
504     m_opaqueRoots.clear();
505 }
506
507 void SlotVisitor::harvestWeakReferences()
508 {
509     for (WeakReferenceHarvester* current = m_heap.m_weakReferenceHarvesters.head(); current; current = current->next())
510         current->visitWeakReferences(*this);
511 }
512
513 void SlotVisitor::finalizeUnconditionalFinalizers()
514 {
515     while (m_heap.m_unconditionalFinalizers.hasNext())
516         m_heap.m_unconditionalFinalizers.removeNext()->finalizeUnconditionally();
517 }
518
519 void SlotVisitor::dump(PrintStream&) const
520 {
521     for (const JSCell* cell : markStack())
522         dataLog(*cell, "\n");
523 }
524
525 } // namespace JSC