Remove Heap::setMarked()
[WebKit-https.git] / Source / JavaScriptCore / heap / HeapInlines.h
1 /*
2  * Copyright (C) 2014-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #ifndef HeapInlines_h
27 #define HeapInlines_h
28
29 #include "Heap.h"
30 #include "HeapCellInlines.h"
31 #include "IndexingHeader.h"
32 #include "JSCallee.h"
33 #include "JSCell.h"
34 #include "Structure.h"
35 #include <type_traits>
36 #include <wtf/Assertions.h>
37 #include <wtf/MainThread.h>
38 #include <wtf/RandomNumber.h>
39
40 namespace JSC {
41
42 inline bool Heap::shouldCollect()
43 {
44     if (isDeferred())
45         return false;
46     if (!m_isSafeToCollect)
47         return false;
48     if (m_operationInProgress != NoOperation)
49         return false;
50     if (Options::gcMaxHeapSize())
51         return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize();
52     return m_bytesAllocatedThisCycle > m_maxEdenSize;
53 }
54
55 inline bool Heap::isBusy()
56 {
57     return m_operationInProgress != NoOperation;
58 }
59
60 inline bool Heap::isCollecting()
61 {
62     return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
63 }
64
65 ALWAYS_INLINE Heap* Heap::heap(const HeapCell* cell)
66 {
67     return cell->heap();
68 }
69
70 inline Heap* Heap::heap(const JSValue v)
71 {
72     if (!v.isCell())
73         return 0;
74     return heap(v.asCell());
75 }
76
77 ALWAYS_INLINE bool Heap::isMarked(const void* rawCell)
78 {
79     ASSERT(!mayBeGCThread());
80     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
81     if (cell->isLargeAllocation())
82         return cell->largeAllocation().isMarked();
83     MarkedBlock& block = cell->markedBlock();
84     if (block.needsFlip(block.vm()->heap.objectSpace().version()))
85         return false;
86     return block.isMarked(cell);
87 }
88
89 ALWAYS_INLINE bool Heap::isMarkedConcurrently(const void* rawCell)
90 {
91     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
92     if (cell->isLargeAllocation())
93         return cell->largeAllocation().isMarked();
94     MarkedBlock& block = cell->markedBlock();
95     if (block.needsFlip(block.vm()->heap.objectSpace().version()))
96         return false;
97     WTF::loadLoadFence();
98     return block.isMarked(cell);
99 }
100
101 ALWAYS_INLINE bool Heap::testAndSetMarked(HeapVersion version, const void* rawCell)
102 {
103     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
104     if (cell->isLargeAllocation())
105         return cell->largeAllocation().testAndSetMarked();
106     MarkedBlock& block = cell->markedBlock();
107     block.flipIfNecessaryDuringMarking(version);
108     return block.testAndSetMarked(cell);
109 }
110
111 ALWAYS_INLINE size_t Heap::cellSize(const void* rawCell)
112 {
113     return bitwise_cast<HeapCell*>(rawCell)->cellSize();
114 }
115
116 inline void Heap::writeBarrier(const JSCell* from, JSValue to)
117 {
118 #if ENABLE(WRITE_BARRIER_PROFILING)
119     WriteBarrierCounters::countWriteBarrier();
120 #endif
121     if (!to.isCell())
122         return;
123     writeBarrier(from, to.asCell());
124 }
125
126 inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
127 {
128 #if ENABLE(WRITE_BARRIER_PROFILING)
129     WriteBarrierCounters::countWriteBarrier();
130 #endif
131     if (!from || from->cellState() != CellState::OldBlack)
132         return;
133     if (!to || to->cellState() != CellState::NewWhite)
134         return;
135     addToRememberedSet(from);
136 }
137
138 inline void Heap::writeBarrier(const JSCell* from)
139 {
140     ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
141     if (!from || from->cellState() != CellState::OldBlack)
142         return;
143     addToRememberedSet(from);
144 }
145
146 inline void Heap::reportExtraMemoryAllocated(size_t size)
147 {
148     if (size > minExtraMemory) 
149         reportExtraMemoryAllocatedSlowCase(size);
150 }
151
152 inline void Heap::reportExtraMemoryVisited(CellState dataBeforeVisiting, size_t size)
153 {
154     // We don't want to double-count the extra memory that was reported in previous collections.
155     if (operationInProgress() == EdenCollection && dataBeforeVisiting == CellState::OldGrey)
156         return;
157
158     size_t* counter = &m_extraMemorySize;
159     
160     for (;;) {
161         size_t oldSize = *counter;
162         if (WTF::weakCompareAndSwap(counter, oldSize, oldSize + size))
163             return;
164     }
165 }
166
167 #if ENABLE(RESOURCE_USAGE)
168 inline void Heap::reportExternalMemoryVisited(CellState dataBeforeVisiting, size_t size)
169 {
170     // We don't want to double-count the external memory that was reported in previous collections.
171     if (operationInProgress() == EdenCollection && dataBeforeVisiting == CellState::OldGrey)
172         return;
173
174     size_t* counter = &m_externalMemorySize;
175
176     for (;;) {
177         size_t oldSize = *counter;
178         if (WTF::weakCompareAndSwap(counter, oldSize, oldSize + size))
179             return;
180     }
181 }
182 #endif
183
184 inline void Heap::deprecatedReportExtraMemory(size_t size)
185 {
186     if (size > minExtraMemory) 
187         deprecatedReportExtraMemorySlowCase(size);
188 }
189
190 template<typename Functor> inline void Heap::forEachCodeBlock(const Functor& func)
191 {
192     forEachCodeBlockImpl(scopedLambdaRef<bool(CodeBlock*)>(func));
193 }
194
195 template<typename Functor> inline void Heap::forEachProtectedCell(const Functor& functor)
196 {
197     for (auto& pair : m_protectedValues)
198         functor(pair.key);
199     m_handleSet.forEachStrongHandle(functor, m_protectedValues);
200 }
201
202 inline void* Heap::allocateWithDestructor(size_t bytes)
203 {
204 #if ENABLE(ALLOCATION_LOGGING)
205     dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes);
206 #endif
207     ASSERT(isValidAllocation(bytes));
208     return m_objectSpace.allocateWithDestructor(bytes);
209 }
210
211 inline void* Heap::allocateWithoutDestructor(size_t bytes)
212 {
213 #if ENABLE(ALLOCATION_LOGGING)
214     dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes);
215 #endif
216     ASSERT(isValidAllocation(bytes));
217     return m_objectSpace.allocateWithoutDestructor(bytes);
218 }
219
220 template<typename ClassType>
221 inline void* Heap::allocateObjectOfType(size_t bytes)
222 {
223     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
224     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
225
226     if (ClassType::needsDestruction)
227         return allocateWithDestructor(bytes);
228     return allocateWithoutDestructor(bytes);
229 }
230
231 template<typename ClassType>
232 inline MarkedSpace::Subspace& Heap::subspaceForObjectOfType()
233 {
234     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
235     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
236     
237     if (ClassType::needsDestruction)
238         return subspaceForObjectDestructor();
239     return subspaceForObjectWithoutDestructor();
240 }
241
242 template<typename ClassType>
243 inline MarkedAllocator* Heap::allocatorForObjectOfType(size_t bytes)
244 {
245     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
246     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
247
248     MarkedAllocator* result;
249     if (ClassType::needsDestruction)
250         result = allocatorForObjectWithDestructor(bytes);
251     else
252         result = allocatorForObjectWithoutDestructor(bytes);
253     
254     ASSERT(result || !ClassType::info()->isSubClassOf(JSCallee::info()));
255     return result;
256 }
257
258 inline void* Heap::allocateAuxiliary(JSCell* intendedOwner, size_t bytes)
259 {
260     void* result = m_objectSpace.allocateAuxiliary(bytes);
261 #if ENABLE(ALLOCATION_LOGGING)
262     dataLogF("JSC GC allocating %lu bytes of auxiliary for %p: %p.\n", bytes, intendedOwner, result);
263 #else
264     UNUSED_PARAM(intendedOwner);
265 #endif
266     return result;
267 }
268
269 inline void* Heap::tryAllocateAuxiliary(JSCell* intendedOwner, size_t bytes)
270 {
271     void* result = m_objectSpace.tryAllocateAuxiliary(bytes);
272 #if ENABLE(ALLOCATION_LOGGING)
273     dataLogF("JSC GC allocating %lu bytes of auxiliary for %p: %p.\n", bytes, intendedOwner, result);
274 #else
275     UNUSED_PARAM(intendedOwner);
276 #endif
277     return result;
278 }
279
280 inline void* Heap::tryReallocateAuxiliary(JSCell* intendedOwner, void* oldBase, size_t oldSize, size_t newSize)
281 {
282     void* newBase = tryAllocateAuxiliary(intendedOwner, newSize);
283     if (!newBase)
284         return nullptr;
285     memcpy(newBase, oldBase, oldSize);
286     return newBase;
287 }
288
289 inline void Heap::ascribeOwner(JSCell* intendedOwner, void* storage)
290 {
291 #if ENABLE(ALLOCATION_LOGGING)
292     dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner, storage);
293 #else
294     UNUSED_PARAM(intendedOwner);
295     UNUSED_PARAM(storage);
296 #endif
297 }
298
299 #if USE(FOUNDATION)
300 template <typename T>
301 inline void Heap::releaseSoon(RetainPtr<T>&& object)
302 {
303     m_delayedReleaseObjects.append(WTFMove(object));
304 }
305 #endif
306
307 inline void Heap::incrementDeferralDepth()
308 {
309     RELEASE_ASSERT(m_deferralDepth < 100); // Sanity check to make sure this doesn't get ridiculous.
310     m_deferralDepth++;
311 }
312
313 inline void Heap::decrementDeferralDepth()
314 {
315     RELEASE_ASSERT(m_deferralDepth >= 1);
316     m_deferralDepth--;
317 }
318
319 inline bool Heap::collectIfNecessaryOrDefer()
320 {
321     if (!shouldCollect())
322         return false;
323
324     collect();
325     return true;
326 }
327
328 inline void Heap::collectAccordingToDeferGCProbability()
329 {
330     if (isDeferred() || !m_isSafeToCollect || m_operationInProgress != NoOperation)
331         return;
332
333     if (randomNumber() < Options::deferGCProbability()) {
334         collect();
335         return;
336     }
337
338     // If our coin flip told us not to GC, we still might GC,
339     // but we GC according to our memory pressure markers.
340     collectIfNecessaryOrDefer();
341 }
342
343 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
344 {
345     decrementDeferralDepth();
346     if (UNLIKELY(Options::deferGCShouldCollectWithProbability()))
347         collectAccordingToDeferGCProbability();
348     else
349         collectIfNecessaryOrDefer();
350 }
351
352 inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
353 {
354     if (!m_markListSet)
355         m_markListSet = std::make_unique<HashSet<MarkedArgumentBuffer*>>();
356     return *m_markListSet;
357 }
358
359 inline void Heap::registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback)
360 {
361     m_weakGCMaps.add(weakGCMap, WTFMove(pruningCallback));
362 }
363
364 inline void Heap::unregisterWeakGCMap(void* weakGCMap)
365 {
366     m_weakGCMaps.remove(weakGCMap);
367 }
368
369 inline void Heap::didAllocateBlock(size_t capacity)
370 {
371 #if ENABLE(RESOURCE_USAGE)
372     m_blockBytesAllocated += capacity;
373 #else
374     UNUSED_PARAM(capacity);
375 #endif
376 }
377
378 inline void Heap::didFreeBlock(size_t capacity)
379 {
380 #if ENABLE(RESOURCE_USAGE)
381     m_blockBytesAllocated -= capacity;
382 #else
383     UNUSED_PARAM(capacity);
384 #endif
385 }
386
387 } // namespace JSC
388
389 #endif // HeapInlines_h