f2788878a734a001cb54026afec8d189710a2ed3
[WebKit-https.git] / Source / JavaScriptCore / heap / HeapInlines.h
1 /*
2  * Copyright (C) 2014-2016 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15  * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23  * THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #ifndef HeapInlines_h
27 #define HeapInlines_h
28
29 #include "Heap.h"
30 #include "HeapCellInlines.h"
31 #include "IndexingHeader.h"
32 #include "JSCallee.h"
33 #include "JSCell.h"
34 #include "Structure.h"
35 #include <type_traits>
36 #include <wtf/Assertions.h>
37 #include <wtf/RandomNumber.h>
38
39 namespace JSC {
40
41 inline bool Heap::shouldCollect()
42 {
43     if (isDeferred())
44         return false;
45     if (!m_isSafeToCollect)
46         return false;
47     if (m_operationInProgress != NoOperation)
48         return false;
49     if (Options::gcMaxHeapSize())
50         return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize();
51     return m_bytesAllocatedThisCycle > m_maxEdenSize;
52 }
53
54 inline bool Heap::isBusy()
55 {
56     return m_operationInProgress != NoOperation;
57 }
58
59 inline bool Heap::isCollecting()
60 {
61     return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
62 }
63
64 ALWAYS_INLINE Heap* Heap::heap(const HeapCell* cell)
65 {
66     return cell->heap();
67 }
68
69 inline Heap* Heap::heap(const JSValue v)
70 {
71     if (!v.isCell())
72         return 0;
73     return heap(v.asCell());
74 }
75
76 inline bool Heap::isLive(const void* rawCell)
77 {
78     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
79     if (cell->isLargeAllocation())
80         return cell->largeAllocation().isLive();
81     MarkedBlock& block = cell->markedBlock();
82     block.flipIfNecessaryConcurrently(block.vm()->heap.objectSpace().version());
83     return block.handle().isLiveCell(cell);
84 }
85
86 ALWAYS_INLINE bool Heap::isMarked(const void* rawCell)
87 {
88     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
89     if (cell->isLargeAllocation())
90         return cell->largeAllocation().isMarked();
91     MarkedBlock& block = cell->markedBlock();
92     block.flipIfNecessaryConcurrently(block.vm()->heap.objectSpace().version());
93     return block.isMarked(cell);
94 }
95
96 ALWAYS_INLINE bool Heap::testAndSetMarked(HeapVersion version, const void* rawCell)
97 {
98     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
99     if (cell->isLargeAllocation())
100         return cell->largeAllocation().testAndSetMarked();
101     MarkedBlock& block = cell->markedBlock();
102     block.flipIfNecessaryConcurrently(version);
103     return block.testAndSetMarked(cell);
104 }
105
106 inline void Heap::setMarked(const void* rawCell)
107 {
108     HeapCell* cell = bitwise_cast<HeapCell*>(rawCell);
109     if (cell->isLargeAllocation()) {
110         cell->largeAllocation().setMarked();
111         return;
112     }
113     MarkedBlock& block = cell->markedBlock();
114     block.flipIfNecessary(block.vm()->heap.objectSpace().version());
115     block.setMarked(cell);
116 }
117
118 ALWAYS_INLINE size_t Heap::cellSize(const void* rawCell)
119 {
120     return bitwise_cast<HeapCell*>(rawCell)->cellSize();
121 }
122
123 inline void Heap::writeBarrier(const JSCell* from, JSValue to)
124 {
125 #if ENABLE(WRITE_BARRIER_PROFILING)
126     WriteBarrierCounters::countWriteBarrier();
127 #endif
128     if (!to.isCell())
129         return;
130     writeBarrier(from, to.asCell());
131 }
132
133 inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
134 {
135 #if ENABLE(WRITE_BARRIER_PROFILING)
136     WriteBarrierCounters::countWriteBarrier();
137 #endif
138     if (!from || from->cellState() != CellState::OldBlack)
139         return;
140     if (!to || to->cellState() != CellState::NewWhite)
141         return;
142     addToRememberedSet(from);
143 }
144
145 inline void Heap::writeBarrier(const JSCell* from)
146 {
147     ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
148     if (!from || from->cellState() != CellState::OldBlack)
149         return;
150     addToRememberedSet(from);
151 }
152
153 inline void Heap::reportExtraMemoryAllocated(size_t size)
154 {
155     if (size > minExtraMemory) 
156         reportExtraMemoryAllocatedSlowCase(size);
157 }
158
159 inline void Heap::reportExtraMemoryVisited(CellState dataBeforeVisiting, size_t size)
160 {
161     // We don't want to double-count the extra memory that was reported in previous collections.
162     if (operationInProgress() == EdenCollection && dataBeforeVisiting == CellState::OldGrey)
163         return;
164
165     size_t* counter = &m_extraMemorySize;
166     
167     for (;;) {
168         size_t oldSize = *counter;
169         if (WTF::weakCompareAndSwap(counter, oldSize, oldSize + size))
170             return;
171     }
172 }
173
174 #if ENABLE(RESOURCE_USAGE)
175 inline void Heap::reportExternalMemoryVisited(CellState dataBeforeVisiting, size_t size)
176 {
177     // We don't want to double-count the external memory that was reported in previous collections.
178     if (operationInProgress() == EdenCollection && dataBeforeVisiting == CellState::OldGrey)
179         return;
180
181     size_t* counter = &m_externalMemorySize;
182
183     for (;;) {
184         size_t oldSize = *counter;
185         if (WTF::weakCompareAndSwap(counter, oldSize, oldSize + size))
186             return;
187     }
188 }
189 #endif
190
191 inline void Heap::deprecatedReportExtraMemory(size_t size)
192 {
193     if (size > minExtraMemory) 
194         deprecatedReportExtraMemorySlowCase(size);
195 }
196
197 template<typename Functor> inline void Heap::forEachCodeBlock(const Functor& func)
198 {
199     forEachCodeBlockImpl(scopedLambdaRef<bool(CodeBlock*)>(func));
200 }
201
202 template<typename Functor> inline void Heap::forEachProtectedCell(const Functor& functor)
203 {
204     for (auto& pair : m_protectedValues)
205         functor(pair.key);
206     m_handleSet.forEachStrongHandle(functor, m_protectedValues);
207 }
208
209 inline void* Heap::allocateWithDestructor(size_t bytes)
210 {
211 #if ENABLE(ALLOCATION_LOGGING)
212     dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes);
213 #endif
214     ASSERT(isValidAllocation(bytes));
215     return m_objectSpace.allocateWithDestructor(bytes);
216 }
217
218 inline void* Heap::allocateWithoutDestructor(size_t bytes)
219 {
220 #if ENABLE(ALLOCATION_LOGGING)
221     dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes);
222 #endif
223     ASSERT(isValidAllocation(bytes));
224     return m_objectSpace.allocateWithoutDestructor(bytes);
225 }
226
227 template<typename ClassType>
228 inline void* Heap::allocateObjectOfType(size_t bytes)
229 {
230     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
231     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
232
233     if (ClassType::needsDestruction)
234         return allocateWithDestructor(bytes);
235     return allocateWithoutDestructor(bytes);
236 }
237
238 template<typename ClassType>
239 inline MarkedSpace::Subspace& Heap::subspaceForObjectOfType()
240 {
241     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
242     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
243     
244     if (ClassType::needsDestruction)
245         return subspaceForObjectDestructor();
246     return subspaceForObjectWithoutDestructor();
247 }
248
249 template<typename ClassType>
250 inline MarkedAllocator* Heap::allocatorForObjectOfType(size_t bytes)
251 {
252     // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
253     ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
254
255     MarkedAllocator* result;
256     if (ClassType::needsDestruction)
257         result = allocatorForObjectWithDestructor(bytes);
258     else
259         result = allocatorForObjectWithoutDestructor(bytes);
260     
261     ASSERT(result || !ClassType::info()->isSubClassOf(JSCallee::info()));
262     return result;
263 }
264
265 inline void* Heap::allocateAuxiliary(JSCell* intendedOwner, size_t bytes)
266 {
267     void* result = m_objectSpace.allocateAuxiliary(bytes);
268 #if ENABLE(ALLOCATION_LOGGING)
269     dataLogF("JSC GC allocating %lu bytes of auxiliary for %p: %p.\n", bytes, intendedOwner, result);
270 #else
271     UNUSED_PARAM(intendedOwner);
272 #endif
273     return result;
274 }
275
276 inline void* Heap::tryAllocateAuxiliary(JSCell* intendedOwner, size_t bytes)
277 {
278     void* result = m_objectSpace.tryAllocateAuxiliary(bytes);
279 #if ENABLE(ALLOCATION_LOGGING)
280     dataLogF("JSC GC allocating %lu bytes of auxiliary for %p: %p.\n", bytes, intendedOwner, result);
281 #else
282     UNUSED_PARAM(intendedOwner);
283 #endif
284     return result;
285 }
286
287 inline void* Heap::tryReallocateAuxiliary(JSCell* intendedOwner, void* oldBase, size_t oldSize, size_t newSize)
288 {
289     void* newBase = tryAllocateAuxiliary(intendedOwner, newSize);
290     if (!newBase)
291         return nullptr;
292     memcpy(newBase, oldBase, oldSize);
293     return newBase;
294 }
295
296 inline void Heap::ascribeOwner(JSCell* intendedOwner, void* storage)
297 {
298 #if ENABLE(ALLOCATION_LOGGING)
299     dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner, storage);
300 #else
301     UNUSED_PARAM(intendedOwner);
302     UNUSED_PARAM(storage);
303 #endif
304 }
305
306 #if USE(FOUNDATION)
307 template <typename T>
308 inline void Heap::releaseSoon(RetainPtr<T>&& object)
309 {
310     m_delayedReleaseObjects.append(WTFMove(object));
311 }
312 #endif
313
314 inline void Heap::incrementDeferralDepth()
315 {
316     RELEASE_ASSERT(m_deferralDepth < 100); // Sanity check to make sure this doesn't get ridiculous.
317     m_deferralDepth++;
318 }
319
320 inline void Heap::decrementDeferralDepth()
321 {
322     RELEASE_ASSERT(m_deferralDepth >= 1);
323     m_deferralDepth--;
324 }
325
326 inline bool Heap::collectIfNecessaryOrDefer()
327 {
328     if (!shouldCollect())
329         return false;
330
331     collect();
332     return true;
333 }
334
335 inline void Heap::collectAccordingToDeferGCProbability()
336 {
337     if (isDeferred() || !m_isSafeToCollect || m_operationInProgress != NoOperation)
338         return;
339
340     if (randomNumber() < Options::deferGCProbability()) {
341         collect();
342         return;
343     }
344
345     // If our coin flip told us not to GC, we still might GC,
346     // but we GC according to our memory pressure markers.
347     collectIfNecessaryOrDefer();
348 }
349
350 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
351 {
352     decrementDeferralDepth();
353     if (UNLIKELY(Options::deferGCShouldCollectWithProbability()))
354         collectAccordingToDeferGCProbability();
355     else
356         collectIfNecessaryOrDefer();
357 }
358
359 inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
360 {
361     if (!m_markListSet)
362         m_markListSet = std::make_unique<HashSet<MarkedArgumentBuffer*>>();
363     return *m_markListSet;
364 }
365
366 inline void Heap::registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback)
367 {
368     m_weakGCMaps.add(weakGCMap, WTFMove(pruningCallback));
369 }
370
371 inline void Heap::unregisterWeakGCMap(void* weakGCMap)
372 {
373     m_weakGCMaps.remove(weakGCMap);
374 }
375
376 inline void Heap::didAllocateBlock(size_t capacity)
377 {
378 #if ENABLE(RESOURCE_USAGE)
379     m_blockBytesAllocated += capacity;
380 #else
381     UNUSED_PARAM(capacity);
382 #endif
383 }
384
385 inline void Heap::didFreeBlock(size_t capacity)
386 {
387 #if ENABLE(RESOURCE_USAGE)
388     m_blockBytesAllocated -= capacity;
389 #else
390     UNUSED_PARAM(capacity);
391 #endif
392 }
393
394 } // namespace JSC
395
396 #endif // HeapInlines_h