[JSC] Weak should only accept cell pointees.
[WebKit-https.git] / Source / JavaScriptCore / heap / MarkedSpace.h
1 /*
2  *  Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3  *  Copyright (C) 2001 Peter Kelly (pmk@post.com)
4  *  Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2011 Apple Inc. All rights reserved.
5  *
6  *  This library is free software; you can redistribute it and/or
7  *  modify it under the terms of the GNU Lesser General Public
8  *  License as published by the Free Software Foundation; either
9  *  version 2 of the License, or (at your option) any later version.
10  *
11  *  This library is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  *  Lesser General Public License for more details.
15  *
16  *  You should have received a copy of the GNU Lesser General Public
17  *  License along with this library; if not, write to the Free Software
18  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
19  *
20  */
21
22 #ifndef MarkedSpace_h
23 #define MarkedSpace_h
24
25 #include "MachineStackMarker.h"
26 #include "MarkedAllocator.h"
27 #include "MarkedBlock.h"
28 #include "MarkedBlockSet.h"
29 #include <array>
30 #include <wtf/Bitmap.h>
31 #include <wtf/DoublyLinkedList.h>
32 #include <wtf/HashSet.h>
33 #include <wtf/Noncopyable.h>
34 #include <wtf/RetainPtr.h>
35 #include <wtf/Vector.h>
36
37 namespace JSC {
38
39 class Heap;
40 class HeapIterationScope;
41 class JSCell;
42 class LiveObjectIterator;
43 class LLIntOffsetsExtractor;
44 class WeakGCHandle;
45 class SlotVisitor;
46
47 struct ClearMarks : MarkedBlock::VoidFunctor {
48     void operator()(MarkedBlock* block)
49     {
50         block->clearMarks();
51     }
52 };
53
54 struct Sweep : MarkedBlock::VoidFunctor {
55     void operator()(MarkedBlock* block) { block->sweep(); }
56 };
57
58 struct ZombifySweep : MarkedBlock::VoidFunctor {
59     void operator()(MarkedBlock* block)
60     {
61         if (block->needsSweeping())
62             block->sweep();
63     }
64 };
65
66 struct MarkCount : MarkedBlock::CountFunctor {
67     void operator()(MarkedBlock* block) { count(block->markCount()); }
68 };
69
70 struct Size : MarkedBlock::CountFunctor {
71     void operator()(MarkedBlock* block) { count(block->markCount() * block->cellSize()); }
72 };
73
74 class MarkedSpace {
75     WTF_MAKE_NONCOPYABLE(MarkedSpace);
76 public:
77     // [ 32... 128 ]
78     static const size_t preciseStep = MarkedBlock::atomSize;
79     static const size_t preciseCutoff = 128;
80     static const size_t preciseCount = preciseCutoff / preciseStep;
81
82     // [ 1024... blockSize ]
83     static const size_t impreciseStep = 2 * preciseCutoff;
84     static const size_t impreciseCutoff = MarkedBlock::blockSize / 2;
85     static const size_t impreciseCount = impreciseCutoff / impreciseStep;
86
87     struct Subspace {
88         std::array<MarkedAllocator, preciseCount> preciseAllocators;
89         std::array<MarkedAllocator, impreciseCount> impreciseAllocators;
90         MarkedAllocator largeAllocator;
91     };
92
93     MarkedSpace(Heap*);
94     ~MarkedSpace();
95     void lastChanceToFinalize();
96
97     MarkedAllocator& allocatorFor(size_t);
98     MarkedAllocator& destructorAllocatorFor(size_t);
99     void* allocateWithDestructor(size_t);
100     void* allocateWithoutDestructor(size_t);
101
102     Subspace& subspaceForObjectsWithDestructor() { return m_destructorSpace; }
103     Subspace& subspaceForObjectsWithoutDestructor() { return m_normalSpace; }
104
105     void resetAllocators();
106
107     void visitWeakSets(HeapRootVisitor&);
108     void reapWeakSets();
109
110     MarkedBlockSet& blocks() { return m_blocks; }
111
112     void willStartIterating();
113     bool isIterating() { return m_isIterating; }
114     void didFinishIterating();
115
116     void stopAllocating();
117     void resumeAllocating(); // If we just stopped allocation but we didn't do a collection, we need to resume allocation.
118
119     typedef HashSet<MarkedBlock*>::iterator BlockIterator;
120     
121     template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&, Functor&);
122     template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&);
123     template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&, Functor&);
124     template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&);
125     template<typename Functor> typename Functor::ReturnType forEachBlock(Functor&);
126     template<typename Functor> typename Functor::ReturnType forEachBlock();
127     
128     void shrink();
129     void freeBlock(MarkedBlock*);
130     void freeOrShrinkBlock(MarkedBlock*);
131
132     void didAddBlock(MarkedBlock*);
133     void didConsumeFreeList(MarkedBlock*);
134     void didAllocateInBlock(MarkedBlock*);
135
136     void clearMarks();
137     void clearNewlyAllocated();
138     void sweep();
139     void zombifySweep();
140     size_t objectCount();
141     size_t size();
142     size_t capacity();
143
144     bool isPagedOut(double deadline);
145
146 #if USE(CF)
147     template<typename T> void releaseSoon(RetainPtr<T>&&);
148 #endif
149
150     const Vector<MarkedBlock*>& blocksWithNewObjects() const { return m_blocksWithNewObjects; }
151
152 private:
153     friend class LLIntOffsetsExtractor;
154     friend class JIT;
155
156     template<typename Functor> void forEachAllocator(Functor&);
157     template<typename Functor> void forEachAllocator();
158
159     Subspace m_destructorSpace;
160     Subspace m_normalSpace;
161
162     Heap* m_heap;
163     size_t m_capacity;
164     bool m_isIterating;
165     MarkedBlockSet m_blocks;
166     Vector<MarkedBlock*> m_blocksWithNewObjects;
167 };
168
169 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope&, Functor& functor)
170 {
171     ASSERT(isIterating());
172     BlockIterator end = m_blocks.set().end();
173     for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) {
174         if ((*it)->forEachLiveCell(functor) == IterationStatus::Done)
175             break;
176     }
177     return functor.returnValue();
178 }
179
180 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope& scope)
181 {
182     Functor functor;
183     return forEachLiveCell(scope, functor);
184 }
185
186 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope&, Functor& functor)
187 {
188     ASSERT(isIterating());
189     BlockIterator end = m_blocks.set().end();
190     for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) {
191         if ((*it)->forEachDeadCell(functor) == IterationStatus::Done)
192             break;
193     }
194     return functor.returnValue();
195 }
196
197 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope& scope)
198 {
199     Functor functor;
200     return forEachDeadCell(scope, functor);
201 }
202
203 inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes)
204 {
205     ASSERT(bytes);
206     if (bytes <= preciseCutoff)
207         return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep];
208     if (bytes <= impreciseCutoff)
209         return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep];
210     return m_normalSpace.largeAllocator;
211 }
212
213 inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes)
214 {
215     ASSERT(bytes);
216     if (bytes <= preciseCutoff)
217         return m_destructorSpace.preciseAllocators[(bytes - 1) / preciseStep];
218     if (bytes <= impreciseCutoff)
219         return m_destructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep];
220     return m_destructorSpace.largeAllocator;
221 }
222
223 inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes)
224 {
225     return allocatorFor(bytes).allocate(bytes);
226 }
227
228 inline void* MarkedSpace::allocateWithDestructor(size_t bytes)
229 {
230     return destructorAllocatorFor(bytes).allocate(bytes);
231 }
232
233 template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock(Functor& functor)
234 {
235     for (size_t i = 0; i < preciseCount; ++i)
236         m_normalSpace.preciseAllocators[i].forEachBlock(functor);
237     for (size_t i = 0; i < impreciseCount; ++i)
238         m_normalSpace.impreciseAllocators[i].forEachBlock(functor);
239     m_normalSpace.largeAllocator.forEachBlock(functor);
240
241     for (size_t i = 0; i < preciseCount; ++i)
242         m_destructorSpace.preciseAllocators[i].forEachBlock(functor);
243     for (size_t i = 0; i < impreciseCount; ++i)
244         m_destructorSpace.impreciseAllocators[i].forEachBlock(functor);
245     m_destructorSpace.largeAllocator.forEachBlock(functor);
246
247     return functor.returnValue();
248 }
249
250 template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock()
251 {
252     Functor functor;
253     return forEachBlock(functor);
254 }
255
256 inline void MarkedSpace::didAddBlock(MarkedBlock* block)
257 {
258     m_capacity += block->capacity();
259     m_blocks.add(block);
260 }
261
262 inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block)
263 {
264 #if ENABLE(GGC)
265     m_blocksWithNewObjects.append(block);
266 #else
267     UNUSED_PARAM(block);
268 #endif
269 }
270
271 inline size_t MarkedSpace::objectCount()
272 {
273     return forEachBlock<MarkCount>();
274 }
275
276 inline size_t MarkedSpace::size()
277 {
278     return forEachBlock<Size>();
279 }
280
281 inline size_t MarkedSpace::capacity()
282 {
283     return m_capacity;
284 }
285
286 } // namespace JSC
287
288 #endif // MarkedSpace_h