2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include "AlignedMemoryAllocator.h"
29 #include <wtf/Bitmap.h>
30 #include <wtf/FixedArray.h>
31 #include <wtf/HashCountedSet.h>
32 #include <wtf/HashSet.h>
33 #include <wtf/Noncopyable.h>
34 #include <wtf/OwnPtr.h>
35 #include <wtf/PageAllocation.h>
36 #include <wtf/PassOwnPtr.h>
37 #include <wtf/StdLibExtras.h>
38 #include <wtf/Threading.h>
40 #if ENABLE(JSC_MULTIPLE_THREADS)
44 #define ASSERT_CLASS_FITS_IN_CELL(class) COMPILE_ASSERT(sizeof(class) <= CELL_SIZE, class_fits_in_cell)
49 class GCActivityCallback;
53 class MarkedArgumentBuffer;
56 enum OperationInProgress { NoOperation, Allocation, Collection };
58 class LiveObjectIterator;
60 #if OS(WINCE) || OS(SYMBIAN) || PLATFORM(BREWMP)
61 const size_t BLOCK_SIZE = 64 * 1024; // 64k
63 const size_t BLOCK_SIZE = 256 * 1024; // 256k
66 typedef AlignedMemoryAllocator<BLOCK_SIZE> CollectorBlockAllocator;
67 typedef AlignedMemory<BLOCK_SIZE> AlignedCollectorBlock;
69 struct CollectorHeap {
72 AlignedCollectorBlock* blocks;
82 OperationInProgress operationInProgress;
84 CollectorBlock* collectorBlock(size_t index) const
86 return static_cast<CollectorBlock*>(blocks[index].base());
90 class Heap : public Noncopyable {
96 void* allocateNumber(size_t);
97 void* allocate(size_t);
99 bool isBusy(); // true if an allocation or collection is in progress
100 void collectAllGarbage();
102 GCActivityCallback* activityCallback();
103 void setActivityCallback(PassOwnPtr<GCActivityCallback>);
105 static const size_t minExtraCost = 256;
106 static const size_t maxExtraCost = 1024 * 1024;
108 void reportExtraMemoryCost(size_t cost);
110 size_t objectCount() const;
115 Statistics statistics() const;
118 void protect(JSValue);
119 // Returns true if the value is no longer protected by any protect pointers
120 // (though it may still be alive due to heap/stack references).
121 bool unprotect(JSValue);
123 static Heap* heap(JSValue); // 0 for immediate values
124 static Heap* heap(JSCell*);
126 size_t globalObjectCount();
127 size_t protectedObjectCount();
128 size_t protectedGlobalObjectCount();
129 HashCountedSet<const char*>* protectedObjectTypeCounts();
130 HashCountedSet<const char*>* objectTypeCounts();
132 void registerThread(); // Only needs to be called by clients that can use the same heap from multiple threads.
134 static bool isCellMarked(const JSCell*);
135 static bool checkMarkCell(const JSCell*);
136 static void markCell(JSCell*);
138 WeakGCHandle* addWeakGCHandle(JSCell*);
140 void markConservatively(MarkStack&, void* start, void* end);
142 HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = new HashSet<MarkedArgumentBuffer*>; return *m_markListSet; }
144 JSGlobalData* globalData() const { return m_globalData; }
145 static bool isNumber(JSCell*);
147 LiveObjectIterator primaryHeapBegin();
148 LiveObjectIterator primaryHeapEnd();
153 static CollectorBlock* cellBlock(const JSCell*);
154 static size_t cellOffset(const JSCell*);
156 friend class JSGlobalData;
160 NEVER_INLINE CollectorBlock* allocateBlock();
161 NEVER_INLINE void freeBlock(size_t);
164 void growBlocks(size_t neededBlocks);
165 void shrinkBlocks(size_t neededBlocks);
166 void clearMarkBits();
167 void clearMarkBits(CollectorBlock*);
168 size_t markedCells(size_t startBlock = 0, size_t startCell = 0) const;
170 void recordExtraCost(size_t);
172 void addToStatistics(Statistics&) const;
175 void markProtectedObjects(MarkStack&);
176 void markCurrentThreadConservatively(MarkStack&);
177 void markCurrentThreadConservativelyInternal(MarkStack&);
178 void markOtherThreadConservatively(MarkStack&, Thread*);
179 void markStackObjectsConservatively(MarkStack&);
181 void updateWeakGCHandles();
182 WeakGCHandlePool* weakGCHandlePool(size_t index);
184 typedef HashCountedSet<JSCell*> ProtectCountSet;
186 CollectorHeap m_heap;
188 ProtectCountSet m_protectedValues;
189 WTF::Vector<AlignedMemory<WeakGCHandlePool::poolSize> > m_weakGCHandlePools;
191 HashSet<MarkedArgumentBuffer*>* m_markListSet;
193 OwnPtr<GCActivityCallback> m_activityCallback;
195 #if ENABLE(JSC_MULTIPLE_THREADS)
196 void makeUsableFromMultipleThreads();
198 static void unregisterThread(void*);
199 void unregisterThread();
201 Mutex m_registeredThreadsMutex;
202 Thread* m_registeredThreads;
203 pthread_key_t m_currentThreadRegistrar;
206 // Allocates collector blocks with correct alignment
207 CollectorBlockAllocator m_blockallocator;
208 WeakGCHandlePool::Allocator m_weakGCHandlePoolAllocator;
210 JSGlobalData* m_globalData;
213 // tunable parameters
215 const size_t BLOCK_OFFSET_MASK = BLOCK_SIZE - 1;
216 const size_t BLOCK_MASK = ~BLOCK_OFFSET_MASK;
217 const size_t MINIMUM_CELL_SIZE = 64;
218 const size_t CELL_ARRAY_LENGTH = (MINIMUM_CELL_SIZE / sizeof(double)) + (MINIMUM_CELL_SIZE % sizeof(double) != 0 ? sizeof(double) : 0);
219 const size_t CELL_SIZE = CELL_ARRAY_LENGTH * sizeof(double);
220 const size_t SMALL_CELL_SIZE = CELL_SIZE / 2;
221 const size_t CELL_MASK = CELL_SIZE - 1;
222 const size_t CELL_ALIGN_MASK = ~CELL_MASK;
223 const size_t CELLS_PER_BLOCK = (BLOCK_SIZE - sizeof(Heap*)) * 8 * CELL_SIZE / (8 * CELL_SIZE + 1) / CELL_SIZE; // one bitmap byte can represent 8 cells.
225 const size_t BITMAP_SIZE = (CELLS_PER_BLOCK + 7) / 8;
226 const size_t BITMAP_WORDS = (BITMAP_SIZE + 3) / sizeof(uint32_t);
228 struct CollectorBitmap {
229 FixedArray<uint32_t, BITMAP_WORDS> bits;
230 bool get(size_t n) const { return !!(bits[n >> 5] & (1 << (n & 0x1F))); }
231 void set(size_t n) { bits[n >> 5] |= (1 << (n & 0x1F)); }
232 bool getset(size_t n)
234 unsigned i = (1 << (n & 0x1F));
235 uint32_t& b = bits[n >> 5];
240 void clear(size_t n) { bits[n >> 5] &= ~(1 << (n & 0x1F)); }
241 void clearAll() { memset(bits.data(), 0, sizeof(bits)); }
242 ALWAYS_INLINE void advanceToNextPossibleFreeCell(size_t& startCell)
244 if (!~bits[startCell >> 5])
245 startCell = (startCell & (~0x1F)) + 32;
249 size_t count(size_t startCell = 0)
252 for ( ; (startCell & 0x1F) != 0; ++startCell) {
256 for (size_t i = startCell >> 5; i < BITMAP_WORDS; ++i)
257 result += WTF::bitCount(bits[i]);
260 size_t isEmpty() // Much more efficient than testing count() == 0.
262 for (size_t i = 0; i < BITMAP_WORDS; ++i)
269 struct CollectorCell {
270 FixedArray<double, CELL_ARRAY_LENGTH> memory;
273 class CollectorBlock {
275 FixedArray<CollectorCell, CELLS_PER_BLOCK> cells;
276 CollectorBitmap marked;
280 struct HeapConstants {
281 static const size_t cellSize = CELL_SIZE;
282 static const size_t cellsPerBlock = CELLS_PER_BLOCK;
283 typedef CollectorCell Cell;
284 typedef CollectorBlock Block;
287 inline CollectorBlock* Heap::cellBlock(const JSCell* cell)
289 return reinterpret_cast<CollectorBlock*>(reinterpret_cast<uintptr_t>(cell) & BLOCK_MASK);
292 inline size_t Heap::cellOffset(const JSCell* cell)
294 return (reinterpret_cast<uintptr_t>(cell) & BLOCK_OFFSET_MASK) / CELL_SIZE;
297 inline bool Heap::isCellMarked(const JSCell* cell)
299 return cellBlock(cell)->marked.get(cellOffset(cell));
302 inline bool Heap::checkMarkCell(const JSCell* cell)
304 return cellBlock(cell)->marked.getset(cellOffset(cell));
307 inline void Heap::markCell(JSCell* cell)
309 cellBlock(cell)->marked.set(cellOffset(cell));
312 inline void Heap::reportExtraMemoryCost(size_t cost)
314 if (cost > minExtraCost)
315 recordExtraCost(cost);
318 inline void* Heap::allocateNumber(size_t s)
320 if (void* result = m_heap.nextNumber) {
321 m_heap.nextNumber = 0;
325 void* result = allocate(s);
326 m_heap.nextNumber = static_cast<char*>(result) + (CELL_SIZE / 2);
331 inline WeakGCHandlePool* Heap::weakGCHandlePool(size_t index)
333 return static_cast<WeakGCHandlePool*>(m_weakGCHandlePools[index].base());
337 #endif /* Collector_h */