2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include "MarkedSpace.h"
24 #include "CollectorHeapIterator.h"
26 #include "JSGlobalData.h"
37 const size_t GROWTH_FACTOR = 2;
38 const size_t LOW_WATER_FACTOR = 4;
39 const size_t ALLOCATIONS_PER_COLLECTION = 3600;
40 // This value has to be a macro to be used in max() without introducing
41 // a PIC branch in Mach-O binaries, see <rdar://problem/5971391>.
42 #define MIN_ARRAY_SIZE (static_cast<size_t>(14))
44 MarkedSpace::MarkedSpace(JSGlobalData* globalData)
45 : m_globalData(globalData)
47 memset(&m_heap, 0, sizeof(CollectorHeap));
51 void MarkedSpace::destroy()
53 clearMarkBits(); // Make sure weak pointers appear dead during destruction.
55 while (m_heap.usedBlocks)
57 fastFree(m_heap.blocks);
59 memset(&m_heap, 0, sizeof(CollectorHeap));
62 NEVER_INLINE MarkedBlock* MarkedSpace::allocateBlock()
64 PageAllocationAligned allocation = PageAllocationAligned::allocate(BLOCK_SIZE, BLOCK_SIZE, OSAllocator::JSGCHeapPages);
65 MarkedBlock* block = static_cast<MarkedBlock*>(allocation.base());
71 block->heap = &globalData()->heap;
74 Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
75 for (size_t i = 0; i < HeapConstants::cellsPerBlock; ++i)
76 new (&block->cells[i]) JSCell(dummyMarkableCellStructure);
78 // Add block to blocks vector.
80 size_t numBlocks = m_heap.numBlocks;
81 if (m_heap.usedBlocks == numBlocks) {
82 static const size_t maxNumBlocks = ULONG_MAX / sizeof(PageAllocationAligned) / GROWTH_FACTOR;
83 if (numBlocks > maxNumBlocks)
85 numBlocks = max(MIN_ARRAY_SIZE, numBlocks * GROWTH_FACTOR);
86 m_heap.numBlocks = numBlocks;
87 m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, numBlocks * sizeof(PageAllocationAligned)));
89 m_heap.blocks[m_heap.usedBlocks++] = allocation;
94 NEVER_INLINE void MarkedSpace::freeBlock(size_t block)
96 ObjectIterator it(m_heap, block, 0);
97 ObjectIterator end(m_heap, block + 1, 0);
98 for ( ; it != end; ++it)
100 m_heap.blocks[block].deallocate();
102 // swap with the last block so we compact as we go
103 m_heap.blocks[block] = m_heap.blocks[m_heap.usedBlocks - 1];
106 if (m_heap.numBlocks > MIN_ARRAY_SIZE && m_heap.usedBlocks < m_heap.numBlocks / LOW_WATER_FACTOR) {
107 m_heap.numBlocks = m_heap.numBlocks / GROWTH_FACTOR;
108 m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, m_heap.numBlocks * sizeof(PageAllocationAligned)));
112 void* MarkedSpace::allocate(size_t s)
114 ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
115 typedef HeapConstants::Block Block;
116 typedef HeapConstants::Cell Cell;
118 ASSERT(JSLock::lockCount() > 0);
119 ASSERT(JSLock::currentThreadIsHoldingLock());
120 ASSERT_UNUSED(s, s <= HeapConstants::cellSize);
122 // Fast case: find the next garbage cell and recycle it.
125 ASSERT(m_heap.nextBlock < m_heap.usedBlocks);
126 Block* block = m_heap.collectorBlock(m_heap.nextBlock);
128 ASSERT(m_heap.nextCell < HeapConstants::cellsPerBlock);
129 if (!block->marked.get(m_heap.nextCell)) { // Always false for the last cell in the block
130 Cell* cell = &block->cells[m_heap.nextCell];
132 JSCell* imp = reinterpret_cast<JSCell*>(cell);
138 m_heap.nextCell = block->marked.nextPossiblyUnset(m_heap.nextCell);
139 } while (m_heap.nextCell != HeapConstants::cellsPerBlock);
141 m_heap.waterMark += BLOCK_SIZE;
142 } while (++m_heap.nextBlock != m_heap.usedBlocks);
144 if (m_heap.waterMark < m_heap.highWaterMark)
145 return &allocateBlock()->cells[m_heap.nextCell++];
150 void MarkedSpace::resizeBlocks()
152 size_t usedCellCount = markedCells();
153 size_t minCellCount = usedCellCount + max(ALLOCATIONS_PER_COLLECTION, usedCellCount);
154 size_t minBlockCount = (minCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
156 size_t maxCellCount = 1.25f * minCellCount;
157 size_t maxBlockCount = (maxCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
159 if (m_heap.usedBlocks < minBlockCount)
160 growBlocks(minBlockCount);
161 else if (m_heap.usedBlocks > maxBlockCount)
162 shrinkBlocks(maxBlockCount);
165 void MarkedSpace::growBlocks(size_t neededBlocks)
167 ASSERT(m_heap.usedBlocks < neededBlocks);
168 while (m_heap.usedBlocks < neededBlocks)
172 void MarkedSpace::shrinkBlocks(size_t neededBlocks)
174 ASSERT(m_heap.usedBlocks > neededBlocks);
176 // Clear the always-on last bit, so isEmpty() isn't fooled by it.
177 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
178 m_heap.collectorBlock(i)->marked.clear(HeapConstants::cellsPerBlock - 1);
180 for (size_t i = 0; i != m_heap.usedBlocks && m_heap.usedBlocks != neededBlocks; ) {
181 if (m_heap.collectorBlock(i)->marked.isEmpty()) {
187 // Reset the always-on last bit.
188 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
189 m_heap.collectorBlock(i)->marked.set(HeapConstants::cellsPerBlock - 1);
192 bool MarkedSpace::containsSlowCase(const void* x)
194 uintptr_t xAsBits = reinterpret_cast<uintptr_t>(x);
195 xAsBits &= CELL_ALIGN_MASK;
197 uintptr_t offset = xAsBits & BLOCK_OFFSET_MASK;
198 const size_t lastCellOffset = sizeof(CollectorCell) * (CELLS_PER_BLOCK - 1);
199 if (offset > lastCellOffset)
202 MarkedBlock* blockAddr = reinterpret_cast<MarkedBlock*>(xAsBits - offset);
203 size_t usedBlocks = m_heap.usedBlocks;
204 for (size_t block = 0; block < usedBlocks; block++) {
205 if (m_heap.collectorBlock(block) != blockAddr)
208 // x is a pointer into the heap. Now, verify that the cell it
209 // points to is live. (If the cell is dead, we must not mark it,
210 // since that would revive it in a zombie state.)
211 if (block < m_heap.nextBlock)
214 size_t cellOffset = offset / CELL_SIZE;
216 if (block == m_heap.nextBlock && cellOffset < m_heap.nextCell)
219 return blockAddr->marked.get(cellOffset);
225 void MarkedSpace::clearMarkBits()
227 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
228 clearMarkBits(m_heap.collectorBlock(i));
231 void MarkedSpace::clearMarkBits(MarkedBlock* block)
233 // allocate assumes that the last cell in every block is marked.
234 block->marked.clearAll();
235 block->marked.set(HeapConstants::cellsPerBlock - 1);
238 size_t MarkedSpace::markedCells(size_t startBlock, size_t startCell) const
240 ASSERT(startBlock <= m_heap.usedBlocks);
241 ASSERT(startCell < HeapConstants::cellsPerBlock);
243 if (startBlock >= m_heap.usedBlocks)
247 result += m_heap.collectorBlock(startBlock)->marked.count(startCell);
248 for (size_t i = startBlock + 1; i < m_heap.usedBlocks; ++i)
249 result += m_heap.collectorBlock(i)->marked.count();
254 void MarkedSpace::sweep()
256 #if !ENABLE(JSC_ZOMBIES)
257 Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
260 DeadObjectIterator it(m_heap, 0, 0);
261 DeadObjectIterator end(m_heap, m_heap.usedBlocks, 0);
262 for ( ; it != end; ++it) {
264 #if ENABLE(JSC_ZOMBIES)
265 if (!cell->isZombie()) {
266 const ClassInfo* info = cell->classInfo();
268 new (cell) JSZombie(info, JSZombie::leakedZombieStructure());
269 Heap::markCell(cell);
273 // Callers of sweep assume it's safe to mark any cell in the heap.
274 new (cell) JSCell(dummyMarkableCellStructure);
281 size_t MarkedSpace::objectCount() const
283 return m_heap.nextBlock * HeapConstants::cellsPerBlock // allocated full blocks
284 + m_heap.nextCell // allocated cells in current block
285 + markedCells(m_heap.nextBlock, m_heap.nextCell) // marked cells in remainder of m_heap
286 - m_heap.usedBlocks; // 1 cell per block is a dummy sentinel
289 size_t MarkedSpace::size() const
291 return objectCount() * HeapConstants::cellSize;
294 size_t MarkedSpace::capacity() const
296 return m_heap.usedBlocks * BLOCK_SIZE;
299 void MarkedSpace::reset()
302 m_heap.nextBlock = 0;
303 m_heap.waterMark = 0;
304 #if ENABLE(JSC_ZOMBIES)
309 LiveObjectIterator MarkedSpace::primaryHeapBegin()
311 return LiveObjectIterator(m_heap, 0, 0);
314 LiveObjectIterator MarkedSpace::primaryHeapEnd()
316 return LiveObjectIterator(m_heap, m_heap.usedBlocks, 0);