2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include "MarkedSpace.h"
24 #include "CollectorHeapIterator.h"
26 #include "JSGlobalData.h"
37 const size_t GROWTH_FACTOR = 2;
38 const size_t LOW_WATER_FACTOR = 4;
39 const size_t ALLOCATIONS_PER_COLLECTION = 3600;
40 // This value has to be a macro to be used in max() without introducing
41 // a PIC branch in Mach-O binaries, see <rdar://problem/5971391>.
42 #define MIN_ARRAY_SIZE (static_cast<size_t>(14))
44 MarkedSpace::MarkedSpace(JSGlobalData* globalData)
45 : m_globalData(globalData)
47 memset(&m_heap, 0, sizeof(CollectorHeap));
51 void MarkedSpace::destroy(ProtectCountSet& protectedValuesCopy)
54 ProtectCountSet::iterator protectedValuesEnd = protectedValuesCopy.end();
55 for (ProtectCountSet::iterator it = protectedValuesCopy.begin(); it != protectedValuesEnd; ++it)
60 DeadObjectIterator it(m_heap, m_heap.nextBlock, m_heap.nextCell);
61 DeadObjectIterator end(m_heap, m_heap.usedBlocks);
62 for ( ; it != end; ++it)
65 protectedValuesEnd = protectedValuesCopy.end();
66 for (ProtectCountSet::iterator it = protectedValuesCopy.begin(); it != protectedValuesEnd; ++it)
69 for (size_t block = 0; block < m_heap.usedBlocks; ++block)
70 m_heap.blocks[block].deallocate();
72 fastFree(m_heap.blocks);
74 memset(&m_heap, 0, sizeof(CollectorHeap));
77 NEVER_INLINE CollectorBlock* MarkedSpace::allocateBlock()
79 PageAllocationAligned allocation = PageAllocationAligned::allocate(BLOCK_SIZE, BLOCK_SIZE, OSAllocator::JSGCHeapPages);
80 CollectorBlock* block = static_cast<CollectorBlock*>(allocation.base());
86 block->heap = &globalData()->heap;
89 Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
90 for (size_t i = 0; i < HeapConstants::cellsPerBlock; ++i)
91 new (&block->cells[i]) JSCell(dummyMarkableCellStructure);
93 // Add block to blocks vector.
95 size_t numBlocks = m_heap.numBlocks;
96 if (m_heap.usedBlocks == numBlocks) {
97 static const size_t maxNumBlocks = ULONG_MAX / sizeof(PageAllocationAligned) / GROWTH_FACTOR;
98 if (numBlocks > maxNumBlocks)
100 numBlocks = max(MIN_ARRAY_SIZE, numBlocks * GROWTH_FACTOR);
101 m_heap.numBlocks = numBlocks;
102 m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, numBlocks * sizeof(PageAllocationAligned)));
104 m_heap.blocks[m_heap.usedBlocks++] = allocation;
109 NEVER_INLINE void MarkedSpace::freeBlock(size_t block)
111 ObjectIterator it(m_heap, block);
112 ObjectIterator end(m_heap, block + 1);
113 for ( ; it != end; ++it)
115 m_heap.blocks[block].deallocate();
117 // swap with the last block so we compact as we go
118 m_heap.blocks[block] = m_heap.blocks[m_heap.usedBlocks - 1];
121 if (m_heap.numBlocks > MIN_ARRAY_SIZE && m_heap.usedBlocks < m_heap.numBlocks / LOW_WATER_FACTOR) {
122 m_heap.numBlocks = m_heap.numBlocks / GROWTH_FACTOR;
123 m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, m_heap.numBlocks * sizeof(PageAllocationAligned)));
127 void* MarkedSpace::allocate(size_t s)
129 ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
130 typedef HeapConstants::Block Block;
131 typedef HeapConstants::Cell Cell;
133 ASSERT(JSLock::lockCount() > 0);
134 ASSERT(JSLock::currentThreadIsHoldingLock());
135 ASSERT_UNUSED(s, s <= HeapConstants::cellSize);
137 // Fast case: find the next garbage cell and recycle it.
140 ASSERT(m_heap.nextBlock < m_heap.usedBlocks);
141 Block* block = m_heap.collectorBlock(m_heap.nextBlock);
143 ASSERT(m_heap.nextCell < HeapConstants::cellsPerBlock);
144 if (!block->marked.get(m_heap.nextCell)) { // Always false for the last cell in the block
145 Cell* cell = &block->cells[m_heap.nextCell];
147 JSCell* imp = reinterpret_cast<JSCell*>(cell);
153 block->marked.advanceToNextPossibleFreeCell(m_heap.nextCell);
154 } while (m_heap.nextCell != HeapConstants::cellsPerBlock);
156 } while (++m_heap.nextBlock != m_heap.usedBlocks);
161 void MarkedSpace::resizeBlocks()
163 size_t usedCellCount = markedCells();
164 size_t minCellCount = usedCellCount + max(ALLOCATIONS_PER_COLLECTION, usedCellCount);
165 size_t minBlockCount = (minCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
167 size_t maxCellCount = 1.25f * minCellCount;
168 size_t maxBlockCount = (maxCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
170 if (m_heap.usedBlocks < minBlockCount)
171 growBlocks(minBlockCount);
172 else if (m_heap.usedBlocks > maxBlockCount)
173 shrinkBlocks(maxBlockCount);
176 void MarkedSpace::growBlocks(size_t neededBlocks)
178 ASSERT(m_heap.usedBlocks < neededBlocks);
179 while (m_heap.usedBlocks < neededBlocks)
183 void MarkedSpace::shrinkBlocks(size_t neededBlocks)
185 ASSERT(m_heap.usedBlocks > neededBlocks);
187 // Clear the always-on last bit, so isEmpty() isn't fooled by it.
188 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
189 m_heap.collectorBlock(i)->marked.clear(HeapConstants::cellsPerBlock - 1);
191 for (size_t i = 0; i != m_heap.usedBlocks && m_heap.usedBlocks != neededBlocks; ) {
192 if (m_heap.collectorBlock(i)->marked.isEmpty()) {
198 // Reset the always-on last bit.
199 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
200 m_heap.collectorBlock(i)->marked.set(HeapConstants::cellsPerBlock - 1);
203 bool MarkedSpace::containsSlowCase(void* x)
205 uintptr_t xAsBits = reinterpret_cast<uintptr_t>(x);
206 xAsBits &= CELL_ALIGN_MASK;
208 uintptr_t offset = xAsBits & BLOCK_OFFSET_MASK;
209 const size_t lastCellOffset = sizeof(CollectorCell) * (CELLS_PER_BLOCK - 1);
210 if (offset > lastCellOffset)
213 CollectorBlock* blockAddr = reinterpret_cast<CollectorBlock*>(xAsBits - offset);
214 size_t usedBlocks = m_heap.usedBlocks;
215 for (size_t block = 0; block < usedBlocks; block++) {
216 if (m_heap.collectorBlock(block) != blockAddr)
219 // x is a pointer into the heap. Now, verify that the cell it
220 // points to is live. (If the cell is dead, we must not mark it,
221 // since that would revive it in a zombie state.)
222 if (block < m_heap.nextBlock)
225 size_t cellOffset = offset / CELL_SIZE;
227 if (block == m_heap.nextBlock && cellOffset < m_heap.nextCell)
230 return blockAddr->marked.get(cellOffset);
236 void MarkedSpace::clearMarkBits()
238 for (size_t i = 0; i < m_heap.usedBlocks; ++i)
239 clearMarkBits(m_heap.collectorBlock(i));
242 void MarkedSpace::clearMarkBits(CollectorBlock* block)
244 // allocate assumes that the last cell in every block is marked.
245 block->marked.clearAll();
246 block->marked.set(HeapConstants::cellsPerBlock - 1);
249 size_t MarkedSpace::markedCells(size_t startBlock, size_t startCell) const
251 ASSERT(startBlock <= m_heap.usedBlocks);
252 ASSERT(startCell < HeapConstants::cellsPerBlock);
254 if (startBlock >= m_heap.usedBlocks)
258 result += m_heap.collectorBlock(startBlock)->marked.count(startCell);
259 for (size_t i = startBlock + 1; i < m_heap.usedBlocks; ++i)
260 result += m_heap.collectorBlock(i)->marked.count();
265 void MarkedSpace::sweep()
267 #if !ENABLE(JSC_ZOMBIES)
268 Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
271 DeadObjectIterator it(m_heap, m_heap.nextBlock, m_heap.nextCell);
272 DeadObjectIterator end(m_heap, m_heap.usedBlocks);
273 for ( ; it != end; ++it) {
275 #if ENABLE(JSC_ZOMBIES)
276 if (!cell->isZombie()) {
277 const ClassInfo* info = cell->classInfo();
279 new (cell) JSZombie(info, JSZombie::leakedZombieStructure());
280 Heap::markCell(cell);
284 // Callers of sweep assume it's safe to mark any cell in the heap.
285 new (cell) JSCell(dummyMarkableCellStructure);
290 size_t MarkedSpace::objectCount() const
292 return m_heap.nextBlock * HeapConstants::cellsPerBlock // allocated full blocks
293 + m_heap.nextCell // allocated cells in current block
294 + markedCells(m_heap.nextBlock, m_heap.nextCell) // marked cells in remainder of m_heap
295 - m_heap.usedBlocks; // 1 cell per block is a dummy sentinel
298 size_t MarkedSpace::size() const
300 return objectCount() * HeapConstants::cellSize;
303 size_t MarkedSpace::capacity() const
305 return m_heap.usedBlocks * BLOCK_SIZE;
308 void MarkedSpace::reset()
311 m_heap.nextBlock = 0;
312 #if ENABLE(JSC_ZOMBIES)
318 LiveObjectIterator MarkedSpace::primaryHeapBegin()
320 return LiveObjectIterator(m_heap, 0);
323 LiveObjectIterator MarkedSpace::primaryHeapEnd()
325 return LiveObjectIterator(m_heap, m_heap.usedBlocks);