2011-02-04 Geoffrey Garen <ggaren@apple.com>
[WebKit.git] / Source / JavaScriptCore / runtime / MarkedSpace.cpp
1 /*
2  *  Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3  *  Copyright (C) 2007 Eric Seidel <eric@webkit.org>
4  *
5  *  This library is free software; you can redistribute it and/or
6  *  modify it under the terms of the GNU Lesser General Public
7  *  License as published by the Free Software Foundation; either
8  *  version 2 of the License, or (at your option) any later version.
9  *
10  *  This library is distributed in the hope that it will be useful,
11  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
12  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  *  Lesser General Public License for more details.
14  *
15  *  You should have received a copy of the GNU Lesser General Public
16  *  License along with this library; if not, write to the Free Software
17  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
18  *
19  */
20
21 #include "config.h"
22 #include "MarkedSpace.h"
23
24 #include "CollectorHeapIterator.h"
25 #include "JSCell.h"
26 #include "JSGlobalData.h"
27 #include "JSLock.h"
28
29 using std::max;
30
31 namespace JSC {
32
33 class Structure;
34
35 // tunable parameters
36
37 const size_t GROWTH_FACTOR = 2;
38 const size_t LOW_WATER_FACTOR = 4;
39 const size_t ALLOCATIONS_PER_COLLECTION = 3600;
40 // This value has to be a macro to be used in max() without introducing
41 // a PIC branch in Mach-O binaries, see <rdar://problem/5971391>.
42 #define MIN_ARRAY_SIZE (static_cast<size_t>(14))
43
44 MarkedSpace::MarkedSpace(JSGlobalData* globalData)
45     : m_globalData(globalData)
46 {
47     memset(&m_heap, 0, sizeof(CollectorHeap));
48     allocateBlock();
49 }
50
51 void MarkedSpace::destroy()
52 {
53     clearMarkBits(); // Make sure weak pointers appear dead during destruction.
54
55     while (m_heap.usedBlocks)
56         freeBlock(0);
57     fastFree(m_heap.blocks);
58
59     memset(&m_heap, 0, sizeof(CollectorHeap));
60 }
61
62 NEVER_INLINE MarkedBlock* MarkedSpace::allocateBlock()
63 {
64     PageAllocationAligned allocation = PageAllocationAligned::allocate(BLOCK_SIZE, BLOCK_SIZE, OSAllocator::JSGCHeapPages);
65     MarkedBlock* block = static_cast<MarkedBlock*>(allocation.base());
66     if (!block)
67         CRASH();
68
69     // Initialize block.
70
71     block->heap = &globalData()->heap;
72     clearMarkBits(block);
73
74     Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
75     for (size_t i = 0; i < HeapConstants::cellsPerBlock; ++i)
76         new (&block->cells[i]) JSCell(dummyMarkableCellStructure);
77     
78     // Add block to blocks vector.
79
80     size_t numBlocks = m_heap.numBlocks;
81     if (m_heap.usedBlocks == numBlocks) {
82         static const size_t maxNumBlocks = ULONG_MAX / sizeof(PageAllocationAligned) / GROWTH_FACTOR;
83         if (numBlocks > maxNumBlocks)
84             CRASH();
85         numBlocks = max(MIN_ARRAY_SIZE, numBlocks * GROWTH_FACTOR);
86         m_heap.numBlocks = numBlocks;
87         m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, numBlocks * sizeof(PageAllocationAligned)));
88     }
89     m_heap.blocks[m_heap.usedBlocks++] = allocation;
90
91     return block;
92 }
93
94 NEVER_INLINE void MarkedSpace::freeBlock(size_t block)
95 {
96     ObjectIterator it(m_heap, block, 0);
97     ObjectIterator end(m_heap, block + 1, 0);
98     for ( ; it != end; ++it)
99         (*it)->~JSCell();
100     m_heap.blocks[block].deallocate();
101
102     // swap with the last block so we compact as we go
103     m_heap.blocks[block] = m_heap.blocks[m_heap.usedBlocks - 1];
104     m_heap.usedBlocks--;
105
106     if (m_heap.numBlocks > MIN_ARRAY_SIZE && m_heap.usedBlocks < m_heap.numBlocks / LOW_WATER_FACTOR) {
107         m_heap.numBlocks = m_heap.numBlocks / GROWTH_FACTOR; 
108         m_heap.blocks = static_cast<PageAllocationAligned*>(fastRealloc(m_heap.blocks, m_heap.numBlocks * sizeof(PageAllocationAligned)));
109     }
110 }
111
112 void* MarkedSpace::allocate(size_t s)
113 {
114     ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
115     typedef HeapConstants::Block Block;
116     typedef HeapConstants::Cell Cell;
117     
118     ASSERT(JSLock::lockCount() > 0);
119     ASSERT(JSLock::currentThreadIsHoldingLock());
120     ASSERT_UNUSED(s, s <= HeapConstants::cellSize);
121
122     // Fast case: find the next garbage cell and recycle it.
123
124     do {
125         ASSERT(m_heap.nextBlock < m_heap.usedBlocks);
126         Block* block = m_heap.collectorBlock(m_heap.nextBlock);
127         do {
128             ASSERT(m_heap.nextCell < HeapConstants::cellsPerBlock);
129             if (!block->marked.get(m_heap.nextCell)) { // Always false for the last cell in the block
130                 Cell* cell = &block->cells[m_heap.nextCell];
131
132                 JSCell* imp = reinterpret_cast<JSCell*>(cell);
133                 imp->~JSCell();
134
135                 ++m_heap.nextCell;
136                 return cell;
137             }
138             m_heap.nextCell = block->marked.nextPossiblyUnset(m_heap.nextCell);
139         } while (m_heap.nextCell != HeapConstants::cellsPerBlock);
140         m_heap.nextCell = 0;
141         m_heap.waterMark += BLOCK_SIZE;
142     } while (++m_heap.nextBlock != m_heap.usedBlocks);
143
144     if (m_heap.waterMark < m_heap.highWaterMark)
145         return &allocateBlock()->cells[m_heap.nextCell++];
146
147     return 0;
148 }
149
150 void MarkedSpace::resizeBlocks()
151 {
152     size_t usedCellCount = markedCells();
153     size_t minCellCount = usedCellCount + max(ALLOCATIONS_PER_COLLECTION, usedCellCount);
154     size_t minBlockCount = (minCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
155
156     size_t maxCellCount = 1.25f * minCellCount;
157     size_t maxBlockCount = (maxCellCount + HeapConstants::cellsPerBlock - 1) / HeapConstants::cellsPerBlock;
158
159     if (m_heap.usedBlocks < minBlockCount)
160         growBlocks(minBlockCount);
161     else if (m_heap.usedBlocks > maxBlockCount)
162         shrinkBlocks(maxBlockCount);
163 }
164
165 void MarkedSpace::growBlocks(size_t neededBlocks)
166 {
167     ASSERT(m_heap.usedBlocks < neededBlocks);
168     while (m_heap.usedBlocks < neededBlocks)
169         allocateBlock();
170 }
171
172 void MarkedSpace::shrinkBlocks(size_t neededBlocks)
173 {
174     ASSERT(m_heap.usedBlocks > neededBlocks);
175     
176     // Clear the always-on last bit, so isEmpty() isn't fooled by it.
177     for (size_t i = 0; i < m_heap.usedBlocks; ++i)
178         m_heap.collectorBlock(i)->marked.clear(HeapConstants::cellsPerBlock - 1);
179
180     for (size_t i = 0; i != m_heap.usedBlocks && m_heap.usedBlocks != neededBlocks; ) {
181         if (m_heap.collectorBlock(i)->marked.isEmpty()) {
182             freeBlock(i);
183         } else
184             ++i;
185     }
186
187     // Reset the always-on last bit.
188     for (size_t i = 0; i < m_heap.usedBlocks; ++i)
189         m_heap.collectorBlock(i)->marked.set(HeapConstants::cellsPerBlock - 1);
190 }
191
192 bool MarkedSpace::containsSlowCase(const void* x)
193 {
194     uintptr_t xAsBits = reinterpret_cast<uintptr_t>(x);
195     xAsBits &= CELL_ALIGN_MASK;
196
197     uintptr_t offset = xAsBits & BLOCK_OFFSET_MASK;
198     const size_t lastCellOffset = sizeof(CollectorCell) * (CELLS_PER_BLOCK - 1);
199     if (offset > lastCellOffset)
200         return false;
201
202     MarkedBlock* blockAddr = reinterpret_cast<MarkedBlock*>(xAsBits - offset);
203     size_t usedBlocks = m_heap.usedBlocks;
204     for (size_t block = 0; block < usedBlocks; block++) {
205         if (m_heap.collectorBlock(block) != blockAddr)
206             continue;
207
208         // x is a pointer into the heap. Now, verify that the cell it
209         // points to is live. (If the cell is dead, we must not mark it,
210         // since that would revive it in a zombie state.)
211         if (block < m_heap.nextBlock)
212             return true;
213         
214         size_t cellOffset = offset / CELL_SIZE;
215         
216         if (block == m_heap.nextBlock && cellOffset < m_heap.nextCell)
217             return true;
218         
219         return blockAddr->marked.get(cellOffset);
220     }
221     
222     return false;
223 }
224
225 void MarkedSpace::clearMarkBits()
226 {
227     for (size_t i = 0; i < m_heap.usedBlocks; ++i)
228         clearMarkBits(m_heap.collectorBlock(i));
229 }
230
231 void MarkedSpace::clearMarkBits(MarkedBlock* block)
232 {
233     // allocate assumes that the last cell in every block is marked.
234     block->marked.clearAll();
235     block->marked.set(HeapConstants::cellsPerBlock - 1);
236 }
237
238 size_t MarkedSpace::markedCells(size_t startBlock, size_t startCell) const
239 {
240     ASSERT(startBlock <= m_heap.usedBlocks);
241     ASSERT(startCell < HeapConstants::cellsPerBlock);
242
243     if (startBlock >= m_heap.usedBlocks)
244         return 0;
245
246     size_t result = 0;
247     result += m_heap.collectorBlock(startBlock)->marked.count(startCell);
248     for (size_t i = startBlock + 1; i < m_heap.usedBlocks; ++i)
249         result += m_heap.collectorBlock(i)->marked.count();
250
251     return result;
252 }
253
254 void MarkedSpace::sweep()
255 {
256 #if !ENABLE(JSC_ZOMBIES)
257     Structure* dummyMarkableCellStructure = globalData()->dummyMarkableCellStructure.get();
258 #endif
259
260     DeadObjectIterator it(m_heap, 0, 0);
261     DeadObjectIterator end(m_heap, m_heap.usedBlocks, 0);
262     for ( ; it != end; ++it) {
263         JSCell* cell = *it;
264 #if ENABLE(JSC_ZOMBIES)
265         if (!cell->isZombie()) {
266             const ClassInfo* info = cell->classInfo();
267             cell->~JSCell();
268             new (cell) JSZombie(info, JSZombie::leakedZombieStructure());
269             Heap::markCell(cell);
270         }
271 #else
272         cell->~JSCell();
273         // Callers of sweep assume it's safe to mark any cell in the heap.
274         new (cell) JSCell(dummyMarkableCellStructure);
275 #endif
276     }
277     
278     if (m_heap.usedBlocks > 1)
279         shrinkBlocks(1);
280 }
281
282 size_t MarkedSpace::objectCount() const
283 {
284     return m_heap.nextBlock * HeapConstants::cellsPerBlock // allocated full blocks
285            + m_heap.nextCell // allocated cells in current block
286            + markedCells(m_heap.nextBlock, m_heap.nextCell) // marked cells in remainder of m_heap
287            - m_heap.usedBlocks; // 1 cell per block is a dummy sentinel
288 }
289
290 size_t MarkedSpace::size() const
291 {
292     return objectCount() * HeapConstants::cellSize;
293 }
294
295 size_t MarkedSpace::capacity() const
296 {
297     return m_heap.usedBlocks * BLOCK_SIZE;
298 }
299
300 void MarkedSpace::reset()
301 {
302     m_heap.nextCell = 0;
303     m_heap.nextBlock = 0;
304     m_heap.waterMark = 0;
305 #if ENABLE(JSC_ZOMBIES)
306     sweep();
307 #endif
308 }
309
310 LiveObjectIterator MarkedSpace::primaryHeapBegin()
311 {
312     return LiveObjectIterator(m_heap, 0, 0);
313 }
314
315 LiveObjectIterator MarkedSpace::primaryHeapEnd()
316 {
317     return LiveObjectIterator(m_heap, m_heap.usedBlocks, 0);
318 }
319
320 } // namespace JSC