[JSC] Rename LargeAllocation to PreciseAllocation
[WebKit-https.git] / Source / JavaScriptCore / heap / PreciseAllocation.cpp
1 /*
2  * Copyright (C) 2016-2019 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #include "config.h"
27 #include "PreciseAllocation.h"
28
29 #include "AlignedMemoryAllocator.h"
30 #include "Heap.h"
31 #include "JSCInlines.h"
32 #include "Operations.h"
33 #include "SubspaceInlines.h"
34
35 namespace JSC {
36
37 static inline bool isAlignedForPreciseAllocation(void* memory)
38 {
39     uintptr_t allocatedPointer = bitwise_cast<uintptr_t>(memory);
40     return !(allocatedPointer & (PreciseAllocation::alignment - 1));
41 }
42
43 PreciseAllocation* PreciseAllocation::tryCreate(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace)
44 {
45     if (validateDFGDoesGC)
46         RELEASE_ASSERT(heap.expectDoesGC());
47
48     size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
49     static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
50     
51     // We must use tryAllocateMemory instead of tryAllocateAlignedMemory since we want to use "realloc" feature.
52     void* space = subspace->alignedMemoryAllocator()->tryAllocateMemory(adjustedAlignmentAllocationSize);
53     if (!space)
54         return nullptr;
55
56     bool adjustedAlignment = false;
57     if (!isAlignedForPreciseAllocation(space)) {
58         space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
59         adjustedAlignment = true;
60         ASSERT(isAlignedForPreciseAllocation(space));
61     }
62     
63     if (scribbleFreeCells())
64         scribble(space, size);
65     return new (NotNull, space) PreciseAllocation(heap, size, subspace, indexInSpace, adjustedAlignment);
66 }
67
68 PreciseAllocation* PreciseAllocation::tryReallocate(size_t size, Subspace* subspace)
69 {
70     ASSERT(!isLowerTier());
71     size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
72     static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
73
74     ASSERT(subspace == m_subspace);
75
76     unsigned oldCellSize = m_cellSize;
77     bool oldAdjustedAlignment = m_adjustedAlignment;
78     void* oldBasePointer = basePointer();
79
80     void* newBasePointer = subspace->alignedMemoryAllocator()->tryReallocateMemory(oldBasePointer, adjustedAlignmentAllocationSize);
81     if (!newBasePointer)
82         return nullptr;
83
84     PreciseAllocation* newAllocation = bitwise_cast<PreciseAllocation*>(newBasePointer);
85     bool newAdjustedAlignment = false;
86     if (!isAlignedForPreciseAllocation(newBasePointer)) {
87         newAdjustedAlignment = true;
88         newAllocation = bitwise_cast<PreciseAllocation*>(bitwise_cast<uintptr_t>(newBasePointer) + halfAlignment);
89         ASSERT(isAlignedForPreciseAllocation(static_cast<void*>(newAllocation)));
90     }
91
92     // We have 4 patterns.
93     // oldAdjustedAlignment = true  newAdjustedAlignment = true  => Do nothing.
94     // oldAdjustedAlignment = true  newAdjustedAlignment = false => Shift forward by halfAlignment
95     // oldAdjustedAlignment = false newAdjustedAlignment = true  => Shift backward by halfAlignment
96     // oldAdjustedAlignment = false newAdjustedAlignment = false => Do nothing.
97
98     if (oldAdjustedAlignment != newAdjustedAlignment) {
99         if (oldAdjustedAlignment) {
100             ASSERT(!newAdjustedAlignment);
101             ASSERT(newAllocation == newBasePointer);
102             // Old   [ 8 ][  content  ]
103             // Now   [   ][  content  ]
104             // New   [  content  ]...
105             memmove(newBasePointer, bitwise_cast<char*>(newBasePointer) + halfAlignment, oldCellSize + PreciseAllocation::headerSize());
106         } else {
107             ASSERT(newAdjustedAlignment);
108             ASSERT(newAllocation != newBasePointer);
109             ASSERT(newAllocation == bitwise_cast<void*>(bitwise_cast<char*>(newBasePointer) + halfAlignment));
110             // Old   [  content  ]
111             // Now   [  content  ][   ]
112             // New   [ 8 ][  content  ]
113             memmove(bitwise_cast<char*>(newBasePointer) + halfAlignment, newBasePointer, oldCellSize + PreciseAllocation::headerSize());
114         }
115     }
116
117     newAllocation->m_cellSize = size;
118     newAllocation->m_adjustedAlignment = newAdjustedAlignment;
119     return newAllocation;
120 }
121
122
123 PreciseAllocation* PreciseAllocation::createForLowerTier(Heap& heap, size_t size, Subspace* subspace, uint8_t lowerTierIndex)
124 {
125     if (validateDFGDoesGC)
126         RELEASE_ASSERT(heap.expectDoesGC());
127
128     size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
129     static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
130
131     void* space = subspace->alignedMemoryAllocator()->tryAllocateMemory(adjustedAlignmentAllocationSize);
132     RELEASE_ASSERT(space);
133
134     bool adjustedAlignment = false;
135     if (!isAlignedForPreciseAllocation(space)) {
136         space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
137         adjustedAlignment = true;
138         ASSERT(isAlignedForPreciseAllocation(space));
139     }
140
141     if (scribbleFreeCells())
142         scribble(space, size);
143     PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
144     preciseAllocation->m_lowerTierIndex = lowerTierIndex;
145     return preciseAllocation;
146 }
147
148 PreciseAllocation* PreciseAllocation::reuseForLowerTier()
149 {
150     Heap& heap = *this->heap();
151     size_t size = m_cellSize;
152     Subspace* subspace = m_subspace;
153     bool adjustedAlignment = m_adjustedAlignment;
154     uint8_t lowerTierIndex = m_lowerTierIndex;
155
156     void* space = this->basePointer();
157     this->~PreciseAllocation();
158
159     PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
160     preciseAllocation->m_lowerTierIndex = lowerTierIndex;
161     preciseAllocation->m_hasValidCell = false;
162     return preciseAllocation;
163 }
164
165 PreciseAllocation::PreciseAllocation(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace, bool adjustedAlignment)
166     : m_indexInSpace(indexInSpace)
167     , m_cellSize(size)
168     , m_isNewlyAllocated(true)
169     , m_hasValidCell(true)
170     , m_adjustedAlignment(adjustedAlignment)
171     , m_attributes(subspace->attributes())
172     , m_subspace(subspace)
173     , m_weakSet(heap.vm())
174 {
175     m_isMarked.store(0);
176 }
177
178 PreciseAllocation::~PreciseAllocation()
179 {
180     if (isOnList())
181         remove();
182 }
183
184 void PreciseAllocation::lastChanceToFinalize()
185 {
186     m_weakSet.lastChanceToFinalize();
187     clearMarked();
188     clearNewlyAllocated();
189     sweep();
190 }
191
192 void PreciseAllocation::shrink()
193 {
194     m_weakSet.shrink();
195 }
196
197 void PreciseAllocation::visitWeakSet(SlotVisitor& visitor)
198 {
199     m_weakSet.visit(visitor);
200 }
201
202 void PreciseAllocation::reapWeakSet()
203 {
204     return m_weakSet.reap();
205 }
206
207 void PreciseAllocation::flip()
208 {
209     ASSERT(heap()->collectionScope() == CollectionScope::Full);
210     clearMarked();
211 }
212
213 bool PreciseAllocation::isEmpty()
214 {
215     return !isMarked() && m_weakSet.isEmpty() && !isNewlyAllocated();
216 }
217
218 void PreciseAllocation::sweep()
219 {
220     m_weakSet.sweep();
221     
222     if (m_hasValidCell && !isLive()) {
223         if (m_attributes.destruction == NeedsDestruction)
224             m_subspace->destroy(vm(), static_cast<JSCell*>(cell()));
225         m_hasValidCell = false;
226     }
227 }
228
229 void PreciseAllocation::destroy()
230 {
231     AlignedMemoryAllocator* allocator = m_subspace->alignedMemoryAllocator();
232     void* basePointer = this->basePointer();
233     this->~PreciseAllocation();
234     allocator->freeMemory(basePointer);
235 }
236
237 void PreciseAllocation::dump(PrintStream& out) const
238 {
239     out.print(RawPointer(this), ":(cell at ", RawPointer(cell()), " with size ", m_cellSize, " and attributes ", m_attributes, ")");
240 }
241
242 #if !ASSERT_DISABLED
243 void PreciseAllocation::assertValidCell(VM& vm, HeapCell* cell) const
244 {
245     ASSERT(&vm == &this->vm());
246     ASSERT(cell == this->cell());
247     ASSERT(m_hasValidCell);
248 }
249 #endif
250
251 } // namespace JSC
252