2 * Copyright (C) 2017-2018 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 class CompleteSubspace : public Subspace {
34 JS_EXPORT_PRIVATE CompleteSubspace(CString name, Heap&, HeapCellType*, AlignedMemoryAllocator*);
35 JS_EXPORT_PRIVATE ~CompleteSubspace();
37 // In some code paths, we need it to be a compile error to call the virtual version of one of
38 // these functions. That's why we do final methods the old school way.
40 // FIXME: Currently subspaces speak of BlockDirectories as "allocators", but that's temporary.
41 // https://bugs.webkit.org/show_bug.cgi?id=181559
42 Allocator allocatorFor(size_t, AllocatorForMode) override;
43 Allocator allocatorForNonVirtual(size_t, AllocatorForMode);
45 void* allocate(VM&, size_t, GCDeferralContext*, AllocationFailureMode) override;
46 void* allocateNonVirtual(VM&, size_t, GCDeferralContext*, AllocationFailureMode);
47 void* reallocateLargeAllocationNonVirtual(VM&, HeapCell*, size_t, GCDeferralContext*, AllocationFailureMode);
49 static ptrdiff_t offsetOfAllocatorForSizeStep() { return OBJECT_OFFSETOF(CompleteSubspace, m_allocatorForSizeStep); }
51 Allocator* allocatorForSizeStep() { return &m_allocatorForSizeStep[0]; }
54 JS_EXPORT_PRIVATE Allocator allocatorForSlow(size_t);
56 // These slow paths are concerned with large allocations and allocator creation.
57 JS_EXPORT_PRIVATE void* allocateSlow(VM&, size_t, GCDeferralContext*, AllocationFailureMode);
58 void* tryAllocateSlow(VM&, size_t, GCDeferralContext*);
60 std::array<Allocator, MarkedSpace::numSizeClasses> m_allocatorForSizeStep;
61 Vector<std::unique_ptr<BlockDirectory>> m_directories;
62 Vector<std::unique_ptr<LocalAllocator>> m_localAllocators;
65 ALWAYS_INLINE Allocator CompleteSubspace::allocatorForNonVirtual(size_t size, AllocatorForMode mode)
67 if (size <= MarkedSpace::largeCutoff) {
68 Allocator result = m_allocatorForSizeStep[MarkedSpace::sizeClassToIndex(size)];
70 case AllocatorForMode::MustAlreadyHaveAllocator:
71 RELEASE_ASSERT(result);
73 case AllocatorForMode::EnsureAllocator:
75 return allocatorForSlow(size);
77 case AllocatorForMode::AllocatorIfExists:
82 RELEASE_ASSERT(mode != AllocatorForMode::MustAlreadyHaveAllocator);