2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "Assertions.h"
25 #include "FastMalloc.h"
26 #include "Noncopyable.h"
28 #include "VectorTraits.h"
39 // WTF_ALIGN_OF / WTF_ALIGNED
40 #if COMPILER(GCC) || COMPILER(MINGW) || COMPILER(RVCT) || COMPILER(WINSCW)
41 #define WTF_ALIGN_OF(type) __alignof__(type)
42 #define WTF_ALIGNED(variable_type, variable, n) variable_type variable __attribute__((__aligned__(n)))
44 #define WTF_ALIGN_OF(type) __alignof(type)
45 #define WTF_ALIGNED(variable_type, variable, n) __declspec(align(n)) variable_type variable
47 #error WTF_ALIGN macros need alignment control.
50 #if COMPILER(GCC) && (((__GNUC__ * 100) + __GNUC_MINOR__) >= 303)
51 typedef char __attribute__((__may_alias__)) AlignedBufferChar;
53 typedef char AlignedBufferChar;
56 template <size_t size, size_t alignment> struct AlignedBuffer;
57 template <size_t size> struct AlignedBuffer<size, 1> { AlignedBufferChar buffer[size]; };
58 template <size_t size> struct AlignedBuffer<size, 2> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 2); };
59 template <size_t size> struct AlignedBuffer<size, 4> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 4); };
60 template <size_t size> struct AlignedBuffer<size, 8> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 8); };
61 template <size_t size> struct AlignedBuffer<size, 16> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 16); };
62 template <size_t size> struct AlignedBuffer<size, 32> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 32); };
63 template <size_t size> struct AlignedBuffer<size, 64> { WTF_ALIGNED(AlignedBufferChar, buffer[size], 64); };
65 template <bool needsDestruction, typename T>
66 class VectorDestructor;
69 struct VectorDestructor<false, T>
71 static void destruct(T*, T*) {}
75 struct VectorDestructor<true, T>
77 static void destruct(T* begin, T* end)
79 for (T* cur = begin; cur != end; ++cur)
84 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
85 class VectorInitializer;
87 template<bool ignore, typename T>
88 struct VectorInitializer<false, ignore, T>
90 static void initialize(T*, T*) {}
94 struct VectorInitializer<true, false, T>
96 static void initialize(T* begin, T* end)
98 for (T* cur = begin; cur != end; ++cur)
104 struct VectorInitializer<true, true, T>
106 static void initialize(T* begin, T* end)
108 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
112 template <bool canMoveWithMemcpy, typename T>
116 struct VectorMover<false, T>
118 static void move(const T* src, const T* srcEnd, T* dst)
120 while (src != srcEnd) {
127 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
130 move(src, srcEnd, dst);
132 T* dstEnd = dst + (srcEnd - src);
133 while (src != srcEnd) {
136 new (dstEnd) T(*srcEnd);
144 struct VectorMover<true, T>
146 static void move(const T* src, const T* srcEnd, T* dst)
148 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
150 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
152 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
156 template <bool canCopyWithMemcpy, typename T>
160 struct VectorCopier<false, T>
162 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
164 while (src != srcEnd) {
173 struct VectorCopier<true, T>
175 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
177 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
181 template <bool canFillWithMemset, typename T>
185 struct VectorFiller<false, T>
187 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
189 while (dst != dstEnd) {
197 struct VectorFiller<true, T>
199 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
201 ASSERT(sizeof(T) == sizeof(char));
202 memset(dst, val, dstEnd - dst);
206 template<bool canCompareWithMemcmp, typename T>
207 class VectorComparer;
210 struct VectorComparer<false, T>
212 static bool compare(const T* a, const T* b, size_t size)
214 for (size_t i = 0; i < size; ++i)
222 struct VectorComparer<true, T>
224 static bool compare(const T* a, const T* b, size_t size)
226 return memcmp(a, b, sizeof(T) * size) == 0;
231 struct VectorTypeOperations
233 static void destruct(T* begin, T* end)
235 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
238 static void initialize(T* begin, T* end)
240 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
243 static void move(const T* src, const T* srcEnd, T* dst)
245 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
248 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
250 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
253 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
255 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
258 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
260 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
263 static bool compare(const T* a, const T* b, size_t size)
265 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
270 class VectorBufferBase : Noncopyable {
272 void allocateBuffer(size_t newCapacity)
274 m_capacity = newCapacity;
275 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
277 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
280 void deallocateBuffer(T* bufferToDeallocate)
282 if (m_buffer == bufferToDeallocate) {
286 fastFree(bufferToDeallocate);
289 T* buffer() { return m_buffer; }
290 const T* buffer() const { return m_buffer; }
291 T** bufferSlot() { return &m_buffer; }
292 size_t capacity() const { return m_capacity; }
296 T* buffer = m_buffer;
309 VectorBufferBase(T* buffer, size_t capacity)
311 , m_capacity(capacity)
317 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
324 template<typename T, size_t inlineCapacity>
328 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
330 typedef VectorBufferBase<T> Base;
336 VectorBuffer(size_t capacity)
338 allocateBuffer(capacity);
343 deallocateBuffer(buffer());
346 void swap(VectorBuffer<T, 0>& other)
348 std::swap(m_buffer, other.m_buffer);
349 std::swap(m_capacity, other.m_capacity);
352 void restoreInlineBufferIfNeeded() { }
354 using Base::allocateBuffer;
355 using Base::deallocateBuffer;
358 using Base::bufferSlot;
359 using Base::capacity;
361 using Base::releaseBuffer;
363 using Base::m_buffer;
364 using Base::m_capacity;
367 template<typename T, size_t inlineCapacity>
368 class VectorBuffer : private VectorBufferBase<T> {
370 typedef VectorBufferBase<T> Base;
373 : Base(inlineBuffer(), inlineCapacity)
377 VectorBuffer(size_t capacity)
378 : Base(inlineBuffer(), inlineCapacity)
380 if (capacity > inlineCapacity)
381 Base::allocateBuffer(capacity);
386 deallocateBuffer(buffer());
389 void allocateBuffer(size_t newCapacity)
391 if (newCapacity > inlineCapacity)
392 Base::allocateBuffer(newCapacity);
394 m_buffer = inlineBuffer();
395 m_capacity = inlineCapacity;
399 void deallocateBuffer(T* bufferToDeallocate)
401 if (bufferToDeallocate == inlineBuffer())
403 Base::deallocateBuffer(bufferToDeallocate);
406 void restoreInlineBufferIfNeeded()
410 m_buffer = inlineBuffer();
411 m_capacity = inlineCapacity;
415 using Base::bufferSlot;
416 using Base::capacity;
420 if (buffer() == inlineBuffer())
422 return Base::releaseBuffer();
426 using Base::m_buffer;
427 using Base::m_capacity;
429 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
430 T* inlineBuffer() { return reinterpret_cast<T*>(m_inlineBuffer.buffer); }
432 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
435 template<typename T, size_t inlineCapacity = 0>
438 typedef VectorBuffer<T, inlineCapacity> Buffer;
439 typedef VectorTypeOperations<T> TypeOperations;
445 typedef const T* const_iterator;
452 explicit Vector(size_t size)
457 TypeOperations::initialize(begin(), end());
462 if (m_size) shrink(0);
465 Vector(const Vector&);
466 template<size_t otherCapacity>
467 Vector(const Vector<T, otherCapacity>&);
469 Vector& operator=(const Vector&);
470 template<size_t otherCapacity>
471 Vector& operator=(const Vector<T, otherCapacity>&);
473 size_t size() const { return m_size; }
474 size_t capacity() const { return m_buffer.capacity(); }
475 bool isEmpty() const { return !size(); }
480 return m_buffer.buffer()[i];
482 const T& at(size_t i) const
485 return m_buffer.buffer()[i];
488 T& operator[](size_t i) { return at(i); }
489 const T& operator[](size_t i) const { return at(i); }
491 T* data() { return m_buffer.buffer(); }
492 const T* data() const { return m_buffer.buffer(); }
493 T** dataSlot() { return m_buffer.bufferSlot(); }
495 iterator begin() { return data(); }
496 iterator end() { return begin() + m_size; }
497 const_iterator begin() const { return data(); }
498 const_iterator end() const { return begin() + m_size; }
500 T& first() { return at(0); }
501 const T& first() const { return at(0); }
502 T& last() { return at(size() - 1); }
503 const T& last() const { return at(size() - 1); }
505 template<typename U> size_t find(const U&) const;
507 void shrink(size_t size);
508 void grow(size_t size);
509 void resize(size_t size);
510 void reserveCapacity(size_t newCapacity);
511 void reserveInitialCapacity(size_t initialCapacity);
512 void shrinkCapacity(size_t newCapacity);
513 void shrinkToFit() { shrinkCapacity(size()); }
515 void clear() { shrinkCapacity(0); }
517 template<typename U> void append(const U*, size_t);
518 template<typename U> void append(const U&);
519 template<typename U> void uncheckedAppend(const U& val);
520 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&);
522 template<typename U> void insert(size_t position, const U*, size_t);
523 template<typename U> void insert(size_t position, const U&);
524 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
526 template<typename U> void prepend(const U*, size_t);
527 template<typename U> void prepend(const U&);
528 template<typename U, size_t c> void prepend(const Vector<U, c>&);
530 void remove(size_t position);
531 void remove(size_t position, size_t length);
539 Vector(size_t size, const T& val)
544 TypeOperations::uninitializedFill(begin(), end(), val);
547 void fill(const T&, size_t);
548 void fill(const T& val) { fill(val, size()); }
550 template<typename Iterator> void appendRange(Iterator start, Iterator end);
554 void swap(Vector<T, inlineCapacity>& other)
556 std::swap(m_size, other.m_size);
557 m_buffer.swap(other.m_buffer);
561 void expandCapacity(size_t newMinCapacity);
562 const T* expandCapacity(size_t newMinCapacity, const T*);
563 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
569 template<typename T, size_t inlineCapacity>
570 Vector<T, inlineCapacity>::Vector(const Vector& other)
571 : m_size(other.size())
572 , m_buffer(other.capacity())
575 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
578 template<typename T, size_t inlineCapacity>
579 template<size_t otherCapacity>
580 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
581 : m_size(other.size())
582 , m_buffer(other.capacity())
585 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
588 template<typename T, size_t inlineCapacity>
589 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
594 if (size() > other.size())
595 shrink(other.size());
596 else if (other.size() > capacity()) {
598 reserveCapacity(other.size());
603 std::copy(other.begin(), other.begin() + size(), begin());
604 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
605 m_size = other.size();
610 template<typename T, size_t inlineCapacity>
611 template<size_t otherCapacity>
612 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
617 if (size() > other.size())
618 shrink(other.size());
619 else if (other.size() > capacity()) {
621 reserveCapacity(other.size());
626 std::copy(other.begin(), other.begin() + size(), begin());
627 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
628 m_size = other.size();
633 template<typename T, size_t inlineCapacity>
635 size_t Vector<T, inlineCapacity>::find(const U& value) const
637 for (size_t i = 0; i < size(); ++i) {
644 template<typename T, size_t inlineCapacity>
645 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
647 if (size() > newSize)
649 else if (newSize > capacity()) {
651 reserveCapacity(newSize);
656 std::fill(begin(), end(), val);
657 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
661 template<typename T, size_t inlineCapacity>
662 template<typename Iterator>
663 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
665 for (Iterator it = start; it != end; ++it)
669 template<typename T, size_t inlineCapacity>
670 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
672 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
675 template<typename T, size_t inlineCapacity>
676 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
678 if (ptr < begin() || ptr >= end()) {
679 expandCapacity(newMinCapacity);
682 size_t index = ptr - begin();
683 expandCapacity(newMinCapacity);
684 return begin() + index;
687 template<typename T, size_t inlineCapacity> template<typename U>
688 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
690 expandCapacity(newMinCapacity);
694 template<typename T, size_t inlineCapacity>
695 inline void Vector<T, inlineCapacity>::resize(size_t size)
698 TypeOperations::destruct(begin() + size, end());
700 if (size > capacity())
701 expandCapacity(size);
703 TypeOperations::initialize(end(), begin() + size);
709 template<typename T, size_t inlineCapacity>
710 void Vector<T, inlineCapacity>::shrink(size_t size)
712 ASSERT(size <= m_size);
713 TypeOperations::destruct(begin() + size, end());
717 template<typename T, size_t inlineCapacity>
718 void Vector<T, inlineCapacity>::grow(size_t size)
720 ASSERT(size >= m_size);
721 if (size > capacity())
722 expandCapacity(size);
724 TypeOperations::initialize(end(), begin() + size);
728 template<typename T, size_t inlineCapacity>
729 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
731 if (newCapacity <= capacity())
733 T* oldBuffer = begin();
735 m_buffer.allocateBuffer(newCapacity);
737 TypeOperations::move(oldBuffer, oldEnd, begin());
738 m_buffer.deallocateBuffer(oldBuffer);
741 template<typename T, size_t inlineCapacity>
742 inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity)
745 ASSERT(capacity() == inlineCapacity);
746 if (initialCapacity > inlineCapacity)
747 m_buffer.allocateBuffer(initialCapacity);
750 template<typename T, size_t inlineCapacity>
751 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity)
753 if (newCapacity >= capacity())
756 if (newCapacity < size())
759 T* oldBuffer = begin();
760 if (newCapacity > 0) {
762 m_buffer.allocateBuffer(newCapacity);
763 if (begin() != oldBuffer)
764 TypeOperations::move(oldBuffer, oldEnd, begin());
767 m_buffer.deallocateBuffer(oldBuffer);
768 m_buffer.restoreInlineBufferIfNeeded();
771 // Templatizing these is better than just letting the conversion happen implicitly,
772 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
773 // without refcount thrash.
775 template<typename T, size_t inlineCapacity> template<typename U>
776 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
778 size_t newSize = m_size + dataSize;
779 if (newSize > capacity()) {
780 data = expandCapacity(newSize, data);
784 if (newSize < m_size)
787 for (size_t i = 0; i < dataSize; ++i)
788 new (&dest[i]) T(data[i]);
792 template<typename T, size_t inlineCapacity> template<typename U>
793 ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val)
796 if (size() == capacity()) {
797 ptr = expandCapacity(size() + 1, ptr);
803 // FIXME: MSVC7 generates compilation errors when trying to assign
804 // a pointer to a Vector of its base class (i.e. can't downcast). So far
805 // I've been unable to determine any logical reason for this, so I can
806 // only assume it is a bug with the compiler. Casting is a bad solution,
807 // however, because it subverts implicit conversions, so a better
809 new (end()) T(static_cast<T>(*ptr));
816 // This version of append saves a branch in the case where you know that the
817 // vector's capacity is large enough for the append to succeed.
819 template<typename T, size_t inlineCapacity> template<typename U>
820 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
822 ASSERT(size() < capacity());
828 // This method should not be called append, a better name would be appendElements.
829 // It could also be eliminated entirely, and call sites could just use
830 // appendRange(val.begin(), val.end()).
831 template<typename T, size_t inlineCapacity> template<size_t otherCapacity>
832 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val)
834 append(val.begin(), val.size());
837 template<typename T, size_t inlineCapacity> template<typename U>
838 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
840 ASSERT(position <= size());
841 size_t newSize = m_size + dataSize;
842 if (newSize > capacity()) {
843 data = expandCapacity(newSize, data);
847 if (newSize < m_size)
849 T* spot = begin() + position;
850 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
851 for (size_t i = 0; i < dataSize; ++i)
852 new (&spot[i]) T(data[i]);
856 template<typename T, size_t inlineCapacity> template<typename U>
857 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
859 ASSERT(position <= size());
860 const U* data = &val;
861 if (size() == capacity()) {
862 data = expandCapacity(size() + 1, data);
866 T* spot = begin() + position;
867 TypeOperations::moveOverlapping(spot, end(), spot + 1);
872 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
873 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
875 insert(position, val.begin(), val.size());
878 template<typename T, size_t inlineCapacity> template<typename U>
879 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
881 insert(0, data, dataSize);
884 template<typename T, size_t inlineCapacity> template<typename U>
885 inline void Vector<T, inlineCapacity>::prepend(const U& val)
890 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
891 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
893 insert(0, val.begin(), val.size());
896 template<typename T, size_t inlineCapacity>
897 inline void Vector<T, inlineCapacity>::remove(size_t position)
899 ASSERT(position < size());
900 T* spot = begin() + position;
902 TypeOperations::moveOverlapping(spot + 1, end(), spot);
906 template<typename T, size_t inlineCapacity>
907 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length)
909 ASSERT(position < size());
910 ASSERT(position + length < size());
911 T* beginSpot = begin() + position;
912 T* endSpot = beginSpot + length;
913 TypeOperations::destruct(beginSpot, endSpot);
914 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
918 template<typename T, size_t inlineCapacity>
919 inline T* Vector<T, inlineCapacity>::releaseBuffer()
921 T* buffer = m_buffer.releaseBuffer();
922 if (inlineCapacity && !buffer && m_size) {
923 // If the vector had some data, but no buffer to release,
924 // that means it was using the inline buffer. In that case,
925 // we create a brand new buffer so the caller always gets one.
926 size_t bytes = m_size * sizeof(T);
927 buffer = static_cast<T*>(fastMalloc(bytes));
928 memcpy(buffer, data(), bytes);
934 template<typename T, size_t inlineCapacity>
935 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
937 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
938 iterator end = collection.end();
939 for (iterator it = collection.begin(); it != end; ++it)
943 template<typename T, size_t inlineCapacity>
944 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
949 template<typename T, size_t inlineCapacity>
950 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
952 if (a.size() != b.size())
955 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
958 template<typename T, size_t inlineCapacity>
959 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
969 #endif // WTF_Vector_h