1 // -*- mode: c++; c-basic-offset: 4 -*-
3 * This file is part of the KDE libraries
4 * Copyright (C) 2005, 2006 Apple Computer, Inc.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public License
17 * along with this library; see the file COPYING.LIB. If not, write to
18 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
26 #include "Assertions.h"
27 #include "FastMalloc.h"
28 #include "VectorTraits.h"
39 template <bool needsDestruction, typename T>
40 class VectorDestructor;
43 struct VectorDestructor<false, T>
45 static void destruct(T*, T*) {}
49 struct VectorDestructor<true, T>
51 static void destruct(T* begin, T* end)
53 for (T* cur = begin; cur != end; ++cur)
58 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
59 class VectorInitializer;
61 template<bool ignore, typename T>
62 struct VectorInitializer<false, ignore, T>
64 static void initialize(T*, T*) {}
68 struct VectorInitializer<true, false, T>
70 static void initialize(T* begin, T* end)
72 for (T* cur = begin; cur != end; ++cur)
78 struct VectorInitializer<true, true, T>
80 static void initialize(T* begin, T* end)
82 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
86 template <bool canMoveWithMemcpy, typename T>
90 struct VectorMover<false, T>
92 static void move(const T* src, const T* srcEnd, T* dst)
94 while (src != srcEnd) {
101 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
104 move(src, srcEnd, dst);
106 T* dstEnd = dst + (srcEnd - src);
107 while (src != srcEnd) {
110 new (dstEnd) T(*srcEnd);
118 struct VectorMover<true, T>
120 static void move(const T* src, const T* srcEnd, T* dst)
122 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
124 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
126 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
130 template <bool canCopyWithMemcpy, typename T>
134 struct VectorCopier<false, T>
136 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
138 while (src != srcEnd) {
147 struct VectorCopier<true, T>
149 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
151 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
155 template <bool canFillWithMemset, typename T>
159 struct VectorFiller<false, T>
161 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
163 while (dst != dstEnd) {
171 struct VectorFiller<true, T>
173 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
175 ASSERT(sizeof(T) == sizeof(char));
176 memset(dst, val, dstEnd - dst);
180 template<bool canCompareWithMemcmp, typename T>
181 class VectorComparer;
184 struct VectorComparer<false, T>
186 static bool compare(const T* a, const T* b, size_t size)
188 for (size_t i = 0; i < size; ++i)
196 struct VectorComparer<true, T>
198 static bool compare(const T* a, const T* b, size_t size)
200 return memcmp(a, b, sizeof(T) * size) == 0;
205 struct VectorTypeOperations
207 static void destruct(T* begin, T* end)
209 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
212 static void initialize(T* begin, T* end)
214 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
217 static void move(const T* src, const T* srcEnd, T* dst)
219 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
222 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
224 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
227 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
229 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
232 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
234 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
237 static bool compare(const T* a, const T* b, size_t size)
239 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
244 class VectorBufferBase {
246 void allocateBuffer(size_t newCapacity)
248 ASSERT(newCapacity >= m_capacity);
249 m_capacity = newCapacity;
250 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
252 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
255 void deallocateBuffer(T* bufferToDeallocate)
257 fastFree(bufferToDeallocate);
260 T* buffer() { return m_buffer; }
261 const T* buffer() const { return m_buffer; }
262 size_t capacity() const { return m_capacity; }
266 T* buffer = m_buffer;
279 VectorBufferBase(T* buffer, size_t capacity)
281 , m_capacity(capacity)
287 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
294 template<typename T, size_t inlineCapacity>
298 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
300 typedef VectorBufferBase<T> Base;
306 VectorBuffer(size_t capacity)
308 allocateBuffer(capacity);
313 deallocateBuffer(buffer());
316 void swap(VectorBuffer<T, 0>& other)
318 std::swap(m_buffer, other.m_buffer);
319 std::swap(m_capacity, other.m_capacity);
322 using Base::allocateBuffer;
323 using Base::deallocateBuffer;
326 using Base::capacity;
328 using Base::releaseBuffer;
330 using Base::m_buffer;
331 using Base::m_capacity;
334 template<typename T, size_t inlineCapacity>
335 class VectorBuffer : private VectorBufferBase<T> {
337 typedef VectorBufferBase<T> Base;
340 : Base(inlineBuffer(), inlineCapacity)
344 VectorBuffer(size_t capacity)
345 : Base(inlineBuffer(), inlineCapacity)
347 if (capacity > inlineCapacity)
348 allocateBuffer(capacity);
353 deallocateBuffer(buffer());
356 using Base::allocateBuffer;
358 void deallocateBuffer(T* bufferToDeallocate)
360 if (bufferToDeallocate == inlineBuffer())
362 Base::deallocateBuffer(bufferToDeallocate);
366 using Base::capacity;
370 if (buffer() == inlineBuffer())
372 return Base::releaseBuffer();
376 using Base::m_buffer;
377 using Base::m_capacity;
379 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
380 T* inlineBuffer() { return reinterpret_cast<T*>(&m_inlineBuffer); }
382 // FIXME: Nothing guarantees this buffer is appropriately aligned to hold objects of type T.
383 char m_inlineBuffer[m_inlineBufferSize];
386 template<typename T, size_t inlineCapacity = 0>
389 typedef VectorBuffer<T, inlineCapacity> Impl;
390 typedef VectorTypeOperations<T> TypeOperations;
396 typedef const T* const_iterator;
403 explicit Vector(size_t size)
414 Vector(const Vector&);
415 template<size_t otherCapacity>
416 Vector(const Vector<T, otherCapacity>&);
418 Vector& operator=(const Vector&);
419 template<size_t otherCapacity>
420 Vector& operator=(const Vector<T, otherCapacity>&);
422 size_t size() const { return m_size; }
423 size_t capacity() const { return m_impl.capacity(); }
424 bool isEmpty() const { return !size(); }
429 return m_impl.buffer()[i];
431 const T& at(size_t i) const
434 return m_impl.buffer()[i];
437 T& operator[](size_t i) { return at(i); }
438 const T& operator[](size_t i) const { return at(i); }
440 T* data() { return m_impl.buffer(); }
441 const T* data() const { return m_impl.buffer(); }
443 iterator begin() { return data(); }
444 iterator end() { return begin() + m_size; }
445 const_iterator begin() const { return data(); }
446 const_iterator end() const { return begin() + m_size; }
448 T& first() { return at(0); }
449 const T& first() const { return at(0); }
450 T& last() { return at(size() - 1); }
451 const T& last() const { return at(size() - 1); }
453 void shrink(size_t size);
454 void resize(size_t size);
455 void reserveCapacity(size_t newCapacity);
457 void clear() { shrink(0); }
459 template<typename U> void append(const U*, size_t);
460 template<typename U> void append(const U&);
461 template<typename U> void uncheckedAppend(const U& val);
462 template<typename U, size_t c> void append(const Vector<U, c>&);
464 template<typename U> void insert(size_t position, const U*, size_t);
465 template<typename U> void insert(size_t position, const U&);
466 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
468 template<typename U> void prepend(const U*, size_t);
469 template<typename U> void prepend(const U&);
470 template<typename U, size_t c> void prepend(const Vector<U, c>&);
472 void remove(size_t position);
480 Vector(size_t size, const T& val)
484 TypeOperations::uninitializedFill(begin(), end(), val);
487 void fill(const T&, size_t);
488 void fill(const T& val) { fill(val, size()); }
490 template<typename Iterator> void appendRange(Iterator start, Iterator end);
494 void swap(Vector<T, inlineCapacity>& other)
496 std::swap(m_size, other.m_size);
497 m_impl.swap(other.m_impl);
501 void expandCapacity(size_t newMinCapacity);
502 const T* expandCapacity(size_t newMinCapacity, const T*);
503 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
509 template<typename T, size_t inlineCapacity>
510 Vector<T, inlineCapacity>::Vector(const Vector& other)
511 : m_size(other.size())
512 , m_impl(other.capacity())
514 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
517 template<typename T, size_t inlineCapacity>
518 template<size_t otherCapacity>
519 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
520 : m_size(other.size())
521 , m_impl(other.capacity())
523 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
526 template<typename T, size_t inlineCapacity>
527 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
532 if (size() > other.size())
533 shrink(other.size());
534 else if (other.size() > capacity()) {
536 reserveCapacity(other.size());
539 std::copy(other.begin(), other.begin() + size(), begin());
540 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
541 m_size = other.size();
546 template<typename T, size_t inlineCapacity>
547 template<size_t otherCapacity>
548 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
553 if (size() > other.size())
554 shrink(other.size());
555 else if (other.size() > capacity()) {
557 reserveCapacity(other.size());
560 std::copy(other.begin(), other.begin() + size(), begin());
561 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
562 m_size = other.size();
567 template<typename T, size_t inlineCapacity>
568 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
570 if (size() > newSize)
572 else if (newSize > capacity()) {
574 reserveCapacity(newSize);
577 std::fill(begin(), end(), val);
578 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
582 template<typename T, size_t inlineCapacity>
583 template<typename Iterator>
584 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
586 for (Iterator it = start; it != end; ++it)
590 template<typename T, size_t inlineCapacity>
591 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
593 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
596 template<typename T, size_t inlineCapacity>
597 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
599 if (ptr < begin() || ptr >= end()) {
600 expandCapacity(newMinCapacity);
603 size_t index = ptr - begin();
604 expandCapacity(newMinCapacity);
605 return begin() + index;
608 template<typename T, size_t inlineCapacity> template<typename U>
609 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
611 expandCapacity(newMinCapacity);
615 template<typename T, size_t inlineCapacity>
616 void Vector<T, inlineCapacity>::resize(size_t size)
619 TypeOperations::destruct(begin() + size, end());
621 if (size > capacity())
622 expandCapacity(size);
623 TypeOperations::initialize(end(), begin() + size);
629 template<typename T, size_t inlineCapacity>
630 void Vector<T, inlineCapacity>::shrink(size_t size)
632 ASSERT(size <= m_size);
633 TypeOperations::destruct(begin() + size, end());
637 template<typename T, size_t inlineCapacity>
638 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
640 if (newCapacity < capacity())
642 T* oldBuffer = begin();
644 m_impl.allocateBuffer(newCapacity);
645 TypeOperations::move(oldBuffer, oldEnd, begin());
646 m_impl.deallocateBuffer(oldBuffer);
649 // Templatizing these is better than just letting the conversion happen implicitly,
650 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
651 // without refcount thrash.
653 template<typename T, size_t inlineCapacity> template<typename U>
654 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
656 size_t newSize = m_size + dataSize;
657 if (newSize > capacity())
658 data = expandCapacity(newSize, data);
660 for (size_t i = 0; i < dataSize; ++i)
661 new (&dest[i]) T(data[i]);
665 template<typename T, size_t inlineCapacity> template<typename U>
666 inline void Vector<T, inlineCapacity>::append(const U& val)
669 if (size() == capacity())
670 ptr = expandCapacity(size() + 1, ptr);
675 // This version of append saves a branch in the case where you know that the
676 // vector's capacity is large enough for the append to succeed.
678 template<typename T, size_t inlineCapacity> template<typename U>
679 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
681 ASSERT(size() < capacity());
687 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
688 inline void Vector<T, inlineCapacity>::append(const Vector<U, c>& val)
690 append(val.begin(), val.size());
693 template<typename T, size_t inlineCapacity> template<typename U>
694 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
696 ASSERT(position <= size());
697 size_t newSize = m_size + dataSize;
698 if (newSize > capacity())
699 data = expandCapacity(newSize, data);
700 T* spot = begin() + position;
701 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
702 for (size_t i = 0; i < dataSize; ++i)
703 new (&spot[i]) T(data[i]);
707 template<typename T, size_t inlineCapacity> template<typename U>
708 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
710 ASSERT(position <= size());
711 const U* data = &val;
712 if (size() == capacity())
713 data = expandCapacity(size() + 1, data);
714 T* spot = begin() + position;
715 TypeOperations::moveOverlapping(spot, end(), spot + 1);
720 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
721 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
723 insert(position, val.begin(), val.size());
726 template<typename T, size_t inlineCapacity> template<typename U>
727 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
729 insert(0, data, dataSize);
732 template<typename T, size_t inlineCapacity> template<typename U>
733 inline void Vector<T, inlineCapacity>::prepend(const U& val)
738 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
739 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
741 insert(0, val.begin(), val.size());
744 template<typename T, size_t inlineCapacity>
745 inline void Vector<T, inlineCapacity>::remove(size_t position)
747 ASSERT(position < size());
748 T* spot = begin() + position;
750 TypeOperations::moveOverlapping(spot + 1, end(), spot);
754 template<typename T, size_t inlineCapacity>
755 T* Vector<T, inlineCapacity>::releaseBuffer()
757 T* buffer = m_impl.releaseBuffer();
758 if (!buffer && m_size) {
759 // If the vector had some data, but no buffer to release,
760 // that means it was using the inline buffer. In that case,
761 // we create a brand new buffer so the caller always gets one.
762 size_t bytes = m_size * sizeof(T);
763 buffer = static_cast<T*>(fastMalloc(bytes));
764 memcpy(buffer, data(), bytes);
770 template<typename T, size_t inlineCapacity>
771 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
773 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
774 iterator end = collection.end();
775 for (iterator it = collection.begin(); it != end; ++it)
779 template<typename T, size_t inlineCapacity>
780 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
785 template<typename T, size_t inlineCapacity>
786 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
788 if (a.size() != b.size())
791 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
794 template<typename T, size_t inlineCapacity>
795 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
805 #endif // WTF_Vector_h