1 // -*- mode: c++; c-basic-offset: 4 -*-
3 * This file is part of the KDE libraries
4 * Copyright (C) 2005, 2006 Apple Computer, Inc.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public License
17 * along with this library; see the file COPYING.LIB. If not, write to
18 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301, USA.
26 #include "Assertions.h"
27 #include "FastMalloc.h"
28 #include "VectorTraits.h"
39 template <bool needsDestruction, typename T>
40 class VectorDestructor;
43 struct VectorDestructor<false, T>
45 static void destruct(T*, T*) {}
49 struct VectorDestructor<true, T>
51 static void destruct(T* begin, T* end)
53 for (T* cur = begin; cur != end; ++cur)
58 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
59 class VectorInitializer;
61 template<bool ignore, typename T>
62 struct VectorInitializer<false, ignore, T>
64 static void initialize(T*, T*) {}
68 struct VectorInitializer<true, false, T>
70 static void initialize(T* begin, T* end)
72 for (T* cur = begin; cur != end; ++cur)
78 struct VectorInitializer<true, true, T>
80 static void initialize(T* begin, T* end)
82 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
86 template <bool canMoveWithMemcpy, typename T>
90 struct VectorMover<false, T>
92 static void move(const T* src, const T* srcEnd, T* dst)
94 while (src != srcEnd) {
101 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
104 move(src, srcEnd, dst);
106 T* dstEnd = dst + (srcEnd - src);
107 while (src != srcEnd) {
110 new (dstEnd) T(*srcEnd);
118 struct VectorMover<true, T>
120 static void move(const T* src, const T* srcEnd, T* dst)
122 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
124 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
126 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
130 template <bool canCopyWithMemcpy, typename T>
134 struct VectorCopier<false, T>
136 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
138 while (src != srcEnd) {
147 struct VectorCopier<true, T>
149 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
151 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
155 template <bool canFillWithMemset, typename T>
159 struct VectorFiller<false, T>
161 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
163 while (dst != dstEnd) {
171 struct VectorFiller<true, T>
173 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
175 ASSERT(sizeof(T) == sizeof(char));
176 memset(dst, val, dstEnd - dst);
180 template<bool canCompareWithMemcmp, typename T>
181 class VectorComparer;
184 struct VectorComparer<false, T>
186 static bool compare(const T* a, const T* b, size_t size)
188 for (size_t i = 0; i < size; ++i)
196 struct VectorComparer<true, T>
198 static bool compare(const T* a, const T* b, size_t size)
200 return memcmp(a, b, sizeof(T) * size) == 0;
205 struct VectorTypeOperations
207 static void destruct(T* begin, T* end)
209 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
212 static void initialize(T* begin, T* end)
214 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
217 static void move(const T* src, const T* srcEnd, T* dst)
219 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
222 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
224 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
227 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
229 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
232 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
234 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
237 static bool compare(const T* a, const T* b, size_t size)
239 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
244 class VectorBufferBase {
246 void allocateBuffer(size_t newCapacity)
248 ASSERT(newCapacity >= m_capacity);
249 m_capacity = newCapacity;
250 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
252 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
255 void deallocateBuffer(T* bufferToDeallocate)
257 fastFree(bufferToDeallocate);
260 T* buffer() { return m_buffer; }
261 const T* buffer() const { return m_buffer; }
262 size_t capacity() const { return m_capacity; }
266 T* buffer = m_buffer;
279 VectorBufferBase(T* buffer, size_t capacity)
281 , m_capacity(capacity)
287 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
294 template<typename T, size_t inlineCapacity>
298 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
300 typedef VectorBufferBase<T> Base;
306 VectorBuffer(size_t capacity)
308 allocateBuffer(capacity);
313 deallocateBuffer(buffer());
316 void swap(VectorBuffer<T, 0>& other)
318 std::swap(m_buffer, other.m_buffer);
319 std::swap(m_capacity, other.m_capacity);
322 using Base::allocateBuffer;
323 using Base::deallocateBuffer;
326 using Base::capacity;
328 using Base::releaseBuffer;
330 using Base::m_buffer;
331 using Base::m_capacity;
334 template<typename T, size_t inlineCapacity>
335 class VectorBuffer : private VectorBufferBase<T> {
337 typedef VectorBufferBase<T> Base;
340 : Base(inlineBuffer(), inlineCapacity)
344 VectorBuffer(size_t capacity)
345 : Base(inlineBuffer(), inlineCapacity)
347 if (capacity > inlineCapacity)
348 allocateBuffer(capacity);
353 deallocateBuffer(buffer());
356 using Base::allocateBuffer;
358 void deallocateBuffer(T* bufferToDeallocate)
360 if (bufferToDeallocate == inlineBuffer())
362 Base::deallocateBuffer(bufferToDeallocate);
366 using Base::capacity;
370 if (buffer() == inlineBuffer())
372 return Base::releaseBuffer();
376 using Base::m_buffer;
377 using Base::m_capacity;
379 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
380 T* inlineBuffer() { return reinterpret_cast<T*>(&m_inlineBuffer); }
382 // FIXME: Nothing guarantees this buffer is appropriately aligned to hold objects of type T.
383 char m_inlineBuffer[m_inlineBufferSize];
386 template<typename T, size_t inlineCapacity = 0>
389 typedef VectorBuffer<T, inlineCapacity> Impl;
390 typedef VectorTypeOperations<T> TypeOperations;
394 typedef const T* const_iterator;
401 explicit Vector(size_t size)
412 Vector(const Vector&);
413 template<size_t otherCapacity>
414 Vector(const Vector<T, otherCapacity>&);
416 Vector& operator=(const Vector&);
417 template<size_t otherCapacity>
418 Vector& operator=(const Vector<T, otherCapacity>&);
420 size_t size() const { return m_size; }
421 size_t capacity() const { return m_impl.capacity(); }
422 bool isEmpty() const { return !size(); }
427 return m_impl.buffer()[i];
429 const T& at(size_t i) const
432 return m_impl.buffer()[i];
435 T& operator[](size_t i) { return at(i); }
436 const T& operator[](size_t i) const { return at(i); }
438 T* data() { return m_impl.buffer(); }
439 const T* data() const { return m_impl.buffer(); }
441 iterator begin() { return data(); }
442 iterator end() { return begin() + m_size; }
443 const_iterator begin() const { return data(); }
444 const_iterator end() const { return begin() + m_size; }
446 T& first() { return at(0); }
447 const T& first() const { return at(0); }
448 T& last() { return at(size() - 1); }
449 const T& last() const { return at(size() - 1); }
451 void shrink(size_t size);
452 void resize(size_t size);
453 void reserveCapacity(size_t newCapacity);
455 void clear() { shrink(0); }
457 template<typename U> void append(const U*, size_t);
458 template<typename U> void append(const U&);
459 template<typename U> void uncheckedAppend(const U& val);
460 template<typename U, size_t c> void append(const Vector<U, c>&);
462 template<typename U> void insert(size_t position, const U*, size_t);
463 template<typename U> void insert(size_t position, const U&);
464 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
466 template<typename U> void prepend(const U*, size_t);
467 template<typename U> void prepend(const U&);
468 template<typename U, size_t c> void prepend(const Vector<U, c>&);
470 void remove(size_t position);
478 Vector(size_t size, const T& val)
482 TypeOperations::uninitializedFill(begin(), end(), val);
485 void fill(const T&, size_t);
486 void fill(const T& val) { fill(val, size()); }
488 template<typename Iterator> void appendRange(Iterator start, Iterator end);
492 void swap(Vector<T, inlineCapacity>& other)
494 std::swap(m_size, other.m_size);
495 m_impl.swap(other.m_impl);
499 void expandCapacity(size_t newMinCapacity);
500 const T* expandCapacity(size_t newMinCapacity, const T*);
501 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
507 template<typename T, size_t inlineCapacity>
508 Vector<T, inlineCapacity>::Vector(const Vector& other)
509 : m_size(other.size())
510 , m_impl(other.capacity())
512 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
515 template<typename T, size_t inlineCapacity>
516 template<size_t otherCapacity>
517 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
518 : m_size(other.size())
519 , m_impl(other.capacity())
521 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
524 template<typename T, size_t inlineCapacity>
525 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
530 if (size() > other.size())
531 shrink(other.size());
532 else if (other.size() > capacity()) {
534 reserveCapacity(other.size());
537 std::copy(other.begin(), other.begin() + size(), begin());
538 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
539 m_size = other.size();
544 template<typename T, size_t inlineCapacity>
545 template<size_t otherCapacity>
546 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
551 if (size() > other.size())
552 shrink(other.size());
553 else if (other.size() > capacity()) {
555 reserveCapacity(other.size());
558 std::copy(other.begin(), other.begin() + size(), begin());
559 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
560 m_size = other.size();
565 template<typename T, size_t inlineCapacity>
566 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
568 if (size() > newSize)
570 else if (newSize > capacity()) {
572 reserveCapacity(newSize);
575 std::fill(begin(), end(), val);
576 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
580 template<typename T, size_t inlineCapacity>
581 template<typename Iterator>
582 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
584 for (Iterator it = start; it != end; ++it)
588 template<typename T, size_t inlineCapacity>
589 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
591 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
594 template<typename T, size_t inlineCapacity>
595 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
597 if (ptr < begin() || ptr >= end()) {
598 expandCapacity(newMinCapacity);
601 size_t index = ptr - begin();
602 expandCapacity(newMinCapacity);
603 return begin() + index;
606 template<typename T, size_t inlineCapacity> template<typename U>
607 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
609 expandCapacity(newMinCapacity);
613 template<typename T, size_t inlineCapacity>
614 void Vector<T, inlineCapacity>::resize(size_t size)
617 TypeOperations::destruct(begin() + size, end());
619 if (size > capacity())
620 expandCapacity(size);
621 TypeOperations::initialize(end(), begin() + size);
627 template<typename T, size_t inlineCapacity>
628 void Vector<T, inlineCapacity>::shrink(size_t size)
630 ASSERT(size <= m_size);
631 TypeOperations::destruct(begin() + size, end());
635 template<typename T, size_t inlineCapacity>
636 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
638 if (newCapacity < capacity())
640 T* oldBuffer = begin();
642 m_impl.allocateBuffer(newCapacity);
643 TypeOperations::move(oldBuffer, oldEnd, begin());
644 m_impl.deallocateBuffer(oldBuffer);
647 // Templatizing these is better than just letting the conversion happen implicitly,
648 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
649 // without refcount thrash.
651 template<typename T, size_t inlineCapacity> template<typename U>
652 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
654 size_t newSize = m_size + dataSize;
655 if (newSize > capacity())
656 data = expandCapacity(newSize, data);
658 for (size_t i = 0; i < dataSize; ++i)
659 new (&dest[i]) T(data[i]);
663 template<typename T, size_t inlineCapacity> template<typename U>
664 inline void Vector<T, inlineCapacity>::append(const U& val)
667 if (size() == capacity())
668 ptr = expandCapacity(size() + 1, ptr);
673 // This version of append saves a branch in the case where you know that the
674 // vector's capacity is large enough for the append to succeed.
676 template<typename T, size_t inlineCapacity> template<typename U>
677 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
679 ASSERT(size() < capacity());
685 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
686 inline void Vector<T, inlineCapacity>::append(const Vector<U, c>& val)
688 append(val.begin(), val.size());
691 template<typename T, size_t inlineCapacity> template<typename U>
692 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
694 ASSERT(position <= size());
695 size_t newSize = m_size + dataSize;
696 if (newSize > capacity())
697 data = expandCapacity(newSize, data);
698 T* spot = begin() + position;
699 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
700 for (size_t i = 0; i < dataSize; ++i)
701 new (&spot[i]) T(data[i]);
705 template<typename T, size_t inlineCapacity> template<typename U>
706 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
708 ASSERT(position <= size());
709 const U* data = &val;
710 if (size() == capacity())
711 data = expandCapacity(size() + 1, data);
712 T* spot = begin() + position;
713 TypeOperations::moveOverlapping(spot, end(), spot + 1);
718 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
719 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
721 insert(position, val.begin(), val.size());
724 template<typename T, size_t inlineCapacity> template<typename U>
725 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
727 insert(0, data, dataSize);
730 template<typename T, size_t inlineCapacity> template<typename U>
731 inline void Vector<T, inlineCapacity>::prepend(const U& val)
736 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
737 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
739 insert(0, val.begin(), val.size());
742 template<typename T, size_t inlineCapacity>
743 inline void Vector<T, inlineCapacity>::remove(size_t position)
745 ASSERT(position < size());
746 T* spot = begin() + position;
748 TypeOperations::moveOverlapping(spot + 1, end(), spot);
752 template<typename T, size_t inlineCapacity>
753 T* Vector<T, inlineCapacity>::releaseBuffer()
755 T* buffer = m_impl.releaseBuffer();
756 if (!buffer && m_size) {
757 // If the vector had some data, but no buffer to release,
758 // that means it was using the inline buffer. In that case,
759 // we create a brand new buffer so the caller always gets one.
760 size_t bytes = m_size * sizeof(T);
761 buffer = static_cast<T*>(fastMalloc(bytes));
762 memcpy(buffer, data(), bytes);
768 template<typename T, size_t inlineCapacity>
769 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
771 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
772 iterator end = collection.end();
773 for (iterator it = collection.begin(); it != end; ++it)
777 template<typename T, size_t inlineCapacity>
778 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
783 template<typename T, size_t inlineCapacity>
784 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
786 if (a.size() != b.size())
789 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
792 template<typename T, size_t inlineCapacity>
793 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
803 #endif // WTF_Vector_h