2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "Alignment.h"
25 #include "FastAllocBase.h"
26 #include "Noncopyable.h"
28 #include "StdLibExtras.h"
29 #include "ValueCheck.h"
30 #include "VectorTraits.h"
35 #include <QDataStream>
43 template <bool needsDestruction, typename T>
44 struct VectorDestructor;
47 struct VectorDestructor<false, T>
49 static void destruct(T*, T*) {}
53 struct VectorDestructor<true, T>
55 static void destruct(T* begin, T* end)
57 for (T* cur = begin; cur != end; ++cur)
62 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
63 struct VectorInitializer;
65 template<bool ignore, typename T>
66 struct VectorInitializer<false, ignore, T>
68 static void initialize(T*, T*) {}
72 struct VectorInitializer<true, false, T>
74 static void initialize(T* begin, T* end)
76 for (T* cur = begin; cur != end; ++cur)
82 struct VectorInitializer<true, true, T>
84 static void initialize(T* begin, T* end)
86 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
90 template <bool canMoveWithMemcpy, typename T>
94 struct VectorMover<false, T>
96 static void move(const T* src, const T* srcEnd, T* dst)
98 while (src != srcEnd) {
100 #if COMPILER(SUNCC) && __SUNPRO_CC <= 0x590
101 const_cast<T*>(src)->~T(); // Work around obscure SunCC 12 compiler bug.
109 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
112 move(src, srcEnd, dst);
114 T* dstEnd = dst + (srcEnd - src);
115 while (src != srcEnd) {
118 new (dstEnd) T(*srcEnd);
126 struct VectorMover<true, T>
128 static void move(const T* src, const T* srcEnd, T* dst)
130 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
132 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
134 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
138 template <bool canCopyWithMemcpy, typename T>
142 struct VectorCopier<false, T>
144 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
146 while (src != srcEnd) {
155 struct VectorCopier<true, T>
157 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
159 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
163 template <bool canFillWithMemset, typename T>
167 struct VectorFiller<false, T>
169 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
171 while (dst != dstEnd) {
179 struct VectorFiller<true, T>
181 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
183 ASSERT(sizeof(T) == sizeof(char));
184 memset(dst, val, dstEnd - dst);
188 template<bool canCompareWithMemcmp, typename T>
189 struct VectorComparer;
192 struct VectorComparer<false, T>
194 static bool compare(const T* a, const T* b, size_t size)
196 for (size_t i = 0; i < size; ++i)
204 struct VectorComparer<true, T>
206 static bool compare(const T* a, const T* b, size_t size)
208 return memcmp(a, b, sizeof(T) * size) == 0;
213 struct VectorTypeOperations
215 static void destruct(T* begin, T* end)
217 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
220 static void initialize(T* begin, T* end)
222 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
225 static void move(const T* src, const T* srcEnd, T* dst)
227 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
230 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
232 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
235 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
237 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
240 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
242 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
245 static bool compare(const T* a, const T* b, size_t size)
247 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
252 class VectorBufferBase {
253 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
255 void allocateBuffer(size_t newCapacity)
258 m_capacity = newCapacity;
259 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
261 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
264 bool tryAllocateBuffer(size_t newCapacity)
267 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
271 if (tryFastMalloc(newCapacity * sizeof(T)).getValue(newBuffer)) {
272 m_capacity = newCapacity;
273 m_buffer = newBuffer;
279 void deallocateBuffer(T* bufferToDeallocate)
281 if (m_buffer == bufferToDeallocate) {
285 fastFree(bufferToDeallocate);
288 T* buffer() { return m_buffer; }
289 const T* buffer() const { return m_buffer; }
290 T** bufferSlot() { return &m_buffer; }
291 size_t capacity() const { return m_capacity; }
295 T* buffer = m_buffer;
308 VectorBufferBase(T* buffer, size_t capacity)
310 , m_capacity(capacity)
316 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
323 template<typename T, size_t inlineCapacity>
327 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
329 typedef VectorBufferBase<T> Base;
335 VectorBuffer(size_t capacity)
337 // Calling malloc(0) might take a lock and may actually do an
338 // allocation on some systems.
340 allocateBuffer(capacity);
345 deallocateBuffer(buffer());
348 void swap(VectorBuffer<T, 0>& other)
350 std::swap(m_buffer, other.m_buffer);
351 std::swap(m_capacity, other.m_capacity);
354 void restoreInlineBufferIfNeeded() { }
356 using Base::allocateBuffer;
357 using Base::tryAllocateBuffer;
358 using Base::deallocateBuffer;
361 using Base::bufferSlot;
362 using Base::capacity;
364 using Base::releaseBuffer;
366 using Base::m_buffer;
367 using Base::m_capacity;
370 template<typename T, size_t inlineCapacity>
371 class VectorBuffer : private VectorBufferBase<T> {
372 WTF_MAKE_NONCOPYABLE(VectorBuffer);
374 typedef VectorBufferBase<T> Base;
377 : Base(inlineBuffer(), inlineCapacity)
381 VectorBuffer(size_t capacity)
382 : Base(inlineBuffer(), inlineCapacity)
384 if (capacity > inlineCapacity)
385 Base::allocateBuffer(capacity);
390 deallocateBuffer(buffer());
393 void allocateBuffer(size_t newCapacity)
395 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
396 if (newCapacity > inlineCapacity)
397 Base::allocateBuffer(newCapacity);
399 m_buffer = inlineBuffer();
400 m_capacity = inlineCapacity;
404 bool tryAllocateBuffer(size_t newCapacity)
406 if (newCapacity > inlineCapacity)
407 return Base::tryAllocateBuffer(newCapacity);
408 m_buffer = inlineBuffer();
409 m_capacity = inlineCapacity;
413 void deallocateBuffer(T* bufferToDeallocate)
415 if (bufferToDeallocate == inlineBuffer())
417 Base::deallocateBuffer(bufferToDeallocate);
420 void swap(VectorBuffer<T, inlineCapacity>& other)
422 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
423 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
424 std::swap(m_capacity, other.m_capacity);
425 } else if (buffer() == inlineBuffer()) {
426 m_buffer = other.m_buffer;
427 other.m_buffer = other.inlineBuffer();
428 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
429 std::swap(m_capacity, other.m_capacity);
430 } else if (other.buffer() == other.inlineBuffer()) {
431 other.m_buffer = m_buffer;
432 m_buffer = inlineBuffer();
433 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
434 std::swap(m_capacity, other.m_capacity);
436 std::swap(m_buffer, other.m_buffer);
437 std::swap(m_capacity, other.m_capacity);
441 void restoreInlineBufferIfNeeded()
445 m_buffer = inlineBuffer();
446 m_capacity = inlineCapacity;
450 using Base::bufferSlot;
451 using Base::capacity;
455 if (buffer() == inlineBuffer())
457 return Base::releaseBuffer();
461 using Base::m_buffer;
462 using Base::m_capacity;
464 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
465 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
467 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
470 template<typename T, size_t inlineCapacity = 0>
472 WTF_MAKE_FAST_ALLOCATED;
474 typedef VectorBuffer<T, inlineCapacity> Buffer;
475 typedef VectorTypeOperations<T> TypeOperations;
481 typedef const T* const_iterator;
488 explicit Vector(size_t size)
493 TypeOperations::initialize(begin(), end());
498 if (m_size) shrink(0);
501 Vector(const Vector&);
502 template<size_t otherCapacity>
503 Vector(const Vector<T, otherCapacity>&);
505 Vector& operator=(const Vector&);
506 template<size_t otherCapacity>
507 Vector& operator=(const Vector<T, otherCapacity>&);
509 size_t size() const { return m_size; }
510 size_t capacity() const { return m_buffer.capacity(); }
511 bool isEmpty() const { return !size(); }
516 return m_buffer.buffer()[i];
518 const T& at(size_t i) const
521 return m_buffer.buffer()[i];
524 T& operator[](size_t i) { return at(i); }
525 const T& operator[](size_t i) const { return at(i); }
527 T* data() { return m_buffer.buffer(); }
528 const T* data() const { return m_buffer.buffer(); }
529 T** dataSlot() { return m_buffer.bufferSlot(); }
531 iterator begin() { return data(); }
532 iterator end() { return begin() + m_size; }
533 const_iterator begin() const { return data(); }
534 const_iterator end() const { return begin() + m_size; }
536 T& first() { return at(0); }
537 const T& first() const { return at(0); }
538 T& last() { return at(size() - 1); }
539 const T& last() const { return at(size() - 1); }
541 template<typename U> bool contains(const U&) const;
542 template<typename U> size_t find(const U&) const;
543 template<typename U> size_t reverseFind(const U&) const;
545 void shrink(size_t size);
546 void grow(size_t size);
547 void resize(size_t size);
548 void reserveCapacity(size_t newCapacity);
549 bool tryReserveCapacity(size_t newCapacity);
550 void reserveInitialCapacity(size_t initialCapacity);
551 void shrinkCapacity(size_t newCapacity);
552 void shrinkToFit() { shrinkCapacity(size()); }
554 void clear() { shrinkCapacity(0); }
556 template<typename U> void append(const U*, size_t);
557 template<typename U> void append(const U&);
558 template<typename U> void uncheckedAppend(const U& val);
559 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&);
560 template<typename U> bool tryAppend(const U*, size_t);
562 template<typename U> void insert(size_t position, const U*, size_t);
563 template<typename U> void insert(size_t position, const U&);
564 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
566 template<typename U> void prepend(const U*, size_t);
567 template<typename U> void prepend(const U&);
568 template<typename U, size_t c> void prepend(const Vector<U, c>&);
570 void remove(size_t position);
571 void remove(size_t position, size_t length);
579 Vector(size_t size, const T& val)
584 TypeOperations::uninitializedFill(begin(), end(), val);
587 void fill(const T&, size_t);
588 void fill(const T& val) { fill(val, size()); }
590 template<typename Iterator> void appendRange(Iterator start, Iterator end);
594 void swap(Vector<T, inlineCapacity>& other)
596 std::swap(m_size, other.m_size);
597 m_buffer.swap(other.m_buffer);
602 void checkConsistency();
605 void expandCapacity(size_t newMinCapacity);
606 const T* expandCapacity(size_t newMinCapacity, const T*);
607 bool tryExpandCapacity(size_t newMinCapacity);
608 const T* tryExpandCapacity(size_t newMinCapacity, const T*);
609 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
617 QDataStream& operator<<(QDataStream& stream, const Vector<T>& data)
619 stream << qint64(data.size());
620 foreach (const T& i, data)
626 QDataStream& operator>>(QDataStream& stream, Vector<T>& data)
632 data.reserveCapacity(count);
633 for (qint64 i = 0; i < count; ++i) {
641 template<typename T, size_t inlineCapacity>
642 Vector<T, inlineCapacity>::Vector(const Vector& other)
643 : m_size(other.size())
644 , m_buffer(other.capacity())
647 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
650 template<typename T, size_t inlineCapacity>
651 template<size_t otherCapacity>
652 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
653 : m_size(other.size())
654 , m_buffer(other.capacity())
657 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
660 template<typename T, size_t inlineCapacity>
661 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
666 if (size() > other.size())
667 shrink(other.size());
668 else if (other.size() > capacity()) {
670 reserveCapacity(other.size());
675 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
676 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
681 std::copy(other.begin(), other.begin() + size(), begin());
682 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
683 m_size = other.size();
688 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
690 template<typename T, size_t inlineCapacity>
691 template<size_t otherCapacity>
692 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
694 // If the inline capacities match, we should call the more specific
695 // template. If the inline capacities don't match, the two objects
696 // shouldn't be allocated the same address.
697 ASSERT(!typelessPointersAreEqual(&other, this));
699 if (size() > other.size())
700 shrink(other.size());
701 else if (other.size() > capacity()) {
703 reserveCapacity(other.size());
708 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
709 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
714 std::copy(other.begin(), other.begin() + size(), begin());
715 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
716 m_size = other.size();
721 template<typename T, size_t inlineCapacity>
723 bool Vector<T, inlineCapacity>::contains(const U& value) const
725 return find(value) != notFound;
728 template<typename T, size_t inlineCapacity>
730 size_t Vector<T, inlineCapacity>::find(const U& value) const
732 for (size_t i = 0; i < size(); ++i) {
739 template<typename T, size_t inlineCapacity>
741 size_t Vector<T, inlineCapacity>::reverseFind(const U& value) const
743 for (size_t i = 1; i <= size(); ++i) {
744 const size_t index = size() - i;
745 if (at(index) == value)
751 template<typename T, size_t inlineCapacity>
752 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
754 if (size() > newSize)
756 else if (newSize > capacity()) {
758 reserveCapacity(newSize);
763 std::fill(begin(), end(), val);
764 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
768 template<typename T, size_t inlineCapacity>
769 template<typename Iterator>
770 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
772 for (Iterator it = start; it != end; ++it)
776 template<typename T, size_t inlineCapacity>
777 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
779 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
782 template<typename T, size_t inlineCapacity>
783 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
785 if (ptr < begin() || ptr >= end()) {
786 expandCapacity(newMinCapacity);
789 size_t index = ptr - begin();
790 expandCapacity(newMinCapacity);
791 return begin() + index;
794 template<typename T, size_t inlineCapacity>
795 bool Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity)
797 return tryReserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
800 template<typename T, size_t inlineCapacity>
801 const T* Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity, const T* ptr)
803 if (ptr < begin() || ptr >= end()) {
804 if (!tryExpandCapacity(newMinCapacity))
808 size_t index = ptr - begin();
809 if (!tryExpandCapacity(newMinCapacity))
811 return begin() + index;
814 template<typename T, size_t inlineCapacity> template<typename U>
815 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
817 expandCapacity(newMinCapacity);
821 template<typename T, size_t inlineCapacity>
822 inline void Vector<T, inlineCapacity>::resize(size_t size)
825 TypeOperations::destruct(begin() + size, end());
827 if (size > capacity())
828 expandCapacity(size);
830 TypeOperations::initialize(end(), begin() + size);
836 template<typename T, size_t inlineCapacity>
837 void Vector<T, inlineCapacity>::shrink(size_t size)
839 ASSERT(size <= m_size);
840 TypeOperations::destruct(begin() + size, end());
844 template<typename T, size_t inlineCapacity>
845 void Vector<T, inlineCapacity>::grow(size_t size)
847 ASSERT(size >= m_size);
848 if (size > capacity())
849 expandCapacity(size);
851 TypeOperations::initialize(end(), begin() + size);
855 template<typename T, size_t inlineCapacity>
856 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
858 if (newCapacity <= capacity())
860 T* oldBuffer = begin();
862 m_buffer.allocateBuffer(newCapacity);
864 TypeOperations::move(oldBuffer, oldEnd, begin());
865 m_buffer.deallocateBuffer(oldBuffer);
868 template<typename T, size_t inlineCapacity>
869 bool Vector<T, inlineCapacity>::tryReserveCapacity(size_t newCapacity)
871 if (newCapacity <= capacity())
873 T* oldBuffer = begin();
875 if (!m_buffer.tryAllocateBuffer(newCapacity))
878 TypeOperations::move(oldBuffer, oldEnd, begin());
879 m_buffer.deallocateBuffer(oldBuffer);
883 template<typename T, size_t inlineCapacity>
884 inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity)
887 ASSERT(capacity() == inlineCapacity);
888 if (initialCapacity > inlineCapacity)
889 m_buffer.allocateBuffer(initialCapacity);
892 template<typename T, size_t inlineCapacity>
893 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity)
895 if (newCapacity >= capacity())
898 if (newCapacity < size())
901 T* oldBuffer = begin();
902 if (newCapacity > 0) {
904 m_buffer.allocateBuffer(newCapacity);
905 if (begin() != oldBuffer)
906 TypeOperations::move(oldBuffer, oldEnd, begin());
909 m_buffer.deallocateBuffer(oldBuffer);
910 m_buffer.restoreInlineBufferIfNeeded();
913 // Templatizing these is better than just letting the conversion happen implicitly,
914 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
915 // without refcount thrash.
917 template<typename T, size_t inlineCapacity> template<typename U>
918 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
920 size_t newSize = m_size + dataSize;
921 if (newSize > capacity()) {
922 data = expandCapacity(newSize, data);
926 if (newSize < m_size)
929 for (size_t i = 0; i < dataSize; ++i)
930 new (&dest[i]) T(data[i]);
934 template<typename T, size_t inlineCapacity> template<typename U>
935 bool Vector<T, inlineCapacity>::tryAppend(const U* data, size_t dataSize)
937 size_t newSize = m_size + dataSize;
938 if (newSize > capacity()) {
939 data = tryExpandCapacity(newSize, data);
944 if (newSize < m_size)
947 for (size_t i = 0; i < dataSize; ++i)
948 new (&dest[i]) T(data[i]);
953 template<typename T, size_t inlineCapacity> template<typename U>
954 ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val)
957 if (size() == capacity()) {
958 ptr = expandCapacity(size() + 1, ptr);
963 #if COMPILER(MSVC7_OR_LOWER)
964 // FIXME: MSVC7 generates compilation errors when trying to assign
965 // a pointer to a Vector of its base class (i.e. can't downcast). So far
966 // I've been unable to determine any logical reason for this, so I can
967 // only assume it is a bug with the compiler. Casting is a bad solution,
968 // however, because it subverts implicit conversions, so a better
970 new (end()) T(static_cast<T>(*ptr));
977 // This version of append saves a branch in the case where you know that the
978 // vector's capacity is large enough for the append to succeed.
980 template<typename T, size_t inlineCapacity> template<typename U>
981 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
983 ASSERT(size() < capacity());
989 // This method should not be called append, a better name would be appendElements.
990 // It could also be eliminated entirely, and call sites could just use
991 // appendRange(val.begin(), val.end()).
992 template<typename T, size_t inlineCapacity> template<size_t otherCapacity>
993 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val)
995 append(val.begin(), val.size());
998 template<typename T, size_t inlineCapacity> template<typename U>
999 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
1001 ASSERT(position <= size());
1002 size_t newSize = m_size + dataSize;
1003 if (newSize > capacity()) {
1004 data = expandCapacity(newSize, data);
1008 if (newSize < m_size)
1010 T* spot = begin() + position;
1011 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1012 for (size_t i = 0; i < dataSize; ++i)
1013 new (&spot[i]) T(data[i]);
1017 template<typename T, size_t inlineCapacity> template<typename U>
1018 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
1020 ASSERT(position <= size());
1021 const U* data = &val;
1022 if (size() == capacity()) {
1023 data = expandCapacity(size() + 1, data);
1027 T* spot = begin() + position;
1028 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1029 new (spot) T(*data);
1033 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
1034 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
1036 insert(position, val.begin(), val.size());
1039 template<typename T, size_t inlineCapacity> template<typename U>
1040 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
1042 insert(0, data, dataSize);
1045 template<typename T, size_t inlineCapacity> template<typename U>
1046 inline void Vector<T, inlineCapacity>::prepend(const U& val)
1051 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
1052 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
1054 insert(0, val.begin(), val.size());
1057 template<typename T, size_t inlineCapacity>
1058 inline void Vector<T, inlineCapacity>::remove(size_t position)
1060 ASSERT(position < size());
1061 T* spot = begin() + position;
1063 TypeOperations::moveOverlapping(spot + 1, end(), spot);
1067 template<typename T, size_t inlineCapacity>
1068 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length)
1070 ASSERT(position < size());
1071 ASSERT(position + length <= size());
1072 T* beginSpot = begin() + position;
1073 T* endSpot = beginSpot + length;
1074 TypeOperations::destruct(beginSpot, endSpot);
1075 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1079 template<typename T, size_t inlineCapacity>
1080 inline void Vector<T, inlineCapacity>::reverse()
1082 for (size_t i = 0; i < m_size / 2; ++i)
1083 std::swap(at(i), at(m_size - 1 - i));
1086 template<typename T, size_t inlineCapacity>
1087 inline T* Vector<T, inlineCapacity>::releaseBuffer()
1089 T* buffer = m_buffer.releaseBuffer();
1090 if (inlineCapacity && !buffer && m_size) {
1091 // If the vector had some data, but no buffer to release,
1092 // that means it was using the inline buffer. In that case,
1093 // we create a brand new buffer so the caller always gets one.
1094 size_t bytes = m_size * sizeof(T);
1095 buffer = static_cast<T*>(fastMalloc(bytes));
1096 memcpy(buffer, data(), bytes);
1102 template<typename T, size_t inlineCapacity>
1103 inline void Vector<T, inlineCapacity>::checkConsistency()
1105 #if !ASSERT_DISABLED
1106 for (size_t i = 0; i < size(); ++i)
1107 ValueCheck<T>::checkConsistency(at(i));
1111 template<typename T, size_t inlineCapacity>
1112 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
1114 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
1115 iterator end = collection.end();
1116 for (iterator it = collection.begin(); it != end; ++it)
1120 template<typename T, size_t inlineCapacity>
1121 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
1126 template<typename T, size_t inlineCapacity>
1127 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
1129 if (a.size() != b.size())
1132 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1135 template<typename T, size_t inlineCapacity>
1136 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
1141 #if !ASSERT_DISABLED
1142 template<typename T> struct ValueCheck<Vector<T> > {
1143 typedef Vector<T> TraitType;
1144 static void checkConsistency(const Vector<T>& v)
1146 v.checkConsistency();
1155 #endif // WTF_Vector_h