2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include <wtf/Alignment.h>
25 #include <wtf/FastAllocBase.h>
26 #include <wtf/Noncopyable.h>
27 #include <wtf/NotFound.h>
28 #include <wtf/StdLibExtras.h>
29 #include <wtf/ValueCheck.h>
30 #include <wtf/VectorTraits.h>
35 #include <QDataStream>
43 template <bool needsDestruction, typename T>
44 struct VectorDestructor;
47 struct VectorDestructor<false, T>
49 static void destruct(T*, T*) {}
53 struct VectorDestructor<true, T>
55 static void destruct(T* begin, T* end)
57 for (T* cur = begin; cur != end; ++cur)
62 template <bool needsInitialization, bool canInitializeWithMemset, typename T>
63 struct VectorInitializer;
65 template<bool ignore, typename T>
66 struct VectorInitializer<false, ignore, T>
68 static void initialize(T*, T*) {}
72 struct VectorInitializer<true, false, T>
74 static void initialize(T* begin, T* end)
76 for (T* cur = begin; cur != end; ++cur)
82 struct VectorInitializer<true, true, T>
84 static void initialize(T* begin, T* end)
86 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
90 template <bool canMoveWithMemcpy, typename T>
94 struct VectorMover<false, T>
96 static void move(const T* src, const T* srcEnd, T* dst)
98 while (src != srcEnd) {
99 new (NotNull, dst) T(*src);
100 #if COMPILER(SUNCC) && __SUNPRO_CC <= 0x590
101 const_cast<T*>(src)->~T(); // Work around obscure SunCC 12 compiler bug.
109 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
112 move(src, srcEnd, dst);
114 T* dstEnd = dst + (srcEnd - src);
115 while (src != srcEnd) {
118 new (NotNull, dstEnd) T(*srcEnd);
126 struct VectorMover<true, T>
128 static void move(const T* src, const T* srcEnd, T* dst)
130 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
132 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
134 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
138 template <bool canCopyWithMemcpy, typename T>
142 struct VectorCopier<false, T>
144 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
146 while (src != srcEnd) {
147 new (NotNull, dst) T(*src);
155 struct VectorCopier<true, T>
157 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
159 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
163 template <bool canFillWithMemset, typename T>
167 struct VectorFiller<false, T>
169 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
171 while (dst != dstEnd) {
172 new (NotNull, dst) T(val);
179 struct VectorFiller<true, T>
181 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
183 ASSERT(sizeof(T) == sizeof(char));
184 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
185 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
187 memset(dst, val, dstEnd - dst);
191 template<bool canCompareWithMemcmp, typename T>
192 struct VectorComparer;
195 struct VectorComparer<false, T>
197 static bool compare(const T* a, const T* b, size_t size)
199 for (size_t i = 0; i < size; ++i)
207 struct VectorComparer<true, T>
209 static bool compare(const T* a, const T* b, size_t size)
211 return memcmp(a, b, sizeof(T) * size) == 0;
216 struct VectorTypeOperations
218 static void destruct(T* begin, T* end)
220 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
223 static void initialize(T* begin, T* end)
225 VectorInitializer<VectorTraits<T>::needsInitialization, VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
228 static void move(const T* src, const T* srcEnd, T* dst)
230 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
233 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
235 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
238 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
240 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
243 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
245 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
248 static bool compare(const T* a, const T* b, size_t size)
250 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
255 class VectorBufferBase {
256 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
258 void allocateBuffer(size_t newCapacity)
261 m_capacity = newCapacity;
262 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
264 m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
267 bool tryAllocateBuffer(size_t newCapacity)
270 if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
274 if (tryFastMalloc(newCapacity * sizeof(T)).getValue(newBuffer)) {
275 m_capacity = newCapacity;
276 m_buffer = newBuffer;
282 void deallocateBuffer(T* bufferToDeallocate)
284 if (!bufferToDeallocate)
287 if (m_buffer == bufferToDeallocate) {
292 fastFree(bufferToDeallocate);
295 T* buffer() { return m_buffer; }
296 const T* buffer() const { return m_buffer; }
297 T** bufferSlot() { return &m_buffer; }
298 size_t capacity() const { return m_capacity; }
302 T* buffer = m_buffer;
315 VectorBufferBase(T* buffer, size_t capacity)
317 , m_capacity(capacity)
323 // FIXME: It would be nice to find a way to ASSERT that m_buffer hasn't leaked here.
330 template<typename T, size_t inlineCapacity>
334 class VectorBuffer<T, 0> : private VectorBufferBase<T> {
336 typedef VectorBufferBase<T> Base;
342 VectorBuffer(size_t capacity)
344 // Calling malloc(0) might take a lock and may actually do an
345 // allocation on some systems.
347 allocateBuffer(capacity);
352 deallocateBuffer(buffer());
355 void swap(VectorBuffer<T, 0>& other)
357 std::swap(m_buffer, other.m_buffer);
358 std::swap(m_capacity, other.m_capacity);
361 void restoreInlineBufferIfNeeded() { }
363 using Base::allocateBuffer;
364 using Base::tryAllocateBuffer;
365 using Base::deallocateBuffer;
368 using Base::bufferSlot;
369 using Base::capacity;
371 using Base::releaseBuffer;
373 using Base::m_buffer;
374 using Base::m_capacity;
377 template<typename T, size_t inlineCapacity>
378 class VectorBuffer : private VectorBufferBase<T> {
379 WTF_MAKE_NONCOPYABLE(VectorBuffer);
381 typedef VectorBufferBase<T> Base;
384 : Base(inlineBuffer(), inlineCapacity)
388 VectorBuffer(size_t capacity)
389 : Base(inlineBuffer(), inlineCapacity)
391 if (capacity > inlineCapacity)
392 Base::allocateBuffer(capacity);
397 deallocateBuffer(buffer());
400 void allocateBuffer(size_t newCapacity)
402 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
403 if (newCapacity > inlineCapacity)
404 Base::allocateBuffer(newCapacity);
406 m_buffer = inlineBuffer();
407 m_capacity = inlineCapacity;
411 bool tryAllocateBuffer(size_t newCapacity)
413 if (newCapacity > inlineCapacity)
414 return Base::tryAllocateBuffer(newCapacity);
415 m_buffer = inlineBuffer();
416 m_capacity = inlineCapacity;
420 void deallocateBuffer(T* bufferToDeallocate)
422 if (bufferToDeallocate == inlineBuffer())
424 Base::deallocateBuffer(bufferToDeallocate);
427 void swap(VectorBuffer<T, inlineCapacity>& other)
429 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
430 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
431 std::swap(m_capacity, other.m_capacity);
432 } else if (buffer() == inlineBuffer()) {
433 m_buffer = other.m_buffer;
434 other.m_buffer = other.inlineBuffer();
435 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
436 std::swap(m_capacity, other.m_capacity);
437 } else if (other.buffer() == other.inlineBuffer()) {
438 other.m_buffer = m_buffer;
439 m_buffer = inlineBuffer();
440 WTF::swap(m_inlineBuffer, other.m_inlineBuffer);
441 std::swap(m_capacity, other.m_capacity);
443 std::swap(m_buffer, other.m_buffer);
444 std::swap(m_capacity, other.m_capacity);
448 void restoreInlineBufferIfNeeded()
452 m_buffer = inlineBuffer();
453 m_capacity = inlineCapacity;
457 using Base::bufferSlot;
458 using Base::capacity;
462 if (buffer() == inlineBuffer())
464 return Base::releaseBuffer();
468 using Base::m_buffer;
469 using Base::m_capacity;
471 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
472 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
474 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
477 template<typename T, size_t inlineCapacity = 0>
479 WTF_MAKE_FAST_ALLOCATED;
481 typedef VectorBuffer<T, inlineCapacity> Buffer;
482 typedef VectorTypeOperations<T> TypeOperations;
484 class VectorReverseProxy;
490 typedef const T* const_iterator;
491 typedef std::reverse_iterator<iterator> reverse_iterator;
492 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
499 explicit Vector(size_t size)
504 TypeOperations::initialize(begin(), end());
513 Vector(const Vector&);
514 template<size_t otherCapacity>
515 Vector(const Vector<T, otherCapacity>&);
517 Vector& operator=(const Vector&);
518 template<size_t otherCapacity>
519 Vector& operator=(const Vector<T, otherCapacity>&);
521 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
523 Vector& operator=(Vector&&);
526 size_t size() const { return m_size; }
527 size_t capacity() const { return m_buffer.capacity(); }
528 bool isEmpty() const { return !size(); }
533 return m_buffer.buffer()[i];
535 const T& at(size_t i) const
538 return m_buffer.buffer()[i];
541 T& operator[](size_t i) { return at(i); }
542 const T& operator[](size_t i) const { return at(i); }
544 T* data() { return m_buffer.buffer(); }
545 const T* data() const { return m_buffer.buffer(); }
546 T** dataSlot() { return m_buffer.bufferSlot(); }
548 iterator begin() { return data(); }
549 iterator end() { return begin() + m_size; }
550 const_iterator begin() const { return data(); }
551 const_iterator end() const { return begin() + m_size; }
553 reverse_iterator rbegin() { return reverse_iterator(end()); }
554 reverse_iterator rend() { return reverse_iterator(begin()); }
555 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
556 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
558 VectorReverseProxy& reversed() { return static_cast<VectorReverseProxy&>(*this); }
559 const VectorReverseProxy& reversed() const { return static_cast<const VectorReverseProxy&>(*this); }
561 T& first() { return at(0); }
562 const T& first() const { return at(0); }
563 T& last() { return at(size() - 1); }
564 const T& last() const { return at(size() - 1); }
566 template<typename U> bool contains(const U&) const;
567 template<typename U> size_t find(const U&) const;
568 template<typename U> size_t reverseFind(const U&) const;
570 void shrink(size_t size);
571 void grow(size_t size);
572 void resize(size_t size);
573 void reserveCapacity(size_t newCapacity);
574 bool tryReserveCapacity(size_t newCapacity);
575 void reserveInitialCapacity(size_t initialCapacity);
576 void shrinkCapacity(size_t newCapacity);
577 void shrinkToFit() { shrinkCapacity(size()); }
579 void clear() { shrinkCapacity(0); }
581 template<typename U> void append(const U*, size_t);
582 template<typename U> void append(const U&);
583 template<typename U> void uncheckedAppend(const U& val);
584 template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&);
585 template<typename U> bool tryAppend(const U*, size_t);
587 template<typename U> void insert(size_t position, const U*, size_t);
588 template<typename U> void insert(size_t position, const U&);
589 template<typename U, size_t c> void insert(size_t position, const Vector<U, c>&);
591 template<typename U> void prepend(const U*, size_t);
592 template<typename U> void prepend(const U&);
593 template<typename U, size_t c> void prepend(const Vector<U, c>&);
595 void remove(size_t position);
596 void remove(size_t position, size_t length);
604 Vector(size_t size, const T& val)
609 TypeOperations::uninitializedFill(begin(), end(), val);
612 void fill(const T&, size_t);
613 void fill(const T& val) { fill(val, size()); }
615 template<typename Iterator> void appendRange(Iterator start, Iterator end);
619 void swap(Vector<T, inlineCapacity>& other)
621 std::swap(m_size, other.m_size);
622 m_buffer.swap(other.m_buffer);
627 void checkConsistency();
630 void expandCapacity(size_t newMinCapacity);
631 const T* expandCapacity(size_t newMinCapacity, const T*);
632 bool tryExpandCapacity(size_t newMinCapacity);
633 const T* tryExpandCapacity(size_t newMinCapacity, const T*);
634 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
635 template<typename U> void appendSlowCase(const U&);
637 class VectorReverseProxy : private Vector {
639 typedef typename Vector::reverse_iterator iterator;
640 typedef typename Vector::const_reverse_iterator const_iterator;
642 iterator begin() { return Vector::rbegin(); }
643 iterator end() { return Vector::rend(); }
644 const_iterator begin() const { return Vector::rbegin(); }
645 const_iterator end() const { return Vector::rend(); }
650 // These are intentionally not implemented.
651 VectorReverseProxy();
652 VectorReverseProxy(const VectorReverseProxy&);
653 VectorReverseProxy& operator=(const VectorReverseProxy&);
654 ~VectorReverseProxy();
663 QDataStream& operator<<(QDataStream& stream, const Vector<T>& data)
665 stream << qint64(data.size());
666 foreach (const T& i, data)
672 QDataStream& operator>>(QDataStream& stream, Vector<T>& data)
678 data.reserveCapacity(count);
679 for (qint64 i = 0; i < count; ++i) {
687 template<typename T, size_t inlineCapacity>
688 Vector<T, inlineCapacity>::Vector(const Vector& other)
689 : m_size(other.size())
690 , m_buffer(other.capacity())
693 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
696 template<typename T, size_t inlineCapacity>
697 template<size_t otherCapacity>
698 Vector<T, inlineCapacity>::Vector(const Vector<T, otherCapacity>& other)
699 : m_size(other.size())
700 , m_buffer(other.capacity())
703 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
706 template<typename T, size_t inlineCapacity>
707 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, inlineCapacity>& other)
712 if (size() > other.size())
713 shrink(other.size());
714 else if (other.size() > capacity()) {
716 reserveCapacity(other.size());
721 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
722 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
727 std::copy(other.begin(), other.begin() + size(), begin());
728 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
729 m_size = other.size();
734 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
736 template<typename T, size_t inlineCapacity>
737 template<size_t otherCapacity>
738 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(const Vector<T, otherCapacity>& other)
740 // If the inline capacities match, we should call the more specific
741 // template. If the inline capacities don't match, the two objects
742 // shouldn't be allocated the same address.
743 ASSERT(!typelessPointersAreEqual(&other, this));
745 if (size() > other.size())
746 shrink(other.size());
747 else if (other.size() > capacity()) {
749 reserveCapacity(other.size());
754 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
755 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
760 std::copy(other.begin(), other.begin() + size(), begin());
761 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
762 m_size = other.size();
767 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
768 template<typename T, size_t inlineCapacity>
769 Vector<T, inlineCapacity>::Vector(Vector<T, inlineCapacity>&& other)
772 // It's a little weird to implement a move constructor using swap but this way we
773 // don't have to add a move constructor to VectorBuffer.
777 template<typename T, size_t inlineCapacity>
778 Vector<T, inlineCapacity>& Vector<T, inlineCapacity>::operator=(Vector<T, inlineCapacity>&& other)
785 template<typename T, size_t inlineCapacity>
787 bool Vector<T, inlineCapacity>::contains(const U& value) const
789 return find(value) != notFound;
792 template<typename T, size_t inlineCapacity>
794 size_t Vector<T, inlineCapacity>::find(const U& value) const
796 for (size_t i = 0; i < size(); ++i) {
803 template<typename T, size_t inlineCapacity>
805 size_t Vector<T, inlineCapacity>::reverseFind(const U& value) const
807 for (size_t i = 1; i <= size(); ++i) {
808 const size_t index = size() - i;
809 if (at(index) == value)
815 template<typename T, size_t inlineCapacity>
816 void Vector<T, inlineCapacity>::fill(const T& val, size_t newSize)
818 if (size() > newSize)
820 else if (newSize > capacity()) {
822 reserveCapacity(newSize);
827 std::fill(begin(), end(), val);
828 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
832 template<typename T, size_t inlineCapacity>
833 template<typename Iterator>
834 void Vector<T, inlineCapacity>::appendRange(Iterator start, Iterator end)
836 for (Iterator it = start; it != end; ++it)
840 template<typename T, size_t inlineCapacity>
841 void Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity)
843 reserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
846 template<typename T, size_t inlineCapacity>
847 const T* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, const T* ptr)
849 if (ptr < begin() || ptr >= end()) {
850 expandCapacity(newMinCapacity);
853 size_t index = ptr - begin();
854 expandCapacity(newMinCapacity);
855 return begin() + index;
858 template<typename T, size_t inlineCapacity>
859 bool Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity)
861 return tryReserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
864 template<typename T, size_t inlineCapacity>
865 const T* Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity, const T* ptr)
867 if (ptr < begin() || ptr >= end()) {
868 if (!tryExpandCapacity(newMinCapacity))
872 size_t index = ptr - begin();
873 if (!tryExpandCapacity(newMinCapacity))
875 return begin() + index;
878 template<typename T, size_t inlineCapacity> template<typename U>
879 inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
881 expandCapacity(newMinCapacity);
885 template<typename T, size_t inlineCapacity>
886 inline void Vector<T, inlineCapacity>::resize(size_t size)
889 TypeOperations::destruct(begin() + size, end());
891 if (size > capacity())
892 expandCapacity(size);
894 TypeOperations::initialize(end(), begin() + size);
900 template<typename T, size_t inlineCapacity>
901 void Vector<T, inlineCapacity>::shrink(size_t size)
903 ASSERT(size <= m_size);
904 TypeOperations::destruct(begin() + size, end());
908 template<typename T, size_t inlineCapacity>
909 void Vector<T, inlineCapacity>::grow(size_t size)
911 ASSERT(size >= m_size);
912 if (size > capacity())
913 expandCapacity(size);
915 TypeOperations::initialize(end(), begin() + size);
919 template<typename T, size_t inlineCapacity>
920 void Vector<T, inlineCapacity>::reserveCapacity(size_t newCapacity)
922 if (newCapacity <= capacity())
924 T* oldBuffer = begin();
926 m_buffer.allocateBuffer(newCapacity);
928 TypeOperations::move(oldBuffer, oldEnd, begin());
929 m_buffer.deallocateBuffer(oldBuffer);
932 template<typename T, size_t inlineCapacity>
933 bool Vector<T, inlineCapacity>::tryReserveCapacity(size_t newCapacity)
935 if (newCapacity <= capacity())
937 T* oldBuffer = begin();
939 if (!m_buffer.tryAllocateBuffer(newCapacity))
942 TypeOperations::move(oldBuffer, oldEnd, begin());
943 m_buffer.deallocateBuffer(oldBuffer);
947 template<typename T, size_t inlineCapacity>
948 inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity)
951 ASSERT(capacity() == inlineCapacity);
952 if (initialCapacity > inlineCapacity)
953 m_buffer.allocateBuffer(initialCapacity);
956 template<typename T, size_t inlineCapacity>
957 void Vector<T, inlineCapacity>::shrinkCapacity(size_t newCapacity)
959 if (newCapacity >= capacity())
962 if (newCapacity < size())
965 T* oldBuffer = begin();
966 if (newCapacity > 0) {
968 m_buffer.allocateBuffer(newCapacity);
969 if (begin() != oldBuffer)
970 TypeOperations::move(oldBuffer, oldEnd, begin());
973 m_buffer.deallocateBuffer(oldBuffer);
974 m_buffer.restoreInlineBufferIfNeeded();
977 // Templatizing these is better than just letting the conversion happen implicitly,
978 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
979 // without refcount thrash.
981 template<typename T, size_t inlineCapacity> template<typename U>
982 void Vector<T, inlineCapacity>::append(const U* data, size_t dataSize)
984 size_t newSize = m_size + dataSize;
985 if (newSize > capacity()) {
986 data = expandCapacity(newSize, data);
990 if (newSize < m_size)
993 for (size_t i = 0; i < dataSize; ++i)
994 new (NotNull, &dest[i]) T(data[i]);
998 template<typename T, size_t inlineCapacity> template<typename U>
999 bool Vector<T, inlineCapacity>::tryAppend(const U* data, size_t dataSize)
1001 size_t newSize = m_size + dataSize;
1002 if (newSize > capacity()) {
1003 data = tryExpandCapacity(newSize, data);
1008 if (newSize < m_size)
1011 for (size_t i = 0; i < dataSize; ++i)
1012 new (NotNull, &dest[i]) T(data[i]);
1017 template<typename T, size_t inlineCapacity> template<typename U>
1018 ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val)
1020 if (size() != capacity()) {
1021 new (NotNull, end()) T(val);
1026 appendSlowCase(val);
1029 template<typename T, size_t inlineCapacity> template<typename U>
1030 void Vector<T, inlineCapacity>::appendSlowCase(const U& val)
1032 ASSERT(size() == capacity());
1034 const U* ptr = &val;
1035 ptr = expandCapacity(size() + 1, ptr);
1039 new (NotNull, end()) T(*ptr);
1043 // This version of append saves a branch in the case where you know that the
1044 // vector's capacity is large enough for the append to succeed.
1046 template<typename T, size_t inlineCapacity> template<typename U>
1047 inline void Vector<T, inlineCapacity>::uncheckedAppend(const U& val)
1049 ASSERT(size() < capacity());
1050 const U* ptr = &val;
1051 new (NotNull, end()) T(*ptr);
1055 // This method should not be called append, a better name would be appendElements.
1056 // It could also be eliminated entirely, and call sites could just use
1057 // appendRange(val.begin(), val.end()).
1058 template<typename T, size_t inlineCapacity> template<size_t otherCapacity>
1059 inline void Vector<T, inlineCapacity>::append(const Vector<T, otherCapacity>& val)
1061 append(val.begin(), val.size());
1064 template<typename T, size_t inlineCapacity> template<typename U>
1065 void Vector<T, inlineCapacity>::insert(size_t position, const U* data, size_t dataSize)
1067 ASSERT(position <= size());
1068 size_t newSize = m_size + dataSize;
1069 if (newSize > capacity()) {
1070 data = expandCapacity(newSize, data);
1074 if (newSize < m_size)
1076 T* spot = begin() + position;
1077 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1078 for (size_t i = 0; i < dataSize; ++i)
1079 new (NotNull, &spot[i]) T(data[i]);
1083 template<typename T, size_t inlineCapacity> template<typename U>
1084 inline void Vector<T, inlineCapacity>::insert(size_t position, const U& val)
1086 ASSERT(position <= size());
1087 const U* data = &val;
1088 if (size() == capacity()) {
1089 data = expandCapacity(size() + 1, data);
1093 T* spot = begin() + position;
1094 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1095 new (NotNull, spot) T(*data);
1099 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
1100 inline void Vector<T, inlineCapacity>::insert(size_t position, const Vector<U, c>& val)
1102 insert(position, val.begin(), val.size());
1105 template<typename T, size_t inlineCapacity> template<typename U>
1106 void Vector<T, inlineCapacity>::prepend(const U* data, size_t dataSize)
1108 insert(0, data, dataSize);
1111 template<typename T, size_t inlineCapacity> template<typename U>
1112 inline void Vector<T, inlineCapacity>::prepend(const U& val)
1117 template<typename T, size_t inlineCapacity> template<typename U, size_t c>
1118 inline void Vector<T, inlineCapacity>::prepend(const Vector<U, c>& val)
1120 insert(0, val.begin(), val.size());
1123 template<typename T, size_t inlineCapacity>
1124 inline void Vector<T, inlineCapacity>::remove(size_t position)
1126 ASSERT(position < size());
1127 T* spot = begin() + position;
1129 TypeOperations::moveOverlapping(spot + 1, end(), spot);
1133 template<typename T, size_t inlineCapacity>
1134 inline void Vector<T, inlineCapacity>::remove(size_t position, size_t length)
1136 ASSERT(position <= size());
1137 ASSERT(position + length <= size());
1138 T* beginSpot = begin() + position;
1139 T* endSpot = beginSpot + length;
1140 TypeOperations::destruct(beginSpot, endSpot);
1141 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1145 template<typename T, size_t inlineCapacity>
1146 inline void Vector<T, inlineCapacity>::reverse()
1148 for (size_t i = 0; i < m_size / 2; ++i)
1149 std::swap(at(i), at(m_size - 1 - i));
1152 template<typename T, size_t inlineCapacity>
1153 inline T* Vector<T, inlineCapacity>::releaseBuffer()
1155 T* buffer = m_buffer.releaseBuffer();
1156 if (inlineCapacity && !buffer && m_size) {
1157 // If the vector had some data, but no buffer to release,
1158 // that means it was using the inline buffer. In that case,
1159 // we create a brand new buffer so the caller always gets one.
1160 size_t bytes = m_size * sizeof(T);
1161 buffer = static_cast<T*>(fastMalloc(bytes));
1162 memcpy(buffer, data(), bytes);
1168 template<typename T, size_t inlineCapacity>
1169 inline void Vector<T, inlineCapacity>::checkConsistency()
1171 #if !ASSERT_DISABLED
1172 for (size_t i = 0; i < size(); ++i)
1173 ValueCheck<T>::checkConsistency(at(i));
1177 template<typename T, size_t inlineCapacity>
1178 void deleteAllValues(const Vector<T, inlineCapacity>& collection)
1180 typedef typename Vector<T, inlineCapacity>::const_iterator iterator;
1181 iterator end = collection.end();
1182 for (iterator it = collection.begin(); it != end; ++it)
1186 template<typename T, size_t inlineCapacity>
1187 inline void swap(Vector<T, inlineCapacity>& a, Vector<T, inlineCapacity>& b)
1192 template<typename T, size_t inlineCapacity>
1193 bool operator==(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
1195 if (a.size() != b.size())
1198 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1201 template<typename T, size_t inlineCapacity>
1202 inline bool operator!=(const Vector<T, inlineCapacity>& a, const Vector<T, inlineCapacity>& b)
1207 #if !ASSERT_DISABLED
1208 template<typename T> struct ValueCheck<Vector<T> > {
1209 typedef Vector<T> TraitType;
1210 static void checkConsistency(const Vector<T>& v)
1212 v.checkConsistency();
1221 #endif // WTF_Vector_h