2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "wtf/Alignment.h"
25 #include "wtf/DefaultAllocator.h"
26 #include "wtf/FastAllocBase.h"
27 #include "wtf/Noncopyable.h"
28 #include "wtf/NotFound.h"
29 #include "wtf/StdLibExtras.h"
30 #include "wtf/VectorTraits.h"
37 #if defined(MEMORY_SANITIZER_INITIAL_SIZE)
38 static const size_t kInitialVectorSize = 1;
40 #ifndef WTF_VECTOR_INITIAL_SIZE
41 #define WTF_VECTOR_INITIAL_SIZE 4
43 static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE;
46 template<typename T, size_t inlineBuffer, typename Allocator>
49 template <bool needsDestruction, typename T>
50 struct VectorDestructor;
53 struct VectorDestructor<false, T>
55 static void destruct(T*, T*) {}
59 struct VectorDestructor<true, T>
61 static void destruct(T* begin, T* end)
63 for (T* cur = begin; cur != end; ++cur)
68 template <bool unusedSlotsMustBeZeroed, typename T>
69 struct VectorUnusedSlotClearer;
72 struct VectorUnusedSlotClearer<false, T> {
73 static void clear(T*, T*) { }
77 struct VectorUnusedSlotClearer<true, T> {
78 static void clear(T* begin, T* end)
80 // We clear out unused slots so that the visitor and the finalizer
81 // do not visit them (or at least it does not matter if they do).
82 memset(begin, 0, sizeof(T) * (end - begin));
86 template <bool canInitializeWithMemset, typename T>
87 struct VectorInitializer;
90 struct VectorInitializer<false, T>
92 static void initialize(T* begin, T* end)
94 for (T* cur = begin; cur != end; ++cur)
100 struct VectorInitializer<true, T>
102 static void initialize(T* begin, T* end)
104 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
108 template <bool canMoveWithMemcpy, typename T>
112 struct VectorMover<false, T>
114 static void move(const T* src, const T* srcEnd, T* dst)
116 while (src != srcEnd) {
117 new (NotNull, dst) T(*src);
123 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
126 move(src, srcEnd, dst);
128 T* dstEnd = dst + (srcEnd - src);
129 while (src != srcEnd) {
132 new (NotNull, dstEnd) T(*srcEnd);
137 static void swap(T* src, T* srcEnd, T* dst)
139 std::swap_ranges(src, srcEnd, dst);
144 struct VectorMover<true, T>
146 static void move(const T* src, const T* srcEnd, T* dst)
148 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
150 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
152 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
154 static void swap(T* src, T* srcEnd, T* dst)
156 std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(srcEnd), reinterpret_cast<char*>(dst));
160 template <bool canCopyWithMemcpy, typename T>
164 struct VectorCopier<false, T>
167 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
169 while (src != srcEnd) {
170 new (NotNull, dst) T(*src);
178 struct VectorCopier<true, T>
180 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
182 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
185 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
187 VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst);
191 template <bool canFillWithMemset, typename T>
195 struct VectorFiller<false, T>
197 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
199 while (dst != dstEnd) {
200 new (NotNull, dst) T(val);
207 struct VectorFiller<true, T>
209 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
211 COMPILE_ASSERT(sizeof(T) == sizeof(char), Size_of_type_should_be_equal_to_one);
212 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
213 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
215 memset(dst, val, dstEnd - dst);
219 template<bool canCompareWithMemcmp, typename T>
220 struct VectorComparer;
223 struct VectorComparer<false, T>
225 static bool compare(const T* a, const T* b, size_t size)
228 return std::equal(a, a + size, b);
234 struct VectorComparer<true, T>
236 static bool compare(const T* a, const T* b, size_t size)
238 return memcmp(a, b, sizeof(T) * size) == 0;
243 struct VectorTypeOperations
245 static void destruct(T* begin, T* end)
247 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
250 static void initialize(T* begin, T* end)
252 VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
255 static void move(const T* src, const T* srcEnd, T* dst)
257 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
260 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
262 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
265 static void swap(T* src, T* srcEnd, T* dst)
267 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst);
270 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
272 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
275 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
277 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
280 static bool compare(const T* a, const T* b, size_t size)
282 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
286 template<typename T, typename Allocator>
287 class VectorBufferBase {
288 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
290 void allocateBuffer(size_t newCapacity)
292 typedef typename Allocator::template VectorBackingHelper<T, VectorTraits<T> >::Type VectorBacking;
294 size_t sizeToAllocate = allocationSize(newCapacity);
295 m_buffer = Allocator::template backingMalloc<T*, VectorBacking>(sizeToAllocate);
296 m_capacity = sizeToAllocate / sizeof(T);
299 size_t allocationSize(size_t capacity) const
301 return Allocator::Quantizer::template quantizedSize<T>(capacity);
304 T* buffer() { return m_buffer; }
305 const T* buffer() const { return m_buffer; }
306 size_t capacity() const { return m_capacity; }
308 void clearUnusedSlots(T* from, T* to)
310 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T> >::value), T>::clear(from, to);
320 VectorBufferBase(T* buffer, size_t capacity)
322 , m_capacity(capacity)
331 template<typename T, size_t inlineCapacity, typename Allocator = DefaultAllocator>
334 template<typename T, typename Allocator>
335 class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, Allocator> {
337 typedef VectorBufferBase<T, Allocator> Base;
343 VectorBuffer(size_t capacity)
345 // Calling malloc(0) might take a lock and may actually do an
346 // allocation on some systems.
348 allocateBuffer(capacity);
353 deallocateBuffer(m_buffer);
357 void deallocateBuffer(T* bufferToDeallocate)
359 Allocator::backingFree(bufferToDeallocate);
362 void resetBufferPointer()
368 void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other)
370 std::swap(m_buffer, other.m_buffer);
371 std::swap(m_capacity, other.m_capacity);
374 using Base::allocateBuffer;
375 using Base::allocationSize;
378 using Base::capacity;
380 using Base::clearUnusedSlots;
382 bool hasOutOfLineBuffer() const
384 // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
392 using Base::m_buffer;
393 using Base::m_capacity;
396 template<typename T, size_t inlineCapacity, typename Allocator>
397 class VectorBuffer : protected VectorBufferBase<T, Allocator> {
398 WTF_MAKE_NONCOPYABLE(VectorBuffer);
400 typedef VectorBufferBase<T, Allocator> Base;
403 : Base(inlineBuffer(), inlineCapacity)
407 VectorBuffer(size_t capacity)
408 : Base(inlineBuffer(), inlineCapacity)
410 if (capacity > inlineCapacity)
411 Base::allocateBuffer(capacity);
416 deallocateBuffer(m_buffer);
420 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate)
422 Allocator::backingFree(bufferToDeallocate);
425 void deallocateBuffer(T* bufferToDeallocate)
427 if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
428 reallyDeallocateBuffer(bufferToDeallocate);
431 void resetBufferPointer()
433 m_buffer = inlineBuffer();
434 m_capacity = inlineCapacity;
437 void allocateBuffer(size_t newCapacity)
439 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
440 if (newCapacity > inlineCapacity)
441 Base::allocateBuffer(newCapacity);
443 resetBufferPointer();
446 size_t allocationSize(size_t capacity) const
448 if (capacity <= inlineCapacity)
449 return m_inlineBufferSize;
450 return Base::allocationSize(capacity);
453 void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other)
455 typedef VectorTypeOperations<T> TypeOperations;
457 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
458 ASSERT(m_capacity == other.m_capacity);
459 if (m_size > other.m_size) {
460 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, other.inlineBuffer());
461 TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
463 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
464 TypeOperations::move(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size, inlineBuffer() + m_size);
466 } else if (buffer() == inlineBuffer()) {
467 m_buffer = other.m_buffer;
468 other.m_buffer = other.inlineBuffer();
469 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
470 std::swap(m_capacity, other.m_capacity);
471 } else if (other.buffer() == other.inlineBuffer()) {
472 other.m_buffer = m_buffer;
473 m_buffer = inlineBuffer();
474 TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + other.m_size, inlineBuffer());
475 std::swap(m_capacity, other.m_capacity);
477 std::swap(m_buffer, other.m_buffer);
478 std::swap(m_capacity, other.m_capacity);
483 using Base::capacity;
485 bool hasOutOfLineBuffer() const
487 return buffer() && buffer() != inlineBuffer();
494 using Base::m_buffer;
495 using Base::m_capacity;
497 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
498 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
499 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); }
501 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
502 template<typename U, size_t inlineBuffer, typename V>
506 template<typename T, size_t inlineCapacity, typename Allocator>
509 // VectorDestructorBase defines the destructor of a vector. This base is used in order to
510 // completely avoid creating a destructor for a vector that does not need to be destructed.
511 // By doing so, the clang compiler will have correct information about whether or not a
512 // vector has a trivial destructor and we use that in a compiler plugin to ensure the
513 // correctness of non-finalized garbage-collected classes and the use of VectorTraits::needsDestruction.
515 // All non-GC managed vectors need a destructor. This destructor will simply call finalize on the actual vector type.
516 template<typename Derived, typename Elements, bool hasInlineCapacity, bool isGarbageCollected>
517 class VectorDestructorBase {
519 ~VectorDestructorBase() { static_cast<Derived*>(this)->finalize(); }
522 // Heap-allocated vectors with no inlineCapacity never need a destructor.
523 template<typename Derived, typename Elements>
524 class VectorDestructorBase<Derived, Elements, false, true> { };
526 // Heap-allocator vectors with inlineCapacity need a destructor if the inline elements do.
527 // The use of VectorTraits<Elements>::needsDestruction is delayed until we know that
528 // inlineCapacity is non-zero to allow classes that recursively refer to themselves in vector
529 // members. If inlineCapacity is non-zero doing so would have undefined meaning, so in this
530 // case we can use HeapVectorWithInlineCapacityDestructorBase to define a destructor
531 // depending on the value of VectorTraits<Elements>::needsDestruction.
532 template<typename Derived, bool elementsNeedsDestruction>
533 class HeapVectorWithInlineCapacityDestructorBase;
535 template<typename Derived>
536 class HeapVectorWithInlineCapacityDestructorBase<Derived, true> {
538 ~HeapVectorWithInlineCapacityDestructorBase() { static_cast<Derived*>(this)->finalize(); }
541 template<typename Derived>
542 class HeapVectorWithInlineCapacityDestructorBase<Derived, false> { };
544 template<typename Derived, typename Elements>
545 class VectorDestructorBase<Derived, Elements, true, true> : public HeapVectorWithInlineCapacityDestructorBase<Derived, VectorTraits<Elements>::needsDestruction> { };
547 template<typename T, size_t inlineCapacity = 0, typename Allocator = DefaultAllocator>
548 class Vector : private VectorBuffer<T, inlineCapacity, Allocator>, public VectorDestructorBase<Vector<T, inlineCapacity, Allocator>, T, (inlineCapacity > 0), Allocator::isGarbageCollected> {
549 WTF_USE_ALLOCATOR(Vector, Allocator);
551 typedef VectorBuffer<T, inlineCapacity, Allocator> Base;
552 typedef VectorTypeOperations<T> TypeOperations;
558 typedef const T* const_iterator;
559 typedef std::reverse_iterator<iterator> reverse_iterator;
560 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
564 // Unused slots are initialized to zero so that the visitor and the
565 // finalizer can visit them safely. canInitializeWithMemset tells us
566 // that the class does not expect matching constructor and
567 // destructor calls as long as the memory is zeroed.
568 COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
569 COMPILE_ASSERT(!WTF::IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, CantInitializeWithMemsetIfThereIsAVtable);
573 explicit Vector(size_t size)
576 // Unused slots are initialized to zero so that the visitor and the
577 // finalizer can visit them safely. canInitializeWithMemset tells us
578 // that the class does not expect matching constructor and
579 // destructor calls as long as the memory is zeroed.
580 COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
582 TypeOperations::initialize(begin(), end());
585 // Off-GC-heap vectors: Destructor should be called.
586 // On-GC-heap vectors: Destructor should be called for inline buffers
587 // (if any) but destructor shouldn't be called for vector backing since
588 // it is managed by the traced GC heap.
591 if (!inlineCapacity) {
592 if (LIKELY(!Base::buffer()))
595 if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) {
596 TypeOperations::destruct(begin(), end());
597 m_size = 0; // Partial protection against use-after-free.
603 void finalizeGarbageCollectedObject()
608 Vector(const Vector&);
609 template<size_t otherCapacity>
610 explicit Vector(const Vector<T, otherCapacity, Allocator>&);
612 Vector& operator=(const Vector&);
613 template<size_t otherCapacity>
614 Vector& operator=(const Vector<T, otherCapacity, Allocator>&);
617 Vector& operator=(Vector&&);
619 size_t size() const { return m_size; }
620 size_t capacity() const { return Base::capacity(); }
621 bool isEmpty() const { return !size(); }
625 RELEASE_ASSERT(i < size());
626 return Base::buffer()[i];
628 const T& at(size_t i) const
630 RELEASE_ASSERT(i < size());
631 return Base::buffer()[i];
634 T& operator[](size_t i) { return at(i); }
635 const T& operator[](size_t i) const { return at(i); }
637 T* data() { return Base::buffer(); }
638 const T* data() const { return Base::buffer(); }
640 iterator begin() { return data(); }
641 iterator end() { return begin() + m_size; }
642 const_iterator begin() const { return data(); }
643 const_iterator end() const { return begin() + m_size; }
645 reverse_iterator rbegin() { return reverse_iterator(end()); }
646 reverse_iterator rend() { return reverse_iterator(begin()); }
647 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
648 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
650 T& first() { return at(0); }
651 const T& first() const { return at(0); }
652 T& last() { return at(size() - 1); }
653 const T& last() const { return at(size() - 1); }
655 template<typename U> bool contains(const U&) const;
656 template<typename U> size_t find(const U&) const;
657 template<typename U> size_t reverseFind(const U&) const;
659 void shrink(size_t size);
660 void grow(size_t size);
661 void resize(size_t size);
662 void reserveCapacity(size_t newCapacity);
663 void reserveInitialCapacity(size_t initialCapacity);
664 void shrinkToFit() { shrinkCapacity(size()); }
665 void shrinkToReasonableCapacity()
667 if (size() * 2 < capacity())
668 shrinkCapacity(size() + size() / 4 + 1);
671 void clear() { shrinkCapacity(0); }
673 template<typename U> void append(const U*, size_t);
674 template<typename U> void append(const U&);
675 template<typename U> void uncheckedAppend(const U& val);
676 template<typename U, size_t otherCapacity, typename V> void appendVector(const Vector<U, otherCapacity, V>&);
678 template<typename U> void insert(size_t position, const U*, size_t);
679 template<typename U> void insert(size_t position, const U&);
680 template<typename U, size_t c, typename V> void insert(size_t position, const Vector<U, c, V>&);
682 template<typename U> void prepend(const U*, size_t);
683 template<typename U> void prepend(const U&);
684 template<typename U, size_t c, typename V> void prepend(const Vector<U, c, V>&);
686 void remove(size_t position);
687 void remove(size_t position, size_t length);
695 Vector(size_t size, const T& val)
699 TypeOperations::uninitializedFill(begin(), end(), val);
702 void fill(const T&, size_t);
703 void fill(const T& val) { fill(val, size()); }
705 template<typename Iterator> void appendRange(Iterator start, Iterator end);
707 void swap(Vector& other)
709 Base::swapVectorBuffer(other);
710 std::swap(m_size, other.m_size);
715 void trace(typename Allocator::Visitor*);
718 void expandCapacity(size_t newMinCapacity);
719 const T* expandCapacity(size_t newMinCapacity, const T*);
720 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
721 void shrinkCapacity(size_t newCapacity);
722 template<typename U> void appendSlowCase(const U&);
726 using Base::capacity;
727 using Base::swapVectorBuffer;
728 using Base::allocateBuffer;
729 using Base::allocationSize;
730 using Base::clearUnusedSlots;
733 template<typename T, size_t inlineCapacity, typename Allocator>
734 Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other)
735 : Base(other.capacity())
737 m_size = other.size();
738 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
741 template<typename T, size_t inlineCapacity, typename Allocator>
742 template<size_t otherCapacity>
743 Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allocator>& other)
744 : Base(other.capacity())
746 m_size = other.size();
747 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
750 template<typename T, size_t inlineCapacity, typename Allocator>
751 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, inlineCapacity, Allocator>& other)
753 if (UNLIKELY(&other == this))
756 if (size() > other.size())
757 shrink(other.size());
758 else if (other.size() > capacity()) {
760 reserveCapacity(other.size());
764 std::copy(other.begin(), other.begin() + size(), begin());
765 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
766 m_size = other.size();
771 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
773 template<typename T, size_t inlineCapacity, typename Allocator>
774 template<size_t otherCapacity>
775 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, otherCapacity, Allocator>& other)
777 // If the inline capacities match, we should call the more specific
778 // template. If the inline capacities don't match, the two objects
779 // shouldn't be allocated the same address.
780 ASSERT(!typelessPointersAreEqual(&other, this));
782 if (size() > other.size())
783 shrink(other.size());
784 else if (other.size() > capacity()) {
786 reserveCapacity(other.size());
790 std::copy(other.begin(), other.begin() + size(), begin());
791 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
792 m_size = other.size();
797 template<typename T, size_t inlineCapacity, typename Allocator>
798 Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator>&& other)
801 // It's a little weird to implement a move constructor using swap but this way we
802 // don't have to add a move constructor to VectorBuffer.
806 template<typename T, size_t inlineCapacity, typename Allocator>
807 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(Vector<T, inlineCapacity, Allocator>&& other)
813 template<typename T, size_t inlineCapacity, typename Allocator>
815 bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const
817 return find(value) != kNotFound;
820 template<typename T, size_t inlineCapacity, typename Allocator>
822 size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const
824 const T* b = begin();
826 for (const T* iter = b; iter < e; ++iter) {
833 template<typename T, size_t inlineCapacity, typename Allocator>
835 size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const
837 const T* b = begin();
838 const T* iter = end();
847 template<typename T, size_t inlineCapacity, typename Allocator>
848 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize)
850 if (size() > newSize)
852 else if (newSize > capacity()) {
854 reserveCapacity(newSize);
858 std::fill(begin(), end(), val);
859 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
863 template<typename T, size_t inlineCapacity, typename Allocator>
864 template<typename Iterator>
865 void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator end)
867 for (Iterator it = start; it != end; ++it)
871 template<typename T, size_t inlineCapacity, typename Allocator>
872 void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity)
874 size_t oldCapacity = capacity();
875 size_t expandedCapacity = oldCapacity;
876 // We use a more aggressive expansion strategy for Vectors with inline storage.
877 // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
878 // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
879 if (inlineCapacity) {
880 expandedCapacity *= 2;
881 // Check for integer overflow, which could happen in the 32-bit build.
882 RELEASE_ASSERT(expandedCapacity > oldCapacity);
884 // This cannot integer overflow.
885 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
886 // On 32-bit, there's not enough address space to hold the old and new buffers.
887 // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
888 expandedCapacity += (expandedCapacity / 4) + 1;
890 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity)));
893 template<typename T, size_t inlineCapacity, typename Allocator>
894 const T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, const T* ptr)
896 if (ptr < begin() || ptr >= end()) {
897 expandCapacity(newMinCapacity);
900 size_t index = ptr - begin();
901 expandCapacity(newMinCapacity);
902 return begin() + index;
905 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
906 inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, U* ptr)
908 expandCapacity(newMinCapacity);
912 template<typename T, size_t inlineCapacity, typename Allocator>
913 inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size)
916 TypeOperations::destruct(begin() + size, end());
918 if (size > capacity())
919 expandCapacity(size);
920 TypeOperations::initialize(end(), begin() + size);
926 template<typename T, size_t inlineCapacity, typename Allocator>
927 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size)
929 ASSERT(size <= m_size);
930 TypeOperations::destruct(begin() + size, end());
931 clearUnusedSlots(begin() + size, end());
935 template<typename T, size_t inlineCapacity, typename Allocator>
936 void Vector<T, inlineCapacity, Allocator>::grow(size_t size)
938 ASSERT(size >= m_size);
939 if (size > capacity())
940 expandCapacity(size);
941 TypeOperations::initialize(end(), begin() + size);
945 template<typename T, size_t inlineCapacity, typename Allocator>
946 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity)
948 if (UNLIKELY(newCapacity <= capacity()))
950 T* oldBuffer = begin();
952 Base::allocateBuffer(newCapacity);
953 TypeOperations::move(oldBuffer, oldEnd, begin());
954 Base::deallocateBuffer(oldBuffer);
957 template<typename T, size_t inlineCapacity, typename Allocator>
958 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t initialCapacity)
961 ASSERT(capacity() == inlineCapacity);
962 if (initialCapacity > inlineCapacity)
963 Base::allocateBuffer(initialCapacity);
966 template<typename T, size_t inlineCapacity, typename Allocator>
967 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity)
969 if (newCapacity >= capacity())
972 if (newCapacity < size())
975 T* oldBuffer = begin();
976 if (newCapacity > 0) {
977 // Optimization: if we're downsizing inside the same allocator bucket, we can exit with no additional work.
978 if (Base::allocationSize(capacity()) == Base::allocationSize(newCapacity))
982 Base::allocateBuffer(newCapacity);
983 if (begin() != oldBuffer)
984 TypeOperations::move(oldBuffer, oldEnd, begin());
986 Base::resetBufferPointer();
989 Base::deallocateBuffer(oldBuffer);
992 // Templatizing these is better than just letting the conversion happen implicitly,
993 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
994 // without refcount thrash.
996 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
997 void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize)
999 ASSERT(Allocator::isAllocationAllowed());
1000 size_t newSize = m_size + dataSize;
1001 if (newSize > capacity()) {
1002 data = expandCapacity(newSize, data);
1005 RELEASE_ASSERT(newSize >= m_size);
1007 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], dest);
1011 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1012 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(const U& val)
1014 ASSERT(Allocator::isAllocationAllowed());
1015 if (LIKELY(size() != capacity())) {
1016 new (NotNull, end()) T(val);
1021 appendSlowCase(val);
1024 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1025 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(const U& val)
1027 ASSERT(size() == capacity());
1029 const U* ptr = &val;
1030 ptr = expandCapacity(size() + 1, ptr);
1033 new (NotNull, end()) T(*ptr);
1037 // This version of append saves a branch in the case where you know that the
1038 // vector's capacity is large enough for the append to succeed.
1040 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1041 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(const U& val)
1043 ASSERT(size() < capacity());
1044 const U* ptr = &val;
1045 new (NotNull, end()) T(*ptr);
1049 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t otherCapacity, typename OtherAllocator>
1050 inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, otherCapacity, OtherAllocator>& val)
1052 append(val.begin(), val.size());
1055 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1056 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data, size_t dataSize)
1058 ASSERT(Allocator::isAllocationAllowed());
1059 RELEASE_ASSERT(position <= size());
1060 size_t newSize = m_size + dataSize;
1061 if (newSize > capacity()) {
1062 data = expandCapacity(newSize, data);
1065 RELEASE_ASSERT(newSize >= m_size);
1066 T* spot = begin() + position;
1067 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1068 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], spot);
1072 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1073 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U& val)
1075 ASSERT(Allocator::isAllocationAllowed());
1076 RELEASE_ASSERT(position <= size());
1077 const U* data = &val;
1078 if (size() == capacity()) {
1079 data = expandCapacity(size() + 1, data);
1082 T* spot = begin() + position;
1083 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1084 new (NotNull, spot) T(*data);
1088 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename OtherAllocator>
1089 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const Vector<U, c, OtherAllocator>& val)
1091 insert(position, val.begin(), val.size());
1094 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1095 void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSize)
1097 insert(0, data, dataSize);
1100 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1101 inline void Vector<T, inlineCapacity, Allocator>::prepend(const U& val)
1106 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename V>
1107 inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>& val)
1109 insert(0, val.begin(), val.size());
1112 template<typename T, size_t inlineCapacity, typename Allocator>
1113 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position)
1115 RELEASE_ASSERT(position < size());
1116 T* spot = begin() + position;
1118 TypeOperations::moveOverlapping(spot + 1, end(), spot);
1119 clearUnusedSlots(end() - 1, end());
1123 template<typename T, size_t inlineCapacity, typename Allocator>
1124 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t length)
1126 ASSERT_WITH_SECURITY_IMPLICATION(position <= size());
1127 RELEASE_ASSERT(position + length <= size());
1128 T* beginSpot = begin() + position;
1129 T* endSpot = beginSpot + length;
1130 TypeOperations::destruct(beginSpot, endSpot);
1131 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1132 clearUnusedSlots(end() - length, end());
1136 template<typename T, size_t inlineCapacity, typename Allocator>
1137 inline void Vector<T, inlineCapacity, Allocator>::reverse()
1139 for (size_t i = 0; i < m_size / 2; ++i)
1140 std::swap(at(i), at(m_size - 1 - i));
1143 template<typename T, size_t inlineCapacity, typename Allocator>
1144 void deleteAllValues(const Vector<T, inlineCapacity, Allocator>& collection)
1146 typedef typename Vector<T, inlineCapacity, Allocator>::const_iterator iterator;
1147 iterator end = collection.end();
1148 for (iterator it = collection.begin(); it != end; ++it)
1152 template<typename T, size_t inlineCapacity, typename Allocator>
1153 inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapacity, Allocator>& b)
1158 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1159 bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1161 if (a.size() != b.size())
1164 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1167 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1168 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1173 // This is only called if the allocator is a HeapAllocator. It is used when
1174 // visiting during a tracing GC.
1175 template<typename T, size_t inlineCapacity, typename Allocator>
1176 void Vector<T, inlineCapacity, Allocator>::trace(typename Allocator::Visitor* visitor)
1178 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled.
1179 const T* bufferBegin = buffer();
1180 const T* bufferEnd = buffer() + size();
1181 if (ShouldBeTraced<VectorTraits<T> >::value) {
1182 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; bufferEntry++)
1183 Allocator::template trace<T, VectorTraits<T> >(visitor, *const_cast<T*>(bufferEntry));
1185 if (this->hasOutOfLineBuffer())
1186 Allocator::markNoTracing(visitor, buffer());
1190 template<typename T, size_t N>
1191 struct NeedsTracing<Vector<T, N> > {
1192 static const bool value = false;
1200 #endif // WTF_Vector_h