2 * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public License
15 * along with this library; see the file COPYING.LIB. If not, write to
16 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
24 #include "wtf/Alignment.h"
25 #include "wtf/DefaultAllocator.h"
26 #include "wtf/FastAllocBase.h"
27 #include "wtf/Noncopyable.h"
28 #include "wtf/NotFound.h"
29 #include "wtf/StdLibExtras.h"
30 #include "wtf/VectorTraits.h"
37 #if defined(MEMORY_SANITIZER_INITIAL_SIZE)
38 static const size_t kInitialVectorSize = 1;
40 #ifndef WTF_VECTOR_INITIAL_SIZE
41 #define WTF_VECTOR_INITIAL_SIZE 4
43 static const size_t kInitialVectorSize = WTF_VECTOR_INITIAL_SIZE;
46 template<typename T, size_t inlineBuffer, typename Allocator>
49 template <bool needsDestruction, typename T>
50 struct VectorDestructor;
53 struct VectorDestructor<false, T>
55 static void destruct(T*, T*) {}
59 struct VectorDestructor<true, T>
61 static void destruct(T* begin, T* end)
63 for (T* cur = begin; cur != end; ++cur)
68 template <bool unusedSlotsMustBeZeroed, typename T>
69 struct VectorUnusedSlotClearer;
72 struct VectorUnusedSlotClearer<false, T> {
73 static void clear(T*, T*) { }
77 struct VectorUnusedSlotClearer<true, T> {
78 static void clear(T* begin, T* end)
80 // We clear out unused slots so that the visitor and the finalizer
81 // do not visit them (or at least it does not matter if they do).
82 memset(begin, 0, sizeof(T) * (end - begin));
86 template <bool canInitializeWithMemset, typename T>
87 struct VectorInitializer;
90 struct VectorInitializer<false, T>
92 static void initialize(T* begin, T* end)
94 for (T* cur = begin; cur != end; ++cur)
100 struct VectorInitializer<true, T>
102 static void initialize(T* begin, T* end)
104 memset(begin, 0, reinterpret_cast<char*>(end) - reinterpret_cast<char*>(begin));
108 template <bool canMoveWithMemcpy, typename T>
112 struct VectorMover<false, T>
114 static void move(const T* src, const T* srcEnd, T* dst)
116 while (src != srcEnd) {
117 new (NotNull, dst) T(*src);
123 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
126 move(src, srcEnd, dst);
128 T* dstEnd = dst + (srcEnd - src);
129 while (src != srcEnd) {
132 new (NotNull, dstEnd) T(*srcEnd);
137 static void swap(T* src, T* srcEnd, T* dst)
139 std::swap_ranges(src, srcEnd, dst);
144 struct VectorMover<true, T>
146 static void move(const T* src, const T* srcEnd, T* dst)
148 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
150 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
152 memmove(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
154 static void swap(T* src, T* srcEnd, T* dst)
156 std::swap_ranges(reinterpret_cast<char*>(src), reinterpret_cast<char*>(srcEnd), reinterpret_cast<char*>(dst));
160 template <bool canCopyWithMemcpy, typename T>
164 struct VectorCopier<false, T>
167 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
169 while (src != srcEnd) {
170 new (NotNull, dst) T(*src);
178 struct VectorCopier<true, T>
180 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
182 memcpy(dst, src, reinterpret_cast<const char*>(srcEnd) - reinterpret_cast<const char*>(src));
185 static void uninitializedCopy(const U* src, const U* srcEnd, T* dst)
187 VectorCopier<false, T>::uninitializedCopy(src, srcEnd, dst);
191 template <bool canFillWithMemset, typename T>
195 struct VectorFiller<false, T>
197 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
199 while (dst != dstEnd) {
200 new (NotNull, dst) T(val);
207 struct VectorFiller<true, T>
209 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
211 COMPILE_ASSERT(sizeof(T) == sizeof(char), Size_of_type_should_be_equal_to_one);
212 #if COMPILER(GCC) && defined(_FORTIFY_SOURCE)
213 if (!__builtin_constant_p(dstEnd - dst) || (!(dstEnd - dst)))
215 memset(dst, val, dstEnd - dst);
219 template<bool canCompareWithMemcmp, typename T>
220 struct VectorComparer;
223 struct VectorComparer<false, T>
225 static bool compare(const T* a, const T* b, size_t size)
228 return std::equal(a, a + size, b);
234 struct VectorComparer<true, T>
236 static bool compare(const T* a, const T* b, size_t size)
238 return memcmp(a, b, sizeof(T) * size) == 0;
243 struct VectorTypeOperations
245 static void destruct(T* begin, T* end)
247 VectorDestructor<VectorTraits<T>::needsDestruction, T>::destruct(begin, end);
250 static void initialize(T* begin, T* end)
252 VectorInitializer<VectorTraits<T>::canInitializeWithMemset, T>::initialize(begin, end);
255 static void move(const T* src, const T* srcEnd, T* dst)
257 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::move(src, srcEnd, dst);
260 static void moveOverlapping(const T* src, const T* srcEnd, T* dst)
262 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::moveOverlapping(src, srcEnd, dst);
265 static void swap(T* src, T* srcEnd, T* dst)
267 VectorMover<VectorTraits<T>::canMoveWithMemcpy, T>::swap(src, srcEnd, dst);
270 static void uninitializedCopy(const T* src, const T* srcEnd, T* dst)
272 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(src, srcEnd, dst);
275 static void uninitializedFill(T* dst, T* dstEnd, const T& val)
277 VectorFiller<VectorTraits<T>::canFillWithMemset, T>::uninitializedFill(dst, dstEnd, val);
280 static bool compare(const T* a, const T* b, size_t size)
282 return VectorComparer<VectorTraits<T>::canCompareWithMemcmp, T>::compare(a, b, size);
286 template<typename T, typename Allocator>
287 class VectorBufferBase {
288 WTF_MAKE_NONCOPYABLE(VectorBufferBase);
290 void allocateBuffer(size_t newCapacity)
292 typedef typename Allocator::template VectorBackingHelper<T, VectorTraits<T> >::Type VectorBacking;
294 size_t sizeToAllocate = allocationSize(newCapacity);
295 m_buffer = Allocator::template backingMalloc<T*, VectorBacking>(sizeToAllocate);
296 m_capacity = sizeToAllocate / sizeof(T);
299 size_t allocationSize(size_t capacity) const
301 return Allocator::Quantizer::template quantizedSize<T>(capacity);
304 T* buffer() { return m_buffer; }
305 const T* buffer() const { return m_buffer; }
306 size_t capacity() const { return m_capacity; }
308 void clearUnusedSlots(T* from, T* to)
310 VectorUnusedSlotClearer<Allocator::isGarbageCollected && (VectorTraits<T>::needsDestruction || ShouldBeTraced<VectorTraits<T> >::value), T>::clear(from, to);
320 VectorBufferBase(T* buffer, size_t capacity)
322 , m_capacity(capacity)
331 template<typename T, size_t inlineCapacity, typename Allocator = DefaultAllocator>
334 template<typename T, typename Allocator>
335 class VectorBuffer<T, 0, Allocator> : protected VectorBufferBase<T, Allocator> {
337 typedef VectorBufferBase<T, Allocator> Base;
343 VectorBuffer(size_t capacity)
345 // Calling malloc(0) might take a lock and may actually do an
346 // allocation on some systems.
348 allocateBuffer(capacity);
353 deallocateBuffer(m_buffer);
357 void deallocateBuffer(T* bufferToDeallocate)
359 Allocator::backingFree(bufferToDeallocate);
362 void resetBufferPointer()
368 void swapVectorBuffer(VectorBuffer<T, 0, Allocator>& other)
370 std::swap(m_buffer, other.m_buffer);
371 std::swap(m_capacity, other.m_capacity);
374 using Base::allocateBuffer;
375 using Base::allocationSize;
378 using Base::capacity;
380 using Base::clearUnusedSlots;
382 bool hasOutOfLineBuffer() const
384 // When inlineCapacity is 0 we have an out of line buffer if we have a buffer.
392 using Base::m_buffer;
393 using Base::m_capacity;
396 template<typename T, size_t inlineCapacity, typename Allocator>
397 class VectorBuffer : protected VectorBufferBase<T, Allocator> {
398 WTF_MAKE_NONCOPYABLE(VectorBuffer);
400 typedef VectorBufferBase<T, Allocator> Base;
403 : Base(inlineBuffer(), inlineCapacity)
407 VectorBuffer(size_t capacity)
408 : Base(inlineBuffer(), inlineCapacity)
410 if (capacity > inlineCapacity)
411 Base::allocateBuffer(capacity);
416 deallocateBuffer(m_buffer);
420 NEVER_INLINE void reallyDeallocateBuffer(T* bufferToDeallocate)
422 Allocator::backingFree(bufferToDeallocate);
425 void deallocateBuffer(T* bufferToDeallocate)
427 if (UNLIKELY(bufferToDeallocate != inlineBuffer()))
428 reallyDeallocateBuffer(bufferToDeallocate);
431 void resetBufferPointer()
433 m_buffer = inlineBuffer();
434 m_capacity = inlineCapacity;
437 void allocateBuffer(size_t newCapacity)
439 // FIXME: This should ASSERT(!m_buffer) to catch misuse/leaks.
440 if (newCapacity > inlineCapacity)
441 Base::allocateBuffer(newCapacity);
443 resetBufferPointer();
446 size_t allocationSize(size_t capacity) const
448 if (capacity <= inlineCapacity)
449 return m_inlineBufferSize;
450 return Base::allocationSize(capacity);
453 void swapVectorBuffer(VectorBuffer<T, inlineCapacity, Allocator>& other)
455 typedef VectorTypeOperations<T> TypeOperations;
457 if (buffer() == inlineBuffer() && other.buffer() == other.inlineBuffer()) {
458 ASSERT(m_capacity == other.m_capacity);
459 if (m_size > other.m_size) {
460 TypeOperations::swap(inlineBuffer(), inlineBuffer() + other.m_size, other.inlineBuffer());
461 TypeOperations::move(inlineBuffer() + other.m_size, inlineBuffer() + m_size, other.inlineBuffer() + other.m_size);
463 TypeOperations::swap(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
464 TypeOperations::move(other.inlineBuffer() + m_size, other.inlineBuffer() + other.m_size, inlineBuffer() + m_size);
466 } else if (buffer() == inlineBuffer()) {
467 m_buffer = other.m_buffer;
468 other.m_buffer = other.inlineBuffer();
469 TypeOperations::move(inlineBuffer(), inlineBuffer() + m_size, other.inlineBuffer());
470 std::swap(m_capacity, other.m_capacity);
471 } else if (other.buffer() == other.inlineBuffer()) {
472 other.m_buffer = m_buffer;
473 m_buffer = inlineBuffer();
474 TypeOperations::move(other.inlineBuffer(), other.inlineBuffer() + other.m_size, inlineBuffer());
475 std::swap(m_capacity, other.m_capacity);
477 std::swap(m_buffer, other.m_buffer);
478 std::swap(m_capacity, other.m_capacity);
483 using Base::capacity;
485 bool hasOutOfLineBuffer() const
487 return buffer() && buffer() != inlineBuffer();
494 using Base::m_buffer;
495 using Base::m_capacity;
497 static const size_t m_inlineBufferSize = inlineCapacity * sizeof(T);
498 T* inlineBuffer() { return reinterpret_cast_ptr<T*>(m_inlineBuffer.buffer); }
499 const T* inlineBuffer() const { return reinterpret_cast_ptr<const T*>(m_inlineBuffer.buffer); }
501 AlignedBuffer<m_inlineBufferSize, WTF_ALIGN_OF(T)> m_inlineBuffer;
502 template<typename U, size_t inlineBuffer, typename V>
506 template<typename T, size_t inlineCapacity, typename Allocator>
509 // VectorDestructorBase defines the destructor of a vector. This base is used in order to
510 // completely avoid creating a destructor for a vector that does not need to be destructed.
511 // By doing so, the clang compiler will have correct information about whether or not a
512 // vector has a trivial destructor and we use that in a compiler plugin to ensure the
513 // correctness of non-finalized garbage-collected classes and the use of VectorTraits::needsDestruction.
515 // All non-GC managed vectors need a destructor. This destructor will simply call finalize on the actual vector type.
516 template<typename Derived, typename Elements, bool hasInlineCapacity, bool isGarbageCollected>
517 class VectorDestructorBase {
519 ~VectorDestructorBase() { static_cast<Derived*>(this)->finalize(); }
522 // Heap-allocated vectors with no inlineCapacity never need a destructor.
523 template<typename Derived, typename Elements>
524 class VectorDestructorBase<Derived, Elements, false, true> { };
526 // Heap-allocator vectors with inlineCapacity need a destructor if the inline elements do.
527 // The use of VectorTraits<Elements>::needsDestruction is delayed until we know that
528 // inlineCapacity is non-zero to allow classes that recursively refer to themselves in vector
529 // members. If inlineCapacity is non-zero doing so would have undefined meaning, so in this
530 // case we can use HeapVectorWithInlineCapacityDestructorBase to define a destructor
531 // depending on the value of VectorTraits<Elements>::needsDestruction.
532 template<typename Derived, bool elementsNeedsDestruction>
533 class HeapVectorWithInlineCapacityDestructorBase;
535 template<typename Derived>
536 class HeapVectorWithInlineCapacityDestructorBase<Derived, true> {
538 ~HeapVectorWithInlineCapacityDestructorBase() { static_cast<Derived*>(this)->finalize(); }
541 template<typename Derived>
542 class HeapVectorWithInlineCapacityDestructorBase<Derived, false> { };
544 template<typename Derived, typename Elements>
545 class VectorDestructorBase<Derived, Elements, true, true> : public HeapVectorWithInlineCapacityDestructorBase<Derived, VectorTraits<Elements>::needsDestruction> { };
547 template<typename T, size_t inlineCapacity = 0, typename Allocator = DefaultAllocator>
548 class Vector : private VectorBuffer<T, inlineCapacity, Allocator>, public VectorDestructorBase<Vector<T, inlineCapacity, Allocator>, T, (inlineCapacity > 0), Allocator::isGarbageCollected> {
549 WTF_USE_ALLOCATOR(Vector, Allocator);
551 typedef VectorBuffer<T, inlineCapacity, Allocator> Base;
552 typedef VectorTypeOperations<T> TypeOperations;
558 typedef const T* const_iterator;
559 typedef std::reverse_iterator<iterator> reverse_iterator;
560 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
564 // Unused slots are initialized to zero so that the visitor and the
565 // finalizer can visit them safely. canInitializeWithMemset tells us
566 // that the class does not expect matching constructor and
567 // destructor calls as long as the memory is zeroed.
568 COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
569 COMPILE_ASSERT(!WTF::IsPolymorphic<T>::value || !VectorTraits<T>::canInitializeWithMemset, CantInitializeWithMemsetIfThereIsAVtable);
573 explicit Vector(size_t size)
576 // Unused slots are initialized to zero so that the visitor and the
577 // finalizer can visit them safely. canInitializeWithMemset tells us
578 // that the class does not expect matching constructor and
579 // destructor calls as long as the memory is zeroed.
580 COMPILE_ASSERT(!Allocator::isGarbageCollected || !VectorTraits<T>::needsDestruction || VectorTraits<T>::canInitializeWithMemset, ClassHasProblemsWithFinalizersCalledOnClearedMemory);
582 TypeOperations::initialize(begin(), end());
585 // Off-GC-heap vectors: Destructor should be called.
586 // On-GC-heap vectors: Destructor should be called for inline buffers
587 // (if any) but destructor shouldn't be called for vector backing since
588 // it is managed by the traced GC heap.
591 if (!inlineCapacity) {
592 if (LIKELY(!Base::buffer()))
595 if (LIKELY(m_size) && !(Allocator::isGarbageCollected && this->hasOutOfLineBuffer())) {
596 TypeOperations::destruct(begin(), end());
597 m_size = 0; // Partial protection against use-after-free.
603 void finalizeGarbageCollectedObject()
608 Vector(const Vector&);
609 template<size_t otherCapacity>
610 explicit Vector(const Vector<T, otherCapacity, Allocator>&);
612 Vector& operator=(const Vector&);
613 template<size_t otherCapacity>
614 Vector& operator=(const Vector<T, otherCapacity, Allocator>&);
616 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
618 Vector& operator=(Vector&&);
621 size_t size() const { return m_size; }
622 size_t capacity() const { return Base::capacity(); }
623 bool isEmpty() const { return !size(); }
627 RELEASE_ASSERT(i < size());
628 return Base::buffer()[i];
630 const T& at(size_t i) const
632 RELEASE_ASSERT(i < size());
633 return Base::buffer()[i];
636 T& operator[](size_t i) { return at(i); }
637 const T& operator[](size_t i) const { return at(i); }
639 T* data() { return Base::buffer(); }
640 const T* data() const { return Base::buffer(); }
642 iterator begin() { return data(); }
643 iterator end() { return begin() + m_size; }
644 const_iterator begin() const { return data(); }
645 const_iterator end() const { return begin() + m_size; }
647 reverse_iterator rbegin() { return reverse_iterator(end()); }
648 reverse_iterator rend() { return reverse_iterator(begin()); }
649 const_reverse_iterator rbegin() const { return const_reverse_iterator(end()); }
650 const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
652 T& first() { return at(0); }
653 const T& first() const { return at(0); }
654 T& last() { return at(size() - 1); }
655 const T& last() const { return at(size() - 1); }
657 template<typename U> bool contains(const U&) const;
658 template<typename U> size_t find(const U&) const;
659 template<typename U> size_t reverseFind(const U&) const;
661 void shrink(size_t size);
662 void grow(size_t size);
663 void resize(size_t size);
664 void reserveCapacity(size_t newCapacity);
665 void reserveInitialCapacity(size_t initialCapacity);
666 void shrinkToFit() { shrinkCapacity(size()); }
667 void shrinkToReasonableCapacity()
669 if (size() * 2 < capacity())
670 shrinkCapacity(size() + size() / 4 + 1);
673 void clear() { shrinkCapacity(0); }
675 template<typename U> void append(const U*, size_t);
676 template<typename U> void append(const U&);
677 template<typename U> void uncheckedAppend(const U& val);
678 template<typename U, size_t otherCapacity, typename V> void appendVector(const Vector<U, otherCapacity, V>&);
680 template<typename U> void insert(size_t position, const U*, size_t);
681 template<typename U> void insert(size_t position, const U&);
682 template<typename U, size_t c, typename V> void insert(size_t position, const Vector<U, c, V>&);
684 template<typename U> void prepend(const U*, size_t);
685 template<typename U> void prepend(const U&);
686 template<typename U, size_t c, typename V> void prepend(const Vector<U, c, V>&);
688 void remove(size_t position);
689 void remove(size_t position, size_t length);
697 Vector(size_t size, const T& val)
701 TypeOperations::uninitializedFill(begin(), end(), val);
704 void fill(const T&, size_t);
705 void fill(const T& val) { fill(val, size()); }
707 template<typename Iterator> void appendRange(Iterator start, Iterator end);
709 void swap(Vector& other)
711 Base::swapVectorBuffer(other);
712 std::swap(m_size, other.m_size);
717 void trace(typename Allocator::Visitor*);
720 void expandCapacity(size_t newMinCapacity);
721 const T* expandCapacity(size_t newMinCapacity, const T*);
722 template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
723 void shrinkCapacity(size_t newCapacity);
724 template<typename U> void appendSlowCase(const U&);
728 using Base::capacity;
729 using Base::swapVectorBuffer;
730 using Base::allocateBuffer;
731 using Base::allocationSize;
732 using Base::clearUnusedSlots;
735 template<typename T, size_t inlineCapacity, typename Allocator>
736 Vector<T, inlineCapacity, Allocator>::Vector(const Vector& other)
737 : Base(other.capacity())
739 m_size = other.size();
740 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
743 template<typename T, size_t inlineCapacity, typename Allocator>
744 template<size_t otherCapacity>
745 Vector<T, inlineCapacity, Allocator>::Vector(const Vector<T, otherCapacity, Allocator>& other)
746 : Base(other.capacity())
748 m_size = other.size();
749 TypeOperations::uninitializedCopy(other.begin(), other.end(), begin());
752 template<typename T, size_t inlineCapacity, typename Allocator>
753 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, inlineCapacity, Allocator>& other)
755 if (UNLIKELY(&other == this))
758 if (size() > other.size())
759 shrink(other.size());
760 else if (other.size() > capacity()) {
762 reserveCapacity(other.size());
766 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
767 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
772 std::copy(other.begin(), other.begin() + size(), begin());
773 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
774 m_size = other.size();
779 inline bool typelessPointersAreEqual(const void* a, const void* b) { return a == b; }
781 template<typename T, size_t inlineCapacity, typename Allocator>
782 template<size_t otherCapacity>
783 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(const Vector<T, otherCapacity, Allocator>& other)
785 // If the inline capacities match, we should call the more specific
786 // template. If the inline capacities don't match, the two objects
787 // shouldn't be allocated the same address.
788 ASSERT(!typelessPointersAreEqual(&other, this));
790 if (size() > other.size())
791 shrink(other.size());
792 else if (other.size() > capacity()) {
794 reserveCapacity(other.size());
798 // Works around an assert in VS2010. See https://connect.microsoft.com/VisualStudio/feedback/details/558044/std-copy-should-not-check-dest-when-first-last
799 #if COMPILER(MSVC) && defined(_ITERATOR_DEBUG_LEVEL) && _ITERATOR_DEBUG_LEVEL
804 std::copy(other.begin(), other.begin() + size(), begin());
805 TypeOperations::uninitializedCopy(other.begin() + size(), other.end(), end());
806 m_size = other.size();
811 #if COMPILER_SUPPORTS(CXX_RVALUE_REFERENCES)
812 template<typename T, size_t inlineCapacity, typename Allocator>
813 Vector<T, inlineCapacity, Allocator>::Vector(Vector<T, inlineCapacity, Allocator>&& other)
816 // It's a little weird to implement a move constructor using swap but this way we
817 // don't have to add a move constructor to VectorBuffer.
821 template<typename T, size_t inlineCapacity, typename Allocator>
822 Vector<T, inlineCapacity, Allocator>& Vector<T, inlineCapacity, Allocator>::operator=(Vector<T, inlineCapacity, Allocator>&& other)
829 template<typename T, size_t inlineCapacity, typename Allocator>
831 bool Vector<T, inlineCapacity, Allocator>::contains(const U& value) const
833 return find(value) != kNotFound;
836 template<typename T, size_t inlineCapacity, typename Allocator>
838 size_t Vector<T, inlineCapacity, Allocator>::find(const U& value) const
840 const T* b = begin();
842 for (const T* iter = b; iter < e; ++iter) {
849 template<typename T, size_t inlineCapacity, typename Allocator>
851 size_t Vector<T, inlineCapacity, Allocator>::reverseFind(const U& value) const
853 const T* b = begin();
854 const T* iter = end();
863 template<typename T, size_t inlineCapacity, typename Allocator>
864 void Vector<T, inlineCapacity, Allocator>::fill(const T& val, size_t newSize)
866 if (size() > newSize)
868 else if (newSize > capacity()) {
870 reserveCapacity(newSize);
874 std::fill(begin(), end(), val);
875 TypeOperations::uninitializedFill(end(), begin() + newSize, val);
879 template<typename T, size_t inlineCapacity, typename Allocator>
880 template<typename Iterator>
881 void Vector<T, inlineCapacity, Allocator>::appendRange(Iterator start, Iterator end)
883 for (Iterator it = start; it != end; ++it)
887 template<typename T, size_t inlineCapacity, typename Allocator>
888 void Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity)
890 size_t oldCapacity = capacity();
891 size_t expandedCapacity = oldCapacity;
892 // We use a more aggressive expansion strategy for Vectors with inline storage.
893 // This is because they are more likely to be on the stack, so the risk of heap bloat is minimized.
894 // Furthermore, exceeding the inline capacity limit is not supposed to happen in the common case and may indicate a pathological condition or microbenchmark.
895 if (inlineCapacity) {
896 expandedCapacity *= 2;
897 // Check for integer overflow, which could happen in the 32-bit build.
898 RELEASE_ASSERT(expandedCapacity > oldCapacity);
900 // This cannot integer overflow.
901 // On 64-bit, the "expanded" integer is 32-bit, and any encroachment above 2^32 will fail allocation in allocateBuffer().
902 // On 32-bit, there's not enough address space to hold the old and new buffers.
903 // In addition, our underlying allocator is supposed to always fail on > (2^31 - 1) allocations.
904 expandedCapacity += (expandedCapacity / 4) + 1;
906 reserveCapacity(std::max(newMinCapacity, std::max(static_cast<size_t>(kInitialVectorSize), expandedCapacity)));
909 template<typename T, size_t inlineCapacity, typename Allocator>
910 const T* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, const T* ptr)
912 if (ptr < begin() || ptr >= end()) {
913 expandCapacity(newMinCapacity);
916 size_t index = ptr - begin();
917 expandCapacity(newMinCapacity);
918 return begin() + index;
921 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
922 inline U* Vector<T, inlineCapacity, Allocator>::expandCapacity(size_t newMinCapacity, U* ptr)
924 expandCapacity(newMinCapacity);
928 template<typename T, size_t inlineCapacity, typename Allocator>
929 inline void Vector<T, inlineCapacity, Allocator>::resize(size_t size)
932 TypeOperations::destruct(begin() + size, end());
934 if (size > capacity())
935 expandCapacity(size);
936 TypeOperations::initialize(end(), begin() + size);
942 template<typename T, size_t inlineCapacity, typename Allocator>
943 void Vector<T, inlineCapacity, Allocator>::shrink(size_t size)
945 ASSERT(size <= m_size);
946 TypeOperations::destruct(begin() + size, end());
947 clearUnusedSlots(begin() + size, end());
951 template<typename T, size_t inlineCapacity, typename Allocator>
952 void Vector<T, inlineCapacity, Allocator>::grow(size_t size)
954 ASSERT(size >= m_size);
955 if (size > capacity())
956 expandCapacity(size);
957 TypeOperations::initialize(end(), begin() + size);
961 template<typename T, size_t inlineCapacity, typename Allocator>
962 void Vector<T, inlineCapacity, Allocator>::reserveCapacity(size_t newCapacity)
964 if (UNLIKELY(newCapacity <= capacity()))
966 T* oldBuffer = begin();
968 Base::allocateBuffer(newCapacity);
969 TypeOperations::move(oldBuffer, oldEnd, begin());
970 Base::deallocateBuffer(oldBuffer);
973 template<typename T, size_t inlineCapacity, typename Allocator>
974 inline void Vector<T, inlineCapacity, Allocator>::reserveInitialCapacity(size_t initialCapacity)
977 ASSERT(capacity() == inlineCapacity);
978 if (initialCapacity > inlineCapacity)
979 Base::allocateBuffer(initialCapacity);
982 template<typename T, size_t inlineCapacity, typename Allocator>
983 void Vector<T, inlineCapacity, Allocator>::shrinkCapacity(size_t newCapacity)
985 if (newCapacity >= capacity())
988 if (newCapacity < size())
991 T* oldBuffer = begin();
992 if (newCapacity > 0) {
993 // Optimization: if we're downsizing inside the same allocator bucket, we can exit with no additional work.
994 if (Base::allocationSize(capacity()) == Base::allocationSize(newCapacity))
998 Base::allocateBuffer(newCapacity);
999 if (begin() != oldBuffer)
1000 TypeOperations::move(oldBuffer, oldEnd, begin());
1002 Base::resetBufferPointer();
1005 Base::deallocateBuffer(oldBuffer);
1008 // Templatizing these is better than just letting the conversion happen implicitly,
1009 // because for instance it allows a PassRefPtr to be appended to a RefPtr vector
1010 // without refcount thrash.
1012 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1013 void Vector<T, inlineCapacity, Allocator>::append(const U* data, size_t dataSize)
1015 ASSERT(Allocator::isAllocationAllowed());
1016 size_t newSize = m_size + dataSize;
1017 if (newSize > capacity()) {
1018 data = expandCapacity(newSize, data);
1021 RELEASE_ASSERT(newSize >= m_size);
1023 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], dest);
1027 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1028 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::append(const U& val)
1030 ASSERT(Allocator::isAllocationAllowed());
1031 if (LIKELY(size() != capacity())) {
1032 new (NotNull, end()) T(val);
1037 appendSlowCase(val);
1040 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1041 NEVER_INLINE void Vector<T, inlineCapacity, Allocator>::appendSlowCase(const U& val)
1043 ASSERT(size() == capacity());
1045 const U* ptr = &val;
1046 ptr = expandCapacity(size() + 1, ptr);
1049 new (NotNull, end()) T(*ptr);
1053 // This version of append saves a branch in the case where you know that the
1054 // vector's capacity is large enough for the append to succeed.
1056 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1057 ALWAYS_INLINE void Vector<T, inlineCapacity, Allocator>::uncheckedAppend(const U& val)
1059 ASSERT(size() < capacity());
1060 const U* ptr = &val;
1061 new (NotNull, end()) T(*ptr);
1065 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t otherCapacity, typename OtherAllocator>
1066 inline void Vector<T, inlineCapacity, Allocator>::appendVector(const Vector<U, otherCapacity, OtherAllocator>& val)
1068 append(val.begin(), val.size());
1071 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1072 void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U* data, size_t dataSize)
1074 ASSERT(Allocator::isAllocationAllowed());
1075 RELEASE_ASSERT(position <= size());
1076 size_t newSize = m_size + dataSize;
1077 if (newSize > capacity()) {
1078 data = expandCapacity(newSize, data);
1081 RELEASE_ASSERT(newSize >= m_size);
1082 T* spot = begin() + position;
1083 TypeOperations::moveOverlapping(spot, end(), spot + dataSize);
1084 VectorCopier<VectorTraits<T>::canCopyWithMemcpy, T>::uninitializedCopy(data, &data[dataSize], spot);
1088 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1089 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const U& val)
1091 ASSERT(Allocator::isAllocationAllowed());
1092 RELEASE_ASSERT(position <= size());
1093 const U* data = &val;
1094 if (size() == capacity()) {
1095 data = expandCapacity(size() + 1, data);
1098 T* spot = begin() + position;
1099 TypeOperations::moveOverlapping(spot, end(), spot + 1);
1100 new (NotNull, spot) T(*data);
1104 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename OtherAllocator>
1105 inline void Vector<T, inlineCapacity, Allocator>::insert(size_t position, const Vector<U, c, OtherAllocator>& val)
1107 insert(position, val.begin(), val.size());
1110 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1111 void Vector<T, inlineCapacity, Allocator>::prepend(const U* data, size_t dataSize)
1113 insert(0, data, dataSize);
1116 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U>
1117 inline void Vector<T, inlineCapacity, Allocator>::prepend(const U& val)
1122 template<typename T, size_t inlineCapacity, typename Allocator> template<typename U, size_t c, typename V>
1123 inline void Vector<T, inlineCapacity, Allocator>::prepend(const Vector<U, c, V>& val)
1125 insert(0, val.begin(), val.size());
1128 template<typename T, size_t inlineCapacity, typename Allocator>
1129 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position)
1131 RELEASE_ASSERT(position < size());
1132 T* spot = begin() + position;
1134 TypeOperations::moveOverlapping(spot + 1, end(), spot);
1135 clearUnusedSlots(end() - 1, end());
1139 template<typename T, size_t inlineCapacity, typename Allocator>
1140 inline void Vector<T, inlineCapacity, Allocator>::remove(size_t position, size_t length)
1142 ASSERT_WITH_SECURITY_IMPLICATION(position <= size());
1143 RELEASE_ASSERT(position + length <= size());
1144 T* beginSpot = begin() + position;
1145 T* endSpot = beginSpot + length;
1146 TypeOperations::destruct(beginSpot, endSpot);
1147 TypeOperations::moveOverlapping(endSpot, end(), beginSpot);
1148 clearUnusedSlots(end() - length, end());
1152 template<typename T, size_t inlineCapacity, typename Allocator>
1153 inline void Vector<T, inlineCapacity, Allocator>::reverse()
1155 for (size_t i = 0; i < m_size / 2; ++i)
1156 std::swap(at(i), at(m_size - 1 - i));
1159 template<typename T, size_t inlineCapacity, typename Allocator>
1160 void deleteAllValues(const Vector<T, inlineCapacity, Allocator>& collection)
1162 typedef typename Vector<T, inlineCapacity, Allocator>::const_iterator iterator;
1163 iterator end = collection.end();
1164 for (iterator it = collection.begin(); it != end; ++it)
1168 template<typename T, size_t inlineCapacity, typename Allocator>
1169 inline void swap(Vector<T, inlineCapacity, Allocator>& a, Vector<T, inlineCapacity, Allocator>& b)
1174 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1175 bool operator==(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1177 if (a.size() != b.size())
1180 return VectorTypeOperations<T>::compare(a.data(), b.data(), a.size());
1183 template<typename T, size_t inlineCapacityA, size_t inlineCapacityB, typename Allocator>
1184 inline bool operator!=(const Vector<T, inlineCapacityA, Allocator>& a, const Vector<T, inlineCapacityB, Allocator>& b)
1189 // This is only called if the allocator is a HeapAllocator. It is used when
1190 // visiting during a tracing GC.
1191 template<typename T, size_t inlineCapacity, typename Allocator>
1192 void Vector<T, inlineCapacity, Allocator>::trace(typename Allocator::Visitor* visitor)
1194 ASSERT(Allocator::isGarbageCollected); // Garbage collector must be enabled.
1195 const T* bufferBegin = buffer();
1196 const T* bufferEnd = buffer() + size();
1197 if (ShouldBeTraced<VectorTraits<T> >::value) {
1198 for (const T* bufferEntry = bufferBegin; bufferEntry != bufferEnd; bufferEntry++)
1199 Allocator::template trace<T, VectorTraits<T> >(visitor, *const_cast<T*>(bufferEntry));
1201 if (this->hasOutOfLineBuffer())
1202 Allocator::markNoTracing(visitor, buffer());
1206 template<typename T, size_t N>
1207 struct NeedsTracing<Vector<T, N> > {
1208 static const bool value = false;
1216 #endif // WTF_Vector_h