1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc. All rights reserved.
3 // https://developers.google.com/protocol-buffers/
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
9 // * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 // * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
15 // * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 // Author: kenton@google.com (Kenton Varda)
32 // Based on original Protocol Buffers design by
33 // Sanjay Ghemawat, Jeff Dean, and others.
35 // RepeatedField and RepeatedPtrField are used by generated protocol message
36 // classes to manipulate repeated fields. These classes are very similar to
37 // STL's vector, but include a number of optimizations found to be useful
38 // specifically in the case of Protocol Buffers. RepeatedPtrField is
39 // particularly different from STL vector as it manages ownership of the
40 // pointers that it contains.
42 // Typically, clients should not need to access RepeatedField objects directly,
43 // but should instead use the accessor functions generated automatically by the
46 #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
47 #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
51 // This is required for min/max on VS2013 only.
58 #include <type_traits>
59 #include <google/protobuf/stubs/logging.h>
60 #include <google/protobuf/stubs/common.h>
61 #include <google/protobuf/arena.h>
62 #include <google/protobuf/implicit_weak_message.h>
63 #include <google/protobuf/message_lite.h>
64 #include <google/protobuf/port.h>
65 #include <google/protobuf/stubs/casts.h>
66 #include <type_traits>
69 #include <google/protobuf/port_def.inc>
72 #error "You cannot SWIG proto headers"
75 // Forward-declare these so that we can make them friends.
77 namespace google_opensource {
79 } // namespace google_opensource
89 class MergePartialFromCodedStreamHelper;
91 static const int kMinRepeatedFieldAllocationSize = 4;
93 // A utility function for logging that doesn't need any template types.
94 void LogIndexOutOfBounds(int index, int size);
96 template <typename Iter>
97 inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
98 return static_cast<int>(std::distance(begin, end));
101 template <typename Iter>
102 inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
103 std::input_iterator_tag /*unused*/) {
107 template <typename Iter>
108 inline int CalculateReserve(Iter begin, Iter end) {
109 typedef typename std::iterator_traits<Iter>::iterator_category Category;
110 return CalculateReserve(begin, end, Category());
112 } // namespace internal
114 // RepeatedField is used to represent repeated fields of a primitive type (in
115 // other words, everything except strings and nested Messages). Most users will
116 // not ever use a RepeatedField directly; they will use the get-by-index,
117 // set-by-index, and add accessors that are generated for all repeated fields.
118 template <typename Element>
119 class RepeatedField final {
122 explicit RepeatedField(Arena* arena);
123 RepeatedField(const RepeatedField& other);
124 template <typename Iter>
125 RepeatedField(Iter begin, const Iter& end);
128 RepeatedField& operator=(const RepeatedField& other);
130 RepeatedField(RepeatedField&& other) noexcept;
131 RepeatedField& operator=(RepeatedField&& other) noexcept;
136 const Element& Get(int index) const;
137 Element* Mutable(int index);
139 const Element& operator[](int index) const { return Get(index); }
140 Element& operator[](int index) { return *Mutable(index); }
142 const Element& at(int index) const;
143 Element& at(int index);
145 void Set(int index, const Element& value);
146 void Add(const Element& value);
147 // Appends a new element and return a pointer to it.
148 // The new element is uninitialized if |Element| is a POD type.
150 // Remove the last element in the array.
153 // Extract elements with indices in "[start .. start+num-1]".
154 // Copy them into "elements[0 .. num-1]" if "elements" is not NULL.
155 // Caution: implementation also moves elements with indices [start+num ..].
156 // Calling this routine inside a loop can cause quadratic behavior.
157 void ExtractSubrange(int start, int num, Element* elements);
160 void MergeFrom(const RepeatedField& other);
161 void CopyFrom(const RepeatedField& other);
163 // Reserve space to expand the field to at least the given size. If the
164 // array is grown, it will always be at least doubled in size.
165 void Reserve(int new_size);
167 // Resize the RepeatedField to a new, smaller size. This is O(1).
168 void Truncate(int new_size);
170 void AddAlreadyReserved(const Element& value);
171 // Appends a new element and return a pointer to it.
172 // The new element is uninitialized if |Element| is a POD type.
173 // Should be called only if Capacity() > Size().
174 Element* AddAlreadyReserved();
175 Element* AddNAlreadyReserved(int elements);
176 int Capacity() const;
178 // Like STL resize. Uses value to fill appended elements.
179 // Like Truncate() if new_size <= size(), otherwise this is
180 // O(new_size - size()).
181 void Resize(int new_size, const Element& value);
183 // Gets the underlying array. This pointer is possibly invalidated by
184 // any add or remove operation.
185 Element* mutable_data();
186 const Element* data() const;
188 // Swap entire contents with "other". If they are separate arenas then, copies
189 // data between each other.
190 void Swap(RepeatedField* other);
192 // Swap entire contents with "other". Should be called only if the caller can
193 // guarantee that both repeated fields are on the same arena or are on the
194 // heap. Swapping between different arenas is disallowed and caught by a
195 // GOOGLE_DCHECK (see API docs for details).
196 void UnsafeArenaSwap(RepeatedField* other);
198 // Swap two elements.
199 void SwapElements(int index1, int index2);
201 // STL-like iterator support
202 typedef Element* iterator;
203 typedef const Element* const_iterator;
204 typedef Element value_type;
205 typedef value_type& reference;
206 typedef const value_type& const_reference;
207 typedef value_type* pointer;
208 typedef const value_type* const_pointer;
209 typedef int size_type;
210 typedef ptrdiff_t difference_type;
213 const_iterator begin() const;
214 const_iterator cbegin() const;
216 const_iterator end() const;
217 const_iterator cend() const;
219 // Reverse iterator support
220 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
221 typedef std::reverse_iterator<iterator> reverse_iterator;
222 reverse_iterator rbegin() { return reverse_iterator(end()); }
223 const_reverse_iterator rbegin() const {
224 return const_reverse_iterator(end());
226 reverse_iterator rend() { return reverse_iterator(begin()); }
227 const_reverse_iterator rend() const {
228 return const_reverse_iterator(begin());
231 // Returns the number of bytes used by the repeated field, excluding
233 size_t SpaceUsedExcludingSelfLong() const;
235 int SpaceUsedExcludingSelf() const {
236 return internal::ToIntSize(SpaceUsedExcludingSelfLong());
239 // Removes the element referenced by position.
241 // Returns an iterator to the element immediately following the removed
244 // Invalidates all iterators at or after the removed element, including end().
245 iterator erase(const_iterator position);
247 // Removes the elements in the range [first, last).
249 // Returns an iterator to the element immediately following the removed range.
251 // Invalidates all iterators at or after the removed range, including end().
252 iterator erase(const_iterator first, const_iterator last);
254 // Get the Arena on which this RepeatedField stores its elements.
255 Arena* GetArena() const { return GetArenaNoVirtual(); }
257 // For internal use only.
259 // This is public due to it being called by generated code.
260 inline void InternalSwap(RepeatedField* other);
263 static const int kInitialSize = 0;
264 // A note on the representation here (see also comment below for
265 // RepeatedPtrFieldBase's struct Rep):
267 // We maintain the same sizeof(RepeatedField) as before we added arena support
268 // so that we do not degrade performance by bloating memory usage. Directly
269 // adding an arena_ element to RepeatedField is quite costly. By using
270 // indirection in this way, we keep the same size when the RepeatedField is
271 // empty (common case), and add only an 8-byte header to the elements array
272 // when non-empty. We make sure to place the size fields directly in the
273 // RepeatedField class to avoid costly cache misses due to the indirection.
280 // We can not use sizeof(Rep) - sizeof(Element) due to the trailing padding on
281 // the struct. We can not use sizeof(Arena*) as well because there might be
282 // a "gap" after the field arena and before the field elements (e.g., when
283 // Element is double and pointer is 32bit).
284 static const size_t kRepHeaderSize;
286 // We reuse the Rep* for an Arena* when total_size == 0, to avoid having to do
287 // an allocation in the constructor when we have an Arena.
289 Pointer(Arena* a) : arena(a) {}
290 Arena* arena; // When total_size_ == 0.
291 Element* elements; // When total_size_ != 0, this is Rep->elements of Rep.
294 Element* elements() const {
295 GOOGLE_DCHECK_GT(total_size_, 0);
296 return ptr_.elements;
300 GOOGLE_DCHECK_GT(total_size_, 0);
302 reinterpret_cast<char*>(ptr_.elements) - offsetof(Rep, elements);
303 return reinterpret_cast<Rep*>(addr);
307 typedef void InternalArenaConstructable_;
310 // Move the contents of |from| into |to|, possibly clobbering |from| in the
311 // process. For primitive types this is just a memcpy(), but it could be
312 // specialized for non-primitive types to, say, swap each element instead.
313 void MoveArray(Element* to, Element* from, int size);
315 // Copy the elements of |from| into |to|.
316 void CopyArray(Element* to, const Element* from, int size);
318 // Internal helper expected by Arena methods.
319 inline Arena* GetArenaNoVirtual() const {
320 return (total_size_ == 0) ? ptr_.arena : rep()->arena;
323 // Internal helper to delete all elements and deallocate the storage.
324 // If Element has a trivial destructor (for example, if it's a fundamental
325 // type, like int32), the loop will be removed by the optimizer.
326 void InternalDeallocate(Rep* rep, int size) {
328 Element* e = &rep->elements[0];
329 Element* limit = &rep->elements[size];
330 for (; e < limit; e++) {
333 if (rep->arena == NULL) {
334 #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
335 const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
336 ::operator delete(static_cast<void*>(rep), bytes);
338 ::operator delete(static_cast<void*>(rep));
344 friend class internal::WireFormatLite;
345 const Element* unsafe_data() const;
348 template <typename Element>
349 const size_t RepeatedField<Element>::kRepHeaderSize =
350 reinterpret_cast<size_t>(&reinterpret_cast<Rep*>(16)->elements[0]) - 16;
353 template <typename It>
354 class RepeatedPtrIterator;
355 template <typename It, typename VoidPtr>
356 class RepeatedPtrOverPtrsIterator;
357 } // namespace internal
361 // This is a helper template to copy an array of elements efficiently when they
362 // have a trivial copy constructor, and correctly otherwise. This really
363 // shouldn't be necessary, but our compiler doesn't optimize std::copy very
365 template <typename Element,
366 bool HasTrivialCopy =
367 std::is_pod<Element>::value>
368 struct ElementCopier {
369 void operator()(Element* to, const Element* from, int array_size);
372 } // namespace internal
376 // type-traits helper for RepeatedPtrFieldBase: we only want to invoke
377 // arena-related "copy if on different arena" behavior if the necessary methods
378 // exist on the contained type. In particular, we rely on MergeFrom() existing
379 // as a general proxy for the fact that a copy will work, and we also provide a
380 // specific override for std::string*.
381 template <typename T>
382 struct TypeImplementsMergeBehaviorProbeForMergeFrom {
383 typedef char HasMerge;
384 typedef long HasNoMerge;
386 // We accept either of:
387 // - void MergeFrom(const T& other)
388 // - bool MergeFrom(const T& other)
390 // We mangle these names a bit to avoid compatibility issues in 'unclean'
391 // include environments that may have, e.g., "#define test ..." (yes, this
393 template <typename U, typename RetType, RetType (U::*)(const U& arg)>
395 template <typename U>
396 static HasMerge Check(CheckType<U, void, &U::MergeFrom>*);
397 template <typename U>
398 static HasMerge Check(CheckType<U, bool, &U::MergeFrom>*);
399 template <typename U>
400 static HasNoMerge Check(...);
402 // Resolves to either std::true_type or std::false_type.
403 typedef std::integral_constant<bool,
404 (sizeof(Check<T>(0)) == sizeof(HasMerge))>
408 template <typename T, typename = void>
409 struct TypeImplementsMergeBehavior
410 : TypeImplementsMergeBehaviorProbeForMergeFrom<T> {};
414 struct TypeImplementsMergeBehavior<std::string> {
415 typedef std::true_type type;
418 template <typename T>
420 : std::integral_constant<bool, std::is_move_constructible<T>::value &&
421 std::is_move_assignable<T>::value> {};
423 // This is the common base class for RepeatedPtrFields. It deals only in void*
424 // pointers. Users should not use this interface directly.
426 // The methods of this interface correspond to the methods of RepeatedPtrField,
427 // but may have a template argument called TypeHandler. Its signature is:
428 // class TypeHandler {
430 // typedef MyType Type;
431 // // WeakType is almost always the same as MyType, but we use it in
432 // // ImplicitWeakTypeHandler.
433 // typedef MyType WeakType;
434 // static Type* New();
435 // static WeakType* NewFromPrototype(const WeakType* prototype,
437 // static void Delete(Type*);
438 // static void Clear(Type*);
439 // static void Merge(const Type& from, Type* to);
441 // // Only needs to be implemented if SpaceUsedExcludingSelf() is called.
442 // static int SpaceUsedLong(const Type&);
444 class PROTOBUF_EXPORT RepeatedPtrFieldBase {
446 RepeatedPtrFieldBase();
447 explicit RepeatedPtrFieldBase(Arena* arena);
448 ~RepeatedPtrFieldBase() {}
450 // Must be called from destructor.
451 template <typename TypeHandler>
457 template <typename TypeHandler>
458 const typename TypeHandler::Type& at(int index) const;
459 template <typename TypeHandler>
460 typename TypeHandler::Type& at(int index);
462 template <typename TypeHandler>
463 typename TypeHandler::Type* Mutable(int index);
464 template <typename TypeHandler>
465 void Delete(int index);
466 template <typename TypeHandler>
467 typename TypeHandler::Type* Add(typename TypeHandler::Type* prototype = NULL);
470 // The next few methods are public so that they can be called from generated
471 // code when implicit weak fields are used, but they should never be called by
474 template <typename TypeHandler>
475 const typename TypeHandler::WeakType& Get(int index) const;
477 // Creates and adds an element using the given prototype, without introducing
478 // a link-time dependency on the concrete message type. This method is used to
479 // implement implicit weak fields. The prototype may be NULL, in which case an
480 // ImplicitWeakMessage will be used as a placeholder.
481 MessageLite* AddWeak(const MessageLite* prototype);
483 template <typename TypeHandler>
486 template <typename TypeHandler>
487 void MergeFrom(const RepeatedPtrFieldBase& other);
489 inline void InternalSwap(RepeatedPtrFieldBase* other);
493 typename TypeHandler,
494 typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
495 void Add(typename TypeHandler::Type&& value);
497 template <typename TypeHandler>
499 template <typename TypeHandler>
500 void CopyFrom(const RepeatedPtrFieldBase& other);
502 void CloseGap(int start, int num);
504 void Reserve(int new_size);
506 int Capacity() const;
508 // Used for constructing iterators.
509 void* const* raw_data() const;
510 void** raw_mutable_data() const;
512 template <typename TypeHandler>
513 typename TypeHandler::Type** mutable_data();
514 template <typename TypeHandler>
515 const typename TypeHandler::Type* const* data() const;
517 template <typename TypeHandler>
518 PROTOBUF_ALWAYS_INLINE void Swap(RepeatedPtrFieldBase* other);
520 void SwapElements(int index1, int index2);
522 template <typename TypeHandler>
523 size_t SpaceUsedExcludingSelfLong() const;
525 // Advanced memory management --------------------------------------
527 // Like Add(), but if there are no cleared objects to use, returns NULL.
528 template <typename TypeHandler>
529 typename TypeHandler::Type* AddFromCleared();
531 template <typename TypeHandler>
532 void AddAllocated(typename TypeHandler::Type* value) {
533 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
534 AddAllocatedInternal<TypeHandler>(value, t);
537 template <typename TypeHandler>
538 void UnsafeArenaAddAllocated(typename TypeHandler::Type* value);
540 template <typename TypeHandler>
541 typename TypeHandler::Type* ReleaseLast() {
542 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
543 return ReleaseLastInternal<TypeHandler>(t);
546 // Releases last element and returns it, but does not do out-of-arena copy.
547 // And just returns the raw pointer to the contained element in the arena.
548 template <typename TypeHandler>
549 typename TypeHandler::Type* UnsafeArenaReleaseLast();
551 int ClearedCount() const;
552 template <typename TypeHandler>
553 void AddCleared(typename TypeHandler::Type* value);
554 template <typename TypeHandler>
555 typename TypeHandler::Type* ReleaseCleared();
557 template <typename TypeHandler>
558 void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type);
559 template <typename TypeHandler>
560 void AddAllocatedInternal(typename TypeHandler::Type* value, std::false_type);
562 template <typename TypeHandler>
563 PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy(
564 typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena);
565 template <typename TypeHandler>
566 PROTOBUF_NOINLINE void AddAllocatedSlowWithoutCopy(
567 typename TypeHandler::Type* value);
569 template <typename TypeHandler>
570 typename TypeHandler::Type* ReleaseLastInternal(std::true_type);
571 template <typename TypeHandler>
572 typename TypeHandler::Type* ReleaseLastInternal(std::false_type);
574 template <typename TypeHandler>
575 PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other);
577 inline Arena* GetArenaNoVirtual() const { return arena_; }
580 static const int kInitialSize = 0;
581 // A few notes on internal representation:
583 // We use an indirected approach, with struct Rep, to keep
584 // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
585 // was added, namely, 3 8-byte machine words on x86-64. An instance of Rep is
586 // allocated only when the repeated field is non-empty, and it is a
587 // dynamically-sized struct (the header is directly followed by elements[]).
588 // We place arena_ and current_size_ directly in the object to avoid cache
589 // misses due to the indirection, because these fields are checked frequently.
590 // Placing all fields directly in the RepeatedPtrFieldBase instance costs
591 // significant performance for memory-sensitive workloads.
599 static const size_t kRepHeaderSize = sizeof(Rep) - sizeof(void*);
600 // Contains arena ptr and the elements array. We also keep the invariant that
601 // if rep_ is NULL, then arena is NULL.
604 template <typename TypeHandler>
605 static inline typename TypeHandler::Type* cast(void* element) {
606 return reinterpret_cast<typename TypeHandler::Type*>(element);
608 template <typename TypeHandler>
609 static inline const typename TypeHandler::Type* cast(const void* element) {
610 return reinterpret_cast<const typename TypeHandler::Type*>(element);
613 // Non-templated inner function to avoid code duplication. Takes a function
614 // pointer to the type-specific (templated) inner allocate/merge loop.
615 void MergeFromInternal(const RepeatedPtrFieldBase& other,
616 void (RepeatedPtrFieldBase::*inner_loop)(void**,
620 template <typename TypeHandler>
621 void MergeFromInnerLoop(void** our_elems, void** other_elems, int length,
622 int already_allocated);
624 // Internal helper: extend array space if necessary to contain |extend_amount|
625 // more elements, and return a pointer to the element immediately following
626 // the old list of elements. This interface factors out common behavior from
627 // Reserve() and MergeFrom() to reduce code size. |extend_amount| must be > 0.
628 void** InternalExtend(int extend_amount);
630 // The reflection implementation needs to call protected methods directly,
631 // reinterpreting pointers as being to Message instead of a specific Message
633 friend class GeneratedMessageReflection;
635 // ExtensionSet stores repeated message extensions as
636 // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement
637 // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong()
638 // reinterpreting MessageLite as Message. ExtensionSet also needs to make use
639 // of AddFromCleared(), which is not part of the public interface.
640 friend class ExtensionSet;
642 // The MapFieldBase implementation needs to call protected methods directly,
643 // reinterpreting pointers as being to Message instead of a specific Message
645 friend class MapFieldBase;
647 // The table-driven MergePartialFromCodedStream implementation needs to
648 // operate on RepeatedPtrField<MessageLite>.
649 friend class MergePartialFromCodedStreamHelper;
651 // To parse directly into a proto2 generated class, the upb class GMR_Handlers
652 // needs to be able to modify a RepeatedPtrFieldBase directly.
653 friend class upb::google_opensource::GMR_Handlers;
655 friend class AccessorHelper;
657 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase);
660 template <typename GenericType>
661 class GenericTypeHandler {
663 typedef GenericType Type;
664 typedef GenericType WeakType;
665 using Movable = IsMovable<GenericType>;
667 static inline GenericType* New(Arena* arena) {
668 return Arena::CreateMaybeMessage<Type>(arena);
670 static inline GenericType* New(Arena* arena, GenericType&& value) {
671 return Arena::Create<GenericType>(arena, std::move(value));
673 static inline GenericType* NewFromPrototype(const GenericType* prototype,
674 Arena* arena = NULL);
675 static inline void Delete(GenericType* value, Arena* arena) {
680 static inline Arena* GetArena(GenericType* value) {
681 return Arena::GetArena<Type>(value);
683 static inline void* GetMaybeArenaPointer(GenericType* value) {
684 return Arena::GetArena<Type>(value);
687 static inline void Clear(GenericType* value) { value->Clear(); }
689 static void Merge(const GenericType& from, GenericType* to);
690 static inline size_t SpaceUsedLong(const GenericType& value) {
691 return value.SpaceUsedLong();
695 template <typename GenericType>
696 GenericType* GenericTypeHandler<GenericType>::NewFromPrototype(
697 const GenericType* /* prototype */, Arena* arena) {
700 template <typename GenericType>
701 void GenericTypeHandler<GenericType>::Merge(const GenericType& from,
706 // NewFromPrototype() and Merge() are not defined inline here, as we will need
707 // to do a virtual function dispatch anyways to go from Message* to call
710 MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype(
711 const MessageLite* prototype, Arena* arena);
713 inline Arena* GenericTypeHandler<MessageLite>::GetArena(MessageLite* value) {
714 return value->GetArena();
717 inline void* GenericTypeHandler<MessageLite>::GetMaybeArenaPointer(
718 MessageLite* value) {
719 return value->GetMaybeArenaPointer();
722 void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
725 inline void GenericTypeHandler<std::string>::Clear(std::string* value) {
729 void GenericTypeHandler<std::string>::Merge(const std::string& from,
732 // Declarations of the specialization as we cannot define them here, as the
733 // header that defines ProtocolMessage depends on types defined in this header.
734 #define DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(TypeName) \
736 PROTOBUF_EXPORT TypeName* GenericTypeHandler<TypeName>::NewFromPrototype( \
737 const TypeName* prototype, Arena* arena); \
739 PROTOBUF_EXPORT Arena* GenericTypeHandler<TypeName>::GetArena( \
742 PROTOBUF_EXPORT void* GenericTypeHandler<TypeName>::GetMaybeArenaPointer( \
745 // Message specialization bodies defined in message.cc. This split is necessary
746 // to allow proto2-lite (which includes this header) to be independent of
748 DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(Message)
751 #undef DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES
753 class StringTypeHandler {
755 typedef std::string Type;
756 typedef std::string WeakType;
757 using Movable = IsMovable<Type>;
759 static inline std::string* New(Arena* arena) {
760 return Arena::Create<std::string>(arena);
762 static inline std::string* New(Arena* arena, std::string&& value) {
763 return Arena::Create<std::string>(arena, std::move(value));
765 static inline std::string* NewFromPrototype(const std::string*,
769 static inline Arena* GetArena(std::string*) { return NULL; }
770 static inline void* GetMaybeArenaPointer(std::string* /* value */) {
773 static inline void Delete(std::string* value, Arena* arena) {
778 static inline void Clear(std::string* value) { value->clear(); }
779 static inline void Merge(const std::string& from, std::string* to) {
782 static size_t SpaceUsedLong(const std::string& value) {
783 return sizeof(value) + StringSpaceUsedExcludingSelfLong(value);
787 } // namespace internal
789 // RepeatedPtrField is like RepeatedField, but used for repeated strings or
791 template <typename Element>
792 class RepeatedPtrField final : private internal::RepeatedPtrFieldBase {
795 explicit RepeatedPtrField(Arena* arena);
797 RepeatedPtrField(const RepeatedPtrField& other);
798 template <typename Iter>
799 RepeatedPtrField(Iter begin, const Iter& end);
802 RepeatedPtrField& operator=(const RepeatedPtrField& other);
804 RepeatedPtrField(RepeatedPtrField&& other) noexcept;
805 RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept;
810 const Element& Get(int index) const;
811 Element* Mutable(int index);
813 void Add(Element&& value);
815 const Element& operator[](int index) const { return Get(index); }
816 Element& operator[](int index) { return *Mutable(index); }
818 const Element& at(int index) const;
819 Element& at(int index);
821 // Remove the last element in the array.
822 // Ownership of the element is retained by the array.
825 // Delete elements with indices in the range [start .. start+num-1].
826 // Caution: implementation moves all elements with indices [start+num .. ].
827 // Calling this routine inside a loop can cause quadratic behavior.
828 void DeleteSubrange(int start, int num);
831 void MergeFrom(const RepeatedPtrField& other);
832 void CopyFrom(const RepeatedPtrField& other);
834 // Reserve space to expand the field to at least the given size. This only
835 // resizes the pointer array; it doesn't allocate any objects. If the
836 // array is grown, it will always be at least doubled in size.
837 void Reserve(int new_size);
839 int Capacity() const;
841 // Gets the underlying array. This pointer is possibly invalidated by
842 // any add or remove operation.
843 Element** mutable_data();
844 const Element* const* data() const;
846 // Swap entire contents with "other". If they are on separate arenas, then
848 void Swap(RepeatedPtrField* other);
850 // Swap entire contents with "other". Caller should guarantee that either both
851 // fields are on the same arena or both are on the heap. Swapping between
852 // different arenas with this function is disallowed and is caught via
854 void UnsafeArenaSwap(RepeatedPtrField* other);
856 // Swap two elements.
857 void SwapElements(int index1, int index2);
859 // STL-like iterator support
860 typedef internal::RepeatedPtrIterator<Element> iterator;
861 typedef internal::RepeatedPtrIterator<const Element> const_iterator;
862 typedef Element value_type;
863 typedef value_type& reference;
864 typedef const value_type& const_reference;
865 typedef value_type* pointer;
866 typedef const value_type* const_pointer;
867 typedef int size_type;
868 typedef ptrdiff_t difference_type;
871 const_iterator begin() const;
872 const_iterator cbegin() const;
874 const_iterator end() const;
875 const_iterator cend() const;
877 // Reverse iterator support
878 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
879 typedef std::reverse_iterator<iterator> reverse_iterator;
880 reverse_iterator rbegin() { return reverse_iterator(end()); }
881 const_reverse_iterator rbegin() const {
882 return const_reverse_iterator(end());
884 reverse_iterator rend() { return reverse_iterator(begin()); }
885 const_reverse_iterator rend() const {
886 return const_reverse_iterator(begin());
889 // Custom STL-like iterator that iterates over and returns the underlying
890 // pointers to Element rather than Element itself.
891 typedef internal::RepeatedPtrOverPtrsIterator<Element*, void*>
893 typedef internal::RepeatedPtrOverPtrsIterator<const Element* const,
895 const_pointer_iterator;
896 pointer_iterator pointer_begin();
897 const_pointer_iterator pointer_begin() const;
898 pointer_iterator pointer_end();
899 const_pointer_iterator pointer_end() const;
901 // Returns (an estimate of) the number of bytes used by the repeated field,
902 // excluding sizeof(*this).
903 size_t SpaceUsedExcludingSelfLong() const;
905 int SpaceUsedExcludingSelf() const {
906 return internal::ToIntSize(SpaceUsedExcludingSelfLong());
909 // Advanced memory management --------------------------------------
910 // When hardcore memory management becomes necessary -- as it sometimes
911 // does here at Google -- the following methods may be useful.
913 // Add an already-allocated object, passing ownership to the
916 // Note that some special behavior occurs with respect to arenas:
918 // (i) if this field holds submessages, the new submessage will be copied if
919 // the original is in an arena and this RepeatedPtrField is either in a
920 // different arena, or on the heap.
921 // (ii) if this field holds strings, the passed-in string *must* be
922 // heap-allocated, not arena-allocated. There is no way to dynamically check
923 // this at runtime, so User Beware.
924 void AddAllocated(Element* value);
926 // Remove the last element and return it, passing ownership to the caller.
927 // Requires: size() > 0
929 // If this RepeatedPtrField is on an arena, an object copy is required to pass
930 // ownership back to the user (for compatible semantics). Use
931 // UnsafeArenaReleaseLast() if this behavior is undesired.
932 Element* ReleaseLast();
934 // Add an already-allocated object, skipping arena-ownership checks. The user
935 // must guarantee that the given object is in the same arena as this
937 // It is also useful in legacy code that uses temporary ownership to avoid
939 // RepeatedPtrField<T> temp_field;
940 // temp_field.AddAllocated(new T);
941 // ... // Do something with temp_field
942 // temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
943 // If you put temp_field on the arena this fails, because the ownership
944 // transfers to the arena at the "AddAllocated" call and is not released
945 // anymore causing a double delete. UnsafeArenaAddAllocated prevents this.
946 void UnsafeArenaAddAllocated(Element* value);
948 // Remove the last element and return it. Works only when operating on an
949 // arena. The returned pointer is to the original object in the arena, hence
950 // has the arena's lifetime.
951 // Requires: current_size_ > 0
952 Element* UnsafeArenaReleaseLast();
954 // Extract elements with indices in the range "[start .. start+num-1]".
955 // The caller assumes ownership of the extracted elements and is responsible
956 // for deleting them when they are no longer needed.
957 // If "elements" is non-NULL, then pointers to the extracted elements
958 // are stored in "elements[0 .. num-1]" for the convenience of the caller.
959 // If "elements" is NULL, then the caller must use some other mechanism
960 // to perform any further operations (like deletion) on these elements.
961 // Caution: implementation also moves elements with indices [start+num ..].
962 // Calling this routine inside a loop can cause quadratic behavior.
964 // Memory copying behavior is identical to ReleaseLast(), described above: if
965 // this RepeatedPtrField is on an arena, an object copy is performed for each
966 // returned element, so that all returned element pointers are to
967 // heap-allocated copies. If this copy is not desired, the user should call
968 // UnsafeArenaExtractSubrange().
969 void ExtractSubrange(int start, int num, Element** elements);
971 // Identical to ExtractSubrange() described above, except that when this
972 // repeated field is on an arena, no object copies are performed. Instead, the
973 // raw object pointers are returned. Thus, if on an arena, the returned
974 // objects must not be freed, because they will not be heap-allocated objects.
975 void UnsafeArenaExtractSubrange(int start, int num, Element** elements);
977 // When elements are removed by calls to RemoveLast() or Clear(), they
978 // are not actually freed. Instead, they are cleared and kept so that
979 // they can be reused later. This can save lots of CPU time when
980 // repeatedly reusing a protocol message for similar purposes.
982 // Hardcore programs may choose to manipulate these cleared objects
983 // to better optimize memory management using the following routines.
985 // Get the number of cleared objects that are currently being kept
987 int ClearedCount() const;
988 // Add an element to the pool of cleared objects, passing ownership to
989 // the RepeatedPtrField. The element must be cleared prior to calling
992 // This method cannot be called when the repeated field is on an arena or when
993 // |value| is; both cases will trigger a GOOGLE_DCHECK-failure.
994 void AddCleared(Element* value);
995 // Remove a single element from the cleared pool and return it, passing
996 // ownership to the caller. The element is guaranteed to be cleared.
997 // Requires: ClearedCount() > 0
1000 // This method cannot be called when the repeated field is on an arena; doing
1001 // so will trigger a GOOGLE_DCHECK-failure.
1002 Element* ReleaseCleared();
1004 // Removes the element referenced by position.
1006 // Returns an iterator to the element immediately following the removed
1009 // Invalidates all iterators at or after the removed element, including end().
1010 iterator erase(const_iterator position);
1012 // Removes the elements in the range [first, last).
1014 // Returns an iterator to the element immediately following the removed range.
1016 // Invalidates all iterators at or after the removed range, including end().
1017 iterator erase(const_iterator first, const_iterator last);
1019 // Gets the arena on which this RepeatedPtrField stores its elements.
1020 Arena* GetArena() const { return GetArenaNoVirtual(); }
1022 // For internal use only.
1024 // This is public due to it being called by generated code.
1025 using RepeatedPtrFieldBase::InternalSwap;
1028 // Note: RepeatedPtrField SHOULD NOT be subclassed by users.
1031 // Internal arena accessor expected by helpers in Arena.
1032 inline Arena* GetArenaNoVirtual() const;
1034 // Implementations for ExtractSubrange(). The copying behavior must be
1035 // included only if the type supports the necessary operations (e.g.,
1036 // MergeFrom()), so we must resolve this at compile time. ExtractSubrange()
1037 // uses SFINAE to choose one of the below implementations.
1038 void ExtractSubrangeInternal(int start, int num, Element** elements,
1040 void ExtractSubrangeInternal(int start, int num, Element** elements,
1044 friend class MessageLite;
1046 typedef void InternalArenaConstructable_;
1050 // implementation ====================================================
1052 template <typename Element>
1053 inline RepeatedField<Element>::RepeatedField()
1054 : current_size_(0), total_size_(0), ptr_(NULL) {}
1056 template <typename Element>
1057 inline RepeatedField<Element>::RepeatedField(Arena* arena)
1058 : current_size_(0), total_size_(0), ptr_(arena) {}
1060 template <typename Element>
1061 inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
1062 : current_size_(0), total_size_(0), ptr_(NULL) {
1063 if (other.current_size_ != 0) {
1064 Reserve(other.size());
1065 AddNAlreadyReserved(other.size());
1066 CopyArray(Mutable(0), &other.Get(0), other.size());
1070 template <typename Element>
1071 template <typename Iter>
1072 RepeatedField<Element>::RepeatedField(Iter begin, const Iter& end)
1073 : current_size_(0), total_size_(0), ptr_(NULL) {
1074 int reserve = internal::CalculateReserve(begin, end);
1075 if (reserve != -1) {
1081 // TODO(ckennelly): The compiler loses track of the buffer freshly
1082 // allocated by Reserve() by the time we call elements, so it cannot
1083 // guarantee that elements does not alias [begin(), end()).
1085 // If restrict is available, annotating the pointer obtained from elements()
1086 // causes this to lower to memcpy instead of memmove.
1087 std::copy(begin, end, elements());
1088 current_size_ = reserve;
1090 for (; begin != end; ++begin) {
1096 template <typename Element>
1097 RepeatedField<Element>::~RepeatedField() {
1098 if (total_size_ > 0) {
1099 InternalDeallocate(rep(), total_size_);
1103 template <typename Element>
1104 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1105 const RepeatedField& other) {
1106 if (this != &other) CopyFrom(other);
1110 template <typename Element>
1111 inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
1113 // We don't just call Swap(&other) here because it would perform 3 copies if
1114 // other is on an arena. This field can't be on an arena because arena
1115 // construction always uses the Arena* accepting constructor.
1116 if (other.GetArenaNoVirtual()) {
1119 InternalSwap(&other);
1123 template <typename Element>
1124 inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1125 RepeatedField&& other) noexcept {
1126 // We don't just call Swap(&other) here because it would perform 3 copies if
1127 // the two fields are on different arenas.
1128 if (this != &other) {
1129 if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
1132 InternalSwap(&other);
1138 template <typename Element>
1139 inline bool RepeatedField<Element>::empty() const {
1140 return current_size_ == 0;
1143 template <typename Element>
1144 inline int RepeatedField<Element>::size() const {
1145 return current_size_;
1148 template <typename Element>
1149 inline int RepeatedField<Element>::Capacity() const {
1153 template <typename Element>
1154 inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
1155 GOOGLE_DCHECK_LT(current_size_, total_size_);
1156 elements()[current_size_++] = value;
1159 template <typename Element>
1160 inline Element* RepeatedField<Element>::AddAlreadyReserved() {
1161 GOOGLE_DCHECK_LT(current_size_, total_size_);
1162 return &elements()[current_size_++];
1165 template <typename Element>
1166 inline Element* RepeatedField<Element>::AddNAlreadyReserved(int n) {
1167 GOOGLE_DCHECK_GE(total_size_ - current_size_, n)
1168 << total_size_ << ", " << current_size_;
1169 // Warning: sometimes people call this when n==0 and total_size_==0. This
1170 // forces us to add this branch, to avoid reading the non-active union member
1171 // (which is UB). Luckily the compiler is smart enough to optimize the branch
1174 total_size_ == 0 ? reinterpret_cast<Element*>(ptr_.arena) : ptr_.elements;
1175 ret += current_size_;
1180 template <typename Element>
1181 inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
1182 GOOGLE_DCHECK_GE(new_size, 0);
1183 if (new_size > current_size_) {
1185 std::fill(&elements()[current_size_], &elements()[new_size], value);
1187 current_size_ = new_size;
1190 template <typename Element>
1191 inline const Element& RepeatedField<Element>::Get(int index) const {
1192 GOOGLE_DCHECK_GE(index, 0);
1193 GOOGLE_DCHECK_LT(index, current_size_);
1194 return elements()[index];
1197 template <typename Element>
1198 inline const Element& RepeatedField<Element>::at(int index) const {
1199 GOOGLE_CHECK_GE(index, 0);
1200 GOOGLE_CHECK_LT(index, current_size_);
1201 return elements()[index];
1204 template <typename Element>
1205 inline Element& RepeatedField<Element>::at(int index) {
1206 GOOGLE_CHECK_GE(index, 0);
1207 GOOGLE_CHECK_LT(index, current_size_);
1208 return elements()[index];
1211 template <typename Element>
1212 inline Element* RepeatedField<Element>::Mutable(int index) {
1213 GOOGLE_DCHECK_GE(index, 0);
1214 GOOGLE_DCHECK_LT(index, current_size_);
1215 return &elements()[index];
1218 template <typename Element>
1219 inline void RepeatedField<Element>::Set(int index, const Element& value) {
1220 GOOGLE_DCHECK_GE(index, 0);
1221 GOOGLE_DCHECK_LT(index, current_size_);
1222 elements()[index] = value;
1225 template <typename Element>
1226 inline void RepeatedField<Element>::Add(const Element& value) {
1227 if (current_size_ == total_size_) Reserve(total_size_ + 1);
1228 elements()[current_size_++] = value;
1231 template <typename Element>
1232 inline Element* RepeatedField<Element>::Add() {
1233 if (current_size_ == total_size_) Reserve(total_size_ + 1);
1234 return &elements()[current_size_++];
1237 template <typename Element>
1238 inline void RepeatedField<Element>::RemoveLast() {
1239 GOOGLE_DCHECK_GT(current_size_, 0);
1243 template <typename Element>
1244 void RepeatedField<Element>::ExtractSubrange(int start, int num,
1245 Element* elements) {
1246 GOOGLE_DCHECK_GE(start, 0);
1247 GOOGLE_DCHECK_GE(num, 0);
1248 GOOGLE_DCHECK_LE(start + num, this->current_size_);
1250 // Save the values of the removed elements if requested.
1251 if (elements != NULL) {
1252 for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
1255 // Slide remaining elements down to fill the gap.
1257 for (int i = start + num; i < this->current_size_; ++i)
1258 this->Set(i - num, this->Get(i));
1259 this->Truncate(this->current_size_ - num);
1263 template <typename Element>
1264 inline void RepeatedField<Element>::Clear() {
1268 template <typename Element>
1269 inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
1270 GOOGLE_DCHECK_NE(&other, this);
1271 if (other.current_size_ != 0) {
1272 int existing_size = size();
1273 Reserve(existing_size + other.size());
1274 AddNAlreadyReserved(other.size());
1275 CopyArray(Mutable(existing_size), &other.Get(0), other.size());
1279 template <typename Element>
1280 inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
1281 if (&other == this) return;
1286 template <typename Element>
1287 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1288 const_iterator position) {
1289 return erase(position, position + 1);
1292 template <typename Element>
1293 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1294 const_iterator first, const_iterator last) {
1295 size_type first_offset = first - cbegin();
1296 if (first != last) {
1297 Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
1299 return begin() + first_offset;
1302 template <typename Element>
1303 inline Element* RepeatedField<Element>::mutable_data() {
1304 return total_size_ > 0 ? elements() : NULL;
1307 template <typename Element>
1308 inline const Element* RepeatedField<Element>::data() const {
1309 return total_size_ > 0 ? elements() : NULL;
1312 template <typename Element>
1313 inline const Element* RepeatedField<Element>::unsafe_data() const {
1317 template <typename Element>
1318 inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
1319 GOOGLE_DCHECK(this != other);
1320 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1322 std::swap(ptr_, other->ptr_);
1323 std::swap(current_size_, other->current_size_);
1324 std::swap(total_size_, other->total_size_);
1327 template <typename Element>
1328 void RepeatedField<Element>::Swap(RepeatedField* other) {
1329 if (this == other) return;
1330 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1331 InternalSwap(other);
1333 RepeatedField<Element> temp(other->GetArenaNoVirtual());
1334 temp.MergeFrom(*this);
1336 other->UnsafeArenaSwap(&temp);
1340 template <typename Element>
1341 void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
1342 if (this == other) return;
1343 InternalSwap(other);
1346 template <typename Element>
1347 void RepeatedField<Element>::SwapElements(int index1, int index2) {
1348 using std::swap; // enable ADL with fallback
1349 swap(elements()[index1], elements()[index2]);
1352 template <typename Element>
1353 inline typename RepeatedField<Element>::iterator
1354 RepeatedField<Element>::begin() {
1355 return total_size_ > 0 ? elements() : NULL;
1357 template <typename Element>
1358 inline typename RepeatedField<Element>::const_iterator
1359 RepeatedField<Element>::begin() const {
1360 return total_size_ > 0 ? elements() : NULL;
1362 template <typename Element>
1363 inline typename RepeatedField<Element>::const_iterator
1364 RepeatedField<Element>::cbegin() const {
1365 return total_size_ > 0 ? elements() : NULL;
1367 template <typename Element>
1368 inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
1369 return total_size_ > 0 ? elements() + current_size_ : NULL;
1371 template <typename Element>
1372 inline typename RepeatedField<Element>::const_iterator
1373 RepeatedField<Element>::end() const {
1374 return total_size_ > 0 ? elements() + current_size_ : NULL;
1376 template <typename Element>
1377 inline typename RepeatedField<Element>::const_iterator
1378 RepeatedField<Element>::cend() const {
1379 return total_size_ > 0 ? elements() + current_size_ : NULL;
1382 template <typename Element>
1383 inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
1384 return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
1387 // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
1388 // amount of code bloat.
1389 template <typename Element>
1390 void RepeatedField<Element>::Reserve(int new_size) {
1391 if (total_size_ >= new_size) return;
1392 Rep* old_rep = total_size_ > 0 ? rep() : NULL;
1394 Arena* arena = GetArenaNoVirtual();
1395 new_size = std::max(internal::kMinRepeatedFieldAllocationSize,
1396 std::max(total_size_ * 2, new_size));
1398 static_cast<size_t>(new_size),
1399 (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
1400 << "Requested size is too large to fit into size_t.";
1402 kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
1403 if (arena == NULL) {
1404 new_rep = static_cast<Rep*>(::operator new(bytes));
1406 new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
1408 new_rep->arena = arena;
1409 int old_total_size = total_size_;
1410 total_size_ = new_size;
1411 ptr_.elements = new_rep->elements;
1412 // Invoke placement-new on newly allocated elements. We shouldn't have to do
1413 // this, since Element is supposed to be POD, but a previous version of this
1414 // code allocated storage with "new Element[size]" and some code uses
1415 // RepeatedField with non-POD types, relying on constructor invocation. If
1416 // Element has a trivial constructor (e.g., int32), gcc (tested with -O2)
1417 // completely removes this loop because the loop body is empty, so this has no
1418 // effect unless its side-effects are required for correctness.
1419 // Note that we do this before MoveArray() below because Element's copy
1420 // assignment implementation will want an initialized instance first.
1421 Element* e = &elements()[0];
1422 Element* limit = e + total_size_;
1423 for (; e < limit; e++) {
1426 if (current_size_ > 0) {
1427 MoveArray(&elements()[0], old_rep->elements, current_size_);
1430 // Likewise, we need to invoke destructors on the old array.
1431 InternalDeallocate(old_rep, old_total_size);
1435 template <typename Element>
1436 inline void RepeatedField<Element>::Truncate(int new_size) {
1437 GOOGLE_DCHECK_LE(new_size, current_size_);
1438 if (current_size_ > 0) {
1439 current_size_ = new_size;
1443 template <typename Element>
1444 inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
1446 CopyArray(to, from, array_size);
1449 template <typename Element>
1450 inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
1452 internal::ElementCopier<Element>()(to, from, array_size);
1455 namespace internal {
1457 template <typename Element, bool HasTrivialCopy>
1458 void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
1459 const Element* from,
1461 std::copy(from, from + array_size, to);
1464 template <typename Element>
1465 struct ElementCopier<Element, true> {
1466 void operator()(Element* to, const Element* from, int array_size) {
1467 memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
1471 } // namespace internal
1474 // -------------------------------------------------------------------
1476 namespace internal {
1478 inline RepeatedPtrFieldBase::RepeatedPtrFieldBase()
1479 : arena_(NULL), current_size_(0), total_size_(0), rep_(NULL) {}
1481 inline RepeatedPtrFieldBase::RepeatedPtrFieldBase(Arena* arena)
1482 : arena_(arena), current_size_(0), total_size_(0), rep_(NULL) {}
1484 template <typename TypeHandler>
1485 void RepeatedPtrFieldBase::Destroy() {
1486 if (rep_ != NULL && arena_ == NULL) {
1487 int n = rep_->allocated_size;
1488 void* const* elements = rep_->elements;
1489 for (int i = 0; i < n; i++) {
1490 TypeHandler::Delete(cast<TypeHandler>(elements[i]), NULL);
1492 #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
1493 const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
1494 ::operator delete(static_cast<void*>(rep_), size);
1496 ::operator delete(static_cast<void*>(rep_));
1502 template <typename TypeHandler>
1503 inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) {
1504 if (other->GetArenaNoVirtual() == GetArenaNoVirtual()) {
1505 InternalSwap(other);
1507 SwapFallback<TypeHandler>(other);
1511 template <typename TypeHandler>
1512 void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) {
1513 GOOGLE_DCHECK(other->GetArenaNoVirtual() != GetArenaNoVirtual());
1515 // Copy semantics in this case. We try to improve efficiency by placing the
1516 // temporary on |other|'s arena so that messages are copied cross-arena only
1518 RepeatedPtrFieldBase temp(other->GetArenaNoVirtual());
1519 temp.MergeFrom<TypeHandler>(*this);
1520 this->Clear<TypeHandler>();
1521 this->MergeFrom<TypeHandler>(*other);
1522 other->Clear<TypeHandler>();
1523 other->InternalSwap(&temp);
1524 temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena.
1527 inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; }
1529 inline int RepeatedPtrFieldBase::size() const { return current_size_; }
1531 template <typename TypeHandler>
1532 inline const typename TypeHandler::WeakType& RepeatedPtrFieldBase::Get(
1534 GOOGLE_DCHECK_GE(index, 0);
1535 GOOGLE_DCHECK_LT(index, current_size_);
1536 return *cast<TypeHandler>(rep_->elements[index]);
1539 template <typename TypeHandler>
1540 inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at(
1542 GOOGLE_CHECK_GE(index, 0);
1543 GOOGLE_CHECK_LT(index, current_size_);
1544 return *cast<TypeHandler>(rep_->elements[index]);
1547 template <typename TypeHandler>
1548 inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) {
1549 GOOGLE_CHECK_GE(index, 0);
1550 GOOGLE_CHECK_LT(index, current_size_);
1551 return *cast<TypeHandler>(rep_->elements[index]);
1554 template <typename TypeHandler>
1555 inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) {
1556 GOOGLE_DCHECK_GE(index, 0);
1557 GOOGLE_DCHECK_LT(index, current_size_);
1558 return cast<TypeHandler>(rep_->elements[index]);
1561 template <typename TypeHandler>
1562 inline void RepeatedPtrFieldBase::Delete(int index) {
1563 GOOGLE_DCHECK_GE(index, 0);
1564 GOOGLE_DCHECK_LT(index, current_size_);
1565 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
1568 template <typename TypeHandler>
1569 inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add(
1570 typename TypeHandler::Type* prototype) {
1571 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1572 return cast<TypeHandler>(rep_->elements[current_size_++]);
1574 if (!rep_ || rep_->allocated_size == total_size_) {
1575 Reserve(total_size_ + 1);
1577 ++rep_->allocated_size;
1578 typename TypeHandler::Type* result =
1579 TypeHandler::NewFromPrototype(prototype, arena_);
1580 rep_->elements[current_size_++] = result;
1584 template <typename TypeHandler,
1585 typename std::enable_if<TypeHandler::Movable::value>::type*>
1586 inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) {
1587 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1588 *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value);
1591 if (!rep_ || rep_->allocated_size == total_size_) {
1592 Reserve(total_size_ + 1);
1594 ++rep_->allocated_size;
1595 typename TypeHandler::Type* result =
1596 TypeHandler::New(arena_, std::move(value));
1597 rep_->elements[current_size_++] = result;
1600 template <typename TypeHandler>
1601 inline void RepeatedPtrFieldBase::RemoveLast() {
1602 GOOGLE_DCHECK_GT(current_size_, 0);
1603 TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_]));
1606 template <typename TypeHandler>
1607 void RepeatedPtrFieldBase::Clear() {
1608 const int n = current_size_;
1609 GOOGLE_DCHECK_GE(n, 0);
1611 void* const* elements = rep_->elements;
1614 TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
1620 // To avoid unnecessary code duplication and reduce binary size, we use a
1621 // layered approach to implementing MergeFrom(). The toplevel method is
1622 // templated, so we get a small thunk per concrete message type in the binary.
1623 // This calls a shared implementation with most of the logic, passing a function
1624 // pointer to another type-specific piece of code that calls the object-allocate
1625 // and merge handlers.
1626 template <typename TypeHandler>
1627 inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) {
1628 GOOGLE_DCHECK_NE(&other, this);
1629 if (other.current_size_ == 0) return;
1630 MergeFromInternal(other,
1631 &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>);
1634 inline void RepeatedPtrFieldBase::MergeFromInternal(
1635 const RepeatedPtrFieldBase& other,
1636 void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
1637 // Note: wrapper has already guaranteed that other.rep_ != NULL here.
1638 int other_size = other.current_size_;
1639 void** other_elements = other.rep_->elements;
1640 void** new_elements = InternalExtend(other_size);
1641 int allocated_elems = rep_->allocated_size - current_size_;
1642 (this->*inner_loop)(new_elements, other_elements, other_size,
1644 current_size_ += other_size;
1645 if (rep_->allocated_size < current_size_) {
1646 rep_->allocated_size = current_size_;
1650 // Merges other_elems to our_elems.
1651 template <typename TypeHandler>
1652 void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems,
1653 void** other_elems, int length,
1654 int already_allocated) {
1655 // Split into two loops, over ranges [0, allocated) and [allocated, length),
1656 // to avoid a branch within the loop.
1657 for (int i = 0; i < already_allocated && i < length; i++) {
1658 // Already allocated: use existing element.
1659 typename TypeHandler::WeakType* other_elem =
1660 reinterpret_cast<typename TypeHandler::WeakType*>(other_elems[i]);
1661 typename TypeHandler::WeakType* new_elem =
1662 reinterpret_cast<typename TypeHandler::WeakType*>(our_elems[i]);
1663 TypeHandler::Merge(*other_elem, new_elem);
1665 Arena* arena = GetArenaNoVirtual();
1666 for (int i = already_allocated; i < length; i++) {
1667 // Not allocated: alloc a new element first, then merge it.
1668 typename TypeHandler::WeakType* other_elem =
1669 reinterpret_cast<typename TypeHandler::WeakType*>(other_elems[i]);
1670 typename TypeHandler::WeakType* new_elem =
1671 TypeHandler::NewFromPrototype(other_elem, arena);
1672 TypeHandler::Merge(*other_elem, new_elem);
1673 our_elems[i] = new_elem;
1677 template <typename TypeHandler>
1678 inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) {
1679 if (&other == this) return;
1680 RepeatedPtrFieldBase::Clear<TypeHandler>();
1681 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
1684 inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; }
1686 inline void* const* RepeatedPtrFieldBase::raw_data() const {
1687 return rep_ ? rep_->elements : NULL;
1690 inline void** RepeatedPtrFieldBase::raw_mutable_data() const {
1691 return rep_ ? const_cast<void**>(rep_->elements) : NULL;
1694 template <typename TypeHandler>
1695 inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() {
1696 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
1698 return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data());
1701 template <typename TypeHandler>
1702 inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data()
1704 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
1706 return reinterpret_cast<const typename TypeHandler::Type* const*>(raw_data());
1709 inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) {
1710 using std::swap; // enable ADL with fallback
1711 swap(rep_->elements[index1], rep_->elements[index2]);
1714 template <typename TypeHandler>
1715 inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const {
1716 size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
1718 for (int i = 0; i < rep_->allocated_size; ++i) {
1720 TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
1722 allocated_bytes += kRepHeaderSize;
1724 return allocated_bytes;
1727 template <typename TypeHandler>
1728 inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() {
1729 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1730 return cast<TypeHandler>(rep_->elements[current_size_++]);
1736 // AddAllocated version that implements arena-safe copying behavior.
1737 template <typename TypeHandler>
1738 void RepeatedPtrFieldBase::AddAllocatedInternal(
1739 typename TypeHandler::Type* value, std::true_type) {
1740 Arena* element_arena =
1741 reinterpret_cast<Arena*>(TypeHandler::GetMaybeArenaPointer(value));
1742 Arena* arena = GetArenaNoVirtual();
1743 if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
1744 // Fast path: underlying arena representation (tagged pointer) is equal to
1745 // our arena pointer, and we can add to array without resizing it (at least
1746 // one slot that is not allocated).
1747 void** elems = rep_->elements;
1748 if (current_size_ < rep_->allocated_size) {
1749 // Make space at [current] by moving first allocated element to end of
1751 elems[rep_->allocated_size] = elems[current_size_];
1753 elems[current_size_] = value;
1754 current_size_ = current_size_ + 1;
1755 rep_->allocated_size = rep_->allocated_size + 1;
1757 AddAllocatedSlowWithCopy<TypeHandler>(value, TypeHandler::GetArena(value),
1762 // Slowpath handles all cases, copying if necessary.
1763 template <typename TypeHandler>
1764 void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy(
1765 // Pass value_arena and my_arena to avoid duplicate virtual call (value) or
1767 typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) {
1768 // Ensure that either the value is in the same arena, or if not, we do the
1769 // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy
1770 // it to our arena/heap (otherwise).
1771 if (my_arena != NULL && value_arena == NULL) {
1772 my_arena->Own(value);
1773 } else if (my_arena != value_arena) {
1774 typename TypeHandler::Type* new_value =
1775 TypeHandler::NewFromPrototype(value, my_arena);
1776 TypeHandler::Merge(*value, new_value);
1777 TypeHandler::Delete(value, value_arena);
1781 UnsafeArenaAddAllocated<TypeHandler>(value);
1784 // AddAllocated version that does not implement arena-safe copying behavior.
1785 template <typename TypeHandler>
1786 void RepeatedPtrFieldBase::AddAllocatedInternal(
1787 typename TypeHandler::Type* value, std::false_type) {
1788 if (rep_ && rep_->allocated_size < total_size_) {
1789 // Fast path: underlying arena representation (tagged pointer) is equal to
1790 // our arena pointer, and we can add to array without resizing it (at least
1791 // one slot that is not allocated).
1792 void** elems = rep_->elements;
1793 if (current_size_ < rep_->allocated_size) {
1794 // Make space at [current] by moving first allocated element to end of
1796 elems[rep_->allocated_size] = elems[current_size_];
1798 elems[current_size_] = value;
1799 current_size_ = current_size_ + 1;
1800 ++rep_->allocated_size;
1802 UnsafeArenaAddAllocated<TypeHandler>(value);
1806 template <typename TypeHandler>
1807 void RepeatedPtrFieldBase::UnsafeArenaAddAllocated(
1808 typename TypeHandler::Type* value) {
1809 // Make room for the new pointer.
1810 if (!rep_ || current_size_ == total_size_) {
1811 // The array is completely full with no cleared objects, so grow it.
1812 Reserve(total_size_ + 1);
1813 ++rep_->allocated_size;
1814 } else if (rep_->allocated_size == total_size_) {
1815 // There is no more space in the pointer array because it contains some
1816 // cleared objects awaiting reuse. We don't want to grow the array in this
1817 // case because otherwise a loop calling AddAllocated() followed by Clear()
1818 // would leak memory.
1819 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
1821 } else if (current_size_ < rep_->allocated_size) {
1822 // We have some cleared objects. We don't care about their order, so we
1823 // can just move the first one to the end to make space.
1824 rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
1825 ++rep_->allocated_size;
1827 // There are no cleared objects.
1828 ++rep_->allocated_size;
1831 rep_->elements[current_size_++] = value;
1834 // ReleaseLast() for types that implement merge/copy behavior.
1835 template <typename TypeHandler>
1836 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
1838 // First, release an element.
1839 typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>();
1840 // Now perform a copy if we're on an arena.
1841 Arena* arena = GetArenaNoVirtual();
1842 if (arena == NULL) {
1845 typename TypeHandler::Type* new_result =
1846 TypeHandler::NewFromPrototype(result, NULL);
1847 TypeHandler::Merge(*result, new_result);
1852 // ReleaseLast() for types that *do not* implement merge/copy behavior -- this
1853 // is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on
1854 // an arena, since the user really should implement the copy operation in this
1856 template <typename TypeHandler>
1857 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
1859 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1860 << "ReleaseLast() called on a RepeatedPtrField that is on an arena, "
1861 << "with a type that does not implement MergeFrom. This is unsafe; "
1862 << "please implement MergeFrom for your type.";
1863 return UnsafeArenaReleaseLast<TypeHandler>();
1866 template <typename TypeHandler>
1867 inline typename TypeHandler::Type*
1868 RepeatedPtrFieldBase::UnsafeArenaReleaseLast() {
1869 GOOGLE_DCHECK_GT(current_size_, 0);
1870 typename TypeHandler::Type* result =
1871 cast<TypeHandler>(rep_->elements[--current_size_]);
1872 --rep_->allocated_size;
1873 if (current_size_ < rep_->allocated_size) {
1874 // There are cleared elements on the end; replace the removed element
1875 // with the last allocated element.
1876 rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
1881 inline int RepeatedPtrFieldBase::ClearedCount() const {
1882 return rep_ ? (rep_->allocated_size - current_size_) : 0;
1885 template <typename TypeHandler>
1886 inline void RepeatedPtrFieldBase::AddCleared(
1887 typename TypeHandler::Type* value) {
1888 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1889 << "AddCleared() can only be used on a RepeatedPtrField not on an arena.";
1890 GOOGLE_DCHECK(TypeHandler::GetArena(value) == NULL)
1891 << "AddCleared() can only accept values not on an arena.";
1892 if (!rep_ || rep_->allocated_size == total_size_) {
1893 Reserve(total_size_ + 1);
1895 rep_->elements[rep_->allocated_size++] = value;
1898 template <typename TypeHandler>
1899 inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() {
1900 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1901 << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
1903 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
1904 GOOGLE_DCHECK(rep_ != NULL);
1905 GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_);
1906 return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
1909 } // namespace internal
1911 // -------------------------------------------------------------------
1913 template <typename Element>
1914 class RepeatedPtrField<Element>::TypeHandler
1915 : public internal::GenericTypeHandler<Element> {};
1918 class RepeatedPtrField<std::string>::TypeHandler
1919 : public internal::StringTypeHandler {};
1921 template <typename Element>
1922 inline RepeatedPtrField<Element>::RepeatedPtrField() : RepeatedPtrFieldBase() {}
1924 template <typename Element>
1925 inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
1926 : RepeatedPtrFieldBase(arena) {}
1928 template <typename Element>
1929 inline RepeatedPtrField<Element>::RepeatedPtrField(
1930 const RepeatedPtrField& other)
1931 : RepeatedPtrFieldBase() {
1935 template <typename Element>
1936 template <typename Iter>
1937 inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin,
1939 int reserve = internal::CalculateReserve(begin, end);
1940 if (reserve != -1) {
1943 for (; begin != end; ++begin) {
1948 template <typename Element>
1949 RepeatedPtrField<Element>::~RepeatedPtrField() {
1950 Destroy<TypeHandler>();
1953 template <typename Element>
1954 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1955 const RepeatedPtrField& other) {
1956 if (this != &other) CopyFrom(other);
1960 template <typename Element>
1961 inline RepeatedPtrField<Element>::RepeatedPtrField(
1962 RepeatedPtrField&& other) noexcept
1963 : RepeatedPtrField() {
1964 // We don't just call Swap(&other) here because it would perform 3 copies if
1965 // other is on an arena. This field can't be on an arena because arena
1966 // construction always uses the Arena* accepting constructor.
1967 if (other.GetArenaNoVirtual()) {
1970 InternalSwap(&other);
1974 template <typename Element>
1975 inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1976 RepeatedPtrField&& other) noexcept {
1977 // We don't just call Swap(&other) here because it would perform 3 copies if
1978 // the two fields are on different arenas.
1979 if (this != &other) {
1980 if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
1983 InternalSwap(&other);
1989 template <typename Element>
1990 inline bool RepeatedPtrField<Element>::empty() const {
1991 return RepeatedPtrFieldBase::empty();
1994 template <typename Element>
1995 inline int RepeatedPtrField<Element>::size() const {
1996 return RepeatedPtrFieldBase::size();
1999 template <typename Element>
2000 inline const Element& RepeatedPtrField<Element>::Get(int index) const {
2001 return RepeatedPtrFieldBase::Get<TypeHandler>(index);
2004 template <typename Element>
2005 inline const Element& RepeatedPtrField<Element>::at(int index) const {
2006 return RepeatedPtrFieldBase::at<TypeHandler>(index);
2009 template <typename Element>
2010 inline Element& RepeatedPtrField<Element>::at(int index) {
2011 return RepeatedPtrFieldBase::at<TypeHandler>(index);
2015 template <typename Element>
2016 inline Element* RepeatedPtrField<Element>::Mutable(int index) {
2017 return RepeatedPtrFieldBase::Mutable<TypeHandler>(index);
2020 template <typename Element>
2021 inline Element* RepeatedPtrField<Element>::Add() {
2022 return RepeatedPtrFieldBase::Add<TypeHandler>();
2025 template <typename Element>
2026 inline void RepeatedPtrField<Element>::Add(Element&& value) {
2027 RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value));
2030 template <typename Element>
2031 inline void RepeatedPtrField<Element>::RemoveLast() {
2032 RepeatedPtrFieldBase::RemoveLast<TypeHandler>();
2035 template <typename Element>
2036 inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) {
2037 GOOGLE_DCHECK_GE(start, 0);
2038 GOOGLE_DCHECK_GE(num, 0);
2039 GOOGLE_DCHECK_LE(start + num, size());
2040 for (int i = 0; i < num; ++i) {
2041 RepeatedPtrFieldBase::Delete<TypeHandler>(start + i);
2043 ExtractSubrange(start, num, NULL);
2046 template <typename Element>
2047 inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num,
2048 Element** elements) {
2049 typename internal::TypeImplementsMergeBehavior<
2050 typename TypeHandler::Type>::type t;
2051 ExtractSubrangeInternal(start, num, elements, t);
2054 // ExtractSubrange() implementation for types that implement merge/copy
2056 template <typename Element>
2057 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2058 int start, int num, Element** elements, std::true_type) {
2059 GOOGLE_DCHECK_GE(start, 0);
2060 GOOGLE_DCHECK_GE(num, 0);
2061 GOOGLE_DCHECK_LE(start + num, size());
2064 // Save the values of the removed elements if requested.
2065 if (elements != NULL) {
2066 if (GetArenaNoVirtual() != NULL) {
2067 // If we're on an arena, we perform a copy for each element so that the
2068 // returned elements are heap-allocated.
2069 for (int i = 0; i < num; ++i) {
2071 RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2072 typename TypeHandler::Type* new_value =
2073 TypeHandler::NewFromPrototype(element, NULL);
2074 TypeHandler::Merge(*element, new_value);
2075 elements[i] = new_value;
2078 for (int i = 0; i < num; ++i) {
2079 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2083 CloseGap(start, num);
2087 // ExtractSubrange() implementation for types that do not implement merge/copy
2089 template <typename Element>
2090 inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2091 int start, int num, Element** elements, std::false_type) {
2092 // This case is identical to UnsafeArenaExtractSubrange(). However, since
2093 // ExtractSubrange() must return heap-allocated objects by contract, and we
2094 // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that
2095 // we are not on an arena.
2096 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
2097 << "ExtractSubrange() when arena is non-NULL is only supported when "
2098 << "the Element type supplies a MergeFrom() operation to make copies.";
2099 UnsafeArenaExtractSubrange(start, num, elements);
2102 template <typename Element>
2103 inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange(
2104 int start, int num, Element** elements) {
2105 GOOGLE_DCHECK_GE(start, 0);
2106 GOOGLE_DCHECK_GE(num, 0);
2107 GOOGLE_DCHECK_LE(start + num, size());
2110 // Save the values of the removed elements if requested.
2111 if (elements != NULL) {
2112 for (int i = 0; i < num; ++i) {
2113 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2116 CloseGap(start, num);
2120 template <typename Element>
2121 inline void RepeatedPtrField<Element>::Clear() {
2122 RepeatedPtrFieldBase::Clear<TypeHandler>();
2125 template <typename Element>
2126 inline void RepeatedPtrField<Element>::MergeFrom(
2127 const RepeatedPtrField& other) {
2128 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
2131 template <typename Element>
2132 inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) {
2133 RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other);
2136 template <typename Element>
2137 inline typename RepeatedPtrField<Element>::iterator
2138 RepeatedPtrField<Element>::erase(const_iterator position) {
2139 return erase(position, position + 1);
2142 template <typename Element>
2143 inline typename RepeatedPtrField<Element>::iterator
2144 RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last) {
2145 size_type pos_offset = std::distance(cbegin(), first);
2146 size_type last_offset = std::distance(cbegin(), last);
2147 DeleteSubrange(pos_offset, last_offset - pos_offset);
2148 return begin() + pos_offset;
2151 template <typename Element>
2152 inline Element** RepeatedPtrField<Element>::mutable_data() {
2153 return RepeatedPtrFieldBase::mutable_data<TypeHandler>();
2156 template <typename Element>
2157 inline const Element* const* RepeatedPtrField<Element>::data() const {
2158 return RepeatedPtrFieldBase::data<TypeHandler>();
2161 template <typename Element>
2162 inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) {
2163 if (this == other) return;
2164 RepeatedPtrFieldBase::Swap<TypeHandler>(other);
2167 template <typename Element>
2168 inline void RepeatedPtrField<Element>::UnsafeArenaSwap(
2169 RepeatedPtrField* other) {
2170 if (this == other) return;
2171 RepeatedPtrFieldBase::InternalSwap(other);
2174 template <typename Element>
2175 inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) {
2176 RepeatedPtrFieldBase::SwapElements(index1, index2);
2179 template <typename Element>
2180 inline Arena* RepeatedPtrField<Element>::GetArenaNoVirtual() const {
2181 return RepeatedPtrFieldBase::GetArenaNoVirtual();
2184 template <typename Element>
2185 inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const {
2186 return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<TypeHandler>();
2189 template <typename Element>
2190 inline void RepeatedPtrField<Element>::AddAllocated(Element* value) {
2191 RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value);
2194 template <typename Element>
2195 inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) {
2196 RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value);
2199 template <typename Element>
2200 inline Element* RepeatedPtrField<Element>::ReleaseLast() {
2201 return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>();
2204 template <typename Element>
2205 inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() {
2206 return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>();
2209 template <typename Element>
2210 inline int RepeatedPtrField<Element>::ClearedCount() const {
2211 return RepeatedPtrFieldBase::ClearedCount();
2214 template <typename Element>
2215 inline void RepeatedPtrField<Element>::AddCleared(Element* value) {
2216 return RepeatedPtrFieldBase::AddCleared<TypeHandler>(value);
2219 template <typename Element>
2220 inline Element* RepeatedPtrField<Element>::ReleaseCleared() {
2221 return RepeatedPtrFieldBase::ReleaseCleared<TypeHandler>();
2224 template <typename Element>
2225 inline void RepeatedPtrField<Element>::Reserve(int new_size) {
2226 return RepeatedPtrFieldBase::Reserve(new_size);
2229 template <typename Element>
2230 inline int RepeatedPtrField<Element>::Capacity() const {
2231 return RepeatedPtrFieldBase::Capacity();
2234 // -------------------------------------------------------------------
2236 namespace internal {
2238 // STL-like iterator implementation for RepeatedPtrField. You should not
2239 // refer to this class directly; use RepeatedPtrField<T>::iterator instead.
2241 // The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is
2242 // very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h,
2243 // but adds random-access operators and is modified to wrap a void** base
2244 // iterator (since RepeatedPtrField stores its array as a void* array and
2245 // casting void** to T** would violate C++ aliasing rules).
2247 // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin
2248 // (jyasskin@google.com).
2249 template <typename Element>
2250 class RepeatedPtrIterator {
2252 using iterator = RepeatedPtrIterator<Element>;
2253 using iterator_category = std::random_access_iterator_tag;
2254 using value_type = typename std::remove_const<Element>::type;
2255 using difference_type = std::ptrdiff_t;
2256 using pointer = Element*;
2257 using reference = Element&;
2259 RepeatedPtrIterator() : it_(NULL) {}
2260 explicit RepeatedPtrIterator(void* const* it) : it_(it) {}
2262 // Allow "upcasting" from RepeatedPtrIterator<T**> to
2263 // RepeatedPtrIterator<const T*const*>.
2264 template <typename OtherElement>
2265 RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other)
2267 // Force a compiler error if the other type is not convertible to ours.
2269 implicit_cast<Element*>(static_cast<OtherElement*>(nullptr));
2274 reference operator*() const { return *reinterpret_cast<Element*>(*it_); }
2275 pointer operator->() const { return &(operator*()); }
2277 // {inc,dec}rementable
2278 iterator& operator++() {
2282 iterator operator++(int) { return iterator(it_++); }
2283 iterator& operator--() {
2287 iterator operator--(int) { return iterator(it_--); }
2289 // equality_comparable
2290 bool operator==(const iterator& x) const { return it_ == x.it_; }
2291 bool operator!=(const iterator& x) const { return it_ != x.it_; }
2293 // less_than_comparable
2294 bool operator<(const iterator& x) const { return it_ < x.it_; }
2295 bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2296 bool operator>(const iterator& x) const { return it_ > x.it_; }
2297 bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2299 // addable, subtractable
2300 iterator& operator+=(difference_type d) {
2304 friend iterator operator+(iterator it, const difference_type d) {
2308 friend iterator operator+(const difference_type d, iterator it) {
2312 iterator& operator-=(difference_type d) {
2316 friend iterator operator-(iterator it, difference_type d) {
2322 reference operator[](difference_type d) const { return *(*this + d); }
2324 // random access iterator
2325 difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2328 template <typename OtherElement>
2329 friend class RepeatedPtrIterator;
2331 // The internal iterator.
2335 // Provide an iterator that operates on pointers to the underlying objects
2336 // rather than the objects themselves as RepeatedPtrIterator does.
2337 // Consider using this when working with stl algorithms that change
2339 // The VoidPtr template parameter holds the type-agnostic pointer value
2340 // referenced by the iterator. It should either be "void *" for a mutable
2341 // iterator, or "const void* const" for a constant iterator.
2342 template <typename Element, typename VoidPtr>
2343 class RepeatedPtrOverPtrsIterator {
2345 using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>;
2346 using iterator_category = std::random_access_iterator_tag;
2347 using value_type = typename std::remove_const<Element>::type;
2348 using difference_type = std::ptrdiff_t;
2349 using pointer = Element*;
2350 using reference = Element&;
2352 RepeatedPtrOverPtrsIterator() : it_(NULL) {}
2353 explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {}
2356 reference operator*() const { return *reinterpret_cast<Element*>(it_); }
2357 pointer operator->() const { return &(operator*()); }
2359 // {inc,dec}rementable
2360 iterator& operator++() {
2364 iterator operator++(int) { return iterator(it_++); }
2365 iterator& operator--() {
2369 iterator operator--(int) { return iterator(it_--); }
2371 // equality_comparable
2372 bool operator==(const iterator& x) const { return it_ == x.it_; }
2373 bool operator!=(const iterator& x) const { return it_ != x.it_; }
2375 // less_than_comparable
2376 bool operator<(const iterator& x) const { return it_ < x.it_; }
2377 bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2378 bool operator>(const iterator& x) const { return it_ > x.it_; }
2379 bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2381 // addable, subtractable
2382 iterator& operator+=(difference_type d) {
2386 friend iterator operator+(iterator it, difference_type d) {
2390 friend iterator operator+(difference_type d, iterator it) {
2394 iterator& operator-=(difference_type d) {
2398 friend iterator operator-(iterator it, difference_type d) {
2404 reference operator[](difference_type d) const { return *(*this + d); }
2406 // random access iterator
2407 difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2410 template <typename OtherElement>
2411 friend class RepeatedPtrIterator;
2413 // The internal iterator.
2417 void RepeatedPtrFieldBase::InternalSwap(RepeatedPtrFieldBase* other) {
2418 GOOGLE_DCHECK(this != other);
2419 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2421 std::swap(rep_, other->rep_);
2422 std::swap(current_size_, other->current_size_);
2423 std::swap(total_size_, other->total_size_);
2426 } // namespace internal
2428 template <typename Element>
2429 inline typename RepeatedPtrField<Element>::iterator
2430 RepeatedPtrField<Element>::begin() {
2431 return iterator(raw_data());
2433 template <typename Element>
2434 inline typename RepeatedPtrField<Element>::const_iterator
2435 RepeatedPtrField<Element>::begin() const {
2436 return iterator(raw_data());
2438 template <typename Element>
2439 inline typename RepeatedPtrField<Element>::const_iterator
2440 RepeatedPtrField<Element>::cbegin() const {
2443 template <typename Element>
2444 inline typename RepeatedPtrField<Element>::iterator
2445 RepeatedPtrField<Element>::end() {
2446 return iterator(raw_data() + size());
2448 template <typename Element>
2449 inline typename RepeatedPtrField<Element>::const_iterator
2450 RepeatedPtrField<Element>::end() const {
2451 return iterator(raw_data() + size());
2453 template <typename Element>
2454 inline typename RepeatedPtrField<Element>::const_iterator
2455 RepeatedPtrField<Element>::cend() const {
2459 template <typename Element>
2460 inline typename RepeatedPtrField<Element>::pointer_iterator
2461 RepeatedPtrField<Element>::pointer_begin() {
2462 return pointer_iterator(raw_mutable_data());
2464 template <typename Element>
2465 inline typename RepeatedPtrField<Element>::const_pointer_iterator
2466 RepeatedPtrField<Element>::pointer_begin() const {
2467 return const_pointer_iterator(const_cast<const void* const*>(raw_data()));
2469 template <typename Element>
2470 inline typename RepeatedPtrField<Element>::pointer_iterator
2471 RepeatedPtrField<Element>::pointer_end() {
2472 return pointer_iterator(raw_mutable_data() + size());
2474 template <typename Element>
2475 inline typename RepeatedPtrField<Element>::const_pointer_iterator
2476 RepeatedPtrField<Element>::pointer_end() const {
2477 return const_pointer_iterator(
2478 const_cast<const void* const*>(raw_data() + size()));
2481 // Iterators and helper functions that follow the spirit of the STL
2482 // std::back_insert_iterator and std::back_inserter but are tailor-made
2483 // for RepeatedField and RepeatedPtrField. Typical usage would be:
2485 // std::copy(some_sequence.begin(), some_sequence.end(),
2486 // RepeatedFieldBackInserter(proto.mutable_sequence()));
2488 // Ported by johannes from util/gtl/proto-array-iterators.h
2490 namespace internal {
2491 // A back inserter for RepeatedField objects.
2492 template <typename T>
2493 class RepeatedFieldBackInsertIterator
2494 : public std::iterator<std::output_iterator_tag, T> {
2496 explicit RepeatedFieldBackInsertIterator(
2497 RepeatedField<T>* const mutable_field)
2498 : field_(mutable_field) {}
2499 RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
2503 RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
2504 RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
2505 RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
2510 RepeatedField<T>* field_;
2513 // A back inserter for RepeatedPtrField objects.
2514 template <typename T>
2515 class RepeatedPtrFieldBackInsertIterator
2516 : public std::iterator<std::output_iterator_tag, T> {
2518 RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field)
2519 : field_(mutable_field) {}
2520 RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) {
2521 *field_->Add() = value;
2524 RepeatedPtrFieldBackInsertIterator<T>& operator=(
2525 const T* const ptr_to_value) {
2526 *field_->Add() = *ptr_to_value;
2529 RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) {
2530 *field_->Add() = std::move(value);
2533 RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2534 RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2535 RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2540 RepeatedPtrField<T>* field_;
2543 // A back inserter for RepeatedPtrFields that inserts by transferring ownership
2545 template <typename T>
2546 class AllocatedRepeatedPtrFieldBackInsertIterator
2547 : public std::iterator<std::output_iterator_tag, T> {
2549 explicit AllocatedRepeatedPtrFieldBackInsertIterator(
2550 RepeatedPtrField<T>* const mutable_field)
2551 : field_(mutable_field) {}
2552 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2553 T* const ptr_to_value) {
2554 field_->AddAllocated(ptr_to_value);
2557 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2558 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2559 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2564 RepeatedPtrField<T>* field_;
2567 // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
2568 // uses the UnsafeArenaAddAllocated instead.
2569 template <typename T>
2570 class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator
2571 : public std::iterator<std::output_iterator_tag, T> {
2573 explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
2574 RepeatedPtrField<T>* const mutable_field)
2575 : field_(mutable_field) {}
2576 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2577 T const* const ptr_to_value) {
2578 field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
2581 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
2584 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
2587 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
2593 RepeatedPtrField<T>* field_;
2596 } // namespace internal
2598 // Provides a back insert iterator for RepeatedField instances,
2599 // similar to std::back_inserter().
2600 template <typename T>
2601 internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2602 RepeatedField<T>* const mutable_field) {
2603 return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
2606 // Provides a back insert iterator for RepeatedPtrField instances,
2607 // similar to std::back_inserter().
2608 template <typename T>
2609 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter(
2610 RepeatedPtrField<T>* const mutable_field) {
2611 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2614 // Special back insert iterator for RepeatedPtrField instances, just in
2615 // case someone wants to write generic template code that can access both
2616 // RepeatedFields and RepeatedPtrFields using a common name.
2617 template <typename T>
2618 internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2619 RepeatedPtrField<T>* const mutable_field) {
2620 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2623 // Provides a back insert iterator for RepeatedPtrField instances
2624 // similar to std::back_inserter() which transfers the ownership while
2625 // copying elements.
2626 template <typename T>
2627 internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>
2628 AllocatedRepeatedPtrFieldBackInserter(
2629 RepeatedPtrField<T>* const mutable_field) {
2630 return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>(
2634 // Similar to AllocatedRepeatedPtrFieldBackInserter, using
2635 // UnsafeArenaAddAllocated instead of AddAllocated.
2636 // This is slightly faster if that matters. It is also useful in legacy code
2637 // that uses temporary ownership to avoid copies. Example:
2638 // RepeatedPtrField<T> temp_field;
2639 // temp_field.AddAllocated(new T);
2640 // ... // Do something with temp_field
2641 // temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
2642 // If you put temp_field on the arena this fails, because the ownership
2643 // transfers to the arena at the "AddAllocated" call and is not released anymore
2644 // causing a double delete. Using UnsafeArenaAddAllocated prevents this.
2645 template <typename T>
2646 internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
2647 UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
2648 RepeatedPtrField<T>* const mutable_field) {
2649 return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
2653 // Extern declarations of common instantiations to reduce libray bloat.
2654 extern template class PROTOBUF_EXPORT RepeatedField<bool>;
2655 extern template class PROTOBUF_EXPORT RepeatedField<int32>;
2656 extern template class PROTOBUF_EXPORT RepeatedField<uint32>;
2657 extern template class PROTOBUF_EXPORT RepeatedField<int64>;
2658 extern template class PROTOBUF_EXPORT RepeatedField<uint64>;
2659 extern template class PROTOBUF_EXPORT RepeatedField<float>;
2660 extern template class PROTOBUF_EXPORT RepeatedField<double>;
2661 extern template class PROTOBUF_EXPORT RepeatedPtrField<std::string>;
2663 } // namespace protobuf
2664 } // namespace google
2666 #include <google/protobuf/port_undef.inc>
2668 #endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__