1// Protocol Buffers - Google's data interchange format
2// Copyright 2008 Google Inc. All rights reserved.
3// https://developers.google.com/protocol-buffers/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above
12// copyright notice, this list of conditions and the following disclaimer
13// in the documentation and/or other materials provided with the
14// distribution.
15// * Neither the name of Google Inc. nor the names of its
16// contributors may be used to endorse or promote products derived from
17// this software without specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31// Author: [email protected] (Kenton Varda)
32// Based on original Protocol Buffers design by
33// Sanjay Ghemawat, Jeff Dean, and others.
34//
35// RepeatedField and RepeatedPtrField are used by generated protocol message
36// classes to manipulate repeated fields. These classes are very similar to
37// STL's vector, but include a number of optimizations found to be useful
38// specifically in the case of Protocol Buffers. RepeatedPtrField is
39// particularly different from STL vector as it manages ownership of the
40// pointers that it contains.
41//
42// Typically, clients should not need to access RepeatedField objects directly,
43// but should instead use the accessor functions generated automatically by the
44// protocol compiler.
45
46#ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
47#define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
48
49#include <utility>
50#ifdef _MSC_VER
51// This is required for min/max on VS2013 only.
52#include <algorithm>
53#endif
54
55#include <iterator>
56#include <limits>
57#include <string>
58#include <type_traits>
59#include <google/protobuf/stubs/logging.h>
60#include <google/protobuf/stubs/common.h>
61#include <google/protobuf/arena.h>
62#include <google/protobuf/implicit_weak_message.h>
63#include <google/protobuf/message_lite.h>
64#include <google/protobuf/port.h>
65#include <google/protobuf/stubs/casts.h>
66#include <type_traits>
67
68
69#include <google/protobuf/port_def.inc>
70
71#ifdef SWIG
72#error "You cannot SWIG proto headers"
73#endif
74
75// Forward-declare these so that we can make them friends.
76namespace upb {
77namespace google_opensource {
78class GMR_Handlers;
79} // namespace google_opensource
80} // namespace upb
81
82namespace google {
83namespace protobuf {
84
85class Message;
86class Reflection;
87
88namespace internal {
89
90class MergePartialFromCodedStreamHelper;
91
92static const int kMinRepeatedFieldAllocationSize = 4;
93
94// A utility function for logging that doesn't need any template types.
95void LogIndexOutOfBounds(int index, int size);
96
97template <typename Iter>
98inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
99 return static_cast<int>(std::distance(begin, end));
100}
101
102template <typename Iter>
103inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
104 std::input_iterator_tag /*unused*/) {
105 return -1;
106}
107
108template <typename Iter>
109inline int CalculateReserve(Iter begin, Iter end) {
110 typedef typename std::iterator_traits<Iter>::iterator_category Category;
111 return CalculateReserve(begin, end, Category());
112}
113} // namespace internal
114
115// RepeatedField is used to represent repeated fields of a primitive type (in
116// other words, everything except strings and nested Messages). Most users will
117// not ever use a RepeatedField directly; they will use the get-by-index,
118// set-by-index, and add accessors that are generated for all repeated fields.
119template <typename Element>
120class RepeatedField final {
121 static_assert(
122 alignof(Arena) >= alignof(Element),
123 "We only support types that have an alignment smaller than Arena");
124
125 public:
126 RepeatedField();
127 explicit RepeatedField(Arena* arena);
128 RepeatedField(const RepeatedField& other);
129 template <typename Iter>
130 RepeatedField(Iter begin, const Iter& end);
131 ~RepeatedField();
132
133 RepeatedField& operator=(const RepeatedField& other);
134
135 RepeatedField(RepeatedField&& other) noexcept;
136 RepeatedField& operator=(RepeatedField&& other) noexcept;
137
138 bool empty() const;
139 int size() const;
140
141 const Element& Get(int index) const;
142 Element* Mutable(int index);
143
144 const Element& operator[](int index) const { return Get(index); }
145 Element& operator[](int index) { return *Mutable(index); }
146
147 const Element& at(int index) const;
148 Element& at(int index);
149
150 void Set(int index, const Element& value);
151 void Add(const Element& value);
152 // Appends a new element and return a pointer to it.
153 // The new element is uninitialized if |Element| is a POD type.
154 Element* Add();
155 // Append elements in the range [begin, end) after reserving
156 // the appropriate number of elements.
157 template <typename Iter>
158 void Add(Iter begin, Iter end);
159
160 // Remove the last element in the array.
161 void RemoveLast();
162
163 // Extract elements with indices in "[start .. start+num-1]".
164 // Copy them into "elements[0 .. num-1]" if "elements" is not NULL.
165 // Caution: implementation also moves elements with indices [start+num ..].
166 // Calling this routine inside a loop can cause quadratic behavior.
167 void ExtractSubrange(int start, int num, Element* elements);
168
169 void Clear();
170 void MergeFrom(const RepeatedField& other);
171 void CopyFrom(const RepeatedField& other);
172
173 // Reserve space to expand the field to at least the given size. If the
174 // array is grown, it will always be at least doubled in size.
175 void Reserve(int new_size);
176
177 // Resize the RepeatedField to a new, smaller size. This is O(1).
178 void Truncate(int new_size);
179
180 void AddAlreadyReserved(const Element& value);
181 // Appends a new element and return a pointer to it.
182 // The new element is uninitialized if |Element| is a POD type.
183 // Should be called only if Capacity() > Size().
184 Element* AddAlreadyReserved();
185 Element* AddNAlreadyReserved(int elements);
186 int Capacity() const;
187
188 // Like STL resize. Uses value to fill appended elements.
189 // Like Truncate() if new_size <= size(), otherwise this is
190 // O(new_size - size()).
191 void Resize(int new_size, const Element& value);
192
193 // Gets the underlying array. This pointer is possibly invalidated by
194 // any add or remove operation.
195 Element* mutable_data();
196 const Element* data() const;
197
198 // Swap entire contents with "other". If they are separate arenas then, copies
199 // data between each other.
200 void Swap(RepeatedField* other);
201
202 // Swap entire contents with "other". Should be called only if the caller can
203 // guarantee that both repeated fields are on the same arena or are on the
204 // heap. Swapping between different arenas is disallowed and caught by a
205 // GOOGLE_DCHECK (see API docs for details).
206 void UnsafeArenaSwap(RepeatedField* other);
207
208 // Swap two elements.
209 void SwapElements(int index1, int index2);
210
211 // STL-like iterator support
212 typedef Element* iterator;
213 typedef const Element* const_iterator;
214 typedef Element value_type;
215 typedef value_type& reference;
216 typedef const value_type& const_reference;
217 typedef value_type* pointer;
218 typedef const value_type* const_pointer;
219 typedef int size_type;
220 typedef ptrdiff_t difference_type;
221
222 iterator begin();
223 const_iterator begin() const;
224 const_iterator cbegin() const;
225 iterator end();
226 const_iterator end() const;
227 const_iterator cend() const;
228
229 // Reverse iterator support
230 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
231 typedef std::reverse_iterator<iterator> reverse_iterator;
232 reverse_iterator rbegin() { return reverse_iterator(end()); }
233 const_reverse_iterator rbegin() const {
234 return const_reverse_iterator(end());
235 }
236 reverse_iterator rend() { return reverse_iterator(begin()); }
237 const_reverse_iterator rend() const {
238 return const_reverse_iterator(begin());
239 }
240
241 // Returns the number of bytes used by the repeated field, excluding
242 // sizeof(*this)
243 size_t SpaceUsedExcludingSelfLong() const;
244
245 int SpaceUsedExcludingSelf() const {
246 return internal::ToIntSize(SpaceUsedExcludingSelfLong());
247 }
248
249 // Removes the element referenced by position.
250 //
251 // Returns an iterator to the element immediately following the removed
252 // element.
253 //
254 // Invalidates all iterators at or after the removed element, including end().
255 iterator erase(const_iterator position);
256
257 // Removes the elements in the range [first, last).
258 //
259 // Returns an iterator to the element immediately following the removed range.
260 //
261 // Invalidates all iterators at or after the removed range, including end().
262 iterator erase(const_iterator first, const_iterator last);
263
264 // Get the Arena on which this RepeatedField stores its elements.
265 Arena* GetArena() const { return GetArenaNoVirtual(); }
266
267 // For internal use only.
268 //
269 // This is public due to it being called by generated code.
270 inline void InternalSwap(RepeatedField* other);
271
272 private:
273 static const int kInitialSize = 0;
274 // A note on the representation here (see also comment below for
275 // RepeatedPtrFieldBase's struct Rep):
276 //
277 // We maintain the same sizeof(RepeatedField) as before we added arena support
278 // so that we do not degrade performance by bloating memory usage. Directly
279 // adding an arena_ element to RepeatedField is quite costly. By using
280 // indirection in this way, we keep the same size when the RepeatedField is
281 // empty (common case), and add only an 8-byte header to the elements array
282 // when non-empty. We make sure to place the size fields directly in the
283 // RepeatedField class to avoid costly cache misses due to the indirection.
284 int current_size_;
285 int total_size_;
286 struct Rep {
287 Arena* arena;
288 Element elements[1];
289 };
290 // We can not use sizeof(Rep) - sizeof(Element) due to the trailing padding on
291 // the struct. We can not use sizeof(Arena*) as well because there might be
292 // a "gap" after the field arena and before the field elements (e.g., when
293 // Element is double and pointer is 32bit).
294 static const size_t kRepHeaderSize;
295
296 // If total_size_ == 0 this points to an Arena otherwise it points to the
297 // elements member of a Rep struct. Using this invariant allows the storage of
298 // the arena pointer without an extra allocation in the constructor.
299 void* arena_or_elements_;
300
301 // Return pointer to elements array.
302 // pre-condition: the array must have been allocated.
303 Element* elements() const {
304 GOOGLE_DCHECK_GT(total_size_, 0);
305 // Because of above pre-condition this cast is safe.
306 return unsafe_elements();
307 }
308
309 // Return pointer to elements array if it exists otherwise either null or
310 // a invalid pointer is returned. This only happens for empty repeated fields,
311 // where you can't dereference this pointer anyway (it's empty).
312 Element* unsafe_elements() const {
313 return static_cast<Element*>(arena_or_elements_);
314 }
315
316 // Return pointer to the Rep struct.
317 // pre-condition: the Rep must have been allocated, ie elements() is safe.
318 Rep* rep() const {
319 char* addr = reinterpret_cast<char*>(elements()) - offsetof(Rep, elements);
320 return reinterpret_cast<Rep*>(addr);
321 }
322
323 friend class Arena;
324 typedef void InternalArenaConstructable_;
325
326
327 // Move the contents of |from| into |to|, possibly clobbering |from| in the
328 // process. For primitive types this is just a memcpy(), but it could be
329 // specialized for non-primitive types to, say, swap each element instead.
330 void MoveArray(Element* to, Element* from, int size);
331
332 // Copy the elements of |from| into |to|.
333 void CopyArray(Element* to, const Element* from, int size);
334
335 // Internal helper expected by Arena methods.
336 inline Arena* GetArenaNoVirtual() const {
337 return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_)
338 : rep()->arena;
339 }
340
341 // Internal helper to delete all elements and deallocate the storage.
342 // If Element has a trivial destructor (for example, if it's a fundamental
343 // type, like int32), the loop will be removed by the optimizer.
344 void InternalDeallocate(Rep* rep, int size) {
345 if (rep != NULL) {
346 Element* e = &rep->elements[0];
347 Element* limit = &rep->elements[size];
348 for (; e < limit; e++) {
349 e->~Element();
350 }
351 if (rep->arena == NULL) {
352#if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
353 const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
354 ::operator delete(static_cast<void*>(rep), bytes);
355#else
356 ::operator delete(static_cast<void*>(rep));
357#endif
358 }
359 }
360 }
361};
362
363template <typename Element>
364const size_t RepeatedField<Element>::kRepHeaderSize =
365 reinterpret_cast<size_t>(&reinterpret_cast<Rep*>(16)->elements[0]) - 16;
366
367namespace internal {
368template <typename It>
369class RepeatedPtrIterator;
370template <typename It, typename VoidPtr>
371class RepeatedPtrOverPtrsIterator;
372} // namespace internal
373
374namespace internal {
375
376// This is a helper template to copy an array of elements efficiently when they
377// have a trivial copy constructor, and correctly otherwise. This really
378// shouldn't be necessary, but our compiler doesn't optimize std::copy very
379// effectively.
380template <typename Element,
381 bool HasTrivialCopy =
382 std::is_pod<Element>::value>
383struct ElementCopier {
384 void operator()(Element* to, const Element* from, int array_size);
385};
386
387} // namespace internal
388
389namespace internal {
390
391// type-traits helper for RepeatedPtrFieldBase: we only want to invoke
392// arena-related "copy if on different arena" behavior if the necessary methods
393// exist on the contained type. In particular, we rely on MergeFrom() existing
394// as a general proxy for the fact that a copy will work, and we also provide a
395// specific override for std::string*.
396template <typename T>
397struct TypeImplementsMergeBehaviorProbeForMergeFrom {
398 typedef char HasMerge;
399 typedef long HasNoMerge;
400
401 // We accept either of:
402 // - void MergeFrom(const T& other)
403 // - bool MergeFrom(const T& other)
404 //
405 // We mangle these names a bit to avoid compatibility issues in 'unclean'
406 // include environments that may have, e.g., "#define test ..." (yes, this
407 // exists).
408 template <typename U, typename RetType, RetType (U::*)(const U& arg)>
409 struct CheckType;
410 template <typename U>
411 static HasMerge Check(CheckType<U, void, &U::MergeFrom>*);
412 template <typename U>
413 static HasMerge Check(CheckType<U, bool, &U::MergeFrom>*);
414 template <typename U>
415 static HasNoMerge Check(...);
416
417 // Resolves to either std::true_type or std::false_type.
418 typedef std::integral_constant<bool,
419 (sizeof(Check<T>(0)) == sizeof(HasMerge))>
420 type;
421};
422
423template <typename T, typename = void>
424struct TypeImplementsMergeBehavior
425 : TypeImplementsMergeBehaviorProbeForMergeFrom<T> {};
426
427
428template <>
429struct TypeImplementsMergeBehavior<std::string> {
430 typedef std::true_type type;
431};
432
433template <typename T>
434struct IsMovable
435 : std::integral_constant<bool, std::is_move_constructible<T>::value &&
436 std::is_move_assignable<T>::value> {};
437
438// This is the common base class for RepeatedPtrFields. It deals only in void*
439// pointers. Users should not use this interface directly.
440//
441// The methods of this interface correspond to the methods of RepeatedPtrField,
442// but may have a template argument called TypeHandler. Its signature is:
443// class TypeHandler {
444// public:
445// typedef MyType Type;
446// // WeakType is almost always the same as MyType, but we use it in
447// // ImplicitWeakTypeHandler.
448// typedef MyType WeakType;
449// static Type* New();
450// static WeakType* NewFromPrototype(const WeakType* prototype,
451// Arena* arena);
452// static void Delete(Type*);
453// static void Clear(Type*);
454// static void Merge(const Type& from, Type* to);
455//
456// // Only needs to be implemented if SpaceUsedExcludingSelf() is called.
457// static int SpaceUsedLong(const Type&);
458// };
459class PROTOBUF_EXPORT RepeatedPtrFieldBase {
460 protected:
461 RepeatedPtrFieldBase();
462 explicit RepeatedPtrFieldBase(Arena* arena);
463 ~RepeatedPtrFieldBase() {}
464
465 // Must be called from destructor.
466 template <typename TypeHandler>
467 void Destroy();
468
469 bool empty() const;
470 int size() const;
471
472 template <typename TypeHandler>
473 const typename TypeHandler::Type& at(int index) const;
474 template <typename TypeHandler>
475 typename TypeHandler::Type& at(int index);
476
477 template <typename TypeHandler>
478 typename TypeHandler::Type* Mutable(int index);
479 template <typename TypeHandler>
480 void Delete(int index);
481 template <typename TypeHandler>
482 typename TypeHandler::Type* Add(typename TypeHandler::Type* prototype = NULL);
483
484 public:
485 // The next few methods are public so that they can be called from generated
486 // code when implicit weak fields are used, but they should never be called by
487 // application code.
488
489 template <typename TypeHandler>
490 const typename TypeHandler::WeakType& Get(int index) const;
491
492 // Creates and adds an element using the given prototype, without introducing
493 // a link-time dependency on the concrete message type. This method is used to
494 // implement implicit weak fields. The prototype may be NULL, in which case an
495 // ImplicitWeakMessage will be used as a placeholder.
496 MessageLite* AddWeak(const MessageLite* prototype);
497
498 template <typename TypeHandler>
499 void Clear();
500
501 template <typename TypeHandler>
502 void MergeFrom(const RepeatedPtrFieldBase& other);
503
504 inline void InternalSwap(RepeatedPtrFieldBase* other);
505
506 protected:
507 template <
508 typename TypeHandler,
509 typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
510 void Add(typename TypeHandler::Type&& value);
511
512 template <typename TypeHandler>
513 void RemoveLast();
514 template <typename TypeHandler>
515 void CopyFrom(const RepeatedPtrFieldBase& other);
516
517 void CloseGap(int start, int num);
518
519 void Reserve(int new_size);
520
521 int Capacity() const;
522
523 // Used for constructing iterators.
524 void* const* raw_data() const;
525 void** raw_mutable_data() const;
526
527 template <typename TypeHandler>
528 typename TypeHandler::Type** mutable_data();
529 template <typename TypeHandler>
530 const typename TypeHandler::Type* const* data() const;
531
532 template <typename TypeHandler>
533 PROTOBUF_ALWAYS_INLINE void Swap(RepeatedPtrFieldBase* other);
534
535 void SwapElements(int index1, int index2);
536
537 template <typename TypeHandler>
538 size_t SpaceUsedExcludingSelfLong() const;
539
540 // Advanced memory management --------------------------------------
541
542 // Like Add(), but if there are no cleared objects to use, returns NULL.
543 template <typename TypeHandler>
544 typename TypeHandler::Type* AddFromCleared();
545
546 template <typename TypeHandler>
547 void AddAllocated(typename TypeHandler::Type* value) {
548 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
549 AddAllocatedInternal<TypeHandler>(value, t);
550 }
551
552 template <typename TypeHandler>
553 void UnsafeArenaAddAllocated(typename TypeHandler::Type* value);
554
555 template <typename TypeHandler>
556 typename TypeHandler::Type* ReleaseLast() {
557 typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
558 return ReleaseLastInternal<TypeHandler>(t);
559 }
560
561 // Releases last element and returns it, but does not do out-of-arena copy.
562 // And just returns the raw pointer to the contained element in the arena.
563 template <typename TypeHandler>
564 typename TypeHandler::Type* UnsafeArenaReleaseLast();
565
566 int ClearedCount() const;
567 template <typename TypeHandler>
568 void AddCleared(typename TypeHandler::Type* value);
569 template <typename TypeHandler>
570 typename TypeHandler::Type* ReleaseCleared();
571
572 template <typename TypeHandler>
573 void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type);
574 template <typename TypeHandler>
575 void AddAllocatedInternal(typename TypeHandler::Type* value, std::false_type);
576
577 template <typename TypeHandler>
578 PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy(
579 typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena);
580 template <typename TypeHandler>
581 PROTOBUF_NOINLINE void AddAllocatedSlowWithoutCopy(
582 typename TypeHandler::Type* value);
583
584 template <typename TypeHandler>
585 typename TypeHandler::Type* ReleaseLastInternal(std::true_type);
586 template <typename TypeHandler>
587 typename TypeHandler::Type* ReleaseLastInternal(std::false_type);
588
589 template <typename TypeHandler>
590 PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other);
591
592 inline Arena* GetArenaNoVirtual() const { return arena_; }
593
594 private:
595 static const int kInitialSize = 0;
596 // A few notes on internal representation:
597 //
598 // We use an indirected approach, with struct Rep, to keep
599 // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
600 // was added, namely, 3 8-byte machine words on x86-64. An instance of Rep is
601 // allocated only when the repeated field is non-empty, and it is a
602 // dynamically-sized struct (the header is directly followed by elements[]).
603 // We place arena_ and current_size_ directly in the object to avoid cache
604 // misses due to the indirection, because these fields are checked frequently.
605 // Placing all fields directly in the RepeatedPtrFieldBase instance costs
606 // significant performance for memory-sensitive workloads.
607 Arena* arena_;
608 int current_size_;
609 int total_size_;
610 struct Rep {
611 int allocated_size;
612 void* elements[1];
613 };
614 static const size_t kRepHeaderSize = sizeof(Rep) - sizeof(void*);
615 // Contains arena ptr and the elements array. We also keep the invariant that
616 // if rep_ is NULL, then arena is NULL.
617 Rep* rep_;
618
619 template <typename TypeHandler>
620 static inline typename TypeHandler::Type* cast(void* element) {
621 return reinterpret_cast<typename TypeHandler::Type*>(element);
622 }
623 template <typename TypeHandler>
624 static inline const typename TypeHandler::Type* cast(const void* element) {
625 return reinterpret_cast<const typename TypeHandler::Type*>(element);
626 }
627
628 // Non-templated inner function to avoid code duplication. Takes a function
629 // pointer to the type-specific (templated) inner allocate/merge loop.
630 void MergeFromInternal(const RepeatedPtrFieldBase& other,
631 void (RepeatedPtrFieldBase::*inner_loop)(void**,
632 void**, int,
633 int));
634
635 template <typename TypeHandler>
636 void MergeFromInnerLoop(void** our_elems, void** other_elems, int length,
637 int already_allocated);
638
639 // Internal helper: extend array space if necessary to contain |extend_amount|
640 // more elements, and return a pointer to the element immediately following
641 // the old list of elements. This interface factors out common behavior from
642 // Reserve() and MergeFrom() to reduce code size. |extend_amount| must be > 0.
643 void** InternalExtend(int extend_amount);
644
645 // The reflection implementation needs to call protected methods directly,
646 // reinterpreting pointers as being to Message instead of a specific Message
647 // subclass.
648 friend class ::PROTOBUF_NAMESPACE_ID::Reflection;
649
650 // ExtensionSet stores repeated message extensions as
651 // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement
652 // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong()
653 // reinterpreting MessageLite as Message. ExtensionSet also needs to make use
654 // of AddFromCleared(), which is not part of the public interface.
655 friend class ExtensionSet;
656
657 // The MapFieldBase implementation needs to call protected methods directly,
658 // reinterpreting pointers as being to Message instead of a specific Message
659 // subclass.
660 friend class MapFieldBase;
661
662 // The table-driven MergePartialFromCodedStream implementation needs to
663 // operate on RepeatedPtrField<MessageLite>.
664 friend class MergePartialFromCodedStreamHelper;
665
666 // To parse directly into a proto2 generated class, the upb class GMR_Handlers
667 // needs to be able to modify a RepeatedPtrFieldBase directly.
668 friend class upb::google_opensource::GMR_Handlers;
669
670 friend class AccessorHelper;
671
672 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase);
673};
674
675template <typename GenericType>
676class GenericTypeHandler {
677 public:
678 typedef GenericType Type;
679 typedef GenericType WeakType;
680 using Movable = IsMovable<GenericType>;
681
682 static inline GenericType* New(Arena* arena) {
683 return Arena::CreateMaybeMessage<Type>(arena);
684 }
685 static inline GenericType* New(Arena* arena, GenericType&& value) {
686 return Arena::Create<GenericType>(arena, std::move(value));
687 }
688 static inline GenericType* NewFromPrototype(const GenericType* prototype,
689 Arena* arena = NULL);
690 static inline void Delete(GenericType* value, Arena* arena) {
691 if (arena == NULL) {
692 delete value;
693 }
694 }
695 static inline Arena* GetArena(GenericType* value) {
696 return Arena::GetArena<Type>(value);
697 }
698 static inline void* GetMaybeArenaPointer(GenericType* value) {
699 return Arena::GetArena<Type>(value);
700 }
701
702 static inline void Clear(GenericType* value) { value->Clear(); }
703 PROTOBUF_NOINLINE
704 static void Merge(const GenericType& from, GenericType* to);
705 static inline size_t SpaceUsedLong(const GenericType& value) {
706 return value.SpaceUsedLong();
707 }
708};
709
710template <typename GenericType>
711GenericType* GenericTypeHandler<GenericType>::NewFromPrototype(
712 const GenericType* /* prototype */, Arena* arena) {
713 return New(arena);
714}
715template <typename GenericType>
716void GenericTypeHandler<GenericType>::Merge(const GenericType& from,
717 GenericType* to) {
718 to->MergeFrom(from);
719}
720
721// NewFromPrototype() and Merge() are not defined inline here, as we will need
722// to do a virtual function dispatch anyways to go from Message* to call
723// New/Merge.
724template <>
725MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype(
726 const MessageLite* prototype, Arena* arena);
727template <>
728inline Arena* GenericTypeHandler<MessageLite>::GetArena(MessageLite* value) {
729 return value->GetArena();
730}
731template <>
732inline void* GenericTypeHandler<MessageLite>::GetMaybeArenaPointer(
733 MessageLite* value) {
734 return value->GetMaybeArenaPointer();
735}
736template <>
737void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
738 MessageLite* to);
739template <>
740inline void GenericTypeHandler<std::string>::Clear(std::string* value) {
741 value->clear();
742}
743template <>
744void GenericTypeHandler<std::string>::Merge(const std::string& from,
745 std::string* to);
746
747// Declarations of the specialization as we cannot define them here, as the
748// header that defines ProtocolMessage depends on types defined in this header.
749#define DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(TypeName) \
750 template <> \
751 PROTOBUF_EXPORT TypeName* GenericTypeHandler<TypeName>::NewFromPrototype( \
752 const TypeName* prototype, Arena* arena); \
753 template <> \
754 PROTOBUF_EXPORT Arena* GenericTypeHandler<TypeName>::GetArena( \
755 TypeName* value); \
756 template <> \
757 PROTOBUF_EXPORT void* GenericTypeHandler<TypeName>::GetMaybeArenaPointer( \
758 TypeName* value);
759
760// Message specialization bodies defined in message.cc. This split is necessary
761// to allow proto2-lite (which includes this header) to be independent of
762// Message.
763DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(Message)
764
765
766#undef DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES
767
768class StringTypeHandler {
769 public:
770 typedef std::string Type;
771 typedef std::string WeakType;
772 using Movable = IsMovable<Type>;
773
774 static inline std::string* New(Arena* arena) {
775 return Arena::Create<std::string>(arena);
776 }
777 static inline std::string* New(Arena* arena, std::string&& value) {
778 return Arena::Create<std::string>(arena, std::move(value));
779 }
780 static inline std::string* NewFromPrototype(const std::string*,
781 Arena* arena) {
782 return New(arena);
783 }
784 static inline Arena* GetArena(std::string*) { return NULL; }
785 static inline void* GetMaybeArenaPointer(std::string* /* value */) {
786 return NULL;
787 }
788 static inline void Delete(std::string* value, Arena* arena) {
789 if (arena == NULL) {
790 delete value;
791 }
792 }
793 static inline void Clear(std::string* value) { value->clear(); }
794 static inline void Merge(const std::string& from, std::string* to) {
795 *to = from;
796 }
797 static size_t SpaceUsedLong(const std::string& value) {
798 return sizeof(value) + StringSpaceUsedExcludingSelfLong(value);
799 }
800};
801
802} // namespace internal
803
804// RepeatedPtrField is like RepeatedField, but used for repeated strings or
805// Messages.
806template <typename Element>
807class RepeatedPtrField final : private internal::RepeatedPtrFieldBase {
808 public:
809 RepeatedPtrField();
810 explicit RepeatedPtrField(Arena* arena);
811
812 RepeatedPtrField(const RepeatedPtrField& other);
813 template <typename Iter>
814 RepeatedPtrField(Iter begin, const Iter& end);
815 ~RepeatedPtrField();
816
817 RepeatedPtrField& operator=(const RepeatedPtrField& other);
818
819 RepeatedPtrField(RepeatedPtrField&& other) noexcept;
820 RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept;
821
822 bool empty() const;
823 int size() const;
824
825 const Element& Get(int index) const;
826 Element* Mutable(int index);
827 Element* Add();
828 void Add(Element&& value);
829
830 const Element& operator[](int index) const { return Get(index); }
831 Element& operator[](int index) { return *Mutable(index); }
832
833 const Element& at(int index) const;
834 Element& at(int index);
835
836 // Remove the last element in the array.
837 // Ownership of the element is retained by the array.
838 void RemoveLast();
839
840 // Delete elements with indices in the range [start .. start+num-1].
841 // Caution: implementation moves all elements with indices [start+num .. ].
842 // Calling this routine inside a loop can cause quadratic behavior.
843 void DeleteSubrange(int start, int num);
844
845 void Clear();
846 void MergeFrom(const RepeatedPtrField& other);
847 void CopyFrom(const RepeatedPtrField& other);
848
849 // Reserve space to expand the field to at least the given size. This only
850 // resizes the pointer array; it doesn't allocate any objects. If the
851 // array is grown, it will always be at least doubled in size.
852 void Reserve(int new_size);
853
854 int Capacity() const;
855
856 // Gets the underlying array. This pointer is possibly invalidated by
857 // any add or remove operation.
858 Element** mutable_data();
859 const Element* const* data() const;
860
861 // Swap entire contents with "other". If they are on separate arenas, then
862 // copies data.
863 void Swap(RepeatedPtrField* other);
864
865 // Swap entire contents with "other". Caller should guarantee that either both
866 // fields are on the same arena or both are on the heap. Swapping between
867 // different arenas with this function is disallowed and is caught via
868 // GOOGLE_DCHECK.
869 void UnsafeArenaSwap(RepeatedPtrField* other);
870
871 // Swap two elements.
872 void SwapElements(int index1, int index2);
873
874 // STL-like iterator support
875 typedef internal::RepeatedPtrIterator<Element> iterator;
876 typedef internal::RepeatedPtrIterator<const Element> const_iterator;
877 typedef Element value_type;
878 typedef value_type& reference;
879 typedef const value_type& const_reference;
880 typedef value_type* pointer;
881 typedef const value_type* const_pointer;
882 typedef int size_type;
883 typedef ptrdiff_t difference_type;
884
885 iterator begin();
886 const_iterator begin() const;
887 const_iterator cbegin() const;
888 iterator end();
889 const_iterator end() const;
890 const_iterator cend() const;
891
892 // Reverse iterator support
893 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
894 typedef std::reverse_iterator<iterator> reverse_iterator;
895 reverse_iterator rbegin() { return reverse_iterator(end()); }
896 const_reverse_iterator rbegin() const {
897 return const_reverse_iterator(end());
898 }
899 reverse_iterator rend() { return reverse_iterator(begin()); }
900 const_reverse_iterator rend() const {
901 return const_reverse_iterator(begin());
902 }
903
904 // Custom STL-like iterator that iterates over and returns the underlying
905 // pointers to Element rather than Element itself.
906 typedef internal::RepeatedPtrOverPtrsIterator<Element*, void*>
907 pointer_iterator;
908 typedef internal::RepeatedPtrOverPtrsIterator<const Element* const,
909 const void* const>
910 const_pointer_iterator;
911 pointer_iterator pointer_begin();
912 const_pointer_iterator pointer_begin() const;
913 pointer_iterator pointer_end();
914 const_pointer_iterator pointer_end() const;
915
916 // Returns (an estimate of) the number of bytes used by the repeated field,
917 // excluding sizeof(*this).
918 size_t SpaceUsedExcludingSelfLong() const;
919
920 int SpaceUsedExcludingSelf() const {
921 return internal::ToIntSize(SpaceUsedExcludingSelfLong());
922 }
923
924 // Advanced memory management --------------------------------------
925 // When hardcore memory management becomes necessary -- as it sometimes
926 // does here at Google -- the following methods may be useful.
927
928 // Add an already-allocated object, passing ownership to the
929 // RepeatedPtrField.
930 //
931 // Note that some special behavior occurs with respect to arenas:
932 //
933 // (i) if this field holds submessages, the new submessage will be copied if
934 // the original is in an arena and this RepeatedPtrField is either in a
935 // different arena, or on the heap.
936 // (ii) if this field holds strings, the passed-in string *must* be
937 // heap-allocated, not arena-allocated. There is no way to dynamically check
938 // this at runtime, so User Beware.
939 void AddAllocated(Element* value);
940
941 // Remove the last element and return it, passing ownership to the caller.
942 // Requires: size() > 0
943 //
944 // If this RepeatedPtrField is on an arena, an object copy is required to pass
945 // ownership back to the user (for compatible semantics). Use
946 // UnsafeArenaReleaseLast() if this behavior is undesired.
947 Element* ReleaseLast();
948
949 // Add an already-allocated object, skipping arena-ownership checks. The user
950 // must guarantee that the given object is in the same arena as this
951 // RepeatedPtrField.
952 // It is also useful in legacy code that uses temporary ownership to avoid
953 // copies. Example:
954 // RepeatedPtrField<T> temp_field;
955 // temp_field.AddAllocated(new T);
956 // ... // Do something with temp_field
957 // temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
958 // If you put temp_field on the arena this fails, because the ownership
959 // transfers to the arena at the "AddAllocated" call and is not released
960 // anymore causing a double delete. UnsafeArenaAddAllocated prevents this.
961 void UnsafeArenaAddAllocated(Element* value);
962
963 // Remove the last element and return it. Works only when operating on an
964 // arena. The returned pointer is to the original object in the arena, hence
965 // has the arena's lifetime.
966 // Requires: current_size_ > 0
967 Element* UnsafeArenaReleaseLast();
968
969 // Extract elements with indices in the range "[start .. start+num-1]".
970 // The caller assumes ownership of the extracted elements and is responsible
971 // for deleting them when they are no longer needed.
972 // If "elements" is non-NULL, then pointers to the extracted elements
973 // are stored in "elements[0 .. num-1]" for the convenience of the caller.
974 // If "elements" is NULL, then the caller must use some other mechanism
975 // to perform any further operations (like deletion) on these elements.
976 // Caution: implementation also moves elements with indices [start+num ..].
977 // Calling this routine inside a loop can cause quadratic behavior.
978 //
979 // Memory copying behavior is identical to ReleaseLast(), described above: if
980 // this RepeatedPtrField is on an arena, an object copy is performed for each
981 // returned element, so that all returned element pointers are to
982 // heap-allocated copies. If this copy is not desired, the user should call
983 // UnsafeArenaExtractSubrange().
984 void ExtractSubrange(int start, int num, Element** elements);
985
986 // Identical to ExtractSubrange() described above, except that when this
987 // repeated field is on an arena, no object copies are performed. Instead, the
988 // raw object pointers are returned. Thus, if on an arena, the returned
989 // objects must not be freed, because they will not be heap-allocated objects.
990 void UnsafeArenaExtractSubrange(int start, int num, Element** elements);
991
992 // When elements are removed by calls to RemoveLast() or Clear(), they
993 // are not actually freed. Instead, they are cleared and kept so that
994 // they can be reused later. This can save lots of CPU time when
995 // repeatedly reusing a protocol message for similar purposes.
996 //
997 // Hardcore programs may choose to manipulate these cleared objects
998 // to better optimize memory management using the following routines.
999
1000 // Get the number of cleared objects that are currently being kept
1001 // around for reuse.
1002 int ClearedCount() const;
1003 // Add an element to the pool of cleared objects, passing ownership to
1004 // the RepeatedPtrField. The element must be cleared prior to calling
1005 // this method.
1006 //
1007 // This method cannot be called when the repeated field is on an arena or when
1008 // |value| is; both cases will trigger a GOOGLE_DCHECK-failure.
1009 void AddCleared(Element* value);
1010 // Remove a single element from the cleared pool and return it, passing
1011 // ownership to the caller. The element is guaranteed to be cleared.
1012 // Requires: ClearedCount() > 0
1013 //
1014 //
1015 // This method cannot be called when the repeated field is on an arena; doing
1016 // so will trigger a GOOGLE_DCHECK-failure.
1017 Element* ReleaseCleared();
1018
1019 // Removes the element referenced by position.
1020 //
1021 // Returns an iterator to the element immediately following the removed
1022 // element.
1023 //
1024 // Invalidates all iterators at or after the removed element, including end().
1025 iterator erase(const_iterator position);
1026
1027 // Removes the elements in the range [first, last).
1028 //
1029 // Returns an iterator to the element immediately following the removed range.
1030 //
1031 // Invalidates all iterators at or after the removed range, including end().
1032 iterator erase(const_iterator first, const_iterator last);
1033
1034 // Gets the arena on which this RepeatedPtrField stores its elements.
1035 Arena* GetArena() const { return GetArenaNoVirtual(); }
1036
1037 // For internal use only.
1038 //
1039 // This is public due to it being called by generated code.
1040 using RepeatedPtrFieldBase::InternalSwap;
1041
1042 private:
1043 // Note: RepeatedPtrField SHOULD NOT be subclassed by users.
1044 class TypeHandler;
1045
1046 // Internal arena accessor expected by helpers in Arena.
1047 inline Arena* GetArenaNoVirtual() const;
1048
1049 // Implementations for ExtractSubrange(). The copying behavior must be
1050 // included only if the type supports the necessary operations (e.g.,
1051 // MergeFrom()), so we must resolve this at compile time. ExtractSubrange()
1052 // uses SFINAE to choose one of the below implementations.
1053 void ExtractSubrangeInternal(int start, int num, Element** elements,
1054 std::true_type);
1055 void ExtractSubrangeInternal(int start, int num, Element** elements,
1056 std::false_type);
1057
1058 friend class Arena;
1059 friend class MessageLite;
1060
1061 typedef void InternalArenaConstructable_;
1062
1063};
1064
1065// implementation ====================================================
1066
1067template <typename Element>
1068inline RepeatedField<Element>::RepeatedField()
1069 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {}
1070
1071template <typename Element>
1072inline RepeatedField<Element>::RepeatedField(Arena* arena)
1073 : current_size_(0), total_size_(0), arena_or_elements_(arena) {}
1074
1075template <typename Element>
1076inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
1077 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
1078 if (other.current_size_ != 0) {
1079 Reserve(other.size());
1080 AddNAlreadyReserved(other.size());
1081 CopyArray(Mutable(0), &other.Get(0), other.size());
1082 }
1083}
1084
1085template <typename Element>
1086template <typename Iter>
1087RepeatedField<Element>::RepeatedField(Iter begin, const Iter& end)
1088 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
1089 Add(begin, end);
1090}
1091
1092template <typename Element>
1093RepeatedField<Element>::~RepeatedField() {
1094 if (total_size_ > 0) {
1095 InternalDeallocate(rep(), total_size_);
1096 }
1097}
1098
1099template <typename Element>
1100inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1101 const RepeatedField& other) {
1102 if (this != &other) CopyFrom(other);
1103 return *this;
1104}
1105
1106template <typename Element>
1107inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
1108 : RepeatedField() {
1109 // We don't just call Swap(&other) here because it would perform 3 copies if
1110 // other is on an arena. This field can't be on an arena because arena
1111 // construction always uses the Arena* accepting constructor.
1112 if (other.GetArenaNoVirtual()) {
1113 CopyFrom(other);
1114 } else {
1115 InternalSwap(&other);
1116 }
1117}
1118
1119template <typename Element>
1120inline RepeatedField<Element>& RepeatedField<Element>::operator=(
1121 RepeatedField&& other) noexcept {
1122 // We don't just call Swap(&other) here because it would perform 3 copies if
1123 // the two fields are on different arenas.
1124 if (this != &other) {
1125 if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
1126 CopyFrom(other);
1127 } else {
1128 InternalSwap(&other);
1129 }
1130 }
1131 return *this;
1132}
1133
1134template <typename Element>
1135inline bool RepeatedField<Element>::empty() const {
1136 return current_size_ == 0;
1137}
1138
1139template <typename Element>
1140inline int RepeatedField<Element>::size() const {
1141 return current_size_;
1142}
1143
1144template <typename Element>
1145inline int RepeatedField<Element>::Capacity() const {
1146 return total_size_;
1147}
1148
1149template <typename Element>
1150inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
1151 GOOGLE_DCHECK_LT(current_size_, total_size_);
1152 elements()[current_size_++] = value;
1153}
1154
1155template <typename Element>
1156inline Element* RepeatedField<Element>::AddAlreadyReserved() {
1157 GOOGLE_DCHECK_LT(current_size_, total_size_);
1158 return &elements()[current_size_++];
1159}
1160
1161template <typename Element>
1162inline Element* RepeatedField<Element>::AddNAlreadyReserved(int n) {
1163 GOOGLE_DCHECK_GE(total_size_ - current_size_, n)
1164 << total_size_ << ", " << current_size_;
1165 // Warning: sometimes people call this when n == 0 and total_size_ == 0. In
1166 // this case the return pointer points to a zero size array (n == 0). Hence
1167 // we can just use unsafe_elements(), because the user cannot dereference the
1168 // pointer anyway.
1169 Element* ret = unsafe_elements() + current_size_;
1170 current_size_ += n;
1171 return ret;
1172}
1173
1174template <typename Element>
1175inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
1176 GOOGLE_DCHECK_GE(new_size, 0);
1177 if (new_size > current_size_) {
1178 Reserve(new_size);
1179 std::fill(&elements()[current_size_], &elements()[new_size], value);
1180 }
1181 current_size_ = new_size;
1182}
1183
1184template <typename Element>
1185inline const Element& RepeatedField<Element>::Get(int index) const {
1186 GOOGLE_DCHECK_GE(index, 0);
1187 GOOGLE_DCHECK_LT(index, current_size_);
1188 return elements()[index];
1189}
1190
1191template <typename Element>
1192inline const Element& RepeatedField<Element>::at(int index) const {
1193 GOOGLE_CHECK_GE(index, 0);
1194 GOOGLE_CHECK_LT(index, current_size_);
1195 return elements()[index];
1196}
1197
1198template <typename Element>
1199inline Element& RepeatedField<Element>::at(int index) {
1200 GOOGLE_CHECK_GE(index, 0);
1201 GOOGLE_CHECK_LT(index, current_size_);
1202 return elements()[index];
1203}
1204
1205template <typename Element>
1206inline Element* RepeatedField<Element>::Mutable(int index) {
1207 GOOGLE_DCHECK_GE(index, 0);
1208 GOOGLE_DCHECK_LT(index, current_size_);
1209 return &elements()[index];
1210}
1211
1212template <typename Element>
1213inline void RepeatedField<Element>::Set(int index, const Element& value) {
1214 GOOGLE_DCHECK_GE(index, 0);
1215 GOOGLE_DCHECK_LT(index, current_size_);
1216 elements()[index] = value;
1217}
1218
1219template <typename Element>
1220inline void RepeatedField<Element>::Add(const Element& value) {
1221 if (current_size_ == total_size_) Reserve(total_size_ + 1);
1222 elements()[current_size_++] = value;
1223}
1224
1225template <typename Element>
1226inline Element* RepeatedField<Element>::Add() {
1227 if (current_size_ == total_size_) Reserve(total_size_ + 1);
1228 return &elements()[current_size_++];
1229}
1230
1231template <typename Element>
1232template <typename Iter>
1233inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
1234 int reserve = internal::CalculateReserve(begin, end);
1235 if (reserve != -1) {
1236 if (reserve == 0) {
1237 return;
1238 }
1239
1240 Reserve(reserve + size());
1241 // TODO(ckennelly): The compiler loses track of the buffer freshly
1242 // allocated by Reserve() by the time we call elements, so it cannot
1243 // guarantee that elements does not alias [begin(), end()).
1244 //
1245 // If restrict is available, annotating the pointer obtained from elements()
1246 // causes this to lower to memcpy instead of memmove.
1247 std::copy(begin, end, elements() + size());
1248 current_size_ = reserve + size();
1249 } else {
1250 for (; begin != end; ++begin) {
1251 Add(*begin);
1252 }
1253 }
1254}
1255
1256template <typename Element>
1257inline void RepeatedField<Element>::RemoveLast() {
1258 GOOGLE_DCHECK_GT(current_size_, 0);
1259 current_size_--;
1260}
1261
1262template <typename Element>
1263void RepeatedField<Element>::ExtractSubrange(int start, int num,
1264 Element* elements) {
1265 GOOGLE_DCHECK_GE(start, 0);
1266 GOOGLE_DCHECK_GE(num, 0);
1267 GOOGLE_DCHECK_LE(start + num, this->current_size_);
1268
1269 // Save the values of the removed elements if requested.
1270 if (elements != NULL) {
1271 for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
1272 }
1273
1274 // Slide remaining elements down to fill the gap.
1275 if (num > 0) {
1276 for (int i = start + num; i < this->current_size_; ++i)
1277 this->Set(i - num, this->Get(i));
1278 this->Truncate(this->current_size_ - num);
1279 }
1280}
1281
1282template <typename Element>
1283inline void RepeatedField<Element>::Clear() {
1284 current_size_ = 0;
1285}
1286
1287template <typename Element>
1288inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
1289 GOOGLE_DCHECK_NE(&other, this);
1290 if (other.current_size_ != 0) {
1291 int existing_size = size();
1292 Reserve(existing_size + other.size());
1293 AddNAlreadyReserved(other.size());
1294 CopyArray(Mutable(existing_size), &other.Get(0), other.size());
1295 }
1296}
1297
1298template <typename Element>
1299inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
1300 if (&other == this) return;
1301 Clear();
1302 MergeFrom(other);
1303}
1304
1305template <typename Element>
1306inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1307 const_iterator position) {
1308 return erase(position, position + 1);
1309}
1310
1311template <typename Element>
1312inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
1313 const_iterator first, const_iterator last) {
1314 size_type first_offset = first - cbegin();
1315 if (first != last) {
1316 Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
1317 }
1318 return begin() + first_offset;
1319}
1320
1321template <typename Element>
1322inline Element* RepeatedField<Element>::mutable_data() {
1323 return unsafe_elements();
1324}
1325
1326template <typename Element>
1327inline const Element* RepeatedField<Element>::data() const {
1328 return unsafe_elements();
1329}
1330
1331template <typename Element>
1332inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
1333 GOOGLE_DCHECK(this != other);
1334 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
1335
1336 std::swap(arena_or_elements_, other->arena_or_elements_);
1337 std::swap(current_size_, other->current_size_);
1338 std::swap(total_size_, other->total_size_);
1339}
1340
1341template <typename Element>
1342void RepeatedField<Element>::Swap(RepeatedField* other) {
1343 if (this == other) return;
1344 if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
1345 InternalSwap(other);
1346 } else {
1347 RepeatedField<Element> temp(other->GetArenaNoVirtual());
1348 temp.MergeFrom(*this);
1349 CopyFrom(*other);
1350 other->UnsafeArenaSwap(&temp);
1351 }
1352}
1353
1354template <typename Element>
1355void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
1356 if (this == other) return;
1357 InternalSwap(other);
1358}
1359
1360template <typename Element>
1361void RepeatedField<Element>::SwapElements(int index1, int index2) {
1362 using std::swap; // enable ADL with fallback
1363 swap(elements()[index1], elements()[index2]);
1364}
1365
1366template <typename Element>
1367inline typename RepeatedField<Element>::iterator
1368RepeatedField<Element>::begin() {
1369 return unsafe_elements();
1370}
1371template <typename Element>
1372inline typename RepeatedField<Element>::const_iterator
1373RepeatedField<Element>::begin() const {
1374 return unsafe_elements();
1375}
1376template <typename Element>
1377inline typename RepeatedField<Element>::const_iterator
1378RepeatedField<Element>::cbegin() const {
1379 return unsafe_elements();
1380}
1381template <typename Element>
1382inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
1383 return unsafe_elements() + current_size_;
1384}
1385template <typename Element>
1386inline typename RepeatedField<Element>::const_iterator
1387RepeatedField<Element>::end() const {
1388 return unsafe_elements() + current_size_;
1389}
1390template <typename Element>
1391inline typename RepeatedField<Element>::const_iterator
1392RepeatedField<Element>::cend() const {
1393 return unsafe_elements() + current_size_;
1394}
1395
1396template <typename Element>
1397inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
1398 return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
1399}
1400
1401// Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
1402// amount of code bloat.
1403template <typename Element>
1404void RepeatedField<Element>::Reserve(int new_size) {
1405 if (total_size_ >= new_size) return;
1406 Rep* old_rep = total_size_ > 0 ? rep() : NULL;
1407 Rep* new_rep;
1408 Arena* arena = GetArenaNoVirtual();
1409 new_size = std::max(internal::kMinRepeatedFieldAllocationSize,
1410 std::max(total_size_ * 2, new_size));
1411 GOOGLE_DCHECK_LE(
1412 static_cast<size_t>(new_size),
1413 (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
1414 << "Requested size is too large to fit into size_t.";
1415 size_t bytes =
1416 kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
1417 if (arena == NULL) {
1418 new_rep = static_cast<Rep*>(::operator new(bytes));
1419 } else {
1420 new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
1421 }
1422 new_rep->arena = arena;
1423 int old_total_size = total_size_;
1424 total_size_ = new_size;
1425 arena_or_elements_ = new_rep->elements;
1426 // Invoke placement-new on newly allocated elements. We shouldn't have to do
1427 // this, since Element is supposed to be POD, but a previous version of this
1428 // code allocated storage with "new Element[size]" and some code uses
1429 // RepeatedField with non-POD types, relying on constructor invocation. If
1430 // Element has a trivial constructor (e.g., int32), gcc (tested with -O2)
1431 // completely removes this loop because the loop body is empty, so this has no
1432 // effect unless its side-effects are required for correctness.
1433 // Note that we do this before MoveArray() below because Element's copy
1434 // assignment implementation will want an initialized instance first.
1435 Element* e = &elements()[0];
1436 Element* limit = e + total_size_;
1437 for (; e < limit; e++) {
1438 new (e) Element;
1439 }
1440 if (current_size_ > 0) {
1441 MoveArray(&elements()[0], old_rep->elements, current_size_);
1442 }
1443
1444 // Likewise, we need to invoke destructors on the old array.
1445 InternalDeallocate(old_rep, old_total_size);
1446
1447}
1448
1449template <typename Element>
1450inline void RepeatedField<Element>::Truncate(int new_size) {
1451 GOOGLE_DCHECK_LE(new_size, current_size_);
1452 if (current_size_ > 0) {
1453 current_size_ = new_size;
1454 }
1455}
1456
1457template <typename Element>
1458inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
1459 int array_size) {
1460 CopyArray(to, from, array_size);
1461}
1462
1463template <typename Element>
1464inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
1465 int array_size) {
1466 internal::ElementCopier<Element>()(to, from, array_size);
1467}
1468
1469namespace internal {
1470
1471template <typename Element, bool HasTrivialCopy>
1472void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
1473 const Element* from,
1474 int array_size) {
1475 std::copy(from, from + array_size, to);
1476}
1477
1478template <typename Element>
1479struct ElementCopier<Element, true> {
1480 void operator()(Element* to, const Element* from, int array_size) {
1481 memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
1482 }
1483};
1484
1485} // namespace internal
1486
1487
1488// -------------------------------------------------------------------
1489
1490namespace internal {
1491
1492inline RepeatedPtrFieldBase::RepeatedPtrFieldBase()
1493 : arena_(NULL), current_size_(0), total_size_(0), rep_(NULL) {}
1494
1495inline RepeatedPtrFieldBase::RepeatedPtrFieldBase(Arena* arena)
1496 : arena_(arena), current_size_(0), total_size_(0), rep_(NULL) {}
1497
1498template <typename TypeHandler>
1499void RepeatedPtrFieldBase::Destroy() {
1500 if (rep_ != NULL && arena_ == NULL) {
1501 int n = rep_->allocated_size;
1502 void* const* elements = rep_->elements;
1503 for (int i = 0; i < n; i++) {
1504 TypeHandler::Delete(cast<TypeHandler>(elements[i]), NULL);
1505 }
1506#if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
1507 const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
1508 ::operator delete(static_cast<void*>(rep_), size);
1509#else
1510 ::operator delete(static_cast<void*>(rep_));
1511#endif
1512 }
1513 rep_ = NULL;
1514}
1515
1516template <typename TypeHandler>
1517inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) {
1518 if (other->GetArenaNoVirtual() == GetArenaNoVirtual()) {
1519 InternalSwap(other);
1520 } else {
1521 SwapFallback<TypeHandler>(other);
1522 }
1523}
1524
1525template <typename TypeHandler>
1526void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) {
1527 GOOGLE_DCHECK(other->GetArenaNoVirtual() != GetArenaNoVirtual());
1528
1529 // Copy semantics in this case. We try to improve efficiency by placing the
1530 // temporary on |other|'s arena so that messages are copied cross-arena only
1531 // once, not twice.
1532 RepeatedPtrFieldBase temp(other->GetArenaNoVirtual());
1533 temp.MergeFrom<TypeHandler>(*this);
1534 this->Clear<TypeHandler>();
1535 this->MergeFrom<TypeHandler>(*other);
1536 other->Clear<TypeHandler>();
1537 other->InternalSwap(&temp);
1538 temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena.
1539}
1540
1541inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; }
1542
1543inline int RepeatedPtrFieldBase::size() const { return current_size_; }
1544
1545template <typename TypeHandler>
1546inline const typename TypeHandler::WeakType& RepeatedPtrFieldBase::Get(
1547 int index) const {
1548 GOOGLE_DCHECK_GE(index, 0);
1549 GOOGLE_DCHECK_LT(index, current_size_);
1550 return *cast<TypeHandler>(rep_->elements[index]);
1551}
1552
1553template <typename TypeHandler>
1554inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at(
1555 int index) const {
1556 GOOGLE_CHECK_GE(index, 0);
1557 GOOGLE_CHECK_LT(index, current_size_);
1558 return *cast<TypeHandler>(rep_->elements[index]);
1559}
1560
1561template <typename TypeHandler>
1562inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) {
1563 GOOGLE_CHECK_GE(index, 0);
1564 GOOGLE_CHECK_LT(index, current_size_);
1565 return *cast<TypeHandler>(rep_->elements[index]);
1566}
1567
1568template <typename TypeHandler>
1569inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) {
1570 GOOGLE_DCHECK_GE(index, 0);
1571 GOOGLE_DCHECK_LT(index, current_size_);
1572 return cast<TypeHandler>(rep_->elements[index]);
1573}
1574
1575template <typename TypeHandler>
1576inline void RepeatedPtrFieldBase::Delete(int index) {
1577 GOOGLE_DCHECK_GE(index, 0);
1578 GOOGLE_DCHECK_LT(index, current_size_);
1579 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
1580}
1581
1582template <typename TypeHandler>
1583inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add(
1584 typename TypeHandler::Type* prototype) {
1585 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1586 return cast<TypeHandler>(rep_->elements[current_size_++]);
1587 }
1588 if (!rep_ || rep_->allocated_size == total_size_) {
1589 Reserve(total_size_ + 1);
1590 }
1591 ++rep_->allocated_size;
1592 typename TypeHandler::Type* result =
1593 TypeHandler::NewFromPrototype(prototype, arena_);
1594 rep_->elements[current_size_++] = result;
1595 return result;
1596}
1597
1598template <typename TypeHandler,
1599 typename std::enable_if<TypeHandler::Movable::value>::type*>
1600inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) {
1601 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1602 *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value);
1603 return;
1604 }
1605 if (!rep_ || rep_->allocated_size == total_size_) {
1606 Reserve(total_size_ + 1);
1607 }
1608 ++rep_->allocated_size;
1609 typename TypeHandler::Type* result =
1610 TypeHandler::New(arena_, std::move(value));
1611 rep_->elements[current_size_++] = result;
1612}
1613
1614template <typename TypeHandler>
1615inline void RepeatedPtrFieldBase::RemoveLast() {
1616 GOOGLE_DCHECK_GT(current_size_, 0);
1617 TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_]));
1618}
1619
1620template <typename TypeHandler>
1621void RepeatedPtrFieldBase::Clear() {
1622 const int n = current_size_;
1623 GOOGLE_DCHECK_GE(n, 0);
1624 if (n > 0) {
1625 void* const* elements = rep_->elements;
1626 int i = 0;
1627 do {
1628 TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
1629 } while (i < n);
1630 current_size_ = 0;
1631 }
1632}
1633
1634// To avoid unnecessary code duplication and reduce binary size, we use a
1635// layered approach to implementing MergeFrom(). The toplevel method is
1636// templated, so we get a small thunk per concrete message type in the binary.
1637// This calls a shared implementation with most of the logic, passing a function
1638// pointer to another type-specific piece of code that calls the object-allocate
1639// and merge handlers.
1640template <typename TypeHandler>
1641inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) {
1642 GOOGLE_DCHECK_NE(&other, this);
1643 if (other.current_size_ == 0) return;
1644 MergeFromInternal(other,
1645 &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>);
1646}
1647
1648inline void RepeatedPtrFieldBase::MergeFromInternal(
1649 const RepeatedPtrFieldBase& other,
1650 void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
1651 // Note: wrapper has already guaranteed that other.rep_ != NULL here.
1652 int other_size = other.current_size_;
1653 void** other_elements = other.rep_->elements;
1654 void** new_elements = InternalExtend(other_size);
1655 int allocated_elems = rep_->allocated_size - current_size_;
1656 (this->*inner_loop)(new_elements, other_elements, other_size,
1657 allocated_elems);
1658 current_size_ += other_size;
1659 if (rep_->allocated_size < current_size_) {
1660 rep_->allocated_size = current_size_;
1661 }
1662}
1663
1664// Merges other_elems to our_elems.
1665template <typename TypeHandler>
1666void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems,
1667 void** other_elems, int length,
1668 int already_allocated) {
1669 // Split into two loops, over ranges [0, allocated) and [allocated, length),
1670 // to avoid a branch within the loop.
1671 for (int i = 0; i < already_allocated && i < length; i++) {
1672 // Already allocated: use existing element.
1673 typename TypeHandler::WeakType* other_elem =
1674 reinterpret_cast<typename TypeHandler::WeakType*>(other_elems[i]);
1675 typename TypeHandler::WeakType* new_elem =
1676 reinterpret_cast<typename TypeHandler::WeakType*>(our_elems[i]);
1677 TypeHandler::Merge(*other_elem, new_elem);
1678 }
1679 Arena* arena = GetArenaNoVirtual();
1680 for (int i = already_allocated; i < length; i++) {
1681 // Not allocated: alloc a new element first, then merge it.
1682 typename TypeHandler::WeakType* other_elem =
1683 reinterpret_cast<typename TypeHandler::WeakType*>(other_elems[i]);
1684 typename TypeHandler::WeakType* new_elem =
1685 TypeHandler::NewFromPrototype(other_elem, arena);
1686 TypeHandler::Merge(*other_elem, new_elem);
1687 our_elems[i] = new_elem;
1688 }
1689}
1690
1691template <typename TypeHandler>
1692inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) {
1693 if (&other == this) return;
1694 RepeatedPtrFieldBase::Clear<TypeHandler>();
1695 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
1696}
1697
1698inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; }
1699
1700inline void* const* RepeatedPtrFieldBase::raw_data() const {
1701 return rep_ ? rep_->elements : NULL;
1702}
1703
1704inline void** RepeatedPtrFieldBase::raw_mutable_data() const {
1705 return rep_ ? const_cast<void**>(rep_->elements) : NULL;
1706}
1707
1708template <typename TypeHandler>
1709inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() {
1710 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
1711 // method entirely.
1712 return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data());
1713}
1714
1715template <typename TypeHandler>
1716inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data()
1717 const {
1718 // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
1719 // method entirely.
1720 return reinterpret_cast<const typename TypeHandler::Type* const*>(raw_data());
1721}
1722
1723inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) {
1724 using std::swap; // enable ADL with fallback
1725 swap(rep_->elements[index1], rep_->elements[index2]);
1726}
1727
1728template <typename TypeHandler>
1729inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const {
1730 size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
1731 if (rep_ != NULL) {
1732 for (int i = 0; i < rep_->allocated_size; ++i) {
1733 allocated_bytes +=
1734 TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
1735 }
1736 allocated_bytes += kRepHeaderSize;
1737 }
1738 return allocated_bytes;
1739}
1740
1741template <typename TypeHandler>
1742inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() {
1743 if (rep_ != NULL && current_size_ < rep_->allocated_size) {
1744 return cast<TypeHandler>(rep_->elements[current_size_++]);
1745 } else {
1746 return NULL;
1747 }
1748}
1749
1750// AddAllocated version that implements arena-safe copying behavior.
1751template <typename TypeHandler>
1752void RepeatedPtrFieldBase::AddAllocatedInternal(
1753 typename TypeHandler::Type* value, std::true_type) {
1754 Arena* element_arena =
1755 reinterpret_cast<Arena*>(TypeHandler::GetMaybeArenaPointer(value));
1756 Arena* arena = GetArenaNoVirtual();
1757 if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
1758 // Fast path: underlying arena representation (tagged pointer) is equal to
1759 // our arena pointer, and we can add to array without resizing it (at least
1760 // one slot that is not allocated).
1761 void** elems = rep_->elements;
1762 if (current_size_ < rep_->allocated_size) {
1763 // Make space at [current] by moving first allocated element to end of
1764 // allocated list.
1765 elems[rep_->allocated_size] = elems[current_size_];
1766 }
1767 elems[current_size_] = value;
1768 current_size_ = current_size_ + 1;
1769 rep_->allocated_size = rep_->allocated_size + 1;
1770 } else {
1771 AddAllocatedSlowWithCopy<TypeHandler>(value, TypeHandler::GetArena(value),
1772 arena);
1773 }
1774}
1775
1776// Slowpath handles all cases, copying if necessary.
1777template <typename TypeHandler>
1778void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy(
1779 // Pass value_arena and my_arena to avoid duplicate virtual call (value) or
1780 // load (mine).
1781 typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) {
1782 // Ensure that either the value is in the same arena, or if not, we do the
1783 // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy
1784 // it to our arena/heap (otherwise).
1785 if (my_arena != NULL && value_arena == NULL) {
1786 my_arena->Own(value);
1787 } else if (my_arena != value_arena) {
1788 typename TypeHandler::Type* new_value =
1789 TypeHandler::NewFromPrototype(value, my_arena);
1790 TypeHandler::Merge(*value, new_value);
1791 TypeHandler::Delete(value, value_arena);
1792 value = new_value;
1793 }
1794
1795 UnsafeArenaAddAllocated<TypeHandler>(value);
1796}
1797
1798// AddAllocated version that does not implement arena-safe copying behavior.
1799template <typename TypeHandler>
1800void RepeatedPtrFieldBase::AddAllocatedInternal(
1801 typename TypeHandler::Type* value, std::false_type) {
1802 if (rep_ && rep_->allocated_size < total_size_) {
1803 // Fast path: underlying arena representation (tagged pointer) is equal to
1804 // our arena pointer, and we can add to array without resizing it (at least
1805 // one slot that is not allocated).
1806 void** elems = rep_->elements;
1807 if (current_size_ < rep_->allocated_size) {
1808 // Make space at [current] by moving first allocated element to end of
1809 // allocated list.
1810 elems[rep_->allocated_size] = elems[current_size_];
1811 }
1812 elems[current_size_] = value;
1813 current_size_ = current_size_ + 1;
1814 ++rep_->allocated_size;
1815 } else {
1816 UnsafeArenaAddAllocated<TypeHandler>(value);
1817 }
1818}
1819
1820template <typename TypeHandler>
1821void RepeatedPtrFieldBase::UnsafeArenaAddAllocated(
1822 typename TypeHandler::Type* value) {
1823 // Make room for the new pointer.
1824 if (!rep_ || current_size_ == total_size_) {
1825 // The array is completely full with no cleared objects, so grow it.
1826 Reserve(total_size_ + 1);
1827 ++rep_->allocated_size;
1828 } else if (rep_->allocated_size == total_size_) {
1829 // There is no more space in the pointer array because it contains some
1830 // cleared objects awaiting reuse. We don't want to grow the array in this
1831 // case because otherwise a loop calling AddAllocated() followed by Clear()
1832 // would leak memory.
1833 TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
1834 arena_);
1835 } else if (current_size_ < rep_->allocated_size) {
1836 // We have some cleared objects. We don't care about their order, so we
1837 // can just move the first one to the end to make space.
1838 rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
1839 ++rep_->allocated_size;
1840 } else {
1841 // There are no cleared objects.
1842 ++rep_->allocated_size;
1843 }
1844
1845 rep_->elements[current_size_++] = value;
1846}
1847
1848// ReleaseLast() for types that implement merge/copy behavior.
1849template <typename TypeHandler>
1850inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
1851 std::true_type) {
1852 // First, release an element.
1853 typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>();
1854 // Now perform a copy if we're on an arena.
1855 Arena* arena = GetArenaNoVirtual();
1856 if (arena == NULL) {
1857 return result;
1858 } else {
1859 typename TypeHandler::Type* new_result =
1860 TypeHandler::NewFromPrototype(result, NULL);
1861 TypeHandler::Merge(*result, new_result);
1862 return new_result;
1863 }
1864}
1865
1866// ReleaseLast() for types that *do not* implement merge/copy behavior -- this
1867// is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on
1868// an arena, since the user really should implement the copy operation in this
1869// case.
1870template <typename TypeHandler>
1871inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
1872 std::false_type) {
1873 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1874 << "ReleaseLast() called on a RepeatedPtrField that is on an arena, "
1875 << "with a type that does not implement MergeFrom. This is unsafe; "
1876 << "please implement MergeFrom for your type.";
1877 return UnsafeArenaReleaseLast<TypeHandler>();
1878}
1879
1880template <typename TypeHandler>
1881inline typename TypeHandler::Type*
1882RepeatedPtrFieldBase::UnsafeArenaReleaseLast() {
1883 GOOGLE_DCHECK_GT(current_size_, 0);
1884 typename TypeHandler::Type* result =
1885 cast<TypeHandler>(rep_->elements[--current_size_]);
1886 --rep_->allocated_size;
1887 if (current_size_ < rep_->allocated_size) {
1888 // There are cleared elements on the end; replace the removed element
1889 // with the last allocated element.
1890 rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
1891 }
1892 return result;
1893}
1894
1895inline int RepeatedPtrFieldBase::ClearedCount() const {
1896 return rep_ ? (rep_->allocated_size - current_size_) : 0;
1897}
1898
1899template <typename TypeHandler>
1900inline void RepeatedPtrFieldBase::AddCleared(
1901 typename TypeHandler::Type* value) {
1902 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1903 << "AddCleared() can only be used on a RepeatedPtrField not on an arena.";
1904 GOOGLE_DCHECK(TypeHandler::GetArena(value) == NULL)
1905 << "AddCleared() can only accept values not on an arena.";
1906 if (!rep_ || rep_->allocated_size == total_size_) {
1907 Reserve(total_size_ + 1);
1908 }
1909 rep_->elements[rep_->allocated_size++] = value;
1910}
1911
1912template <typename TypeHandler>
1913inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() {
1914 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
1915 << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
1916 << "an arena.";
1917 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
1918 GOOGLE_DCHECK(rep_ != NULL);
1919 GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_);
1920 return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
1921}
1922
1923} // namespace internal
1924
1925// -------------------------------------------------------------------
1926
1927template <typename Element>
1928class RepeatedPtrField<Element>::TypeHandler
1929 : public internal::GenericTypeHandler<Element> {};
1930
1931template <>
1932class RepeatedPtrField<std::string>::TypeHandler
1933 : public internal::StringTypeHandler {};
1934
1935template <typename Element>
1936inline RepeatedPtrField<Element>::RepeatedPtrField() : RepeatedPtrFieldBase() {}
1937
1938template <typename Element>
1939inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
1940 : RepeatedPtrFieldBase(arena) {}
1941
1942template <typename Element>
1943inline RepeatedPtrField<Element>::RepeatedPtrField(
1944 const RepeatedPtrField& other)
1945 : RepeatedPtrFieldBase() {
1946 MergeFrom(other);
1947}
1948
1949template <typename Element>
1950template <typename Iter>
1951inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin,
1952 const Iter& end) {
1953 int reserve = internal::CalculateReserve(begin, end);
1954 if (reserve != -1) {
1955 Reserve(reserve);
1956 }
1957 for (; begin != end; ++begin) {
1958 *Add() = *begin;
1959 }
1960}
1961
1962template <typename Element>
1963RepeatedPtrField<Element>::~RepeatedPtrField() {
1964 Destroy<TypeHandler>();
1965}
1966
1967template <typename Element>
1968inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1969 const RepeatedPtrField& other) {
1970 if (this != &other) CopyFrom(other);
1971 return *this;
1972}
1973
1974template <typename Element>
1975inline RepeatedPtrField<Element>::RepeatedPtrField(
1976 RepeatedPtrField&& other) noexcept
1977 : RepeatedPtrField() {
1978 // We don't just call Swap(&other) here because it would perform 3 copies if
1979 // other is on an arena. This field can't be on an arena because arena
1980 // construction always uses the Arena* accepting constructor.
1981 if (other.GetArenaNoVirtual()) {
1982 CopyFrom(other);
1983 } else {
1984 InternalSwap(&other);
1985 }
1986}
1987
1988template <typename Element>
1989inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
1990 RepeatedPtrField&& other) noexcept {
1991 // We don't just call Swap(&other) here because it would perform 3 copies if
1992 // the two fields are on different arenas.
1993 if (this != &other) {
1994 if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
1995 CopyFrom(other);
1996 } else {
1997 InternalSwap(&other);
1998 }
1999 }
2000 return *this;
2001}
2002
2003template <typename Element>
2004inline bool RepeatedPtrField<Element>::empty() const {
2005 return RepeatedPtrFieldBase::empty();
2006}
2007
2008template <typename Element>
2009inline int RepeatedPtrField<Element>::size() const {
2010 return RepeatedPtrFieldBase::size();
2011}
2012
2013template <typename Element>
2014inline const Element& RepeatedPtrField<Element>::Get(int index) const {
2015 return RepeatedPtrFieldBase::Get<TypeHandler>(index);
2016}
2017
2018template <typename Element>
2019inline const Element& RepeatedPtrField<Element>::at(int index) const {
2020 return RepeatedPtrFieldBase::at<TypeHandler>(index);
2021}
2022
2023template <typename Element>
2024inline Element& RepeatedPtrField<Element>::at(int index) {
2025 return RepeatedPtrFieldBase::at<TypeHandler>(index);
2026}
2027
2028
2029template <typename Element>
2030inline Element* RepeatedPtrField<Element>::Mutable(int index) {
2031 return RepeatedPtrFieldBase::Mutable<TypeHandler>(index);
2032}
2033
2034template <typename Element>
2035inline Element* RepeatedPtrField<Element>::Add() {
2036 return RepeatedPtrFieldBase::Add<TypeHandler>();
2037}
2038
2039template <typename Element>
2040inline void RepeatedPtrField<Element>::Add(Element&& value) {
2041 RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value));
2042}
2043
2044template <typename Element>
2045inline void RepeatedPtrField<Element>::RemoveLast() {
2046 RepeatedPtrFieldBase::RemoveLast<TypeHandler>();
2047}
2048
2049template <typename Element>
2050inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) {
2051 GOOGLE_DCHECK_GE(start, 0);
2052 GOOGLE_DCHECK_GE(num, 0);
2053 GOOGLE_DCHECK_LE(start + num, size());
2054 for (int i = 0; i < num; ++i) {
2055 RepeatedPtrFieldBase::Delete<TypeHandler>(start + i);
2056 }
2057 ExtractSubrange(start, num, NULL);
2058}
2059
2060template <typename Element>
2061inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num,
2062 Element** elements) {
2063 typename internal::TypeImplementsMergeBehavior<
2064 typename TypeHandler::Type>::type t;
2065 ExtractSubrangeInternal(start, num, elements, t);
2066}
2067
2068// ExtractSubrange() implementation for types that implement merge/copy
2069// behavior.
2070template <typename Element>
2071inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2072 int start, int num, Element** elements, std::true_type) {
2073 GOOGLE_DCHECK_GE(start, 0);
2074 GOOGLE_DCHECK_GE(num, 0);
2075 GOOGLE_DCHECK_LE(start + num, size());
2076
2077 if (num > 0) {
2078 // Save the values of the removed elements if requested.
2079 if (elements != NULL) {
2080 if (GetArenaNoVirtual() != NULL) {
2081 // If we're on an arena, we perform a copy for each element so that the
2082 // returned elements are heap-allocated.
2083 for (int i = 0; i < num; ++i) {
2084 Element* element =
2085 RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2086 typename TypeHandler::Type* new_value =
2087 TypeHandler::NewFromPrototype(element, NULL);
2088 TypeHandler::Merge(*element, new_value);
2089 elements[i] = new_value;
2090 }
2091 } else {
2092 for (int i = 0; i < num; ++i) {
2093 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2094 }
2095 }
2096 }
2097 CloseGap(start, num);
2098 }
2099}
2100
2101// ExtractSubrange() implementation for types that do not implement merge/copy
2102// behavior.
2103template <typename Element>
2104inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
2105 int start, int num, Element** elements, std::false_type) {
2106 // This case is identical to UnsafeArenaExtractSubrange(). However, since
2107 // ExtractSubrange() must return heap-allocated objects by contract, and we
2108 // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that
2109 // we are not on an arena.
2110 GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
2111 << "ExtractSubrange() when arena is non-NULL is only supported when "
2112 << "the Element type supplies a MergeFrom() operation to make copies.";
2113 UnsafeArenaExtractSubrange(start, num, elements);
2114}
2115
2116template <typename Element>
2117inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange(
2118 int start, int num, Element** elements) {
2119 GOOGLE_DCHECK_GE(start, 0);
2120 GOOGLE_DCHECK_GE(num, 0);
2121 GOOGLE_DCHECK_LE(start + num, size());
2122
2123 if (num > 0) {
2124 // Save the values of the removed elements if requested.
2125 if (elements != NULL) {
2126 for (int i = 0; i < num; ++i) {
2127 elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
2128 }
2129 }
2130 CloseGap(start, num);
2131 }
2132}
2133
2134template <typename Element>
2135inline void RepeatedPtrField<Element>::Clear() {
2136 RepeatedPtrFieldBase::Clear<TypeHandler>();
2137}
2138
2139template <typename Element>
2140inline void RepeatedPtrField<Element>::MergeFrom(
2141 const RepeatedPtrField& other) {
2142 RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
2143}
2144
2145template <typename Element>
2146inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) {
2147 RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other);
2148}
2149
2150template <typename Element>
2151inline typename RepeatedPtrField<Element>::iterator
2152RepeatedPtrField<Element>::erase(const_iterator position) {
2153 return erase(position, position + 1);
2154}
2155
2156template <typename Element>
2157inline typename RepeatedPtrField<Element>::iterator
2158RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last) {
2159 size_type pos_offset = std::distance(cbegin(), first);
2160 size_type last_offset = std::distance(cbegin(), last);
2161 DeleteSubrange(pos_offset, last_offset - pos_offset);
2162 return begin() + pos_offset;
2163}
2164
2165template <typename Element>
2166inline Element** RepeatedPtrField<Element>::mutable_data() {
2167 return RepeatedPtrFieldBase::mutable_data<TypeHandler>();
2168}
2169
2170template <typename Element>
2171inline const Element* const* RepeatedPtrField<Element>::data() const {
2172 return RepeatedPtrFieldBase::data<TypeHandler>();
2173}
2174
2175template <typename Element>
2176inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) {
2177 if (this == other) return;
2178 RepeatedPtrFieldBase::Swap<TypeHandler>(other);
2179}
2180
2181template <typename Element>
2182inline void RepeatedPtrField<Element>::UnsafeArenaSwap(
2183 RepeatedPtrField* other) {
2184 if (this == other) return;
2185 RepeatedPtrFieldBase::InternalSwap(other);
2186}
2187
2188template <typename Element>
2189inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) {
2190 RepeatedPtrFieldBase::SwapElements(index1, index2);
2191}
2192
2193template <typename Element>
2194inline Arena* RepeatedPtrField<Element>::GetArenaNoVirtual() const {
2195 return RepeatedPtrFieldBase::GetArenaNoVirtual();
2196}
2197
2198template <typename Element>
2199inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const {
2200 return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<TypeHandler>();
2201}
2202
2203template <typename Element>
2204inline void RepeatedPtrField<Element>::AddAllocated(Element* value) {
2205 RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value);
2206}
2207
2208template <typename Element>
2209inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) {
2210 RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value);
2211}
2212
2213template <typename Element>
2214inline Element* RepeatedPtrField<Element>::ReleaseLast() {
2215 return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>();
2216}
2217
2218template <typename Element>
2219inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() {
2220 return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>();
2221}
2222
2223template <typename Element>
2224inline int RepeatedPtrField<Element>::ClearedCount() const {
2225 return RepeatedPtrFieldBase::ClearedCount();
2226}
2227
2228template <typename Element>
2229inline void RepeatedPtrField<Element>::AddCleared(Element* value) {
2230 return RepeatedPtrFieldBase::AddCleared<TypeHandler>(value);
2231}
2232
2233template <typename Element>
2234inline Element* RepeatedPtrField<Element>::ReleaseCleared() {
2235 return RepeatedPtrFieldBase::ReleaseCleared<TypeHandler>();
2236}
2237
2238template <typename Element>
2239inline void RepeatedPtrField<Element>::Reserve(int new_size) {
2240 return RepeatedPtrFieldBase::Reserve(new_size);
2241}
2242
2243template <typename Element>
2244inline int RepeatedPtrField<Element>::Capacity() const {
2245 return RepeatedPtrFieldBase::Capacity();
2246}
2247
2248// -------------------------------------------------------------------
2249
2250namespace internal {
2251
2252// STL-like iterator implementation for RepeatedPtrField. You should not
2253// refer to this class directly; use RepeatedPtrField<T>::iterator instead.
2254//
2255// The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is
2256// very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h,
2257// but adds random-access operators and is modified to wrap a void** base
2258// iterator (since RepeatedPtrField stores its array as a void* array and
2259// casting void** to T** would violate C++ aliasing rules).
2260//
2261// This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin
2262// ([email protected]).
2263template <typename Element>
2264class RepeatedPtrIterator {
2265 public:
2266 using iterator = RepeatedPtrIterator<Element>;
2267 using iterator_category = std::random_access_iterator_tag;
2268 using value_type = typename std::remove_const<Element>::type;
2269 using difference_type = std::ptrdiff_t;
2270 using pointer = Element*;
2271 using reference = Element&;
2272
2273 RepeatedPtrIterator() : it_(NULL) {}
2274 explicit RepeatedPtrIterator(void* const* it) : it_(it) {}
2275
2276 // Allow "upcasting" from RepeatedPtrIterator<T**> to
2277 // RepeatedPtrIterator<const T*const*>.
2278 template <typename OtherElement>
2279 RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other)
2280 : it_(other.it_) {
2281 // Force a compiler error if the other type is not convertible to ours.
2282 if (false) {
2283 implicit_cast<Element*>(static_cast<OtherElement*>(nullptr));
2284 }
2285 }
2286
2287 // dereferenceable
2288 reference operator*() const { return *reinterpret_cast<Element*>(*it_); }
2289 pointer operator->() const { return &(operator*()); }
2290
2291 // {inc,dec}rementable
2292 iterator& operator++() {
2293 ++it_;
2294 return *this;
2295 }
2296 iterator operator++(int) { return iterator(it_++); }
2297 iterator& operator--() {
2298 --it_;
2299 return *this;
2300 }
2301 iterator operator--(int) { return iterator(it_--); }
2302
2303 // equality_comparable
2304 bool operator==(const iterator& x) const { return it_ == x.it_; }
2305 bool operator!=(const iterator& x) const { return it_ != x.it_; }
2306
2307 // less_than_comparable
2308 bool operator<(const iterator& x) const { return it_ < x.it_; }
2309 bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2310 bool operator>(const iterator& x) const { return it_ > x.it_; }
2311 bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2312
2313 // addable, subtractable
2314 iterator& operator+=(difference_type d) {
2315 it_ += d;
2316 return *this;
2317 }
2318 friend iterator operator+(iterator it, const difference_type d) {
2319 it += d;
2320 return it;
2321 }
2322 friend iterator operator+(const difference_type d, iterator it) {
2323 it += d;
2324 return it;
2325 }
2326 iterator& operator-=(difference_type d) {
2327 it_ -= d;
2328 return *this;
2329 }
2330 friend iterator operator-(iterator it, difference_type d) {
2331 it -= d;
2332 return it;
2333 }
2334
2335 // indexable
2336 reference operator[](difference_type d) const { return *(*this + d); }
2337
2338 // random access iterator
2339 difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2340
2341 private:
2342 template <typename OtherElement>
2343 friend class RepeatedPtrIterator;
2344
2345 // The internal iterator.
2346 void* const* it_;
2347};
2348
2349// Provide an iterator that operates on pointers to the underlying objects
2350// rather than the objects themselves as RepeatedPtrIterator does.
2351// Consider using this when working with stl algorithms that change
2352// the array.
2353// The VoidPtr template parameter holds the type-agnostic pointer value
2354// referenced by the iterator. It should either be "void *" for a mutable
2355// iterator, or "const void* const" for a constant iterator.
2356template <typename Element, typename VoidPtr>
2357class RepeatedPtrOverPtrsIterator {
2358 public:
2359 using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>;
2360 using iterator_category = std::random_access_iterator_tag;
2361 using value_type = typename std::remove_const<Element>::type;
2362 using difference_type = std::ptrdiff_t;
2363 using pointer = Element*;
2364 using reference = Element&;
2365
2366 RepeatedPtrOverPtrsIterator() : it_(NULL) {}
2367 explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {}
2368
2369 // dereferenceable
2370 reference operator*() const { return *reinterpret_cast<Element*>(it_); }
2371 pointer operator->() const { return &(operator*()); }
2372
2373 // {inc,dec}rementable
2374 iterator& operator++() {
2375 ++it_;
2376 return *this;
2377 }
2378 iterator operator++(int) { return iterator(it_++); }
2379 iterator& operator--() {
2380 --it_;
2381 return *this;
2382 }
2383 iterator operator--(int) { return iterator(it_--); }
2384
2385 // equality_comparable
2386 bool operator==(const iterator& x) const { return it_ == x.it_; }
2387 bool operator!=(const iterator& x) const { return it_ != x.it_; }
2388
2389 // less_than_comparable
2390 bool operator<(const iterator& x) const { return it_ < x.it_; }
2391 bool operator<=(const iterator& x) const { return it_ <= x.it_; }
2392 bool operator>(const iterator& x) const { return it_ > x.it_; }
2393 bool operator>=(const iterator& x) const { return it_ >= x.it_; }
2394
2395 // addable, subtractable
2396 iterator& operator+=(difference_type d) {
2397 it_ += d;
2398 return *this;
2399 }
2400 friend iterator operator+(iterator it, difference_type d) {
2401 it += d;
2402 return it;
2403 }
2404 friend iterator operator+(difference_type d, iterator it) {
2405 it += d;
2406 return it;
2407 }
2408 iterator& operator-=(difference_type d) {
2409 it_ -= d;
2410 return *this;
2411 }
2412 friend iterator operator-(iterator it, difference_type d) {
2413 it -= d;
2414 return it;
2415 }
2416
2417 // indexable
2418 reference operator[](difference_type d) const { return *(*this + d); }
2419
2420 // random access iterator
2421 difference_type operator-(const iterator& x) const { return it_ - x.it_; }
2422
2423 private:
2424 template <typename OtherElement>
2425 friend class RepeatedPtrIterator;
2426
2427 // The internal iterator.
2428 VoidPtr* it_;
2429};
2430
2431void RepeatedPtrFieldBase::InternalSwap(RepeatedPtrFieldBase* other) {
2432 GOOGLE_DCHECK(this != other);
2433 GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
2434
2435 std::swap(rep_, other->rep_);
2436 std::swap(current_size_, other->current_size_);
2437 std::swap(total_size_, other->total_size_);
2438}
2439
2440} // namespace internal
2441
2442template <typename Element>
2443inline typename RepeatedPtrField<Element>::iterator
2444RepeatedPtrField<Element>::begin() {
2445 return iterator(raw_data());
2446}
2447template <typename Element>
2448inline typename RepeatedPtrField<Element>::const_iterator
2449RepeatedPtrField<Element>::begin() const {
2450 return iterator(raw_data());
2451}
2452template <typename Element>
2453inline typename RepeatedPtrField<Element>::const_iterator
2454RepeatedPtrField<Element>::cbegin() const {
2455 return begin();
2456}
2457template <typename Element>
2458inline typename RepeatedPtrField<Element>::iterator
2459RepeatedPtrField<Element>::end() {
2460 return iterator(raw_data() + size());
2461}
2462template <typename Element>
2463inline typename RepeatedPtrField<Element>::const_iterator
2464RepeatedPtrField<Element>::end() const {
2465 return iterator(raw_data() + size());
2466}
2467template <typename Element>
2468inline typename RepeatedPtrField<Element>::const_iterator
2469RepeatedPtrField<Element>::cend() const {
2470 return end();
2471}
2472
2473template <typename Element>
2474inline typename RepeatedPtrField<Element>::pointer_iterator
2475RepeatedPtrField<Element>::pointer_begin() {
2476 return pointer_iterator(raw_mutable_data());
2477}
2478template <typename Element>
2479inline typename RepeatedPtrField<Element>::const_pointer_iterator
2480RepeatedPtrField<Element>::pointer_begin() const {
2481 return const_pointer_iterator(const_cast<const void* const*>(raw_data()));
2482}
2483template <typename Element>
2484inline typename RepeatedPtrField<Element>::pointer_iterator
2485RepeatedPtrField<Element>::pointer_end() {
2486 return pointer_iterator(raw_mutable_data() + size());
2487}
2488template <typename Element>
2489inline typename RepeatedPtrField<Element>::const_pointer_iterator
2490RepeatedPtrField<Element>::pointer_end() const {
2491 return const_pointer_iterator(
2492 const_cast<const void* const*>(raw_data() + size()));
2493}
2494
2495// Iterators and helper functions that follow the spirit of the STL
2496// std::back_insert_iterator and std::back_inserter but are tailor-made
2497// for RepeatedField and RepeatedPtrField. Typical usage would be:
2498//
2499// std::copy(some_sequence.begin(), some_sequence.end(),
2500// RepeatedFieldBackInserter(proto.mutable_sequence()));
2501//
2502// Ported by johannes from util/gtl/proto-array-iterators.h
2503
2504namespace internal {
2505// A back inserter for RepeatedField objects.
2506template <typename T>
2507class RepeatedFieldBackInsertIterator
2508 : public std::iterator<std::output_iterator_tag, T> {
2509 public:
2510 explicit RepeatedFieldBackInsertIterator(
2511 RepeatedField<T>* const mutable_field)
2512 : field_(mutable_field) {}
2513 RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
2514 field_->Add(value);
2515 return *this;
2516 }
2517 RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
2518 RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
2519 RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
2520 return *this;
2521 }
2522
2523 private:
2524 RepeatedField<T>* field_;
2525};
2526
2527// A back inserter for RepeatedPtrField objects.
2528template <typename T>
2529class RepeatedPtrFieldBackInsertIterator
2530 : public std::iterator<std::output_iterator_tag, T> {
2531 public:
2532 RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field)
2533 : field_(mutable_field) {}
2534 RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) {
2535 *field_->Add() = value;
2536 return *this;
2537 }
2538 RepeatedPtrFieldBackInsertIterator<T>& operator=(
2539 const T* const ptr_to_value) {
2540 *field_->Add() = *ptr_to_value;
2541 return *this;
2542 }
2543 RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) {
2544 *field_->Add() = std::move(value);
2545 return *this;
2546 }
2547 RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2548 RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2549 RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2550 return *this;
2551 }
2552
2553 private:
2554 RepeatedPtrField<T>* field_;
2555};
2556
2557// A back inserter for RepeatedPtrFields that inserts by transferring ownership
2558// of a pointer.
2559template <typename T>
2560class AllocatedRepeatedPtrFieldBackInsertIterator
2561 : public std::iterator<std::output_iterator_tag, T> {
2562 public:
2563 explicit AllocatedRepeatedPtrFieldBackInsertIterator(
2564 RepeatedPtrField<T>* const mutable_field)
2565 : field_(mutable_field) {}
2566 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2567 T* const ptr_to_value) {
2568 field_->AddAllocated(ptr_to_value);
2569 return *this;
2570 }
2571 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
2572 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
2573 AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
2574 return *this;
2575 }
2576
2577 private:
2578 RepeatedPtrField<T>* field_;
2579};
2580
2581// Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
2582// uses the UnsafeArenaAddAllocated instead.
2583template <typename T>
2584class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator
2585 : public std::iterator<std::output_iterator_tag, T> {
2586 public:
2587 explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
2588 RepeatedPtrField<T>* const mutable_field)
2589 : field_(mutable_field) {}
2590 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
2591 T const* const ptr_to_value) {
2592 field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
2593 return *this;
2594 }
2595 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
2596 return *this;
2597 }
2598 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
2599 return *this;
2600 }
2601 UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
2602 int /* unused */) {
2603 return *this;
2604 }
2605
2606 private:
2607 RepeatedPtrField<T>* field_;
2608};
2609
2610} // namespace internal
2611
2612// Provides a back insert iterator for RepeatedField instances,
2613// similar to std::back_inserter().
2614template <typename T>
2615internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2616 RepeatedField<T>* const mutable_field) {
2617 return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
2618}
2619
2620// Provides a back insert iterator for RepeatedPtrField instances,
2621// similar to std::back_inserter().
2622template <typename T>
2623internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter(
2624 RepeatedPtrField<T>* const mutable_field) {
2625 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2626}
2627
2628// Special back insert iterator for RepeatedPtrField instances, just in
2629// case someone wants to write generic template code that can access both
2630// RepeatedFields and RepeatedPtrFields using a common name.
2631template <typename T>
2632internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter(
2633 RepeatedPtrField<T>* const mutable_field) {
2634 return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
2635}
2636
2637// Provides a back insert iterator for RepeatedPtrField instances
2638// similar to std::back_inserter() which transfers the ownership while
2639// copying elements.
2640template <typename T>
2641internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>
2642AllocatedRepeatedPtrFieldBackInserter(
2643 RepeatedPtrField<T>* const mutable_field) {
2644 return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>(
2645 mutable_field);
2646}
2647
2648// Similar to AllocatedRepeatedPtrFieldBackInserter, using
2649// UnsafeArenaAddAllocated instead of AddAllocated.
2650// This is slightly faster if that matters. It is also useful in legacy code
2651// that uses temporary ownership to avoid copies. Example:
2652// RepeatedPtrField<T> temp_field;
2653// temp_field.AddAllocated(new T);
2654// ... // Do something with temp_field
2655// temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
2656// If you put temp_field on the arena this fails, because the ownership
2657// transfers to the arena at the "AddAllocated" call and is not released anymore
2658// causing a double delete. Using UnsafeArenaAddAllocated prevents this.
2659template <typename T>
2660internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
2661UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
2662 RepeatedPtrField<T>* const mutable_field) {
2663 return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
2664 mutable_field);
2665}
2666
2667// Extern declarations of common instantiations to reduce libray bloat.
2668extern template class PROTOBUF_EXPORT RepeatedField<bool>;
2669extern template class PROTOBUF_EXPORT RepeatedField<int32>;
2670extern template class PROTOBUF_EXPORT RepeatedField<uint32>;
2671extern template class PROTOBUF_EXPORT RepeatedField<int64>;
2672extern template class PROTOBUF_EXPORT RepeatedField<uint64>;
2673extern template class PROTOBUF_EXPORT RepeatedField<float>;
2674extern template class PROTOBUF_EXPORT RepeatedField<double>;
2675extern template class PROTOBUF_EXPORT RepeatedPtrField<std::string>;
2676
2677} // namespace protobuf
2678} // namespace google
2679
2680#include <google/protobuf/port_undef.inc>
2681
2682#endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__
2683