// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd // Author: kenton@google.com (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // RepeatedField and RepeatedPtrField are used by generated protocol message // classes to manipulate repeated fields. These classes are very similar to // STL's vector, but include a number of optimizations found to be useful // specifically in the case of Protocol Buffers. RepeatedPtrField is // particularly different from STL vector as it manages ownership of the // pointers that it contains. // // This header covers RepeatedPtrField. #ifndef GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ #define GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__ #include #include #include #include #include #include #include #include #include #include "absl/base/attributes.h" #include "absl/log/absl_check.h" #include "google/protobuf/arena.h" #include "google/protobuf/internal_visibility.h" #include "google/protobuf/message_lite.h" #include "google/protobuf/port.h" // Must be included last. #include "google/protobuf/port_def.inc" #ifdef SWIG #error "You cannot SWIG proto headers" #endif namespace google { namespace protobuf { class Message; class Reflection; template struct WeakRepeatedPtrField; namespace internal { class MergePartialFromCodedStreamHelper; class SwapFieldHelper; } // namespace internal namespace internal { template class RepeatedPtrIterator; template class RepeatedPtrOverPtrsIterator; } // namespace internal namespace internal { template inline void* NewT(Arena* a) { return GenericTypeHandler::New(a); } // Swaps two non-overlapping blocks of memory of size `N` template inline void memswap(char* PROTOBUF_RESTRICT a, char* PROTOBUF_RESTRICT b) { // `PROTOBUF_RESTRICT` tells compiler that blocks do not overlapping which // allows it to generate optimized code for swap_ranges. std::swap_ranges(a, a + N, b); } // type-traits helper for RepeatedPtrFieldBase: we only want to invoke // arena-related "copy if on different arena" behavior if the necessary methods // exist on the contained type. In particular, we rely on MergeFrom() existing // as a general proxy for the fact that a copy will work, and we also provide a // specific override for std::string*. template struct TypeImplementsMergeBehaviorProbeForMergeFrom { typedef char HasMerge; typedef long HasNoMerge; // We accept either of: // - void MergeFrom(const T& other) // - bool MergeFrom(const T& other) // // We mangle these names a bit to avoid compatibility issues in 'unclean' // include environments that may have, e.g., "#define test ..." (yes, this // exists). template struct CheckType; template static HasMerge Check(CheckType*); template static HasMerge Check(CheckType*); template static HasNoMerge Check(...); // Resolves to either std::true_type or std::false_type. typedef std::integral_constant(0)) == sizeof(HasMerge))> type; }; template struct TypeImplementsMergeBehavior : TypeImplementsMergeBehaviorProbeForMergeFrom {}; template <> struct TypeImplementsMergeBehavior { typedef std::true_type type; }; template struct IsMovable : std::integral_constant::value && std::is_move_assignable::value> {}; // A trait that tells offset of `T::arena_`. // // Do not use this struct - it exists for internal use only. template struct ArenaOffsetHelper { constexpr static size_t value = offsetof(T, arena_); }; // This is the common base class for RepeatedPtrFields. It deals only in void* // pointers. Users should not use this interface directly. // // The methods of this interface correspond to the methods of RepeatedPtrField, // but may have a template argument called TypeHandler. Its signature is: // class TypeHandler { // public: // typedef MyType Type; // static Type* New(); // static Type* NewFromPrototype(const Type* prototype, // Arena* arena); // static void Delete(Type*); // static void Clear(Type*); // static void Merge(const Type& from, Type* to); // // // Only needs to be implemented if SpaceUsedExcludingSelf() is called. // static int SpaceUsedLong(const Type&); // }; class PROTOBUF_EXPORT RepeatedPtrFieldBase { template using Value = typename Handler::Type; static constexpr int kSSOCapacity = 1; using ElementFactory = void* (*)(Arena*); protected: // We use the same Handler for all Message types to deduplicate generated // code. template using CommonHandler = typename std::conditional< std::is_base_of>::value, internal::GenericTypeHandler, Handler>::type; constexpr RepeatedPtrFieldBase() : tagged_rep_or_elem_(nullptr), current_size_(0), total_size_(kSSOCapacity), arena_(nullptr) {} explicit RepeatedPtrFieldBase(Arena* arena) : tagged_rep_or_elem_(nullptr), current_size_(0), total_size_(kSSOCapacity), arena_(arena) {} RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete; RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete; ~RepeatedPtrFieldBase() { #ifndef NDEBUG // Try to trigger segfault / asan failure in non-opt builds. If arena_ // lifetime has ended before the destructor. if (arena_) (void)arena_->SpaceAllocated(); #endif } bool empty() const { return current_size_ == 0; } int size() const { return current_size_; } int Capacity() const { return total_size_; } template const Value& at(int index) const { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return *cast(element_at(index)); } template Value& at(int index) { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return *cast(element_at(index)); } template Value* Mutable(int index) { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return cast(element_at(index)); } template Value* Add() { return cast(AddOutOfLineHelper(NewT>)); } template Value* Add(const Value* prototype) { if (current_size_ < allocated_size()) { return cast( element_at(ExchangeCurrentSize(current_size_ + 1))); } auto* result = TypeHandler::NewFromPrototype(prototype, arena_); return cast(AddOutOfLineHelper(result)); } template < typename TypeHandler, typename std::enable_if::type* = nullptr> inline void Add(Value&& value) { if (current_size_ < allocated_size()) { *cast(element_at(ExchangeCurrentSize(current_size_ + 1))) = std::move(value); return; } MaybeExtend(); if (!using_sso()) ++rep()->allocated_size; auto* result = TypeHandler::New(arena_, std::move(value)); element_at(ExchangeCurrentSize(current_size_ + 1)) = result; } template void Delete(int index) { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); using H = CommonHandler; Delete(element_at(index), arena_); } // Must be called from destructor. template void Destroy() { using H = CommonHandler; if (arena_ != nullptr) return; int n = allocated_size(); void** elems = elements(); for (int i = 0; i < n; i++) { Delete(elems[i], nullptr); } if (!using_sso()) { internal::SizedDelete(rep(), total_size_ * sizeof(elems[0]) + kRepHeaderSize); } } bool NeedsDestroy() const { return tagged_rep_or_elem_ != nullptr && arena_ == nullptr; } void DestroyProtos(); // implemented in the cc file public: // The next few methods are public so that they can be called from generated // code when implicit weak fields are used, but they should never be called by // application code. template const Value& Get(int index) const { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return *cast(element_at(index)); } // Creates and adds an element using the given prototype, without introducing // a link-time dependency on the concrete message type. This method is used to // implement implicit weak fields. The prototype may be nullptr, in which case // an ImplicitWeakMessage will be used as a placeholder. MessageLite* AddWeak(const MessageLite* prototype); template void Clear() { const int n = current_size_; ABSL_DCHECK_GE(n, 0); if (n > 0) { using H = CommonHandler; ClearNonEmpty(); } } // Message creating functor: used in MergeFrom() template static MessageLite* CopyMessage(Arena* arena, const MessageLite& src) { T* msg = Arena::CreateMaybeMessage(arena); msg->MergeFrom(static_cast(src)); return msg; } // Appends all message values from `from` to this instance. template void MergeFrom(const RepeatedPtrFieldBase& from) { static_assert(std::is_base_of::value, ""); MergeFromConcreteMessage(from, CopyMessage); } inline void InternalSwap(RepeatedPtrFieldBase* PROTOBUF_RESTRICT rhs) { ABSL_DCHECK(this != rhs); // Swap all fields except arena pointer at once. internal::memswap::value>( reinterpret_cast(this), reinterpret_cast(rhs)); } // Prepares the container for adding elements via `AddAllocatedForParse`. // It ensures we have no preallocated elements in the array. // Returns true if the invariants hold and `AddAllocatedForParse` can be // used. bool PrepareForParse() { return allocated_size() == current_size_; } // Similar to `AddAllocated` but faster. // Can only be invoked after a call to `PrepareForParse` that returned `true`, // or other calls to `AddAllocatedForParse`. template void AddAllocatedForParse(Value* value) { ABSL_DCHECK_EQ(current_size_, allocated_size()); MaybeExtend(); element_at(current_size_++) = value; if (!using_sso()) ++rep()->allocated_size; } protected: template void RemoveLast() { ABSL_DCHECK_GT(current_size_, 0); ExchangeCurrentSize(current_size_ - 1); using H = CommonHandler; H::Clear(cast(element_at(current_size_))); } template void CopyFrom(const RepeatedPtrFieldBase& other) { if (&other == this) return; RepeatedPtrFieldBase::Clear(); if (other.empty()) return; RepeatedPtrFieldBase::MergeFrom(other); } void CloseGap(int start, int num); void Reserve(int capacity); template static inline Value* copy(const Value* value) { using H = CommonHandler; auto* new_value = H::NewFromPrototype(value, nullptr); H::Merge(*value, new_value); return cast(new_value); } // Used for constructing iterators. void* const* raw_data() const { return elements(); } void** raw_mutable_data() { return elements(); } template Value** mutable_data() { // TODO: Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast**>(raw_mutable_data()); } template const Value* const* data() const { // TODO: Breaks C++ aliasing rules. We should probably remove this // method entirely. return reinterpret_cast* const*>(raw_data()); } template PROTOBUF_NDEBUG_INLINE void Swap(RepeatedPtrFieldBase* other) { #ifdef PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() != nullptr && GetArena() == other->GetArena()) #else // PROTOBUF_FORCE_COPY_IN_SWAP if (GetArena() == other->GetArena()) #endif // !PROTOBUF_FORCE_COPY_IN_SWAP { InternalSwap(other); } else { SwapFallback(other); } } void SwapElements(int index1, int index2) { using std::swap; // enable ADL with fallback swap(element_at(index1), element_at(index2)); } template PROTOBUF_NOINLINE size_t SpaceUsedExcludingSelfLong() const { size_t allocated_bytes = using_sso() ? 0 : static_cast(total_size_) * sizeof(void*) + kRepHeaderSize; const int n = allocated_size(); void* const* elems = elements(); for (int i = 0; i < n; ++i) { allocated_bytes += TypeHandler::SpaceUsedLong(*cast(elems[i])); } return allocated_bytes; } // Advanced memory management -------------------------------------- // Like Add(), but if there are no cleared objects to use, returns nullptr. template Value* AddFromCleared() { if (current_size_ < allocated_size()) { return cast( element_at(ExchangeCurrentSize(current_size_ + 1))); } else { return nullptr; } } template void AddAllocated(Value* value) { typename TypeImplementsMergeBehavior>::type t; AddAllocatedInternal(value, t); } template void UnsafeArenaAddAllocated(Value* value) { // Make room for the new pointer. if (current_size_ == total_size_) { // The array is completely full with no cleared objects, so grow it. Reserve(total_size_ + 1); ++rep()->allocated_size; } else if (allocated_size() == total_size_) { // There is no more space in the pointer array because it contains some // cleared objects awaiting reuse. We don't want to grow the array in // this case because otherwise a loop calling AddAllocated() followed by // Clear() would leak memory. using H = CommonHandler; Delete(element_at(current_size_), arena_); } else if (current_size_ < allocated_size()) { // We have some cleared objects. We don't care about their order, so we // can just move the first one to the end to make space. element_at(allocated_size()) = element_at(current_size_); ++rep()->allocated_size; } else { // There are no cleared objects. if (!using_sso()) ++rep()->allocated_size; } element_at(ExchangeCurrentSize(current_size_ + 1)) = value; } template PROTOBUF_NODISCARD Value* ReleaseLast() { typename TypeImplementsMergeBehavior>::type t; return ReleaseLastInternal(t); } // Releases and returns the last element, but does not do out-of-arena copy. // Instead, just returns the raw pointer to the contained element in the // arena. template Value* UnsafeArenaReleaseLast() { ABSL_DCHECK_GT(current_size_, 0); ExchangeCurrentSize(current_size_ - 1); auto* result = cast(element_at(current_size_)); if (using_sso()) { tagged_rep_or_elem_ = nullptr; } else { --rep()->allocated_size; if (current_size_ < allocated_size()) { // There are cleared elements on the end; replace the removed element // with the last allocated element. element_at(current_size_) = element_at(allocated_size()); } } return result; } int ClearedCount() const { return allocated_size() - current_size_; } template void AddCleared(Value* value) { ABSL_DCHECK(GetArena() == nullptr) << "AddCleared() can only be used on a " "RepeatedPtrField not on an arena."; ABSL_DCHECK(TypeHandler::GetArena(value) == nullptr) << "AddCleared() can only accept values not on an arena."; MaybeExtend(); if (using_sso()) { tagged_rep_or_elem_ = value; } else { element_at(rep()->allocated_size++) = value; } } template PROTOBUF_NODISCARD Value* ReleaseCleared() { ABSL_DCHECK(GetArena() == nullptr) << "ReleaseCleared() can only be used on a RepeatedPtrField not on " << "an arena."; ABSL_DCHECK(tagged_rep_or_elem_ != nullptr); ABSL_DCHECK_GT(allocated_size(), current_size_); if (using_sso()) { auto* result = cast(tagged_rep_or_elem_); tagged_rep_or_elem_ = nullptr; return result; } else { return cast(element_at(--rep()->allocated_size)); } } // AddAllocated version that implements arena-safe copying behavior. template void AddAllocatedInternal(Value* value, std::true_type) { Arena* element_arena = TypeHandler::GetArena(value); Arena* arena = GetArena(); if (arena == element_arena && allocated_size() < total_size_) { // Fast path: underlying arena representation (tagged pointer) is equal to // our arena pointer, and we can add to array without resizing it (at // least one slot that is not allocated). void** elems = elements(); if (current_size_ < allocated_size()) { // Make space at [current] by moving first allocated element to end of // allocated list. elems[allocated_size()] = elems[current_size_]; } elems[ExchangeCurrentSize(current_size_ + 1)] = value; if (!using_sso()) ++rep()->allocated_size; } else { AddAllocatedSlowWithCopy(value, element_arena, arena); } } // AddAllocated version that does not implement arena-safe copying behavior. template void AddAllocatedInternal(Value* value, std::false_type) { if (allocated_size() < total_size_) { // Fast path: underlying arena representation (tagged pointer) is equal to // our arena pointer, and we can add to array without resizing it (at // least one slot that is not allocated). void** elems = elements(); if (current_size_ < allocated_size()) { // Make space at [current] by moving first allocated element to end of // allocated list. elems[allocated_size()] = elems[current_size_]; } elems[ExchangeCurrentSize(current_size_ + 1)] = value; if (!using_sso()) ++rep()->allocated_size; } else { UnsafeArenaAddAllocated(value); } } // Slowpath handles all cases, copying if necessary. template PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy( // Pass value_arena and my_arena to avoid duplicate virtual call (value) // or load (mine). Value* value, Arena* value_arena, Arena* my_arena) { // Ensure that either the value is in the same arena, or if not, we do the // appropriate thing: Own() it (if it's on heap and we're in an arena) or // copy it to our arena/heap (otherwise). if (my_arena != nullptr && value_arena == nullptr) { my_arena->Own(value); } else if (my_arena != value_arena) { auto* new_value = TypeHandler::NewFromPrototype(value, my_arena); using H = CommonHandler; H::Merge(*value, new_value); H::Delete(value, value_arena); value = new_value; } UnsafeArenaAddAllocated(value); } template Value* ReleaseLastInternal(std::true_type) { // ReleaseLast() for types that implement merge/copy behavior. // First, release an element. Value* result = UnsafeArenaReleaseLast(); // Now perform a copy if we're on an arena. Arena* arena = GetArena(); #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE auto* new_result = copy(result); if (arena == nullptr) delete result; #else // PROTOBUF_FORCE_COPY_IN_RELEASE auto* new_result = (arena == nullptr) ? result : copy(result); #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE return new_result; } template Value* ReleaseLastInternal(std::false_type) { // ReleaseLast() for types that *do not* implement merge/copy behavior -- // this is the same as UnsafeArenaReleaseLast(). Note that we // ABSL_DCHECK-fail if we're on an arena, since the user really should // implement the copy operation in this case. ABSL_DCHECK(GetArena() == nullptr) << "ReleaseLast() called on a RepeatedPtrField that is on an arena, " << "with a type that does not implement MergeFrom. This is unsafe; " << "please implement MergeFrom for your type."; return UnsafeArenaReleaseLast(); } template PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other) { #ifdef PROTOBUF_FORCE_COPY_IN_SWAP ABSL_DCHECK(GetArena() == nullptr || other->GetArena() != GetArena()); #else // PROTOBUF_FORCE_COPY_IN_SWAP ABSL_DCHECK(other->GetArena() != GetArena()); #endif // !PROTOBUF_FORCE_COPY_IN_SWAP // Copy semantics in this case. We try to improve efficiency by placing the // temporary on |other|'s arena so that messages are copied twice rather // than three times. RepeatedPtrFieldBase temp(other->GetArena()); if (!this->empty()) { temp.MergeFrom(*this); } this->CopyFrom(*other); other->InternalSwap(&temp); temp.Destroy(); // Frees rep_ if `other` had no arena. } // Gets the Arena on which this RepeatedPtrField stores its elements. inline Arena* GetArena() const { return arena_; } inline Arena* GetOwningArena() const { return arena_; } private: using InternalArenaConstructable_ = void; using DestructorSkippable_ = void; template friend class Arena::InternalHelper; // ExtensionSet stores repeated message extensions as // RepeatedPtrField, but non-lite ExtensionSets need to implement // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong() // reinterpreting MessageLite as Message. ExtensionSet also needs to make use // of AddFromCleared(), which is not part of the public interface. friend class ExtensionSet; // The MapFieldBase implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class MapFieldBase; friend struct MapFieldTestPeer; // The table-driven MergePartialFromCodedStream implementation needs to // operate on RepeatedPtrField. friend class MergePartialFromCodedStreamHelper; friend class AccessorHelper; template friend struct google::protobuf::WeakRepeatedPtrField; friend class internal::TcParser; // TODO: Remove this friend. // Expose offset of `arena_` without exposing the member itself. // Used to optimize code size of `InternalSwap` method. template friend struct ArenaOffsetHelper; // The reflection implementation needs to call protected methods directly, // reinterpreting pointers as being to Message instead of a specific Message // subclass. friend class google::protobuf::Reflection; friend class internal::SwapFieldHelper; // Concrete Arena enabled copy function used to copy messages instances. // This follows the `Arena::CreateMaybeMessage` signature so that the compiler // can have the inlined call into the out of line copy function(s) simply pass // the address of `Arena::CreateMaybeMessage` 'as is'. using CopyFn = MessageLite* (*)(Arena*, const MessageLite&); struct Rep { int allocated_size; // Here we declare a huge array as a way of approximating C's "flexible // array member" feature without relying on undefined behavior. void* elements[(std::numeric_limits::max() - 2 * sizeof(int)) / sizeof(void*)]; }; static constexpr size_t kRepHeaderSize = offsetof(Rep, elements); // Replaces current_size_ with new_size and returns the previous value of // current_size_. This function is intended to be the only place where // current_size_ is modified. inline int ExchangeCurrentSize(int new_size) { return std::exchange(current_size_, new_size); } void* const* elements() const { return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements; } void** elements() { return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements; } void*& element_at(int index) { if (using_sso()) { ABSL_DCHECK_EQ(index, 0); return tagged_rep_or_elem_; } return rep()->elements[index]; } const void* element_at(int index) const { return const_cast(this)->element_at(index); } int allocated_size() const { return using_sso() ? (tagged_rep_or_elem_ != nullptr ? 1 : 0) : rep()->allocated_size; } Rep* rep() { ABSL_DCHECK(!using_sso()); return reinterpret_cast( reinterpret_cast(tagged_rep_or_elem_) - 1); } const Rep* rep() const { return const_cast(this)->rep(); } bool using_sso() const { return (reinterpret_cast(tagged_rep_or_elem_) & 1) == 0; } template static inline Value* cast(void* element) { return reinterpret_cast*>(element); } template static inline const Value* cast(const void* element) { return reinterpret_cast*>(element); } template static inline void Delete(void* obj, Arena* arena) { TypeHandler::Delete(cast(obj), arena); } // Out-of-line helper routine for Clear() once the inlined check has // determined the container is non-empty template PROTOBUF_NOINLINE void ClearNonEmpty() { const int n = current_size_; void* const* elems = elements(); int i = 0; ABSL_DCHECK_GT(n, 0); // do/while loop to avoid initial test because we know n > 0 do { TypeHandler::Clear(cast(elems[i++])); } while (i < n); ExchangeCurrentSize(0); } // Merges messages from `from` into available, cleared messages sitting in the // range `[size(), allocated_size())`. Returns the number of message merged // which is `ClearedCount(), from.size())`. // Note that this function does explicitly NOT update `current_size_`. // This function is out of line as it should be the slow path: this scenario // only happens when a caller constructs and fills a repeated field, then // shrinks it, and then merges additional messages into it. int MergeIntoClearedMessages(const RepeatedPtrFieldBase& from); // Appends all messages from `from` to this instance, using the // provided `copy_fn` copy function to copy existing messages. void MergeFromConcreteMessage(const RepeatedPtrFieldBase& from, CopyFn copy_fn); // Extends capacity by at least |extend_amount|. // // Pre-condition: |extend_amount| must be > 0. void** InternalExtend(int extend_amount); // Ensures that capacity is big enough to store one more allocated element. inline void MaybeExtend() { if (using_sso() ? (tagged_rep_or_elem_ != nullptr) : (rep()->allocated_size == total_size_)) { ABSL_DCHECK_EQ(allocated_size(), Capacity()); InternalExtend(1); } else { ABSL_DCHECK_NE(allocated_size(), Capacity()); } } // Ensures that capacity is at least `n` elements. // Returns a pointer to the element directly beyond the last element. inline void** InternalReserve(int n) { if (n <= total_size_) { void** elements = using_sso() ? &tagged_rep_or_elem_ : rep()->elements; return elements + current_size_; } return InternalExtend(n - total_size_); } // Internal helper for Add: adds "obj" as the next element in the // array, including potentially resizing the array with Reserve if // needed void* AddOutOfLineHelper(void* obj); void* AddOutOfLineHelper(ElementFactory factory); // A few notes on internal representation: // // We use an indirected approach, with struct Rep, to keep // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support // was added; namely, 3 8-byte machine words on x86-64. An instance of Rep is // allocated only when the repeated field is non-empty, and it is a // dynamically-sized struct (the header is directly followed by elements[]). // We place arena_ and current_size_ directly in the object to avoid cache // misses due to the indirection, because these fields are checked frequently. // Placing all fields directly in the RepeatedPtrFieldBase instance would cost // significant performance for memory-sensitive workloads. void* tagged_rep_or_elem_; int current_size_; int total_size_; Arena* arena_; }; // Appends all message values from `from` to this instance using the abstract // message interface. This overload is used in places like reflection and // other locations where the underlying type is unavailable template <> void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from); template <> inline void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from) { return MergeFrom(from); } // Appends all `std::string` values from `from` to this instance. template <> void RepeatedPtrFieldBase::MergeFrom( const RepeatedPtrFieldBase& from); PROTOBUF_EXPORT void InternalOutOfLineDeleteMessageLite(MessageLite* message); template class GenericTypeHandler { public: typedef GenericType Type; using Movable = IsMovable; static inline GenericType* New(Arena* arena) { return Arena::CreateMaybeMessage(arena); } static inline GenericType* New(Arena* arena, GenericType&& value) { return Arena::Create(arena, std::move(value)); } static inline GenericType* NewFromPrototype(const GenericType* /*prototype*/, Arena* arena = nullptr) { return New(arena); } static inline void Delete(GenericType* value, Arena* arena) { if (arena != nullptr) return; #ifdef __cpp_if_constexpr if constexpr (std::is_base_of::value) { // Using virtual destructor to reduce generated code size that would have // happened otherwise due to inlined `~GenericType`. InternalOutOfLineDeleteMessageLite(value); } else { delete value; } #else delete value; #endif } static inline Arena* GetArena(GenericType* value) { return Arena::InternalGetArena(value); } static inline void Clear(GenericType* value) { value->Clear(); } static void Merge(const GenericType& from, GenericType* to); static inline size_t SpaceUsedLong(const GenericType& value) { return value.SpaceUsedLong(); } }; // NewFromPrototypeHelper() is not defined inline here, as we will need to do a // virtual function dispatch anyways to go from Message* to call New/Merge. (The // additional helper is needed as a workaround for MSVC.) PROTOBUF_EXPORT MessageLite* NewFromPrototypeHelper( const MessageLite* prototype, Arena* arena); template <> inline MessageLite* GenericTypeHandler::NewFromPrototype( const MessageLite* prototype, Arena* arena) { return NewFromPrototypeHelper(prototype, arena); } template <> inline Arena* GenericTypeHandler::GetArena(MessageLite* value) { return value->GetArena(); } template PROTOBUF_NOINLINE inline void GenericTypeHandler::Merge( const GenericType& from, GenericType* to) { to->MergeFrom(from); } template <> PROTOBUF_EXPORT void GenericTypeHandler::Merge( const MessageLite& from, MessageLite* to); template <> inline void GenericTypeHandler::Clear(std::string* value) { value->clear(); } template <> void GenericTypeHandler::Merge(const std::string& from, std::string* to); // Message specialization bodies defined in message.cc. This split is necessary // to allow proto2-lite (which includes this header) to be independent of // Message. template <> PROTOBUF_EXPORT Message* GenericTypeHandler::NewFromPrototype( const Message* prototype, Arena* arena); template <> PROTOBUF_EXPORT Arena* GenericTypeHandler::GetArena(Message* value); class StringTypeHandler { public: typedef std::string Type; using Movable = IsMovable; static PROTOBUF_NOINLINE std::string* New(Arena* arena) { return Arena::Create(arena); } static PROTOBUF_NOINLINE std::string* New(Arena* arena, std::string&& value) { return Arena::Create(arena, std::move(value)); } static inline std::string* NewFromPrototype(const std::string*, Arena* arena) { return New(arena); } static inline Arena* GetArena(std::string*) { return nullptr; } static inline void Delete(std::string* value, Arena* arena) { if (arena == nullptr) { delete value; } } static inline void Clear(std::string* value) { value->clear(); } static inline void Merge(const std::string& from, std::string* to) { *to = from; } static size_t SpaceUsedLong(const std::string& value) { return sizeof(value) + StringSpaceUsedExcludingSelfLong(value); } }; } // namespace internal // RepeatedPtrField is like RepeatedField, but used for repeated strings or // Messages. template class RepeatedPtrField final : private internal::RepeatedPtrFieldBase { static_assert(!std::is_const::value, "We do not support const value types."); static_assert(!std::is_volatile::value, "We do not support volatile value types."); static_assert(!std::is_pointer::value, "We do not support pointer value types."); static_assert(!std::is_reference::value, "We do not support reference value types."); static constexpr PROTOBUF_ALWAYS_INLINE void StaticValidityCheck() { static_assert( absl::disjunction< internal::is_supported_string_type, internal::is_supported_message_type>::value, "We only support string and Message types in RepeatedPtrField."); } public: using value_type = Element; using size_type = int; using difference_type = ptrdiff_t; using reference = Element&; using const_reference = const Element&; using pointer = Element*; using const_pointer = const Element*; using iterator = internal::RepeatedPtrIterator; using const_iterator = internal::RepeatedPtrIterator; using reverse_iterator = std::reverse_iterator; using const_reverse_iterator = std::reverse_iterator; // Custom STL-like iterator that iterates over and returns the underlying // pointers to Element rather than Element itself. using pointer_iterator = internal::RepeatedPtrOverPtrsIterator; using const_pointer_iterator = internal::RepeatedPtrOverPtrsIterator; constexpr RepeatedPtrField(); // Arena enabled constructors: for internal use only. RepeatedPtrField(internal::InternalVisibility, Arena* arena) : RepeatedPtrField(arena) {} RepeatedPtrField(internal::InternalVisibility, Arena* arena, const RepeatedPtrField& rhs) : RepeatedPtrField(arena, rhs) {} // TODO: make constructor private explicit RepeatedPtrField(Arena* arena); template ())>::value>::type> RepeatedPtrField(Iter begin, Iter end); RepeatedPtrField(const RepeatedPtrField& other) : RepeatedPtrField(nullptr, other) {} RepeatedPtrField& operator=(const RepeatedPtrField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND; RepeatedPtrField(RepeatedPtrField&& other) noexcept; RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND; ~RepeatedPtrField(); bool empty() const; int size() const; const_reference Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; pointer Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Unlike std::vector, adding an element to a RepeatedPtrField doesn't always // make a new element; it might re-use an element left over from when the // field was Clear()'d or resize()'d smaller. For this reason, Add() is the // fastest API for adding a new element. pointer Add() ABSL_ATTRIBUTE_LIFETIME_BOUND; // `Add(std::move(value));` is equivalent to `*Add() = std::move(value);` // It will either move-construct to the end of this field, or swap value // with the new-or-recycled element at the end of this field. Note that // this operation is very slow if this RepeatedPtrField is not on the // same Arena, if any, as `value`. void Add(Element&& value); // Copying to the end of this RepeatedPtrField is slowest of all; it can't // reliably copy-construct to the last element of this RepeatedPtrField, for // example (unlike std::vector). // We currently block this API. The right way to add to the end is to call // Add() and modify the element it points to. // If you must add an existing value, call `*Add() = value;` void Add(const Element& value) = delete; // Append elements in the range [begin, end) after reserving // the appropriate number of elements. template void Add(Iter begin, Iter end); const_reference operator[](int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return Get(index); } reference operator[](int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return *Mutable(index); } const_reference at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; reference at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Removes the last element in the array. // Ownership of the element is retained by the array. void RemoveLast(); // Deletes elements with indices in the range [start .. start+num-1]. // Caution: moves all elements with indices [start+num .. ]. // Calling this routine inside a loop can cause quadratic behavior. void DeleteSubrange(int start, int num); PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear(); void MergeFrom(const RepeatedPtrField& other); PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedPtrField& other); // Replaces the contents with RepeatedPtrField(begin, end). template PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end); // Reserves space to expand the field to at least the given size. This only // resizes the pointer array; it doesn't allocate any objects. If the // array is grown, it will always be at least doubled in size. void Reserve(int new_size); int Capacity() const; // Gets the underlying array. This pointer is possibly invalidated by // any add or remove operation. Element** mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND; const Element* const* data() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Swaps entire contents with "other". If they are on separate arenas, then // copies data. void Swap(RepeatedPtrField* other); // Swaps entire contents with "other". Caller should guarantee that either // both fields are on the same arena or both are on the heap. Swapping between // different arenas with this function is disallowed and is caught via // ABSL_DCHECK. void UnsafeArenaSwap(RepeatedPtrField* other); // Swaps two elements. void SwapElements(int index1, int index2); iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND; reverse_iterator rbegin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(end()); } const_reverse_iterator rbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(end()); } reverse_iterator rend() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(begin()); } const_reverse_iterator rend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(begin()); } pointer_iterator pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_pointer_iterator pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; pointer_iterator pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_pointer_iterator pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Returns (an estimate of) the number of bytes used by the repeated field, // excluding sizeof(*this). size_t SpaceUsedExcludingSelfLong() const; int SpaceUsedExcludingSelf() const { return internal::ToIntSize(SpaceUsedExcludingSelfLong()); } // Advanced memory management -------------------------------------- // When hardcore memory management becomes necessary -- as it sometimes // does here at Google -- the following methods may be useful. // Adds an already-allocated object, passing ownership to the // RepeatedPtrField. // // Note that some special behavior occurs with respect to arenas: // // (i) if this field holds submessages, the new submessage will be copied if // the original is in an arena and this RepeatedPtrField is either in a // different arena, or on the heap. // (ii) if this field holds strings, the passed-in string *must* be // heap-allocated, not arena-allocated. There is no way to dynamically check // this at runtime, so User Beware. void AddAllocated(Element* value); // Removes and returns the last element, passing ownership to the caller. // Requires: size() > 0 // // If this RepeatedPtrField is on an arena, an object copy is required to pass // ownership back to the user (for compatible semantics). Use // UnsafeArenaReleaseLast() if this behavior is undesired. PROTOBUF_NODISCARD Element* ReleaseLast(); // Adds an already-allocated object, skipping arena-ownership checks. The user // must guarantee that the given object is in the same arena as this // RepeatedPtrField. // It is also useful in legacy code that uses temporary ownership to avoid // copies. Example: // RepeatedPtrField temp_field; // temp_field.UnsafeArenaAddAllocated(new T); // ... // Do something with temp_field // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); // If you put temp_field on the arena this fails, because the ownership // transfers to the arena at the "AddAllocated" call and is not released // anymore, causing a double delete. UnsafeArenaAddAllocated prevents this. void UnsafeArenaAddAllocated(Element* value); // Removes and returns the last element. Unlike ReleaseLast, the returned // pointer is always to the original object. This may be in an arena, in // which case it would have the arena's lifetime. // Requires: current_size_ > 0 pointer UnsafeArenaReleaseLast(); // Extracts elements with indices in the range "[start .. start+num-1]". // The caller assumes ownership of the extracted elements and is responsible // for deleting them when they are no longer needed. // If "elements" is non-nullptr, then pointers to the extracted elements // are stored in "elements[0 .. num-1]" for the convenience of the caller. // If "elements" is nullptr, then the caller must use some other mechanism // to perform any further operations (like deletion) on these elements. // Caution: implementation also moves elements with indices [start+num ..]. // Calling this routine inside a loop can cause quadratic behavior. // // Memory copying behavior is identical to ReleaseLast(), described above: if // this RepeatedPtrField is on an arena, an object copy is performed for each // returned element, so that all returned element pointers are to // heap-allocated copies. If this copy is not desired, the user should call // UnsafeArenaExtractSubrange(). void ExtractSubrange(int start, int num, Element** elements); // Identical to ExtractSubrange() described above, except that no object // copies are ever performed. Instead, the raw object pointers are returned. // Thus, if on an arena, the returned objects must not be freed, because they // will not be heap-allocated objects. void UnsafeArenaExtractSubrange(int start, int num, Element** elements); // When elements are removed by calls to RemoveLast() or Clear(), they // are not actually freed. Instead, they are cleared and kept so that // they can be reused later. This can save lots of CPU time when // repeatedly reusing a protocol message for similar purposes. // // Hardcore programs may choose to manipulate these cleared objects // to better optimize memory management using the following routines. // Gets the number of cleared objects that are currently being kept // around for reuse. ABSL_DEPRECATED("This will be removed in a future release") int ClearedCount() const; #ifndef PROTOBUF_FUTURE_REMOVE_CLEARED_API // Adds an element to the pool of cleared objects, passing ownership to // the RepeatedPtrField. The element must be cleared prior to calling // this method. // // This method cannot be called when either the repeated field or |value| is // on an arena; both cases will trigger a ABSL_DCHECK-failure. ABSL_DEPRECATED("This will be removed in a future release") void AddCleared(Element* value); // Removes and returns a single element from the cleared pool, passing // ownership to the caller. The element is guaranteed to be cleared. // Requires: ClearedCount() > 0 // // This method cannot be called when the repeated field is on an arena; doing // so will trigger a ABSL_DCHECK-failure. PROTOBUF_NODISCARD ABSL_DEPRECATED("This will be removed in a future release") pointer ReleaseCleared(); #endif // !PROTOBUF_FUTURE_REMOVE_CLEARED_API // Removes the element referenced by position. // // Returns an iterator to the element immediately following the removed // element. // // Invalidates all iterators at or after the removed element, including end(). iterator erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Removes the elements in the range [first, last). // // Returns an iterator to the element immediately following the removed range. // // Invalidates all iterators at or after the removed range, including end(). iterator erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Gets the arena on which this RepeatedPtrField stores its elements. inline Arena* GetArena(); #ifndef PROTOBUF_FUTURE_REMOVE_CONST_REPEATEDFIELD_GETARENA_API ABSL_DEPRECATED("This will be removed in a future release") inline Arena* GetArena() const; #endif // !PROTOBUF_FUTURE_REMOVE_CONST_REPEATEDFIELD_GETARENA_API // For internal use only. // // This is public due to it being called by generated code. void InternalSwap(RepeatedPtrField* PROTOBUF_RESTRICT other) { internal::RepeatedPtrFieldBase::InternalSwap(other); } private: using InternalArenaConstructable_ = void; using DestructorSkippable_ = void; friend class Arena; friend class internal::TcParser; template friend struct WeakRepeatedPtrField; // Note: RepeatedPtrField SHOULD NOT be subclassed by users. class TypeHandler; RepeatedPtrField(Arena* arena, const RepeatedPtrField& rhs); // Internal version of GetArena(). inline Arena* GetOwningArena() const; // Implementations for ExtractSubrange(). The copying behavior must be // included only if the type supports the necessary operations (e.g., // MergeFrom()), so we must resolve this at compile time. ExtractSubrange() // uses SFINAE to choose one of the below implementations. void ExtractSubrangeInternal(int start, int num, Element** elements, std::true_type); void ExtractSubrangeInternal(int start, int num, Element** elements, std::false_type); void AddAllocatedForParse(Element* p) { return RepeatedPtrFieldBase::AddAllocatedForParse(p); } }; // ------------------------------------------------------------------- template class RepeatedPtrField::TypeHandler : public internal::GenericTypeHandler {}; template <> class RepeatedPtrField::TypeHandler : public internal::StringTypeHandler {}; template constexpr RepeatedPtrField::RepeatedPtrField() : RepeatedPtrFieldBase() { StaticValidityCheck(); } template inline RepeatedPtrField::RepeatedPtrField(Arena* arena) : RepeatedPtrFieldBase(arena) { // We can't have StaticValidityCheck here because that requires Element to be // a complete type, and in split repeated fields cases, we call // CreateMaybeMessage> for incomplete Ts. } template inline RepeatedPtrField::RepeatedPtrField(Arena* arena, const RepeatedPtrField& rhs) : RepeatedPtrFieldBase(arena) { StaticValidityCheck(); MergeFrom(rhs); } template template inline RepeatedPtrField::RepeatedPtrField(Iter begin, Iter end) { StaticValidityCheck(); Add(begin, end); } template RepeatedPtrField::~RepeatedPtrField() { StaticValidityCheck(); #ifdef __cpp_if_constexpr if constexpr (std::is_base_of::value) { #else if (std::is_base_of::value) { #endif if (NeedsDestroy()) DestroyProtos(); } else { Destroy(); } } template inline RepeatedPtrField& RepeatedPtrField::operator=( const RepeatedPtrField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND { if (this != &other) CopyFrom(other); return *this; } template inline RepeatedPtrField::RepeatedPtrField( RepeatedPtrField&& other) noexcept : RepeatedPtrField() { #ifdef PROTOBUF_FORCE_COPY_IN_MOVE CopyFrom(other); #else // PROTOBUF_FORCE_COPY_IN_MOVE // We don't just call Swap(&other) here because it would perform 3 copies if // other is on an arena. This field can't be on an arena because arena // construction always uses the Arena* accepting constructor. if (other.GetArena()) { CopyFrom(other); } else { InternalSwap(&other); } #endif // !PROTOBUF_FORCE_COPY_IN_MOVE } template inline RepeatedPtrField& RepeatedPtrField::operator=( RepeatedPtrField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND { // We don't just call Swap(&other) here because it would perform 3 copies if // the two fields are on different arenas. if (this != &other) { if (GetArena() != other.GetArena() #ifdef PROTOBUF_FORCE_COPY_IN_MOVE || GetArena() == nullptr #endif // !PROTOBUF_FORCE_COPY_IN_MOVE ) { CopyFrom(other); } else { InternalSwap(&other); } } return *this; } template inline bool RepeatedPtrField::empty() const { return RepeatedPtrFieldBase::empty(); } template inline int RepeatedPtrField::size() const { return RepeatedPtrFieldBase::size(); } template inline const Element& RepeatedPtrField::Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Get(index); } template inline const Element& RepeatedPtrField::at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::at(index); } template inline Element& RepeatedPtrField::at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::at(index); } template inline Element* RepeatedPtrField::Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Mutable(index); } template PROTOBUF_NOINLINE Element* RepeatedPtrField::Add() ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::Add(); } template inline void RepeatedPtrField::Add(Element&& value) { RepeatedPtrFieldBase::Add(std::move(value)); } template template inline void RepeatedPtrField::Add(Iter begin, Iter end) { if (std::is_base_of< std::forward_iterator_tag, typename std::iterator_traits::iterator_category>::value) { int reserve = static_cast(std::distance(begin, end)); Reserve(size() + reserve); } for (; begin != end; ++begin) { *Add() = *begin; } } template inline void RepeatedPtrField::RemoveLast() { RepeatedPtrFieldBase::RemoveLast(); } template inline void RepeatedPtrField::DeleteSubrange(int start, int num) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); void** subrange = raw_mutable_data() + start; Arena* arena = GetArena(); for (int i = 0; i < num; ++i) { using H = CommonHandler; H::Delete(static_cast(subrange[i]), arena); } UnsafeArenaExtractSubrange(start, num, nullptr); } template inline void RepeatedPtrField::ExtractSubrange(int start, int num, Element** elements) { typename internal::TypeImplementsMergeBehavior< typename TypeHandler::Type>::type t; ExtractSubrangeInternal(start, num, elements, t); } // ExtractSubrange() implementation for types that implement merge/copy // behavior. template inline void RepeatedPtrField::ExtractSubrangeInternal( int start, int num, Element** elements, std::true_type) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); if (num == 0) return; ABSL_DCHECK_NE(elements, nullptr) << "Releasing elements without transferring ownership is an unsafe " "operation. Use UnsafeArenaExtractSubrange."; if (elements != nullptr) { Arena* arena = GetArena(); auto* extracted = data() + start; #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE // Always copy. for (int i = 0; i < num; ++i) { elements[i] = copy(extracted[i]); } if (arena == nullptr) { for (int i = 0; i < num; ++i) { delete extracted[i]; } } #else // PROTOBUF_FORCE_COPY_IN_RELEASE // If we're on an arena, we perform a copy for each element so that the // returned elements are heap-allocated. Otherwise, just forward it. if (arena != nullptr) { for (int i = 0; i < num; ++i) { elements[i] = copy(extracted[i]); } } else { memcpy(elements, extracted, num * sizeof(Element*)); } #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE } CloseGap(start, num); } // ExtractSubrange() implementation for types that do not implement merge/copy // behavior. template inline void RepeatedPtrField::ExtractSubrangeInternal( int start, int num, Element** elements, std::false_type) { // This case is identical to UnsafeArenaExtractSubrange(). However, since // ExtractSubrange() must return heap-allocated objects by contract, and we // cannot fulfill this contract if we are an on arena, we must ABSL_DCHECK() // that we are not on an arena. ABSL_DCHECK(GetArena() == nullptr) << "ExtractSubrange() when arena is non-nullptr is only supported when " << "the Element type supplies a MergeFrom() operation to make copies."; UnsafeArenaExtractSubrange(start, num, elements); } template inline void RepeatedPtrField::UnsafeArenaExtractSubrange( int start, int num, Element** elements) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, size()); if (num > 0) { // Save the values of the removed elements if requested. if (elements != nullptr) { memcpy(elements, data() + start, num * sizeof(Element*)); } CloseGap(start, num); } } template inline void RepeatedPtrField::Clear() { RepeatedPtrFieldBase::Clear(); } template inline void RepeatedPtrField::MergeFrom( const RepeatedPtrField& other) { if (other.empty()) return; RepeatedPtrFieldBase::MergeFrom(other); } template inline void RepeatedPtrField::CopyFrom(const RepeatedPtrField& other) { RepeatedPtrFieldBase::CopyFrom(other); } template template inline void RepeatedPtrField::Assign(Iter begin, Iter end) { Clear(); Add(begin, end); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND { return erase(position, position + 1); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND { size_type pos_offset = static_cast(std::distance(cbegin(), first)); size_type last_offset = static_cast(std::distance(cbegin(), last)); DeleteSubrange(pos_offset, last_offset - pos_offset); return begin() + pos_offset; } template inline Element** RepeatedPtrField::mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::mutable_data(); } template inline const Element* const* RepeatedPtrField::data() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return RepeatedPtrFieldBase::data(); } template inline void RepeatedPtrField::Swap(RepeatedPtrField* other) { if (this == other) return; RepeatedPtrFieldBase::Swap(other); } template inline void RepeatedPtrField::UnsafeArenaSwap( RepeatedPtrField* other) { if (this == other) return; ABSL_DCHECK_EQ(GetArena(), other->GetArena()); RepeatedPtrFieldBase::InternalSwap(other); } template inline void RepeatedPtrField::SwapElements(int index1, int index2) { RepeatedPtrFieldBase::SwapElements(index1, index2); } template inline Arena* RepeatedPtrField::GetArena() { return RepeatedPtrFieldBase::GetArena(); } #ifndef PROTOBUF_FUTURE_REMOVE_CONST_REPEATEDFIELD_GETARENA_API template inline Arena* RepeatedPtrField::GetArena() const { return RepeatedPtrFieldBase::GetArena(); } #endif // !PROTOBUF_FUTURE_REMOVE_CONST_REPEATEDFIELD_GETARENA_API template inline Arena* RepeatedPtrField::GetOwningArena() const { return RepeatedPtrFieldBase::GetArena(); } template inline size_t RepeatedPtrField::SpaceUsedExcludingSelfLong() const { // `google::protobuf::Message` has a virtual method `SpaceUsedLong`, hence we can // instantiate just one function for all protobuf messages. // Note: std::is_base_of requires that `Element` is a concrete class. using H = typename std::conditional::value, internal::GenericTypeHandler, TypeHandler>::type; return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong(); } template inline void RepeatedPtrField::AddAllocated(Element* value) { RepeatedPtrFieldBase::AddAllocated(value); } template inline void RepeatedPtrField::UnsafeArenaAddAllocated(Element* value) { RepeatedPtrFieldBase::UnsafeArenaAddAllocated(value); } template inline Element* RepeatedPtrField::ReleaseLast() { return RepeatedPtrFieldBase::ReleaseLast(); } template inline Element* RepeatedPtrField::UnsafeArenaReleaseLast() { return RepeatedPtrFieldBase::UnsafeArenaReleaseLast(); } template inline int RepeatedPtrField::ClearedCount() const { return RepeatedPtrFieldBase::ClearedCount(); } #ifndef PROTOBUF_FUTURE_REMOVE_CLEARED_API template inline void RepeatedPtrField::AddCleared(Element* value) { return RepeatedPtrFieldBase::AddCleared(value); } template inline Element* RepeatedPtrField::ReleaseCleared() { return RepeatedPtrFieldBase::ReleaseCleared(); } #endif // !PROTOBUF_FUTURE_REMOVE_CLEARED_API template inline void RepeatedPtrField::Reserve(int new_size) { return RepeatedPtrFieldBase::Reserve(new_size); } template inline int RepeatedPtrField::Capacity() const { return RepeatedPtrFieldBase::Capacity(); } // ------------------------------------------------------------------- namespace internal { // STL-like iterator implementation for RepeatedPtrField. You should not // refer to this class directly; use RepeatedPtrField::iterator instead. // // The iterator for RepeatedPtrField, RepeatedPtrIterator, is // very similar to iterator_ptr in util/gtl/iterator_adaptors.h, // but adds random-access operators and is modified to wrap a void** base // iterator (since RepeatedPtrField stores its array as a void* array and // casting void** to T** would violate C++ aliasing rules). // // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin // (jyasskin@google.com). template class RepeatedPtrIterator { public: using iterator = RepeatedPtrIterator; using iterator_category = std::random_access_iterator_tag; using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; RepeatedPtrIterator() : it_(nullptr) {} explicit RepeatedPtrIterator(void* const* it) : it_(it) {} // Allows "upcasting" from RepeatedPtrIterator to // RepeatedPtrIterator. template ::value>::type* = nullptr> RepeatedPtrIterator(const RepeatedPtrIterator& other) : it_(other.it_) {} // dereferenceable reference operator*() const { return *reinterpret_cast(*it_); } pointer operator->() const { return &(operator*()); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable friend bool operator==(const iterator& x, const iterator& y) { return x.it_ == y.it_; } friend bool operator!=(const iterator& x, const iterator& y) { return x.it_ != y.it_; } // less_than_comparable friend bool operator<(const iterator& x, const iterator& y) { return x.it_ < y.it_; } friend bool operator<=(const iterator& x, const iterator& y) { return x.it_ <= y.it_; } friend bool operator>(const iterator& x, const iterator& y) { return x.it_ > y.it_; } friend bool operator>=(const iterator& x, const iterator& y) { return x.it_ >= y.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, const difference_type d) { it += d; return it; } friend iterator operator+(const difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator friend difference_type operator-(iterator it1, iterator it2) { return it1.it_ - it2.it_; } private: template friend class RepeatedPtrIterator; // The internal iterator. void* const* it_; }; // Provides an iterator that operates on pointers to the underlying objects // rather than the objects themselves as RepeatedPtrIterator does. // Consider using this when working with stl algorithms that change // the array. // The VoidPtr template parameter holds the type-agnostic pointer value // referenced by the iterator. It should either be "void *" for a mutable // iterator, or "const void* const" for a constant iterator. template class RepeatedPtrOverPtrsIterator { public: using iterator = RepeatedPtrOverPtrsIterator; using iterator_category = std::random_access_iterator_tag; using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; RepeatedPtrOverPtrsIterator() : it_(nullptr) {} explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {} // Allows "upcasting" from RepeatedPtrOverPtrsIterator to // RepeatedPtrOverPtrsIterator. template < typename OtherElement, typename OtherVoidPtr, typename std::enable_if< std::is_convertible::value && std::is_convertible::value>::type* = nullptr> RepeatedPtrOverPtrsIterator( const RepeatedPtrOverPtrsIterator& other) : it_(other.it_) {} // dereferenceable reference operator*() const { return *reinterpret_cast(it_); } pointer operator->() const { return &(operator*()); } // {inc,dec}rementable iterator& operator++() { ++it_; return *this; } iterator operator++(int) { return iterator(it_++); } iterator& operator--() { --it_; return *this; } iterator operator--(int) { return iterator(it_--); } // equality_comparable friend bool operator==(const iterator& x, const iterator& y) { return x.it_ == y.it_; } friend bool operator!=(const iterator& x, const iterator& y) { return x.it_ != y.it_; } // less_than_comparable friend bool operator<(const iterator& x, const iterator& y) { return x.it_ < y.it_; } friend bool operator<=(const iterator& x, const iterator& y) { return x.it_ <= y.it_; } friend bool operator>(const iterator& x, const iterator& y) { return x.it_ > y.it_; } friend bool operator>=(const iterator& x, const iterator& y) { return x.it_ >= y.it_; } // addable, subtractable iterator& operator+=(difference_type d) { it_ += d; return *this; } friend iterator operator+(iterator it, difference_type d) { it += d; return it; } friend iterator operator+(difference_type d, iterator it) { it += d; return it; } iterator& operator-=(difference_type d) { it_ -= d; return *this; } friend iterator operator-(iterator it, difference_type d) { it -= d; return it; } // indexable reference operator[](difference_type d) const { return *(*this + d); } // random access iterator friend difference_type operator-(iterator it1, iterator it2) { return it1.it_ - it2.it_; } private: template friend class RepeatedPtrOverPtrsIterator; // The internal iterator. VoidPtr* it_; }; } // namespace internal template inline typename RepeatedPtrField::iterator RepeatedPtrField::begin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return begin(); } template inline typename RepeatedPtrField::iterator RepeatedPtrField::end() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::end() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(raw_data() + size()); } template inline typename RepeatedPtrField::const_iterator RepeatedPtrField::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return end(); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_begin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return pointer_iterator(raw_mutable_data()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_pointer_iterator(const_cast(raw_data())); } template inline typename RepeatedPtrField::pointer_iterator RepeatedPtrField::pointer_end() ABSL_ATTRIBUTE_LIFETIME_BOUND { return pointer_iterator(raw_mutable_data() + size()); } template inline typename RepeatedPtrField::const_pointer_iterator RepeatedPtrField::pointer_end() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_pointer_iterator( const_cast(raw_data() + size())); } // Iterators and helper functions that follow the spirit of the STL // std::back_insert_iterator and std::back_inserter but are tailor-made // for RepeatedField and RepeatedPtrField. Typical usage would be: // // std::copy(some_sequence.begin(), some_sequence.end(), // RepeatedFieldBackInserter(proto.mutable_sequence())); // // Ported by johannes from util/gtl/proto-array-iterators.h namespace internal { // A back inserter for RepeatedPtrField objects. template class RepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; RepeatedPtrFieldBackInsertIterator(RepeatedPtrField* const mutable_field) : field_(mutable_field) {} RepeatedPtrFieldBackInsertIterator& operator=(const T& value) { *field_->Add() = value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=( const T* const ptr_to_value) { *field_->Add() = *ptr_to_value; return *this; } RepeatedPtrFieldBackInsertIterator& operator=(T&& value) { *field_->Add() = std::move(value); return *this; } RepeatedPtrFieldBackInsertIterator& operator*() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++() { return *this; } RepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // A back inserter for RepeatedPtrFields that inserts by transferring ownership // of a pointer. template class AllocatedRepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; explicit AllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} AllocatedRepeatedPtrFieldBackInsertIterator& operator=( T* const ptr_to_value) { field_->AddAllocated(ptr_to_value); return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } AllocatedRepeatedPtrFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one // uses the UnsafeArenaAddAllocated instead. template class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( RepeatedPtrField* const mutable_field) : field_(mutable_field) {} UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator=( T const* const ptr_to_value) { field_->UnsafeArenaAddAllocated(const_cast(ptr_to_value)); return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator*() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++() { return *this; } UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator& operator++( int /* unused */) { return *this; } private: RepeatedPtrField* field_; }; } // namespace internal // Provides a back insert iterator for RepeatedPtrField instances, // similar to std::back_inserter(). template internal::RepeatedPtrFieldBackInsertIterator RepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Special back insert iterator for RepeatedPtrField instances, just in // case someone wants to write generic template code that can access both // RepeatedFields and RepeatedPtrFields using a common name. template internal::RepeatedPtrFieldBackInsertIterator RepeatedFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::RepeatedPtrFieldBackInsertIterator(mutable_field); } // Provides a back insert iterator for RepeatedPtrField instances // similar to std::back_inserter() which transfers the ownership while // copying elements. template internal::AllocatedRepeatedPtrFieldBackInsertIterator AllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::AllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } // Similar to AllocatedRepeatedPtrFieldBackInserter, using // UnsafeArenaAddAllocated instead of AddAllocated. // This is slightly faster if that matters. It is also useful in legacy code // that uses temporary ownership to avoid copies. Example: // RepeatedPtrField temp_field; // temp_field.UnsafeArenaAddAllocated(new T); // ... // Do something with temp_field // temp_field.UnsafeArenaExtractSubrange(0, temp_field.size(), nullptr); // Putting temp_field on the arena fails because the ownership transfers to the // arena at the "AddAllocated" call and is not released anymore causing a // double delete. This function uses UnsafeArenaAddAllocated to prevent this. template internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator UnsafeArenaAllocatedRepeatedPtrFieldBackInserter( RepeatedPtrField* const mutable_field) { return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator( mutable_field); } namespace internal { // Size optimization for `memswap` - supplied below N is used by every // `RepeatedPtrField`. extern template PROTOBUF_EXPORT_TEMPLATE_DECLARE void memswap::value>( char* PROTOBUF_RESTRICT, char* PROTOBUF_RESTRICT); } // namespace internal } // namespace protobuf } // namespace google #include "google/protobuf/port_undef.inc" #endif // GOOGLE_PROTOBUF_REPEATED_PTR_FIELD_H__