// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Author: kenton@google.com (Kenton Varda) // Based on original Protocol Buffers design by // Sanjay Ghemawat, Jeff Dean, and others. // // RepeatedField and RepeatedPtrField are used by generated protocol message // classes to manipulate repeated fields. These classes are very similar to // STL's vector, but include a number of optimizations found to be useful // specifically in the case of Protocol Buffers. RepeatedPtrField is // particularly different from STL vector as it manages ownership of the // pointers that it contains. // // This header covers RepeatedField. #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__ #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__ #include #include #include #include #include #include #include #include #include "google/protobuf/arena.h" #include "google/protobuf/port.h" #include "absl/base/attributes.h" #include "absl/base/dynamic_annotations.h" #include "absl/base/optimization.h" #include "absl/log/absl_check.h" #include "absl/log/absl_log.h" #include "absl/meta/type_traits.h" #include "absl/strings/cord.h" #include "google/protobuf/generated_enum_util.h" #include "google/protobuf/internal_visibility.h" #include "google/protobuf/message_lite.h" #include "google/protobuf/port.h" #include "google/protobuf/repeated_ptr_field.h" // Must be included last. #include "google/protobuf/port_def.inc" #ifdef SWIG #error "You cannot SWIG proto headers" #endif namespace google { namespace protobuf { class Message; namespace internal { template constexpr int RepeatedFieldLowerClampLimit() { // The header is padded to be at least `sizeof(T)` when it would be smaller // otherwise. static_assert(sizeof(T) <= kRepHeaderSize, ""); // We want to pad the minimum size to be a power of two bytes, including the // header. // The first allocation is kRepHeaderSize bytes worth of elements for a total // of 2*kRepHeaderSize bytes. // For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64. return kRepHeaderSize / sizeof(T); } // kRepeatedFieldUpperClampLimit is the lowest signed integer value that // overflows when multiplied by 2 (which is undefined behavior). Sizes above // this will clamp to the maximum int value instead of following exponential // growth when growing a repeated field. constexpr int kRepeatedFieldUpperClampLimit = (std::numeric_limits::max() / 2) + 1; // Swaps two blocks of memory of size kSize: template void memswap(char* a, char* b) { #if __SIZEOF_INT128__ using Buffer = __uint128_t; #else using Buffer = uint64_t; #endif constexpr size_t kBlockSize = sizeof(Buffer); Buffer buf; for (size_t i = 0; i < kSize / kBlockSize; ++i) { memcpy(&buf, a, kBlockSize); memcpy(a, b, kBlockSize); memcpy(b, &buf, kBlockSize); a += kBlockSize; b += kBlockSize; } #if defined(__GNUC__) && !defined(__clang__) // Workaround GCC bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=99578 #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wpragmas" #pragma GCC diagnostic ignored "-Wstringop-overflow" #endif // __GNUC__ // Swap the leftover bytes, could be zero. memcpy(&buf, a, kSize % kBlockSize); memcpy(a, b, kSize % kBlockSize); memcpy(b, &buf, kSize % kBlockSize); #if defined(__GNUC__) && !defined(__clang__) #pragma GCC diagnostic pop #endif // GCC } template class RepeatedIterator; // We can't skip the destructor for, e.g., arena allocated RepeatedField. template ::value> struct RepeatedFieldDestructorSkippableBase {}; template struct RepeatedFieldDestructorSkippableBase { using DestructorSkippable_ = void; }; } // namespace internal // RepeatedField is used to represent repeated fields of a primitive type (in // other words, everything except strings and nested Messages). Most users will // not ever use a RepeatedField directly; they will use the get-by-index, // set-by-index, and add accessors that are generated for all repeated fields. // Actually, in addition to primitive types, we use RepeatedField for repeated // Cords, because the Cord class is in fact just a reference-counted pointer. // We have to specialize several methods in the Cord case to get the memory // management right; e.g. swapping when appropriate, etc. template class RepeatedField final : private internal::RepeatedFieldDestructorSkippableBase { static_assert( alignof(Arena) >= alignof(Element), "We only support types that have an alignment smaller than Arena"); static_assert(!std::is_const::value, "We do not support const value types."); static_assert(!std::is_volatile::value, "We do not support volatile value types."); static_assert(!std::is_pointer::value, "We do not support pointer value types."); static_assert(!std::is_reference::value, "We do not support reference value types."); static constexpr PROTOBUF_ALWAYS_INLINE void StaticValidityCheck() { static_assert( absl::disjunction, internal::is_supported_floating_point_type, std::is_same, is_proto_enum>::value, "We only support non-string scalars in RepeatedField."); } public: constexpr RepeatedField(); RepeatedField(const RepeatedField& rhs) : RepeatedField(nullptr, rhs) {} // TODO(b/290091828): make this constructor private explicit RepeatedField(Arena* arena); template ())>::value>::type> RepeatedField(Iter begin, Iter end); ~RepeatedField(); // Arena enabled constructors: for internal use only. RepeatedField(internal::InternalVisibility, Arena* arena) : RepeatedField(arena) {} RepeatedField(internal::InternalVisibility, Arena* arena, const RepeatedField& rhs) : RepeatedField(arena, rhs) {} RepeatedField& operator=(const RepeatedField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND; RepeatedField(RepeatedField&& other) noexcept; RepeatedField& operator=(RepeatedField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND; bool empty() const; int size() const; const Element& Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; Element* Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; const Element& operator[](int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { return Get(index); } Element& operator[](int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { return *Mutable(index); } const Element& at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND; Element& at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND; void Set(int index, const Element& value); void Add(Element value); // Appends a new element and returns a pointer to it. // The new element is uninitialized if |Element| is a POD type. Element* Add() ABSL_ATTRIBUTE_LIFETIME_BOUND; // Appends elements in the range [begin, end) after reserving // the appropriate number of elements. template void Add(Iter begin, Iter end); // Removes the last element in the array. void RemoveLast(); // Extracts elements with indices in "[start .. start+num-1]". // Copies them into "elements[0 .. num-1]" if "elements" is not nullptr. // Caution: also moves elements with indices [start+num ..]. // Calling this routine inside a loop can cause quadratic behavior. void ExtractSubrange(int start, int num, Element* elements); PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear(); void MergeFrom(const RepeatedField& other); PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedField& other); // Replaces the contents with RepeatedField(begin, end). template PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end); // Reserves space to expand the field to at least the given size. If the // array is grown, it will always be at least doubled in size. void Reserve(int new_size); // Resizes the RepeatedField to a new, smaller size. This is O(1). // Except for RepeatedField, for which it is O(size-new_size). void Truncate(int new_size); void AddAlreadyReserved(Element value); int Capacity() const; // Adds `n` elements to this instance asserting there is enough capacity. // The added elements are uninitialized if `Element` is trivial. Element* AddAlreadyReserved() ABSL_ATTRIBUTE_LIFETIME_BOUND; Element* AddNAlreadyReserved(int n) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Like STL resize. Uses value to fill appended elements. // Like Truncate() if new_size <= size(), otherwise this is // O(new_size - size()). void Resize(int new_size, const Element& value); // Gets the underlying array. This pointer is possibly invalidated by // any add or remove operation. Element* mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND; const Element* data() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Swaps entire contents with "other". If they are separate arenas, then // copies data between each other. void Swap(RepeatedField* other); // Swaps two elements. void SwapElements(int index1, int index2); // STL-like iterator support typedef internal::RepeatedIterator iterator; typedef internal::RepeatedIterator const_iterator; typedef Element value_type; typedef value_type& reference; typedef const value_type& const_reference; typedef value_type* pointer; typedef const value_type* const_pointer; typedef int size_type; typedef ptrdiff_t difference_type; iterator begin() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND; iterator end() ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator end() const ABSL_ATTRIBUTE_LIFETIME_BOUND; const_iterator cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND; // Reverse iterator support typedef std::reverse_iterator const_reverse_iterator; typedef std::reverse_iterator reverse_iterator; reverse_iterator rbegin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(end()); } const_reverse_iterator rbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(end()); } reverse_iterator rend() ABSL_ATTRIBUTE_LIFETIME_BOUND { return reverse_iterator(begin()); } const_reverse_iterator rend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_reverse_iterator(begin()); } // Returns the number of bytes used by the repeated field, excluding // sizeof(*this) size_t SpaceUsedExcludingSelfLong() const; int SpaceUsedExcludingSelf() const { return internal::ToIntSize(SpaceUsedExcludingSelfLong()); } // Removes the element referenced by position. // // Returns an iterator to the element immediately following the removed // element. // // Invalidates all iterators at or after the removed element, including end(). iterator erase(const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Removes the elements in the range [first, last). // // Returns an iterator to the element immediately following the removed range. // // Invalidates all iterators at or after the removed range, including end(). iterator erase(const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND; // Gets the Arena on which this RepeatedField stores its elements. // Note: this can be inaccurate for split default fields so we make this // function non-const. inline Arena* GetArena() { return GetOwningArena(); } // For internal use only. // // This is public due to it being called by generated code. inline void InternalSwap(RepeatedField* other); private: RepeatedField(Arena* arena, const RepeatedField& rhs); template friend class Arena::InternalHelper; // Gets the Arena on which this RepeatedField stores its elements. inline Arena* GetOwningArena() const { return (total_size_ == 0) ? static_cast(arena_or_elements_) : rep()->arena; } // Swaps entire contents with "other". Should be called only if the caller can // guarantee that both repeated fields are on the same arena or are on the // heap. Swapping between different arenas is disallowed and caught by a // ABSL_DCHECK (see API docs for details). void UnsafeArenaSwap(RepeatedField* other); // Copy constructs `n` instances in place into the array `dst`. // This function is identical to `std::uninitialized_copy_n(src, n, dst)` // except that we explicit declare the memory to not be aliased, which will // result in `memcpy` code generation instead of `memmove` for trivial types. static inline void UninitializedCopyN(const Element* PROTOBUF_RESTRICT src, int n, Element* PROTOBUF_RESTRICT dst) { std::uninitialized_copy_n(src, n, dst); } // Copy constructs `[begin, end)` instances in place into the array `dst`. // See above `UninitializedCopyN()` function comments for more information. template static inline void UninitializedCopy(Iter begin, Iter end, Element* PROTOBUF_RESTRICT dst) { std::uninitialized_copy(begin, end, dst); } template void AddForwardIterator(Iter begin, Iter end); template void AddInputIterator(Iter begin, Iter end); // Reserves space to expand the field to at least the given size. // If the array is grown, it will always be at least doubled in size. // If `annotate_size` is true (the default), then this function will annotate // the old container from `current_size` to `total_size_` (unpoison memory) // directly before it is being released, and annotate the new container from // `total_size_` to `current_size` (poison unused memory). void Grow(int current_size, int new_size); void GrowNoAnnotate(int current_size, int new_size); static constexpr int kInitialSize = 0; // A note on the representation here (see also comment below for // RepeatedPtrFieldBase's struct Rep): // // We maintain the same sizeof(RepeatedField) as before we added arena support // so that we do not degrade performance by bloating memory usage. Directly // adding an arena_ element to RepeatedField is quite costly. By using // indirection in this way, we keep the same size when the RepeatedField is // empty (common case), and add only an 8-byte header to the elements array // when non-empty. We make sure to place the size fields directly in the // RepeatedField class to avoid costly cache misses due to the indirection. int current_size_; int total_size_; // Annotates a change in size of this instance. This function should be called // with (total_size, current_size) after new memory has been allocated and // filled from previous memory), and called with (current_size, total_size) // right before (previously annotated) memory is released. void AnnotateSize(int old_size, int new_size) const { if (old_size != new_size) { ABSL_ANNOTATE_CONTIGUOUS_CONTAINER( unsafe_elements(), unsafe_elements() + total_size_, unsafe_elements() + old_size, unsafe_elements() + new_size); if (new_size < old_size) { ABSL_ANNOTATE_MEMORY_IS_UNINITIALIZED( unsafe_elements() + new_size, (old_size - new_size) * sizeof(Element)); } } } // Replaces current_size_ with new_size and returns the previous value of // current_size_. This function is intended to be the only place where // current_size_ is modified, with the exception of `AddInputIterator()` // where the size of added items is not known in advance. inline int ExchangeCurrentSize(int new_size) { const int prev_size = current_size_; AnnotateSize(prev_size, new_size); current_size_ = new_size; return prev_size; } // Pad the rep to being max(Arena*, Element) with a minimum align // of 8 as sanitizers are picky on the alignment of containers to // start at 8 byte offsets even when compiling for 32 bit platforms. struct Rep { union { alignas(8) Arena* arena; Element unused; }; Element* elements() { return reinterpret_cast(this + 1); } // Avoid 'implicitly deleted dtor' warnings on certain compilers. ~Rep() = delete; }; static PROTOBUF_CONSTEXPR const size_t kRepHeaderSize = sizeof(Rep); // If total_size_ == 0 this points to an Arena otherwise it points to the // elements member of a Rep struct. Using this invariant allows the storage of // the arena pointer without an extra allocation in the constructor. void* arena_or_elements_; // Returns a pointer to elements array. // pre-condition: the array must have been allocated. Element* elements() const { ABSL_DCHECK_GT(total_size_, 0); // Because of above pre-condition this cast is safe. return unsafe_elements(); } // Returns a pointer to elements array if it exists; otherwise either null or // an invalid pointer is returned. This only happens for empty repeated // fields, where you can't dereference this pointer anyway (it's empty). Element* unsafe_elements() const { return static_cast(arena_or_elements_); } // Returns a pointer to the Rep struct. // pre-condition: the Rep must have been allocated, ie elements() is safe. Rep* rep() const { return reinterpret_cast(reinterpret_cast(elements()) - kRepHeaderSize); } friend class Arena; typedef void InternalArenaConstructable_; // Destroys all elements in [begin, end). // This function does nothing if `Element` is trivial. static void Destroy(const Element* begin, const Element* end) { if (!std::is_trivial::value) { std::for_each(begin, end, [&](const Element& e) { e.~Element(); }); } } // Internal helper to delete all elements and deallocate the storage. template void InternalDeallocate() { const size_t bytes = total_size_ * sizeof(Element) + kRepHeaderSize; if (rep()->arena == nullptr) { internal::SizedDelete(rep(), bytes); } else if (!in_destructor) { // If we are in the destructor, we might be being destroyed as part of // the arena teardown. We can't try and return blocks to the arena then. rep()->arena->ReturnArrayMemory(rep(), bytes); } } }; // implementation ==================================================== template constexpr RepeatedField::RepeatedField() : current_size_(0), total_size_(0), arena_or_elements_(nullptr) { StaticValidityCheck(); } template inline RepeatedField::RepeatedField(Arena* arena) : current_size_(0), total_size_(0), arena_or_elements_(arena) { StaticValidityCheck(); } template inline RepeatedField::RepeatedField(Arena* arena, const RepeatedField& rhs) : current_size_(0), total_size_(0), arena_or_elements_(arena) { StaticValidityCheck(); if (auto size = rhs.current_size_) { Grow(0, size); ExchangeCurrentSize(size); UninitializedCopyN(rhs.elements(), size, unsafe_elements()); } } template template RepeatedField::RepeatedField(Iter begin, Iter end) : current_size_(0), total_size_(0), arena_or_elements_(nullptr) { StaticValidityCheck(); Add(begin, end); } template RepeatedField::~RepeatedField() { StaticValidityCheck(); #ifndef NDEBUG // Try to trigger segfault / asan failure in non-opt builds if arena_ // lifetime has ended before the destructor. auto arena = GetOwningArena(); if (arena) (void)arena->SpaceAllocated(); #endif if (total_size_ > 0) { Destroy(unsafe_elements(), unsafe_elements() + current_size_); InternalDeallocate(); } } template inline RepeatedField& RepeatedField::operator=( const RepeatedField& other) ABSL_ATTRIBUTE_LIFETIME_BOUND { if (this != &other) CopyFrom(other); return *this; } template inline RepeatedField::RepeatedField(RepeatedField&& other) noexcept : RepeatedField() { #ifdef PROTOBUF_FORCE_COPY_IN_MOVE CopyFrom(other); #else // PROTOBUF_FORCE_COPY_IN_MOVE // We don't just call Swap(&other) here because it would perform 3 copies if // other is on an arena. This field can't be on an arena because arena // construction always uses the Arena* accepting constructor. if (other.GetOwningArena()) { CopyFrom(other); } else { InternalSwap(&other); } #endif // !PROTOBUF_FORCE_COPY_IN_MOVE } template inline RepeatedField& RepeatedField::operator=( RepeatedField&& other) noexcept ABSL_ATTRIBUTE_LIFETIME_BOUND { // We don't just call Swap(&other) here because it would perform 3 copies if // the two fields are on different arenas. if (this != &other) { if (GetOwningArena() != other.GetOwningArena() #ifdef PROTOBUF_FORCE_COPY_IN_MOVE || GetOwningArena() == nullptr #endif // !PROTOBUF_FORCE_COPY_IN_MOVE ) { CopyFrom(other); } else { InternalSwap(&other); } } return *this; } template inline bool RepeatedField::empty() const { return current_size_ == 0; } template inline int RepeatedField::size() const { return current_size_; } template inline int RepeatedField::Capacity() const { return total_size_; } template inline void RepeatedField::AddAlreadyReserved(Element value) { ABSL_DCHECK_LT(current_size_, total_size_); void* p = elements() + ExchangeCurrentSize(current_size_ + 1); ::new (p) Element(std::move(value)); } template inline Element* RepeatedField::AddAlreadyReserved() ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_DCHECK_LT(current_size_, total_size_); // new (p) compiles into nothing: this is intentional as this // function is documented to return uninitialized data for trivial types. void* p = elements() + ExchangeCurrentSize(current_size_ + 1); return ::new (p) Element; } template inline Element* RepeatedField::AddNAlreadyReserved(int n) ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_DCHECK_GE(total_size_ - current_size_, n) << total_size_ << ", " << current_size_; Element* p = unsafe_elements() + ExchangeCurrentSize(current_size_ + n); for (Element *begin = p, *end = p + n; begin != end; ++begin) { new (static_cast(begin)) Element; } return p; } template inline void RepeatedField::Resize(int new_size, const Element& value) { ABSL_DCHECK_GE(new_size, 0); if (new_size > current_size_) { if (new_size > total_size_) Grow(current_size_, new_size); Element* first = elements() + ExchangeCurrentSize(new_size); std::uninitialized_fill(first, elements() + current_size_, value); } else if (new_size < current_size_) { Destroy(unsafe_elements() + new_size, unsafe_elements() + current_size_); ExchangeCurrentSize(new_size); } } template inline const Element& RepeatedField::Get(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return elements()[index]; } template inline const Element& RepeatedField::at(int index) const ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return elements()[index]; } template inline Element& RepeatedField::at(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_CHECK_GE(index, 0); ABSL_CHECK_LT(index, current_size_); return elements()[index]; } template inline Element* RepeatedField::Mutable(int index) ABSL_ATTRIBUTE_LIFETIME_BOUND { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); return &elements()[index]; } template inline void RepeatedField::Set(int index, const Element& value) { ABSL_DCHECK_GE(index, 0); ABSL_DCHECK_LT(index, current_size_); elements()[index] = value; } template inline void RepeatedField::Add(Element value) { int total_size = total_size_; Element* elem = unsafe_elements(); if (ABSL_PREDICT_FALSE(current_size_ == total_size)) { Grow(current_size_, current_size_ + 1); total_size = total_size_; elem = unsafe_elements(); } int new_size = current_size_ + 1; void* p = elem + ExchangeCurrentSize(new_size); ::new (p) Element(std::move(value)); // The below helps the compiler optimize dense loops. ABSL_ASSUME(new_size == current_size_); ABSL_ASSUME(elem == arena_or_elements_); ABSL_ASSUME(total_size == total_size_); } template inline Element* RepeatedField::Add() ABSL_ATTRIBUTE_LIFETIME_BOUND { if (ABSL_PREDICT_FALSE(current_size_ == total_size_)) { Grow(current_size_, current_size_ + 1); } void* p = unsafe_elements() + ExchangeCurrentSize(current_size_ + 1); return ::new (p) Element; } template template inline void RepeatedField::AddForwardIterator(Iter begin, Iter end) { int total_size = total_size_; Element* elem = unsafe_elements(); int new_size = current_size_ + static_cast(std::distance(begin, end)); if (ABSL_PREDICT_FALSE(new_size > total_size)) { Grow(current_size_, new_size); elem = unsafe_elements(); total_size = total_size_; } UninitializedCopy(begin, end, elem + ExchangeCurrentSize(new_size)); // The below helps the compiler optimize dense loops. ABSL_ASSUME(new_size == current_size_); ABSL_ASSUME(elem == arena_or_elements_); ABSL_ASSUME(total_size == total_size_); } template template inline void RepeatedField::AddInputIterator(Iter begin, Iter end) { Element* first = unsafe_elements() + current_size_; Element* last = unsafe_elements() + total_size_; AnnotateSize(current_size_, total_size_); while (begin != end) { if (ABSL_PREDICT_FALSE(first == last)) { int current_size = first - unsafe_elements(); GrowNoAnnotate(current_size, current_size + 1); first = unsafe_elements() + current_size; last = unsafe_elements() + total_size_; } ::new (static_cast(first)) Element(*begin); ++begin; ++first; } current_size_ = first - unsafe_elements(); AnnotateSize(total_size_, current_size_); } template template inline void RepeatedField::Add(Iter begin, Iter end) { if (std::is_base_of< std::forward_iterator_tag, typename std::iterator_traits::iterator_category>::value) { AddForwardIterator(begin, end); } else { AddInputIterator(begin, end); } } template inline void RepeatedField::RemoveLast() { ABSL_DCHECK_GT(current_size_, 0); elements()[current_size_ - 1].~Element(); ExchangeCurrentSize(current_size_ - 1); } template void RepeatedField::ExtractSubrange(int start, int num, Element* elements) { ABSL_DCHECK_GE(start, 0); ABSL_DCHECK_GE(num, 0); ABSL_DCHECK_LE(start + num, this->current_size_); // Save the values of the removed elements if requested. if (elements != nullptr) { for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start); } // Slide remaining elements down to fill the gap. if (num > 0) { for (int i = start + num; i < this->current_size_; ++i) this->Set(i - num, this->Get(i)); this->Truncate(this->current_size_ - num); } } template inline void RepeatedField::Clear() { Destroy(unsafe_elements(), unsafe_elements() + current_size_); ExchangeCurrentSize(0); } template inline void RepeatedField::MergeFrom(const RepeatedField& rhs) { ABSL_DCHECK_NE(&rhs, this); if (auto size = rhs.current_size_) { Reserve(current_size_ + size); Element* dst = elements() + ExchangeCurrentSize(current_size_ + size); UninitializedCopyN(rhs.elements(), size, dst); } } template inline void RepeatedField::CopyFrom(const RepeatedField& other) { if (&other == this) return; Clear(); MergeFrom(other); } template template inline void RepeatedField::Assign(Iter begin, Iter end) { Clear(); Add(begin, end); } template inline typename RepeatedField::iterator RepeatedField::erase( const_iterator position) ABSL_ATTRIBUTE_LIFETIME_BOUND { return erase(position, position + 1); } template inline typename RepeatedField::iterator RepeatedField::erase( const_iterator first, const_iterator last) ABSL_ATTRIBUTE_LIFETIME_BOUND { size_type first_offset = first - cbegin(); if (first != last) { Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin()); } return begin() + first_offset; } template inline Element* RepeatedField::mutable_data() ABSL_ATTRIBUTE_LIFETIME_BOUND { return unsafe_elements(); } template inline const Element* RepeatedField::data() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return unsafe_elements(); } template inline void RepeatedField::InternalSwap(RepeatedField* other) { ABSL_DCHECK(this != other); // Swap all fields at once. static_assert(std::is_standard_layout>::value, "offsetof() requires standard layout before c++17"); internal::memswaparena_or_elements_) - offsetof(RepeatedField, current_size_)>( reinterpret_cast(this) + offsetof(RepeatedField, current_size_), reinterpret_cast(other) + offsetof(RepeatedField, current_size_)); } template void RepeatedField::Swap(RepeatedField* other) { if (this == other) return; #ifdef PROTOBUF_FORCE_COPY_IN_SWAP if (GetOwningArena() != nullptr && GetOwningArena() == other->GetOwningArena()) { #else // PROTOBUF_FORCE_COPY_IN_SWAP if (GetOwningArena() == other->GetOwningArena()) { #endif // !PROTOBUF_FORCE_COPY_IN_SWAP InternalSwap(other); } else { RepeatedField temp(other->GetOwningArena()); temp.MergeFrom(*this); CopyFrom(*other); other->UnsafeArenaSwap(&temp); } } template void RepeatedField::UnsafeArenaSwap(RepeatedField* other) { if (this == other) return; ABSL_DCHECK_EQ(GetOwningArena(), other->GetOwningArena()); InternalSwap(other); } template void RepeatedField::SwapElements(int index1, int index2) { using std::swap; // enable ADL with fallback swap(elements()[index1], elements()[index2]); } template inline typename RepeatedField::iterator RepeatedField::begin() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(unsafe_elements()); } template inline typename RepeatedField::const_iterator RepeatedField::begin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_iterator(unsafe_elements()); } template inline typename RepeatedField::const_iterator RepeatedField::cbegin() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_iterator(unsafe_elements()); } template inline typename RepeatedField::iterator RepeatedField::end() ABSL_ATTRIBUTE_LIFETIME_BOUND { return iterator(unsafe_elements() + current_size_); } template inline typename RepeatedField::const_iterator RepeatedField::end() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_iterator(unsafe_elements() + current_size_); } template inline typename RepeatedField::const_iterator RepeatedField::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { return const_iterator(unsafe_elements() + current_size_); } template inline size_t RepeatedField::SpaceUsedExcludingSelfLong() const { return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0; } namespace internal { // Returns the new size for a reserved field based on its 'total_size' and the // requested 'new_size'. The result is clamped to the closed interval: // [internal::kMinRepeatedFieldAllocationSize, // std::numeric_limits::max()] // Requires: // new_size > total_size && // (total_size == 0 || // total_size >= kRepeatedFieldLowerClampLimit) template inline int CalculateReserveSize(int total_size, int new_size) { constexpr int lower_limit = RepeatedFieldLowerClampLimit(); if (new_size < lower_limit) { // Clamp to smallest allowed size. return lower_limit; } constexpr int kMaxSizeBeforeClamp = (std::numeric_limits::max() - kRepHeaderSize) / 2; if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp)) { return std::numeric_limits::max(); } // We want to double the number of bytes, not the number of elements, to try // to stay within power-of-two allocations. // The allocation has kRepHeaderSize + sizeof(T) * capacity. int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T); return std::max(doubled_size, new_size); } } // namespace internal template void RepeatedField::Reserve(int new_size) { if (ABSL_PREDICT_FALSE(new_size > total_size_)) { Grow(current_size_, new_size); } } // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant // amount of code bloat. template PROTOBUF_NOINLINE void RepeatedField::GrowNoAnnotate(int current_size, int new_size) { ABSL_DCHECK_GT(new_size, total_size_); Rep* new_rep; Arena* arena = GetOwningArena(); new_size = internal::CalculateReserveSize( total_size_, new_size); ABSL_DCHECK_LE( static_cast(new_size), (std::numeric_limits::max() - kRepHeaderSize) / sizeof(Element)) << "Requested size is too large to fit into size_t."; size_t bytes = kRepHeaderSize + sizeof(Element) * static_cast(new_size); if (arena == nullptr) { ABSL_DCHECK_LE((bytes - kRepHeaderSize) / sizeof(Element), static_cast(std::numeric_limits::max())) << "Requested size is too large to fit element count into int."; internal::SizedPtr res = internal::AllocateAtLeast(bytes); size_t num_available = std::min((res.n - kRepHeaderSize) / sizeof(Element), static_cast(std::numeric_limits::max())); new_size = static_cast(num_available); new_rep = static_cast(res.p); } else { new_rep = reinterpret_cast(Arena::CreateArray(arena, bytes)); } new_rep->arena = arena; if (total_size_ > 0) { if (current_size > 0) { Element* pnew = new_rep->elements(); Element* pold = elements(); // TODO(b/263791665): add absl::is_trivially_relocatable if (std::is_trivial::value) { memcpy(static_cast(pnew), pold, current_size * sizeof(Element)); } else { for (Element* end = pnew + current_size; pnew != end; ++pnew, ++pold) { ::new (static_cast(pnew)) Element(std::move(*pold)); pold->~Element(); } } } InternalDeallocate(); } total_size_ = new_size; arena_or_elements_ = new_rep->elements(); } // Ideally we would be able to use: // template // void Grow(); // However, as explained in b/266411038#comment9, this causes issues // in shared libraries for Youtube (and possibly elsewhere). template PROTOBUF_NOINLINE void RepeatedField::Grow(int current_size, int new_size) { AnnotateSize(current_size, total_size_); GrowNoAnnotate(current_size, new_size); AnnotateSize(total_size_, current_size); } template inline void RepeatedField::Truncate(int new_size) { ABSL_DCHECK_LE(new_size, current_size_); if (new_size < current_size_) { Destroy(unsafe_elements() + new_size, unsafe_elements() + current_size_); ExchangeCurrentSize(new_size); } } template <> PROTOBUF_EXPORT size_t RepeatedField::SpaceUsedExcludingSelfLong() const; // ------------------------------------------------------------------- // Iterators and helper functions that follow the spirit of the STL // std::back_insert_iterator and std::back_inserter but are tailor-made // for RepeatedField and RepeatedPtrField. Typical usage would be: // // std::copy(some_sequence.begin(), some_sequence.end(), // RepeatedFieldBackInserter(proto.mutable_sequence())); // // Ported by johannes from util/gtl/proto-array-iterators.h namespace internal { // STL-like iterator implementation for RepeatedField. You should not // refer to this class directly; use RepeatedField::iterator instead. // // Note: All of the iterator operators *must* be inlined to avoid performance // regressions. This is caused by the extern template declarations below (which // are required because of the RepeatedField extern template declarations). If // any of these functions aren't explicitly inlined (e.g. defined in the class), // the compiler isn't allowed to inline them. template class RepeatedIterator { public: using iterator_category = std::random_access_iterator_tag; // Note: remove_const is necessary for std::partial_sum, which uses value_type // to determine the summation variable type. using value_type = typename std::remove_const::type; using difference_type = std::ptrdiff_t; using pointer = Element*; using reference = Element&; constexpr RepeatedIterator() noexcept : it_(nullptr) {} // Allows "upcasting" from RepeatedIterator to // RepeatedIterator. template ::value>::type* = nullptr> constexpr RepeatedIterator( const RepeatedIterator& other) noexcept : it_(other.it_) {} // dereferenceable constexpr reference operator*() const noexcept { return *it_; } constexpr pointer operator->() const noexcept { return it_; } private: // Helper alias to hide the internal type. using iterator = RepeatedIterator; public: // {inc,dec}rementable iterator& operator++() noexcept { ++it_; return *this; } iterator operator++(int) noexcept { return iterator(it_++); } iterator& operator--() noexcept { --it_; return *this; } iterator operator--(int) noexcept { return iterator(it_--); } // equality_comparable friend constexpr bool operator==(const iterator& x, const iterator& y) noexcept { return x.it_ == y.it_; } friend constexpr bool operator!=(const iterator& x, const iterator& y) noexcept { return x.it_ != y.it_; } // less_than_comparable friend constexpr bool operator<(const iterator& x, const iterator& y) noexcept { return x.it_ < y.it_; } friend constexpr bool operator<=(const iterator& x, const iterator& y) noexcept { return x.it_ <= y.it_; } friend constexpr bool operator>(const iterator& x, const iterator& y) noexcept { return x.it_ > y.it_; } friend constexpr bool operator>=(const iterator& x, const iterator& y) noexcept { return x.it_ >= y.it_; } // addable, subtractable iterator& operator+=(difference_type d) noexcept { it_ += d; return *this; } constexpr iterator operator+(difference_type d) const noexcept { return iterator(it_ + d); } friend constexpr iterator operator+(const difference_type d, iterator it) noexcept { return it + d; } iterator& operator-=(difference_type d) noexcept { it_ -= d; return *this; } iterator constexpr operator-(difference_type d) const noexcept { return iterator(it_ - d); } // indexable constexpr reference operator[](difference_type d) const noexcept { return it_[d]; } // random access iterator friend constexpr difference_type operator-(iterator it1, iterator it2) noexcept { return it1.it_ - it2.it_; } private: template friend class RepeatedIterator; // Allow construction from RepeatedField. friend class RepeatedField; explicit RepeatedIterator(Element* it) noexcept : it_(it) {} // The internal iterator. Element* it_; }; // A back inserter for RepeatedField objects. template class RepeatedFieldBackInsertIterator { public: using iterator_category = std::output_iterator_tag; using value_type = T; using pointer = void; using reference = void; using difference_type = std::ptrdiff_t; explicit RepeatedFieldBackInsertIterator( RepeatedField* const mutable_field) : field_(mutable_field) {} RepeatedFieldBackInsertIterator& operator=(const T& value) { field_->Add(value); return *this; } RepeatedFieldBackInsertIterator& operator*() { return *this; } RepeatedFieldBackInsertIterator& operator++() { return *this; } RepeatedFieldBackInsertIterator& operator++(int /* unused */) { return *this; } private: RepeatedField* field_; }; } // namespace internal // Provides a back insert iterator for RepeatedField instances, // similar to std::back_inserter(). template internal::RepeatedFieldBackInsertIterator RepeatedFieldBackInserter( RepeatedField* const mutable_field) { return internal::RepeatedFieldBackInsertIterator(mutable_field); } } // namespace protobuf } // namespace google #include "google/protobuf/port_undef.inc" #endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__