From 57828b0c60f438ee852404878726486f56a20ea0 Mon Sep 17 00:00:00 2001 From: Protobuf Team Bot Date: Fri, 12 Jul 2024 14:58:13 -0700 Subject: [PATCH] More refactoring in preparation for RepeatedField SOO. We previously renamed current_size_ to size_, total_size_ to capacity_, and Rep to HeapRep. PiperOrigin-RevId: 651895012 --- src/google/protobuf/repeated_field.h | 94 ++++++++++++++-------------- 1 file changed, 48 insertions(+), 46 deletions(-) diff --git a/src/google/protobuf/repeated_field.h b/src/google/protobuf/repeated_field.h index 31271de952..f00ee8abd6 100644 --- a/src/google/protobuf/repeated_field.h +++ b/src/google/protobuf/repeated_field.h @@ -59,17 +59,17 @@ class Message; namespace internal { -template +template constexpr int RepeatedFieldLowerClampLimit() { // The header is padded to be at least `sizeof(T)` when it would be smaller // otherwise. - static_assert(sizeof(T) <= kRepHeaderSize, ""); + static_assert(sizeof(T) <= kHeapRepHeaderSize, ""); // We want to pad the minimum size to be a power of two bytes, including the // header. - // The first allocation is kRepHeaderSize bytes worth of elements for a total - // of 2*kRepHeaderSize bytes. - // For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64. - return kRepHeaderSize / sizeof(T); + // The first allocation is kHeapRepHeaderSize bytes worth of elements for a + // total of 2*kHeapRepHeaderSize bytes. For an 8-byte header, we allocate 8 + // bool, 2 ints, or 1 int64. + return kHeapRepHeaderSize / sizeof(T); } // kRepeatedFieldUpperClampLimit is the lowest signed integer value that @@ -333,7 +333,7 @@ class RepeatedField final friend class Arena; static constexpr int kInitialSize = 0; - static PROTOBUF_CONSTEXPR const size_t kRepHeaderSize = sizeof(HeapRep); + static PROTOBUF_CONSTEXPR const size_t kHeapRepHeaderSize = sizeof(HeapRep); RepeatedField(Arena* arena, const RepeatedField& rhs); RepeatedField(Arena* arena, RepeatedField&& rhs); @@ -382,15 +382,15 @@ class RepeatedField final // Reserves space to expand the field to at least the given size. // If the array is grown, it will always be at least doubled in size. // If `annotate_size` is true (the default), then this function will annotate - // the old container from `current_size` to `capacity_` (unpoison memory) + // the old container from `old_size` to `capacity_` (unpoison memory) // directly before it is being released, and annotate the new container from - // `capacity_` to `current_size` (poison unused memory). - void Grow(int current_size, int new_size); - void GrowNoAnnotate(int current_size, int new_size); + // `capacity_` to `old_size` (poison unused memory). + void Grow(int old_size, int new_size); + void GrowNoAnnotate(int old_size, int new_size); // Annotates a change in size of this instance. This function should be called - // with (total_size, current_size) after new memory has been allocated and - // filled from previous memory), and called with (current_size, total_size) + // with (capacity, old_size) after new memory has been allocated and + // filled from previous memory), and called with (old_size, capacity) // right before (previously annotated) memory is released. void AnnotateSize(int old_size, int new_size) const { if (old_size != new_size) { @@ -435,13 +435,13 @@ class RepeatedField final // pre-condition: the HeapRep must have been allocated, ie elements() is safe. HeapRep* heap_rep() const { return reinterpret_cast(reinterpret_cast(elements()) - - kRepHeaderSize); + kHeapRepHeaderSize); } // Internal helper to delete all elements and deallocate the storage. template void InternalDeallocate() { - const size_t bytes = Capacity() * sizeof(Element) + kRepHeaderSize; + const size_t bytes = Capacity() * sizeof(Element) + kHeapRepHeaderSize; if (heap_rep()->arena == nullptr) { internal::SizedDelete(heap_rep(), bytes); } else if (!in_destructor) { @@ -712,9 +712,9 @@ inline void RepeatedField::AddInputIterator(Iter begin, Iter end) { while (begin != end) { if (ABSL_PREDICT_FALSE(first == last)) { - int current_size = first - unsafe_elements(); - GrowNoAnnotate(current_size, current_size + 1); - first = unsafe_elements() + current_size; + int size = first - unsafe_elements(); + GrowNoAnnotate(size, size + 1); + first = unsafe_elements() + size; last = unsafe_elements() + Capacity(); } ::new (static_cast(first)) Element(*begin); @@ -900,34 +900,36 @@ RepeatedField::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND { template inline size_t RepeatedField::SpaceUsedExcludingSelfLong() const { - return Capacity() > 0 ? (Capacity() * sizeof(Element) + kRepHeaderSize) : 0; + return Capacity() > 0 ? (Capacity() * sizeof(Element) + kHeapRepHeaderSize) + : 0; } namespace internal { -// Returns the new size for a reserved field based on its 'total_size' and the +// Returns the new size for a reserved field based on its 'capacity' and the // requested 'new_size'. The result is clamped to the closed interval: // [internal::kMinRepeatedFieldAllocationSize, // std::numeric_limits::max()] // Requires: -// new_size > total_size && -// (total_size == 0 || -// total_size >= kRepeatedFieldLowerClampLimit) -template -inline int CalculateReserveSize(int total_size, int new_size) { - constexpr int lower_limit = RepeatedFieldLowerClampLimit(); +// new_size > capacity && +// (capacity == 0 || +// capacity >= kRepeatedFieldLowerClampLimit) +template +inline int CalculateReserveSize(int capacity, int new_size) { + constexpr int lower_limit = + RepeatedFieldLowerClampLimit(); if (new_size < lower_limit) { // Clamp to smallest allowed size. return lower_limit; } constexpr int kMaxSizeBeforeClamp = - (std::numeric_limits::max() - kRepHeaderSize) / 2; - if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp)) { + (std::numeric_limits::max() - kHeapRepHeaderSize) / 2; + if (PROTOBUF_PREDICT_FALSE(capacity > kMaxSizeBeforeClamp)) { return std::numeric_limits::max(); } // We want to double the number of bytes, not the number of elements, to try // to stay within power-of-two allocations. - // The allocation has kRepHeaderSize + sizeof(T) * capacity. - int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T); + // The allocation has kHeapRepHeaderSize + sizeof(T) * capacity. + int doubled_size = 2 * capacity + kHeapRepHeaderSize / sizeof(T); return std::max(doubled_size, new_size); } } // namespace internal @@ -942,28 +944,28 @@ void RepeatedField::Reserve(int new_size) { // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant // amount of code bloat. template -PROTOBUF_NOINLINE void RepeatedField::GrowNoAnnotate(int current_size, +PROTOBUF_NOINLINE void RepeatedField::GrowNoAnnotate(int old_size, int new_size) { ABSL_DCHECK_GT(new_size, Capacity()); HeapRep* new_rep; Arena* arena = GetArena(); - new_size = internal::CalculateReserveSize(Capacity(), - new_size); + new_size = internal::CalculateReserveSize( + Capacity(), new_size); - ABSL_DCHECK_LE( - static_cast(new_size), - (std::numeric_limits::max() - kRepHeaderSize) / sizeof(Element)) + ABSL_DCHECK_LE(static_cast(new_size), + (std::numeric_limits::max() - kHeapRepHeaderSize) / + sizeof(Element)) << "Requested size is too large to fit into size_t."; size_t bytes = - kRepHeaderSize + sizeof(Element) * static_cast(new_size); + kHeapRepHeaderSize + sizeof(Element) * static_cast(new_size); if (arena == nullptr) { - ABSL_DCHECK_LE((bytes - kRepHeaderSize) / sizeof(Element), + ABSL_DCHECK_LE((bytes - kHeapRepHeaderSize) / sizeof(Element), static_cast(std::numeric_limits::max())) << "Requested size is too large to fit element count into int."; internal::SizedPtr res = internal::AllocateAtLeast(bytes); size_t num_available = - std::min((res.n - kRepHeaderSize) / sizeof(Element), + std::min((res.n - kHeapRepHeaderSize) / sizeof(Element), static_cast(std::numeric_limits::max())); new_size = static_cast(num_available); new_rep = static_cast(res.p); @@ -974,14 +976,14 @@ PROTOBUF_NOINLINE void RepeatedField::GrowNoAnnotate(int current_size, new_rep->arena = arena; if (Capacity() > 0) { - if (current_size > 0) { + if (old_size > 0) { Element* pnew = static_cast(new_rep->elements()); Element* pold = elements(); // TODO: add absl::is_trivially_relocatable if (std::is_trivial::value) { - memcpy(static_cast(pnew), pold, current_size * sizeof(Element)); + memcpy(static_cast(pnew), pold, old_size * sizeof(Element)); } else { - for (Element* end = pnew + current_size; pnew != end; ++pnew, ++pold) { + for (Element* end = pnew + old_size; pnew != end; ++pnew, ++pold) { ::new (static_cast(pnew)) Element(std::move(*pold)); pold->~Element(); } @@ -1000,11 +1002,11 @@ PROTOBUF_NOINLINE void RepeatedField::GrowNoAnnotate(int current_size, // However, as explained in b/266411038#comment9, this causes issues // in shared libraries for Youtube (and possibly elsewhere). template -PROTOBUF_NOINLINE void RepeatedField::Grow(int current_size, +PROTOBUF_NOINLINE void RepeatedField::Grow(int old_size, int new_size) { - AnnotateSize(current_size, Capacity()); - GrowNoAnnotate(current_size, new_size); - AnnotateSize(Capacity(), current_size); + AnnotateSize(old_size, Capacity()); + GrowNoAnnotate(old_size, new_size); + AnnotateSize(Capacity(), old_size); } template