More refactoring in preparation for RepeatedField SOO.

We previously renamed current_size_ to size_, total_size_ to capacity_, and Rep to HeapRep.

PiperOrigin-RevId: 651895012
pull/17447/head
Protobuf Team Bot 9 months ago committed by Copybara-Service
parent 1aefa5715d
commit 57828b0c60
  1. 94
      src/google/protobuf/repeated_field.h

@ -59,17 +59,17 @@ class Message;
namespace internal { namespace internal {
template <typename T, int kRepHeaderSize> template <typename T, int kHeapRepHeaderSize>
constexpr int RepeatedFieldLowerClampLimit() { constexpr int RepeatedFieldLowerClampLimit() {
// The header is padded to be at least `sizeof(T)` when it would be smaller // The header is padded to be at least `sizeof(T)` when it would be smaller
// otherwise. // otherwise.
static_assert(sizeof(T) <= kRepHeaderSize, ""); static_assert(sizeof(T) <= kHeapRepHeaderSize, "");
// We want to pad the minimum size to be a power of two bytes, including the // We want to pad the minimum size to be a power of two bytes, including the
// header. // header.
// The first allocation is kRepHeaderSize bytes worth of elements for a total // The first allocation is kHeapRepHeaderSize bytes worth of elements for a
// of 2*kRepHeaderSize bytes. // total of 2*kHeapRepHeaderSize bytes. For an 8-byte header, we allocate 8
// For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64. // bool, 2 ints, or 1 int64.
return kRepHeaderSize / sizeof(T); return kHeapRepHeaderSize / sizeof(T);
} }
// kRepeatedFieldUpperClampLimit is the lowest signed integer value that // kRepeatedFieldUpperClampLimit is the lowest signed integer value that
@ -333,7 +333,7 @@ class RepeatedField final
friend class Arena; friend class Arena;
static constexpr int kInitialSize = 0; static constexpr int kInitialSize = 0;
static PROTOBUF_CONSTEXPR const size_t kRepHeaderSize = sizeof(HeapRep); static PROTOBUF_CONSTEXPR const size_t kHeapRepHeaderSize = sizeof(HeapRep);
RepeatedField(Arena* arena, const RepeatedField& rhs); RepeatedField(Arena* arena, const RepeatedField& rhs);
RepeatedField(Arena* arena, RepeatedField&& rhs); RepeatedField(Arena* arena, RepeatedField&& rhs);
@ -382,15 +382,15 @@ class RepeatedField final
// Reserves space to expand the field to at least the given size. // Reserves space to expand the field to at least the given size.
// If the array is grown, it will always be at least doubled in size. // If the array is grown, it will always be at least doubled in size.
// If `annotate_size` is true (the default), then this function will annotate // If `annotate_size` is true (the default), then this function will annotate
// the old container from `current_size` to `capacity_` (unpoison memory) // the old container from `old_size` to `capacity_` (unpoison memory)
// directly before it is being released, and annotate the new container from // directly before it is being released, and annotate the new container from
// `capacity_` to `current_size` (poison unused memory). // `capacity_` to `old_size` (poison unused memory).
void Grow(int current_size, int new_size); void Grow(int old_size, int new_size);
void GrowNoAnnotate(int current_size, int new_size); void GrowNoAnnotate(int old_size, int new_size);
// Annotates a change in size of this instance. This function should be called // Annotates a change in size of this instance. This function should be called
// with (total_size, current_size) after new memory has been allocated and // with (capacity, old_size) after new memory has been allocated and
// filled from previous memory), and called with (current_size, total_size) // filled from previous memory), and called with (old_size, capacity)
// right before (previously annotated) memory is released. // right before (previously annotated) memory is released.
void AnnotateSize(int old_size, int new_size) const { void AnnotateSize(int old_size, int new_size) const {
if (old_size != new_size) { if (old_size != new_size) {
@ -435,13 +435,13 @@ class RepeatedField final
// pre-condition: the HeapRep must have been allocated, ie elements() is safe. // pre-condition: the HeapRep must have been allocated, ie elements() is safe.
HeapRep* heap_rep() const { HeapRep* heap_rep() const {
return reinterpret_cast<HeapRep*>(reinterpret_cast<char*>(elements()) - return reinterpret_cast<HeapRep*>(reinterpret_cast<char*>(elements()) -
kRepHeaderSize); kHeapRepHeaderSize);
} }
// Internal helper to delete all elements and deallocate the storage. // Internal helper to delete all elements and deallocate the storage.
template <bool in_destructor = false> template <bool in_destructor = false>
void InternalDeallocate() { void InternalDeallocate() {
const size_t bytes = Capacity() * sizeof(Element) + kRepHeaderSize; const size_t bytes = Capacity() * sizeof(Element) + kHeapRepHeaderSize;
if (heap_rep()->arena == nullptr) { if (heap_rep()->arena == nullptr) {
internal::SizedDelete(heap_rep(), bytes); internal::SizedDelete(heap_rep(), bytes);
} else if (!in_destructor) { } else if (!in_destructor) {
@ -712,9 +712,9 @@ inline void RepeatedField<Element>::AddInputIterator(Iter begin, Iter end) {
while (begin != end) { while (begin != end) {
if (ABSL_PREDICT_FALSE(first == last)) { if (ABSL_PREDICT_FALSE(first == last)) {
int current_size = first - unsafe_elements(); int size = first - unsafe_elements();
GrowNoAnnotate(current_size, current_size + 1); GrowNoAnnotate(size, size + 1);
first = unsafe_elements() + current_size; first = unsafe_elements() + size;
last = unsafe_elements() + Capacity(); last = unsafe_elements() + Capacity();
} }
::new (static_cast<void*>(first)) Element(*begin); ::new (static_cast<void*>(first)) Element(*begin);
@ -900,34 +900,36 @@ RepeatedField<Element>::cend() const ABSL_ATTRIBUTE_LIFETIME_BOUND {
template <typename Element> template <typename Element>
inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const { inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
return Capacity() > 0 ? (Capacity() * sizeof(Element) + kRepHeaderSize) : 0; return Capacity() > 0 ? (Capacity() * sizeof(Element) + kHeapRepHeaderSize)
: 0;
} }
namespace internal { namespace internal {
// Returns the new size for a reserved field based on its 'total_size' and the // Returns the new size for a reserved field based on its 'capacity' and the
// requested 'new_size'. The result is clamped to the closed interval: // requested 'new_size'. The result is clamped to the closed interval:
// [internal::kMinRepeatedFieldAllocationSize, // [internal::kMinRepeatedFieldAllocationSize,
// std::numeric_limits<int>::max()] // std::numeric_limits<int>::max()]
// Requires: // Requires:
// new_size > total_size && // new_size > capacity &&
// (total_size == 0 || // (capacity == 0 ||
// total_size >= kRepeatedFieldLowerClampLimit) // capacity >= kRepeatedFieldLowerClampLimit)
template <typename T, int kRepHeaderSize> template <typename T, int kHeapRepHeaderSize>
inline int CalculateReserveSize(int total_size, int new_size) { inline int CalculateReserveSize(int capacity, int new_size) {
constexpr int lower_limit = RepeatedFieldLowerClampLimit<T, kRepHeaderSize>(); constexpr int lower_limit =
RepeatedFieldLowerClampLimit<T, kHeapRepHeaderSize>();
if (new_size < lower_limit) { if (new_size < lower_limit) {
// Clamp to smallest allowed size. // Clamp to smallest allowed size.
return lower_limit; return lower_limit;
} }
constexpr int kMaxSizeBeforeClamp = constexpr int kMaxSizeBeforeClamp =
(std::numeric_limits<int>::max() - kRepHeaderSize) / 2; (std::numeric_limits<int>::max() - kHeapRepHeaderSize) / 2;
if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp)) { if (PROTOBUF_PREDICT_FALSE(capacity > kMaxSizeBeforeClamp)) {
return std::numeric_limits<int>::max(); return std::numeric_limits<int>::max();
} }
// We want to double the number of bytes, not the number of elements, to try // We want to double the number of bytes, not the number of elements, to try
// to stay within power-of-two allocations. // to stay within power-of-two allocations.
// The allocation has kRepHeaderSize + sizeof(T) * capacity. // The allocation has kHeapRepHeaderSize + sizeof(T) * capacity.
int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T); int doubled_size = 2 * capacity + kHeapRepHeaderSize / sizeof(T);
return std::max(doubled_size, new_size); return std::max(doubled_size, new_size);
} }
} // namespace internal } // namespace internal
@ -942,28 +944,28 @@ void RepeatedField<Element>::Reserve(int new_size) {
// Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
// amount of code bloat. // amount of code bloat.
template <typename Element> template <typename Element>
PROTOBUF_NOINLINE void RepeatedField<Element>::GrowNoAnnotate(int current_size, PROTOBUF_NOINLINE void RepeatedField<Element>::GrowNoAnnotate(int old_size,
int new_size) { int new_size) {
ABSL_DCHECK_GT(new_size, Capacity()); ABSL_DCHECK_GT(new_size, Capacity());
HeapRep* new_rep; HeapRep* new_rep;
Arena* arena = GetArena(); Arena* arena = GetArena();
new_size = internal::CalculateReserveSize<Element, kRepHeaderSize>(Capacity(), new_size = internal::CalculateReserveSize<Element, kHeapRepHeaderSize>(
new_size); Capacity(), new_size);
ABSL_DCHECK_LE( ABSL_DCHECK_LE(static_cast<size_t>(new_size),
static_cast<size_t>(new_size), (std::numeric_limits<size_t>::max() - kHeapRepHeaderSize) /
(std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element)) sizeof(Element))
<< "Requested size is too large to fit into size_t."; << "Requested size is too large to fit into size_t.";
size_t bytes = size_t bytes =
kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size); kHeapRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
if (arena == nullptr) { if (arena == nullptr) {
ABSL_DCHECK_LE((bytes - kRepHeaderSize) / sizeof(Element), ABSL_DCHECK_LE((bytes - kHeapRepHeaderSize) / sizeof(Element),
static_cast<size_t>(std::numeric_limits<int>::max())) static_cast<size_t>(std::numeric_limits<int>::max()))
<< "Requested size is too large to fit element count into int."; << "Requested size is too large to fit element count into int.";
internal::SizedPtr res = internal::AllocateAtLeast(bytes); internal::SizedPtr res = internal::AllocateAtLeast(bytes);
size_t num_available = size_t num_available =
std::min((res.n - kRepHeaderSize) / sizeof(Element), std::min((res.n - kHeapRepHeaderSize) / sizeof(Element),
static_cast<size_t>(std::numeric_limits<int>::max())); static_cast<size_t>(std::numeric_limits<int>::max()));
new_size = static_cast<int>(num_available); new_size = static_cast<int>(num_available);
new_rep = static_cast<HeapRep*>(res.p); new_rep = static_cast<HeapRep*>(res.p);
@ -974,14 +976,14 @@ PROTOBUF_NOINLINE void RepeatedField<Element>::GrowNoAnnotate(int current_size,
new_rep->arena = arena; new_rep->arena = arena;
if (Capacity() > 0) { if (Capacity() > 0) {
if (current_size > 0) { if (old_size > 0) {
Element* pnew = static_cast<Element*>(new_rep->elements()); Element* pnew = static_cast<Element*>(new_rep->elements());
Element* pold = elements(); Element* pold = elements();
// TODO: add absl::is_trivially_relocatable<Element> // TODO: add absl::is_trivially_relocatable<Element>
if (std::is_trivial<Element>::value) { if (std::is_trivial<Element>::value) {
memcpy(static_cast<void*>(pnew), pold, current_size * sizeof(Element)); memcpy(static_cast<void*>(pnew), pold, old_size * sizeof(Element));
} else { } else {
for (Element* end = pnew + current_size; pnew != end; ++pnew, ++pold) { for (Element* end = pnew + old_size; pnew != end; ++pnew, ++pold) {
::new (static_cast<void*>(pnew)) Element(std::move(*pold)); ::new (static_cast<void*>(pnew)) Element(std::move(*pold));
pold->~Element(); pold->~Element();
} }
@ -1000,11 +1002,11 @@ PROTOBUF_NOINLINE void RepeatedField<Element>::GrowNoAnnotate(int current_size,
// However, as explained in b/266411038#comment9, this causes issues // However, as explained in b/266411038#comment9, this causes issues
// in shared libraries for Youtube (and possibly elsewhere). // in shared libraries for Youtube (and possibly elsewhere).
template <typename Element> template <typename Element>
PROTOBUF_NOINLINE void RepeatedField<Element>::Grow(int current_size, PROTOBUF_NOINLINE void RepeatedField<Element>::Grow(int old_size,
int new_size) { int new_size) {
AnnotateSize(current_size, Capacity()); AnnotateSize(old_size, Capacity());
GrowNoAnnotate(current_size, new_size); GrowNoAnnotate(old_size, new_size);
AnnotateSize(Capacity(), current_size); AnnotateSize(Capacity(), old_size);
} }
template <typename Element> template <typename Element>

Loading…
Cancel
Save