Reserve/InternalExtend use capacity instead of size as input.

This allows to remove branch from `InternalExtend` which is beneficial for small fields.

PiperOrigin-RevId: 566628155
pull/14126/head
Protobuf Team Bot 1 year ago committed by Copybara-Service
parent 5ba3f03d1f
commit c98ef06291
  1. 49
      src/google/protobuf/repeated_ptr_field.cc
  2. 43
      src/google/protobuf/repeated_ptr_field.h

@ -9,7 +9,6 @@
// Based on original Protocol Buffers design by // Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others. // Sanjay Ghemawat, Jeff Dean, and others.
#include <algorithm>
#include <cstddef> #include <cstddef>
#include <cstdint> #include <cstdint>
#include <cstring> #include <cstring>
@ -29,28 +28,24 @@ namespace protobuf {
namespace internal { namespace internal {
void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) { void RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
int new_size = current_size_ + extend_amount; ABSL_DCHECK(extend_amount > 0);
if (total_size_ >= new_size) {
// N.B.: rep_ is non-nullptr because extend_amount is always > 0, hence
// total_size must be non-zero since it is lower-bounded by new_size.
return elements() + current_size_;
}
constexpr size_t ptr_size = sizeof(rep()->elements[0]); constexpr size_t ptr_size = sizeof(rep()->elements[0]);
int new_capacity = total_size_ + extend_amount;
Arena* arena = GetOwningArena(); Arena* arena = GetOwningArena();
new_size = internal::CalculateReserveSize<void*, kRepHeaderSize>(total_size_, new_capacity = internal::CalculateReserveSize<void*, kRepHeaderSize>(
new_size); total_size_, new_capacity);
ABSL_CHECK_LE( ABSL_CHECK_LE(
static_cast<int64_t>(new_size), static_cast<int64_t>(new_capacity),
static_cast<int64_t>( static_cast<int64_t>(
(std::numeric_limits<size_t>::max() - kRepHeaderSize) / ptr_size)) (std::numeric_limits<size_t>::max() - kRepHeaderSize) / ptr_size))
<< "Requested size is too large to fit into size_t."; << "Requested size is too large to fit into size_t.";
size_t bytes = kRepHeaderSize + ptr_size * new_size; size_t bytes = kRepHeaderSize + ptr_size * new_capacity;
Rep* new_rep; Rep* new_rep;
void* old_tagged_ptr = tagged_rep_or_elem_; void* old_tagged_ptr = tagged_rep_or_elem_;
if (arena == nullptr) { if (arena == nullptr) {
internal::SizedPtr res = internal::AllocateAtLeast(bytes); internal::SizedPtr res = internal::AllocateAtLeast(bytes);
new_size = static_cast<int>((res.n - kRepHeaderSize) / ptr_size); new_capacity = static_cast<int>((res.n - kRepHeaderSize) / ptr_size);
new_rep = reinterpret_cast<Rep*>(res.p); new_rep = reinterpret_cast<Rep*>(res.p);
} else { } else {
new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes)); new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
@ -78,13 +73,12 @@ void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
tagged_rep_or_elem_ = tagged_rep_or_elem_ =
reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_rep) + 1); reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_rep) + 1);
total_size_ = new_size; total_size_ = new_capacity;
return &new_rep->elements[current_size_];
} }
void RepeatedPtrFieldBase::Reserve(int new_size) { void RepeatedPtrFieldBase::Reserve(int capacity) {
if (new_size > current_size_) { if (capacity > total_size_) {
InternalExtend(new_size - current_size_); InternalExtend(capacity - total_size_);
} }
} }
@ -113,16 +107,14 @@ void* RepeatedPtrFieldBase::AddOutOfLineHelper(void* obj) {
ABSL_DCHECK(using_sso()); ABSL_DCHECK(using_sso());
ABSL_DCHECK_EQ(allocated_size(), 0); ABSL_DCHECK_EQ(allocated_size(), 0);
ExchangeCurrentSize(1); ExchangeCurrentSize(1);
tagged_rep_or_elem_ = obj; return tagged_rep_or_elem_ = obj;
return obj;
} }
if (using_sso() || rep()->allocated_size == total_size_) { if (using_sso() || rep()->allocated_size == total_size_) {
InternalExtend(1); // Equivalent to "Reserve(total_size_ + 1)" InternalExtend(1); // Equivalent to "Reserve(total_size_ + 1)"
} }
Rep* r = rep(); Rep* r = rep();
++r->allocated_size; ++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj; return r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
return obj;
} }
void RepeatedPtrFieldBase::CloseGap(int start, int num) { void RepeatedPtrFieldBase::CloseGap(int start, int num) {
@ -145,21 +137,10 @@ MessageLite* RepeatedPtrFieldBase::AddWeak(const MessageLite* prototype) {
return reinterpret_cast<MessageLite*>( return reinterpret_cast<MessageLite*>(
element_at(ExchangeCurrentSize(current_size_ + 1))); element_at(ExchangeCurrentSize(current_size_ + 1)));
} }
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
MessageLite* result = prototype MessageLite* result = prototype
? prototype->New(arena_) ? prototype->New(arena_)
: Arena::CreateMessage<ImplicitWeakMessage>(arena_); : Arena::CreateMessage<ImplicitWeakMessage>(arena_);
if (using_sso()) { return static_cast<MessageLite*>(AddOutOfLineHelper(result));
ExchangeCurrentSize(current_size_ + 1);
tagged_rep_or_elem_ = result;
} else {
Rep* r = rep();
++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
}
return result;
} }
void InternalOutOfLineDeleteMessageLite(MessageLite* message) { void InternalOutOfLineDeleteMessageLite(MessageLite* message) {

@ -225,9 +225,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
std::move(value); std::move(value);
return; return;
} }
if (allocated_size() == total_size_) { MaybeExtend();
Reserve(total_size_ + 1);
}
if (!using_sso()) ++rep()->allocated_size; if (!using_sso()) ++rep()->allocated_size;
auto* result = TypeHandler::New(arena_, std::move(value)); auto* result = TypeHandler::New(arena_, std::move(value));
element_at(ExchangeCurrentSize(current_size_ + 1)) = result; element_at(ExchangeCurrentSize(current_size_ + 1)) = result;
@ -324,10 +322,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
template <typename TypeHandler> template <typename TypeHandler>
void AddAllocatedForParse(Value<TypeHandler>* value) { void AddAllocatedForParse(Value<TypeHandler>* value) {
ABSL_DCHECK_EQ(current_size_, allocated_size()); ABSL_DCHECK_EQ(current_size_, allocated_size());
if (current_size_ == total_size_) { MaybeExtend();
// The array is completely full with no cleared objects, so grow it.
InternalExtend(1);
}
element_at(current_size_++) = value; element_at(current_size_++) = value;
if (!using_sso()) ++rep()->allocated_size; if (!using_sso()) ++rep()->allocated_size;
} }
@ -348,9 +343,9 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other); RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
} }
void CloseGap(int start, int num); // implemented in the cc file void CloseGap(int start, int num);
void Reserve(int new_size); // implemented in the cc file void Reserve(int capacity);
template <typename TypeHandler> template <typename TypeHandler>
static inline Value<TypeHandler>* copy(const Value<TypeHandler>* value) { static inline Value<TypeHandler>* copy(const Value<TypeHandler>* value) {
@ -495,9 +490,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
"RepeatedPtrField not on an arena."; "RepeatedPtrField not on an arena.";
ABSL_DCHECK(TypeHandler::GetOwningArena(value) == nullptr) ABSL_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
<< "AddCleared() can only accept values not on an arena."; << "AddCleared() can only accept values not on an arena.";
if (allocated_size() == total_size_) { MaybeExtend();
Reserve(total_size_ + 1);
}
if (using_sso()) { if (using_sso()) {
tagged_rep_or_elem_ = value; tagged_rep_or_elem_ = value;
} else { } else {
@ -764,10 +757,11 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
const RepeatedPtrFieldBase& other, const RepeatedPtrFieldBase& other,
void (RepeatedPtrFieldBase::*inner_loop)(void**, void* const*, int, void (RepeatedPtrFieldBase::*inner_loop)(void**, void* const*, int,
int)) { int)) {
// Note: wrapper has already guaranteed that other.rep_ != nullptr here. // Note: wrapper has already guaranteed that `other_size` > 0.
int other_size = other.current_size_; int other_size = other.current_size_;
Reserve(current_size_ + other_size);
void* const* other_elements = other.elements(); void* const* other_elements = other.elements();
void** new_elements = InternalExtend(other_size); void** new_elements = elements() + current_size_;
int allocated_elems = allocated_size() - current_size_; int allocated_elems = allocated_size() - current_size_;
(this->*inner_loop)(new_elements, other_elements, other_size, (this->*inner_loop)(new_elements, other_elements, other_size,
allocated_elems); allocated_elems);
@ -799,12 +793,21 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
} }
} }
// Internal helper: extends array space if necessary to contain // Extends capacity by at least |extend_amount|.
// |extend_amount| more elements, and returns a pointer to the element //
// immediately following the old list of elements. This interface factors out // Pre-condition: |extend_amount| must be > 0.
// common behavior from Reserve() and MergeFrom() to reduce code size. void InternalExtend(int extend_amount);
// |extend_amount| must be > 0.
void** InternalExtend(int extend_amount); // Ensures that capacity is big enough to store one more allocated element.
inline void MaybeExtend() {
if (using_sso() ? (tagged_rep_or_elem_ != nullptr)
: (rep()->allocated_size == total_size_)) {
ABSL_DCHECK_EQ(allocated_size(), Capacity());
InternalExtend(1);
} else {
ABSL_DCHECK_NE(allocated_size(), Capacity());
}
}
// Internal helper for Add: adds "obj" as the next element in the // Internal helper for Add: adds "obj" as the next element in the
// array, including potentially resizing the array with Reserve if // array, including potentially resizing the array with Reserve if

Loading…
Cancel
Save