Reserve/InternalExtend use capacity instead of size as input.

This allows to remove branch from `InternalExtend` which is beneficial for small fields.

PiperOrigin-RevId: 566628155
pull/14126/head
Protobuf Team Bot 1 year ago committed by Copybara-Service
parent 5ba3f03d1f
commit c98ef06291
  1. 49
      src/google/protobuf/repeated_ptr_field.cc
  2. 43
      src/google/protobuf/repeated_ptr_field.h

@ -9,7 +9,6 @@
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstring>
@ -29,28 +28,24 @@ namespace protobuf {
namespace internal {
void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
int new_size = current_size_ + extend_amount;
if (total_size_ >= new_size) {
// N.B.: rep_ is non-nullptr because extend_amount is always > 0, hence
// total_size must be non-zero since it is lower-bounded by new_size.
return elements() + current_size_;
}
void RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
ABSL_DCHECK(extend_amount > 0);
constexpr size_t ptr_size = sizeof(rep()->elements[0]);
int new_capacity = total_size_ + extend_amount;
Arena* arena = GetOwningArena();
new_size = internal::CalculateReserveSize<void*, kRepHeaderSize>(total_size_,
new_size);
new_capacity = internal::CalculateReserveSize<void*, kRepHeaderSize>(
total_size_, new_capacity);
ABSL_CHECK_LE(
static_cast<int64_t>(new_size),
static_cast<int64_t>(new_capacity),
static_cast<int64_t>(
(std::numeric_limits<size_t>::max() - kRepHeaderSize) / ptr_size))
<< "Requested size is too large to fit into size_t.";
size_t bytes = kRepHeaderSize + ptr_size * new_size;
size_t bytes = kRepHeaderSize + ptr_size * new_capacity;
Rep* new_rep;
void* old_tagged_ptr = tagged_rep_or_elem_;
if (arena == nullptr) {
internal::SizedPtr res = internal::AllocateAtLeast(bytes);
new_size = static_cast<int>((res.n - kRepHeaderSize) / ptr_size);
new_capacity = static_cast<int>((res.n - kRepHeaderSize) / ptr_size);
new_rep = reinterpret_cast<Rep*>(res.p);
} else {
new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
@ -78,13 +73,12 @@ void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
tagged_rep_or_elem_ =
reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_rep) + 1);
total_size_ = new_size;
return &new_rep->elements[current_size_];
total_size_ = new_capacity;
}
void RepeatedPtrFieldBase::Reserve(int new_size) {
if (new_size > current_size_) {
InternalExtend(new_size - current_size_);
void RepeatedPtrFieldBase::Reserve(int capacity) {
if (capacity > total_size_) {
InternalExtend(capacity - total_size_);
}
}
@ -113,16 +107,14 @@ void* RepeatedPtrFieldBase::AddOutOfLineHelper(void* obj) {
ABSL_DCHECK(using_sso());
ABSL_DCHECK_EQ(allocated_size(), 0);
ExchangeCurrentSize(1);
tagged_rep_or_elem_ = obj;
return obj;
return tagged_rep_or_elem_ = obj;
}
if (using_sso() || rep()->allocated_size == total_size_) {
InternalExtend(1); // Equivalent to "Reserve(total_size_ + 1)"
}
Rep* r = rep();
++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
return obj;
return r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
}
void RepeatedPtrFieldBase::CloseGap(int start, int num) {
@ -145,21 +137,10 @@ MessageLite* RepeatedPtrFieldBase::AddWeak(const MessageLite* prototype) {
return reinterpret_cast<MessageLite*>(
element_at(ExchangeCurrentSize(current_size_ + 1)));
}
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
MessageLite* result = prototype
? prototype->New(arena_)
: Arena::CreateMessage<ImplicitWeakMessage>(arena_);
if (using_sso()) {
ExchangeCurrentSize(current_size_ + 1);
tagged_rep_or_elem_ = result;
} else {
Rep* r = rep();
++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
}
return result;
return static_cast<MessageLite*>(AddOutOfLineHelper(result));
}
void InternalOutOfLineDeleteMessageLite(MessageLite* message) {

@ -225,9 +225,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
std::move(value);
return;
}
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
MaybeExtend();
if (!using_sso()) ++rep()->allocated_size;
auto* result = TypeHandler::New(arena_, std::move(value));
element_at(ExchangeCurrentSize(current_size_ + 1)) = result;
@ -324,10 +322,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
template <typename TypeHandler>
void AddAllocatedForParse(Value<TypeHandler>* value) {
ABSL_DCHECK_EQ(current_size_, allocated_size());
if (current_size_ == total_size_) {
// The array is completely full with no cleared objects, so grow it.
InternalExtend(1);
}
MaybeExtend();
element_at(current_size_++) = value;
if (!using_sso()) ++rep()->allocated_size;
}
@ -348,9 +343,9 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
}
void CloseGap(int start, int num); // implemented in the cc file
void CloseGap(int start, int num);
void Reserve(int new_size); // implemented in the cc file
void Reserve(int capacity);
template <typename TypeHandler>
static inline Value<TypeHandler>* copy(const Value<TypeHandler>* value) {
@ -495,9 +490,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
"RepeatedPtrField not on an arena.";
ABSL_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
<< "AddCleared() can only accept values not on an arena.";
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
MaybeExtend();
if (using_sso()) {
tagged_rep_or_elem_ = value;
} else {
@ -764,10 +757,11 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
const RepeatedPtrFieldBase& other,
void (RepeatedPtrFieldBase::*inner_loop)(void**, void* const*, int,
int)) {
// Note: wrapper has already guaranteed that other.rep_ != nullptr here.
// Note: wrapper has already guaranteed that `other_size` > 0.
int other_size = other.current_size_;
Reserve(current_size_ + other_size);
void* const* other_elements = other.elements();
void** new_elements = InternalExtend(other_size);
void** new_elements = elements() + current_size_;
int allocated_elems = allocated_size() - current_size_;
(this->*inner_loop)(new_elements, other_elements, other_size,
allocated_elems);
@ -799,12 +793,21 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
}
}
// Internal helper: extends array space if necessary to contain
// |extend_amount| more elements, and returns a pointer to the element
// immediately following the old list of elements. This interface factors out
// common behavior from Reserve() and MergeFrom() to reduce code size.
// |extend_amount| must be > 0.
void** InternalExtend(int extend_amount);
// Extends capacity by at least |extend_amount|.
//
// Pre-condition: |extend_amount| must be > 0.
void InternalExtend(int extend_amount);
// Ensures that capacity is big enough to store one more allocated element.
inline void MaybeExtend() {
if (using_sso() ? (tagged_rep_or_elem_ != nullptr)
: (rep()->allocated_size == total_size_)) {
ABSL_DCHECK_EQ(allocated_size(), Capacity());
InternalExtend(1);
} else {
ABSL_DCHECK_NE(allocated_size(), Capacity());
}
}
// Internal helper for Add: adds "obj" as the next element in the
// array, including potentially resizing the array with Reserve if

Loading…
Cancel
Save