Overload the void* in RepeatedPtrFieldBase to point directly to the first element if Capacity()==1, or to point to an allocated Rep if Capacity()>1.

This avoids allocating memory for 1 element containers, and avoids indirection costs like extra cache misses.

PiperOrigin-RevId: 553157394
pull/13445/head
Protobuf Team Bot 1 year ago committed by Copybara-Service
parent 421ce84e5a
commit 7c2ed10beb
  1. 4
      src/google/protobuf/implicit_weak_message.h
  2. 69
      src/google/protobuf/repeated_field_unittest.cc
  3. 132
      src/google/protobuf/repeated_ptr_field.cc
  4. 251
      src/google/protobuf/repeated_ptr_field.h

@ -198,13 +198,13 @@ struct WeakRepeatedPtrField {
return pointer_iterator(base().raw_mutable_data());
}
const_pointer_iterator pointer_begin() const {
return const_pointer_iterator(base().raw_mutable_data());
return const_pointer_iterator(base().raw_data());
}
pointer_iterator pointer_end() {
return pointer_iterator(base().raw_mutable_data() + base().size());
}
const_pointer_iterator pointer_end() const {
return const_pointer_iterator(base().raw_mutable_data() + base().size());
return const_pointer_iterator(base().raw_data() + base().size());
}
MessageLite* AddWeak(const MessageLite* prototype) {

@ -42,6 +42,7 @@
#include <cstdint>
#include <cstdlib>
#include <cstring>
#include <functional>
#include <iterator>
#include <limits>
#include <list>
@ -886,13 +887,9 @@ TEST(RepeatedField, MoveAssign) {
Arena::CreateMessage<RepeatedField<int>>(&arena);
destination->Add(3);
const int* source_data = source->data();
const int* destination_data = destination->data();
*destination = std::move(*source);
EXPECT_EQ(source_data, destination->data());
EXPECT_THAT(*destination, ElementsAre(1, 2));
// This property isn't guaranteed but it's useful to have a test that would
// catch changes in this area.
EXPECT_EQ(destination_data, source->data());
EXPECT_THAT(*source, ElementsAre(3));
}
{
@ -1656,9 +1653,18 @@ TEST(RepeatedPtrField, AddAllocated) {
field.Add()->assign("filler");
}
const auto ensure_at_capacity = [&] {
while (field.size() < field.Capacity()) {
field.Add()->assign("filler");
}
};
const auto ensure_not_at_capacity = [&] { field.Reserve(field.size() + 1); };
ensure_at_capacity();
int index = field.size();
// First branch: Field is at capacity with no cleared objects.
ASSERT_EQ(field.size(), field.Capacity());
std::string* foo = new std::string("foo");
field.AddAllocated(foo);
EXPECT_EQ(index + 1, field.size());
@ -1666,6 +1672,7 @@ TEST(RepeatedPtrField, AddAllocated) {
EXPECT_EQ(foo, &field.Get(index));
// Last branch: Field is not at capacity and there are no cleared objects.
ensure_not_at_capacity();
std::string* bar = new std::string("bar");
field.AddAllocated(bar);
++index;
@ -1674,6 +1681,7 @@ TEST(RepeatedPtrField, AddAllocated) {
EXPECT_EQ(bar, &field.Get(index));
// Third branch: Field is not at capacity and there are no cleared objects.
ensure_not_at_capacity();
field.RemoveLast();
std::string* baz = new std::string("baz");
field.AddAllocated(baz);
@ -1682,9 +1690,7 @@ TEST(RepeatedPtrField, AddAllocated) {
EXPECT_EQ(baz, &field.Get(index));
// Second branch: Field is at capacity but has some cleared objects.
while (field.size() < field.Capacity()) {
field.Add()->assign("filler2");
}
ensure_at_capacity();
field.RemoveLast();
index = field.size();
std::string* moo = new std::string("moo");
@ -1842,6 +1848,47 @@ TEST(RepeatedPtrField, IteratorConstruct_Proto) {
EXPECT_EQ(values[1].bb(), other.Get(1).bb());
}
TEST(RepeatedPtrField, SmallOptimization) {
// Properties checked here are not part of the contract of RepeatedPtrField,
// but we test them to verify that SSO is working as expected by the
// implementation.
// We use an arena to easily measure memory usage, but not needed.
Arena arena;
auto* array = Arena::CreateMessage<RepeatedPtrField<std::string>>(&arena);
EXPECT_EQ(array->Capacity(), 1);
EXPECT_EQ(array->SpaceUsedExcludingSelf(), 0);
std::string str;
auto usage_before = arena.SpaceUsed();
// We use UnsafeArenaAddAllocated just to grow the array without creating
// objects or causing extra cleanup costs in the arena to make the
// measurements simpler.
array->UnsafeArenaAddAllocated(&str);
// No backing array, just the string.
EXPECT_EQ(array->SpaceUsedExcludingSelf(), sizeof(str));
// We have not used any arena space.
EXPECT_EQ(usage_before, arena.SpaceUsed());
// Verify the string is where we think it is.
EXPECT_EQ(&*array->begin(), &str);
EXPECT_EQ(array->pointer_begin()[0], &str);
// The T** in pointer_begin points into the sso in the object.
EXPECT_TRUE(std::less_equal<void*>{}(array, &*array->pointer_begin()));
EXPECT_TRUE(std::less_equal<void*>{}(&*array->pointer_begin(), array + 1));
// Adding a second object stops sso.
std::string str2;
array->UnsafeArenaAddAllocated(&str2);
EXPECT_EQ(array->Capacity(), 3);
// Backing array and the strings.
EXPECT_EQ(array->SpaceUsedExcludingSelf(),
(1 + array->Capacity()) * sizeof(void*) + 2 * sizeof(str));
// We used some arena space now.
EXPECT_LT(usage_before, arena.SpaceUsed());
// And the pointer_begin is not in the sso anymore.
EXPECT_FALSE(std::less_equal<void*>{}(array, &*array->pointer_begin()) &&
std::less_equal<void*>{}(&*array->pointer_begin(), array + 1));
}
TEST(RepeatedPtrField, CopyAssign) {
RepeatedPtrField<std::string> source, destination;
source.Add()->assign("4");
@ -1907,13 +1954,9 @@ TEST(RepeatedPtrField, MoveAssign) {
RepeatedPtrField<std::string> destination;
*destination.Add() = "3";
const std::string* const* source_data = source.data();
const std::string* const* destination_data = destination.data();
destination = std::move(source);
EXPECT_EQ(source_data, destination.data());
EXPECT_THAT(destination, ElementsAre("1", "2"));
// This property isn't guaranteed but it's useful to have a test that would
// catch changes in this area.
EXPECT_EQ(destination_data, source.data());
EXPECT_THAT(source, ElementsAre("3"));
}
{
@ -1926,13 +1969,9 @@ TEST(RepeatedPtrField, MoveAssign) {
Arena::CreateMessage<RepeatedPtrField<std::string>>(&arena);
*destination->Add() = "3";
const std::string* const* source_data = source->data();
const std::string* const* destination_data = destination->data();
*destination = std::move(*source);
EXPECT_EQ(source_data, destination->data());
EXPECT_THAT(*destination, ElementsAre("1", "2"));
// This property isn't guaranteed but it's useful to have a test that would
// catch changes in this area.
EXPECT_EQ(destination_data, source->data());
EXPECT_THAT(*source, ElementsAre("3"));
}
{

@ -33,8 +33,12 @@
// Sanjay Ghemawat, Jeff Dean, and others.
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include "absl/log/absl_check.h"
#include "google/protobuf/arena.h"
#include "google/protobuf/implicit_weak_message.h"
#include "google/protobuf/port.h"
#include "google/protobuf/repeated_field.h"
@ -52,45 +56,58 @@ void** RepeatedPtrFieldBase::InternalExtend(int extend_amount) {
if (total_size_ >= new_size) {
// N.B.: rep_ is non-nullptr because extend_amount is always > 0, hence
// total_size must be non-zero since it is lower-bounded by new_size.
return &rep_->elements[current_size_];
return elements() + current_size_;
}
Rep* old_rep = rep_;
Arena* arena = GetOwningArena();
new_size = internal::CalculateReserveSize<void*, kRepHeaderSize>(total_size_,
new_size);
ABSL_CHECK_LE(static_cast<int64_t>(new_size),
static_cast<int64_t>(
(std::numeric_limits<size_t>::max() - kRepHeaderSize) /
sizeof(old_rep->elements[0])))
sizeof(rep()->elements[0])))
<< "Requested size is too large to fit into size_t.";
size_t bytes = kRepHeaderSize + sizeof(old_rep->elements[0]) * new_size;
size_t bytes = kRepHeaderSize + sizeof(rep()->elements[0]) * new_size;
Rep* new_rep;
void* old_tagged_ptr = tagged_rep_or_elem_;
if (arena == nullptr) {
internal::SizedPtr res = internal::AllocateAtLeast(bytes);
new_size = (res.n - kRepHeaderSize) / sizeof(old_rep->elements[0]);
rep_ = reinterpret_cast<Rep*>(res.p);
new_size =
static_cast<int>((res.n - kRepHeaderSize) / sizeof(rep()->elements[0]));
new_rep = reinterpret_cast<Rep*>(res.p);
} else {
rep_ = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
}
const int old_total_size = total_size_;
total_size_ = new_size;
if (old_rep) {
if (old_rep->allocated_size > 0) {
memcpy(rep_->elements, old_rep->elements,
old_rep->allocated_size * sizeof(rep_->elements[0]));
}
rep_->allocated_size = old_rep->allocated_size;
const size_t old_size =
old_total_size * sizeof(rep_->elements[0]) + kRepHeaderSize;
if (arena == nullptr) {
internal::SizedDelete(old_rep, old_size);
if (using_sso()) {
new_rep->elements[0] = old_tagged_ptr;
new_rep->allocated_size = old_tagged_ptr != nullptr ? 1 : 0;
} else {
if (old_tagged_ptr) {
Rep* old_rep = reinterpret_cast<Rep*>(
reinterpret_cast<uintptr_t>(old_tagged_ptr) - 1);
if (old_rep->allocated_size > 0) {
memcpy(new_rep->elements, old_rep->elements,
old_rep->allocated_size * sizeof(rep()->elements[0]));
}
new_rep->allocated_size = old_rep->allocated_size;
const size_t old_size =
total_size_ * sizeof(rep()->elements[0]) + kRepHeaderSize;
if (arena == nullptr) {
internal::SizedDelete(old_rep, old_size);
} else {
arena_->ReturnArrayMemory(old_rep, old_size);
}
} else {
arena_->ReturnArrayMemory(old_rep, old_size);
new_rep->allocated_size = 0;
}
} else {
rep_->allocated_size = 0;
}
return &rep_->elements[current_size_];
tagged_rep_or_elem_ =
reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(new_rep) + 1);
total_size_ = new_size;
return &new_rep->elements[current_size_];
}
void RepeatedPtrFieldBase::Reserve(int new_size) {
@ -100,49 +117,76 @@ void RepeatedPtrFieldBase::Reserve(int new_size) {
}
void RepeatedPtrFieldBase::DestroyProtos() {
ABSL_DCHECK(rep_);
ABSL_DCHECK(tagged_rep_or_elem_);
ABSL_DCHECK(arena_ == nullptr);
int n = rep_->allocated_size;
void* const* elements = rep_->elements;
for (int i = 0; i < n; i++) {
delete static_cast<MessageLite*>(elements[i]);
if (using_sso()) {
delete static_cast<MessageLite*>(tagged_rep_or_elem_);
} else {
Rep* r = rep();
int n = r->allocated_size;
void* const* elements = r->elements;
for (int i = 0; i < n; i++) {
delete static_cast<MessageLite*>(elements[i]);
}
const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
internal::SizedDelete(r, size);
tagged_rep_or_elem_ = nullptr;
}
const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
internal::SizedDelete(rep_, size);
rep_ = nullptr;
}
void* RepeatedPtrFieldBase::AddOutOfLineHelper(void* obj) {
if (!rep_ || rep_->allocated_size == total_size_) {
if (tagged_rep_or_elem_ == nullptr) {
ABSL_DCHECK_EQ(current_size_, 0);
ABSL_DCHECK(using_sso());
ABSL_DCHECK_EQ(allocated_size(), 0);
ExchangeCurrentSize(1);
tagged_rep_or_elem_ = obj;
return obj;
}
if (using_sso() || rep()->allocated_size == total_size_) {
InternalExtend(1); // Equivalent to "Reserve(total_size_ + 1)"
}
++rep_->allocated_size;
rep_->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
Rep* r = rep();
++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = obj;
return obj;
}
void RepeatedPtrFieldBase::CloseGap(int start, int num) {
if (rep_ == nullptr) return;
// Close up a gap of "num" elements starting at offset "start".
for (int i = start + num; i < rep_->allocated_size; ++i)
rep_->elements[i - num] = rep_->elements[i];
if (using_sso()) {
if (start == 0 && num == 1) {
tagged_rep_or_elem_ = nullptr;
}
} else {
// Close up a gap of "num" elements starting at offset "start".
Rep* r = rep();
for (int i = start + num; i < r->allocated_size; ++i)
r->elements[i - num] = r->elements[i];
r->allocated_size -= num;
}
ExchangeCurrentSize(current_size_ - num);
rep_->allocated_size -= num;
}
MessageLite* RepeatedPtrFieldBase::AddWeak(const MessageLite* prototype) {
if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
if (current_size_ < allocated_size()) {
return reinterpret_cast<MessageLite*>(
rep_->elements[ExchangeCurrentSize(current_size_ + 1)]);
element_at(ExchangeCurrentSize(current_size_ + 1)));
}
if (!rep_ || rep_->allocated_size == total_size_) {
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
++rep_->allocated_size;
MessageLite* result = prototype
? prototype->New(arena_)
: Arena::CreateMessage<ImplicitWeakMessage>(arena_);
rep_->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
if (using_sso()) {
ExchangeCurrentSize(current_size_ + 1);
tagged_rep_or_elem_ = result;
} else {
Rep* r = rep();
++r->allocated_size;
r->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
}
return result;
}

@ -46,9 +46,11 @@
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <iterator>
#include <limits>
#include <string>
#include <tuple>
#include <type_traits>
#include <utility>
@ -168,11 +170,19 @@ struct IsMovable
// static int SpaceUsedLong(const Type&);
// };
class PROTOBUF_EXPORT RepeatedPtrFieldBase {
static constexpr int kSSOCapacity = 1;
protected:
constexpr RepeatedPtrFieldBase()
: arena_(nullptr), current_size_(0), total_size_(0), rep_(nullptr) {}
: arena_(nullptr),
current_size_(0),
total_size_(kSSOCapacity),
tagged_rep_or_elem_(nullptr) {}
explicit RepeatedPtrFieldBase(Arena* arena)
: arena_(arena), current_size_(0), total_size_(0), rep_(nullptr) {}
: arena_(arena),
current_size_(0),
total_size_(kSSOCapacity),
tagged_rep_or_elem_(nullptr) {}
RepeatedPtrFieldBase(const RepeatedPtrFieldBase&) = delete;
RepeatedPtrFieldBase& operator=(const RepeatedPtrFieldBase&) = delete;
@ -193,29 +203,29 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
const typename TypeHandler::Type& at(int index) const {
ABSL_CHECK_GE(index, 0);
ABSL_CHECK_LT(index, current_size_);
return *cast<TypeHandler>(rep_->elements[index]);
return *cast<TypeHandler>(element_at(index));
}
template <typename TypeHandler>
typename TypeHandler::Type& at(int index) {
ABSL_CHECK_GE(index, 0);
ABSL_CHECK_LT(index, current_size_);
return *cast<TypeHandler>(rep_->elements[index]);
return *cast<TypeHandler>(element_at(index));
}
template <typename TypeHandler>
typename TypeHandler::Type* Mutable(int index) {
ABSL_DCHECK_GE(index, 0);
ABSL_DCHECK_LT(index, current_size_);
return cast<TypeHandler>(rep_->elements[index]);
return cast<TypeHandler>(element_at(index));
}
template <typename TypeHandler>
typename TypeHandler::Type* Add(
const typename TypeHandler::Type* prototype = nullptr) {
if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
if (current_size_ < allocated_size()) {
return cast<TypeHandler>(
rep_->elements[ExchangeCurrentSize(current_size_ + 1)]);
element_at(ExchangeCurrentSize(current_size_ + 1)));
}
typename TypeHandler::Type* result =
TypeHandler::NewFromPrototype(prototype, arena_);
@ -227,44 +237,50 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
typename TypeHandler,
typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
inline void Add(typename TypeHandler::Type&& value) {
if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
*cast<TypeHandler>(
rep_->elements[ExchangeCurrentSize(current_size_ + 1)]) =
if (current_size_ < allocated_size()) {
*cast<TypeHandler>(element_at(ExchangeCurrentSize(current_size_ + 1))) =
std::move(value);
return;
}
if (!rep_ || rep_->allocated_size == total_size_) {
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
++rep_->allocated_size;
if (!using_sso()) ++rep()->allocated_size;
typename TypeHandler::Type* result =
TypeHandler::New(arena_, std::move(value));
rep_->elements[ExchangeCurrentSize(current_size_ + 1)] = result;
element_at(ExchangeCurrentSize(current_size_ + 1)) = result;
}
template <typename TypeHandler>
void Delete(int index) {
ABSL_DCHECK_GE(index, 0);
ABSL_DCHECK_LT(index, current_size_);
TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
TypeHandler::Delete(cast<TypeHandler>(element_at(index)), arena_);
}
// Must be called from destructor.
template <typename TypeHandler>
void Destroy() {
if (rep_ != nullptr && arena_ == nullptr) {
int n = rep_->allocated_size;
void* const* elements = rep_->elements;
for (int i = 0; i < n; i++) {
TypeHandler::Delete(cast<TypeHandler>(elements[i]), nullptr);
}
const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
internal::SizedDelete(rep_, size);
if (arena_ != nullptr) return;
if (using_sso()) {
if (tagged_rep_or_elem_ == nullptr) return;
TypeHandler::Delete(cast<TypeHandler>(tagged_rep_or_elem_), nullptr);
return;
}
rep_ = nullptr;
Rep* r = rep();
int n = r->allocated_size;
void* const* elems = r->elements;
for (int i = 0; i < n; i++) {
TypeHandler::Delete(cast<TypeHandler>(elems[i]), nullptr);
}
internal::SizedDelete(r, total_size_ * sizeof(elems[0]) + kRepHeaderSize);
}
bool NeedsDestroy() const { return rep_ != nullptr && arena_ == nullptr; }
bool NeedsDestroy() const {
return tagged_rep_or_elem_ != nullptr && arena_ == nullptr;
}
void DestroyProtos(); // implemented in the cc file
public:
@ -276,7 +292,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
const typename TypeHandler::Type& Get(int index) const {
ABSL_DCHECK_GE(index, 0);
ABSL_DCHECK_LT(index, current_size_);
return *cast<TypeHandler>(rep_->elements[index]);
return *cast<TypeHandler>(element_at(index));
}
// Creates and adds an element using the given prototype, without introducing
@ -317,31 +333,23 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
}
// Prepares the container for adding elements via `AddAllocatedForParse`.
// It ensures some invariants to avoid checking then in the Add loop:
// - rep_ is not null.
// - there are no preallocated elements.
// It ensures we have no preallocated elements in the array.
// Returns true if the invariants hold and `AddAllocatedForParse` can be
// used.
bool PrepareForParse() {
if (current_size_ == total_size_) {
InternalExtend(1);
}
return rep_->allocated_size == current_size_;
}
bool PrepareForParse() { return allocated_size() == current_size_; }
// Similar to `AddAllocated` but faster.
// Can only be invoked after a call to `PrepareForParse` that returned `true`,
// or other calls to `AddAllocatedForParse`.
template <typename TypeHandler>
void AddAllocatedForParse(typename TypeHandler::Type* value) {
PROTOBUF_ASSUME(rep_ != nullptr);
PROTOBUF_ASSUME(current_size_ == rep_->allocated_size);
ABSL_DCHECK_EQ(current_size_, allocated_size());
if (current_size_ == total_size_) {
// The array is completely full with no cleared objects, so grow it.
InternalExtend(1);
}
rep_->elements[current_size_++] = value;
++rep_->allocated_size;
element_at(current_size_++) = value;
if (!using_sso()) ++rep()->allocated_size;
}
protected:
@ -349,7 +357,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
void RemoveLast() {
ABSL_DCHECK_GT(current_size_, 0);
ExchangeCurrentSize(current_size_ - 1);
TypeHandler::Clear(cast<TypeHandler>(rep_->elements[current_size_]));
TypeHandler::Clear(cast<TypeHandler>(element_at(current_size_)));
}
template <typename TypeHandler>
@ -372,10 +380,8 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
}
// Used for constructing iterators.
void* const* raw_data() const { return rep_ ? rep_->elements : nullptr; }
void** raw_mutable_data() const {
return rep_ ? const_cast<void**>(rep_->elements) : nullptr;
}
void* const* raw_data() const { return elements(); }
void** raw_mutable_data() { return elements(); }
template <typename TypeHandler>
typename TypeHandler::Type** mutable_data() {
@ -409,18 +415,20 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
void SwapElements(int index1, int index2) {
using std::swap; // enable ADL with fallback
swap(rep_->elements[index1], rep_->elements[index2]);
swap(element_at(index1), element_at(index2));
}
template <typename TypeHandler>
size_t SpaceUsedExcludingSelfLong() const {
size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
if (rep_ != nullptr) {
for (int i = 0; i < rep_->allocated_size; ++i) {
allocated_bytes +=
TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
}
allocated_bytes += kRepHeaderSize;
size_t allocated_bytes =
using_sso()
? 0
: static_cast<size_t>(total_size_) * sizeof(void*) + kRepHeaderSize;
const int n = allocated_size();
void* const* elems = elements();
for (int i = 0; i < n; ++i) {
allocated_bytes +=
TypeHandler::SpaceUsedLong(*cast<TypeHandler>(elems[i]));
}
return allocated_bytes;
}
@ -430,9 +438,9 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// Like Add(), but if there are no cleared objects to use, returns nullptr.
template <typename TypeHandler>
typename TypeHandler::Type* AddFromCleared() {
if (rep_ != nullptr && current_size_ < rep_->allocated_size) {
if (current_size_ < allocated_size()) {
return cast<TypeHandler>(
rep_->elements[ExchangeCurrentSize(current_size_ + 1)]);
element_at(ExchangeCurrentSize(current_size_ + 1)));
} else {
return nullptr;
}
@ -447,28 +455,27 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
template <typename TypeHandler>
void UnsafeArenaAddAllocated(typename TypeHandler::Type* value) {
// Make room for the new pointer.
if (!rep_ || current_size_ == total_size_) {
if (current_size_ == total_size_) {
// The array is completely full with no cleared objects, so grow it.
Reserve(total_size_ + 1);
++rep_->allocated_size;
} else if (rep_->allocated_size == total_size_) {
++rep()->allocated_size;
} else if (allocated_size() == total_size_) {
// There is no more space in the pointer array because it contains some
// cleared objects awaiting reuse. We don't want to grow the array in
// this case because otherwise a loop calling AddAllocated() followed by
// Clear() would leak memory.
TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
arena_);
} else if (current_size_ < rep_->allocated_size) {
TypeHandler::Delete(cast<TypeHandler>(element_at(current_size_)), arena_);
} else if (current_size_ < allocated_size()) {
// We have some cleared objects. We don't care about their order, so we
// can just move the first one to the end to make space.
rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
++rep_->allocated_size;
element_at(allocated_size()) = element_at(current_size_);
++rep()->allocated_size;
} else {
// There are no cleared objects.
++rep_->allocated_size;
if (!using_sso()) ++rep()->allocated_size;
}
rep_->elements[ExchangeCurrentSize(current_size_ + 1)] = value;
element_at(ExchangeCurrentSize(current_size_ + 1)) = value;
}
template <typename TypeHandler>
@ -485,19 +492,21 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
ABSL_DCHECK_GT(current_size_, 0);
ExchangeCurrentSize(current_size_ - 1);
typename TypeHandler::Type* result =
cast<TypeHandler>(rep_->elements[current_size_]);
--rep_->allocated_size;
if (current_size_ < rep_->allocated_size) {
// There are cleared elements on the end; replace the removed element
// with the last allocated element.
rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
cast<TypeHandler>(element_at(current_size_));
if (using_sso()) {
tagged_rep_or_elem_ = nullptr;
} else {
--rep()->allocated_size;
if (current_size_ < allocated_size()) {
// There are cleared elements on the end; replace the removed element
// with the last allocated element.
element_at(current_size_) = element_at(allocated_size());
}
}
return result;
}
int ClearedCount() const {
return rep_ ? (rep_->allocated_size - current_size_) : 0;
}
int ClearedCount() const { return allocated_size() - current_size_; }
template <typename TypeHandler>
void AddCleared(typename TypeHandler::Type* value) {
@ -506,10 +515,14 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
"RepeatedPtrField not on an arena.";
ABSL_DCHECK(TypeHandler::GetOwningArena(value) == nullptr)
<< "AddCleared() can only accept values not on an arena.";
if (!rep_ || rep_->allocated_size == total_size_) {
if (allocated_size() == total_size_) {
Reserve(total_size_ + 1);
}
rep_->elements[rep_->allocated_size++] = value;
if (using_sso()) {
tagged_rep_or_elem_ = value;
} else {
element_at(rep()->allocated_size++) = value;
}
}
template <typename TypeHandler>
@ -517,9 +530,16 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
ABSL_DCHECK(GetOwningArena() == nullptr)
<< "ReleaseCleared() can only be used on a RepeatedPtrField not on "
<< "an arena.";
ABSL_DCHECK(rep_ != nullptr);
ABSL_DCHECK_GT(rep_->allocated_size, current_size_);
return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
ABSL_DCHECK(tagged_rep_or_elem_ != nullptr);
ABSL_DCHECK_GT(allocated_size(), current_size_);
if (using_sso()) {
auto* result =
reinterpret_cast<typename TypeHandler::Type*>(tagged_rep_or_elem_);
tagged_rep_or_elem_ = nullptr;
return result;
} else {
return cast<TypeHandler>(element_at(--rep()->allocated_size));
}
}
template <typename TypeHandler>
@ -528,18 +548,18 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
Arena* element_arena =
reinterpret_cast<Arena*>(TypeHandler::GetOwningArena(value));
Arena* arena = GetOwningArena();
if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
if (arena == element_arena && allocated_size() < total_size_) {
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void** elems = rep_->elements;
if (current_size_ < rep_->allocated_size) {
void** elems = elements();
if (current_size_ < allocated_size()) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems[rep_->allocated_size] = elems[current_size_];
elems[allocated_size()] = elems[current_size_];
}
elems[ExchangeCurrentSize(current_size_ + 1)] = value;
rep_->allocated_size = rep_->allocated_size + 1;
if (!using_sso()) ++rep()->allocated_size;
} else {
AddAllocatedSlowWithCopy<TypeHandler>(value, element_arena, arena);
}
@ -550,18 +570,18 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// AddAllocated version that does not implement arena-safe copying
// behavior.
typename TypeHandler::Type* value, std::false_type) {
if (rep_ && rep_->allocated_size < total_size_) {
if (allocated_size() < total_size_) {
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void** elems = rep_->elements;
if (current_size_ < rep_->allocated_size) {
void** elems = elements();
if (current_size_ < allocated_size()) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems[rep_->allocated_size] = elems[current_size_];
elems[allocated_size()] = elems[current_size_];
}
elems[ExchangeCurrentSize(current_size_ + 1)] = value;
++rep_->allocated_size;
if (!using_sso()) ++rep()->allocated_size;
} else {
UnsafeArenaAddAllocated<TypeHandler>(value);
}
@ -685,7 +705,41 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
sizeof(void*)];
};
static constexpr size_t kRepHeaderSize = offsetof(Rep, elements);
Rep* rep_;
void* const* elements() const {
return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
}
void** elements() {
return using_sso() ? &tagged_rep_or_elem_ : +rep()->elements;
}
void*& element_at(int index) {
if (using_sso()) {
ABSL_DCHECK_EQ(index, 0);
return tagged_rep_or_elem_;
}
return rep()->elements[index];
}
const void* element_at(int index) const {
return const_cast<RepeatedPtrFieldBase*>(this)->element_at(index);
}
int allocated_size() const {
return using_sso() ? (tagged_rep_or_elem_ != nullptr ? 1 : 0)
: rep()->allocated_size;
}
Rep* rep() {
ABSL_DCHECK(!using_sso());
return reinterpret_cast<Rep*>(
reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) - 1);
}
const Rep* rep() const {
return const_cast<RepeatedPtrFieldBase*>(this)->rep();
}
bool using_sso() const {
return (reinterpret_cast<uintptr_t>(tagged_rep_or_elem_) & 1) == 0;
}
void* tagged_rep_or_elem_;
template <typename TypeHandler>
static inline typename TypeHandler::Type* cast(void* element) {
@ -701,13 +755,13 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
template <typename TypeHandler>
PROTOBUF_NOINLINE void ClearNonEmpty() {
const int n = current_size_;
void* const* elements = rep_->elements;
void* const* elems = elements();
int i = 0;
ABSL_DCHECK_GT(
n,
0); // do/while loop to avoid initial test because we know n > 0
do {
TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
TypeHandler::Clear(cast<TypeHandler>(elems[i++]));
} while (i < n);
ExchangeCurrentSize(0);
}
@ -716,28 +770,29 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// pointer to the type-specific (templated) inner allocate/merge loop.
PROTOBUF_NOINLINE void MergeFromInternal(
const RepeatedPtrFieldBase& other,
void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
void (RepeatedPtrFieldBase::*inner_loop)(void**, void* const*, int,
int)) {
// Note: wrapper has already guaranteed that other.rep_ != nullptr here.
int other_size = other.current_size_;
void** other_elements = other.rep_->elements;
void* const* other_elements = other.elements();
void** new_elements = InternalExtend(other_size);
int allocated_elems = rep_->allocated_size - current_size_;
int allocated_elems = allocated_size() - current_size_;
(this->*inner_loop)(new_elements, other_elements, other_size,
allocated_elems);
ExchangeCurrentSize(current_size_ + other_size);
if (rep_->allocated_size < current_size_) {
rep_->allocated_size = current_size_;
if (allocated_size() < current_size_) {
rep()->allocated_size = current_size_;
}
}
// Merges other_elems to our_elems.
template <typename TypeHandler>
PROTOBUF_NOINLINE void MergeFromInnerLoop(void** our_elems,
void** other_elems, int length,
int already_allocated) {
void* const* other_elems,
int length, int already_allocated) {
if (already_allocated < length) {
Arena* arena = GetOwningArena();
typename TypeHandler::Type* elem_prototype =
auto* elem_prototype =
reinterpret_cast<typename TypeHandler::Type*>(other_elems[0]);
for (int i = already_allocated; i < length; i++) {
// Allocate a new empty element that we'll merge into below

Loading…
Cancel
Save