Remove tags for arena cleanup nodes now that we have dedicated string cleanup blocks.

PiperOrigin-RevId: 603491012
pull/15683/head
Protobuf Team Bot 10 months ago committed by Copybara-Service
parent 0a8409b5f4
commit 202b106392
  1. 16
      src/google/protobuf/arena.cc
  2. 164
      src/google/protobuf/arena_cleanup.h
  3. 14
      src/google/protobuf/serial_arena.h

@ -20,6 +20,7 @@
#include "absl/container/internal/layout.h"
#include "absl/synchronization/mutex.h"
#include "google/protobuf/arena_allocation_policy.h"
#include "google/protobuf/arena_cleanup.h"
#include "google/protobuf/arenaz_sampler.h"
#include "google/protobuf/port.h"
#include "google/protobuf/serial_arena.h"
@ -136,9 +137,9 @@ std::vector<void*> SerialArena::PeekCleanupListForTesting() {
ArenaBlock* b = head();
if (b->IsSentry()) return res;
const auto peek_list = [&](const char* pos, const char* end) {
while (pos != end) {
pos += cleanup::PeekNode(pos, res);
const auto peek_list = [&](char* pos, char* end) {
for (; pos != end; pos += cleanup::Size()) {
cleanup::PeekNode(pos, res);
}
};
@ -222,15 +223,14 @@ void* SerialArena::AllocateFromStringBlockFallback() {
PROTOBUF_NOINLINE
void* SerialArena::AllocateAlignedWithCleanupFallback(
size_t n, size_t align, void (*destructor)(void*)) {
size_t required = AlignUpTo(n, align) + cleanup::Size(destructor);
size_t required = AlignUpTo(n, align) + cleanup::Size();
AllocateNewBlock(required);
return AllocateAlignedWithCleanup(n, align, destructor);
}
PROTOBUF_NOINLINE
void SerialArena::AddCleanupFallback(void* elem, void (*destructor)(void*)) {
size_t required = cleanup::Size(destructor);
AllocateNewBlock(required);
AllocateNewBlock(cleanup::Size());
AddCleanupFromExisting(elem, destructor);
}
@ -324,8 +324,8 @@ void SerialArena::CleanupList() {
char* limit = b->Limit();
char* it = reinterpret_cast<char*>(b->cleanup_nodes);
ABSL_DCHECK(!b->IsSentry() || it == limit);
while (it < limit) {
it += cleanup::DestroyNode(it);
for (; it < limit; it += cleanup::Size()) {
cleanup::DestroyNode(it);
}
b = b->next;
} while (b);

@ -33,169 +33,43 @@ void arena_destruct_object(void* object) {
reinterpret_cast<T*>(object)->~T();
}
// Tag defines the type of cleanup / cleanup object. This tag is stored in the
// lowest 2 bits of the `elem` value identifying the type of node. All node
// types must start with a `uintptr_t` that stores `Tag` in its low two bits.
enum class Tag : uintptr_t {
kDynamic = 0, // DynamicNode
kString = 1, // TaggedNode (std::string)
kCord = 2, // TaggedNode (absl::Cord)
};
// DynamicNode contains the object (`elem`) that needs to be
// CleanupNode contains the object (`elem`) that needs to be
// destroyed, and the function to destroy it (`destructor`)
// elem must be aligned at minimum on a 4 byte boundary.
struct DynamicNode {
uintptr_t elem;
struct CleanupNode {
void* elem;
void (*destructor)(void*);
};
// TaggedNode contains a `std::string` or `absl::Cord` object (`elem`) that
// needs to be destroyed. The lowest 2 bits of `elem` contain the non-zero
// `kString` or `kCord` tag.
struct TaggedNode {
uintptr_t elem;
};
// EnableSpecializedTags() return true if the alignment of tagged objects
// such as std::string allow us to poke tags in the 2 LSB bits.
inline constexpr bool EnableSpecializedTags() {
// For now we require 2 bits
return alignof(std::string) >= 8 && alignof(absl::Cord) >= 8;
inline ABSL_ATTRIBUTE_ALWAYS_INLINE CleanupNode* ToCleanup(void* pos) {
return reinterpret_cast<CleanupNode*>(pos);
}
// Adds a cleanup entry identified by `tag` at memory location `pos`.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(Tag tag, void* pos,
const void* elem_raw,
// Adds a cleanup entry at memory location `pos`.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(void* pos, void* elem,
void (*destructor)(void*)) {
auto elem = reinterpret_cast<uintptr_t>(elem_raw);
if (EnableSpecializedTags()) {
ABSL_DCHECK_EQ(elem & 3, 0ULL); // Must be aligned
switch (tag) {
case Tag::kString: {
TaggedNode n = {elem | static_cast<uintptr_t>(Tag::kString)};
CleanupNode n = {elem, destructor};
memcpy(pos, &n, sizeof(n));
return;
}
case Tag::kCord: {
TaggedNode n = {elem | static_cast<uintptr_t>(Tag::kCord)};
memcpy(pos, &n, sizeof(n));
return;
}
case Tag::kDynamic:
default:
break;
}
}
DynamicNode n = {elem, destructor};
memcpy(pos, &n, sizeof(n));
// Optimization: performs a prefetch on the elem for the cleanup node at `pos`.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void PrefetchNode(void* pos) {
}
// Optimization: performs a prefetch on `elem_address`.
// Returns the size of the cleanup (meta) data at this address, allowing the
// caller to advance cleanup iterators without needing to examine or know
// anything about the underlying cleanup node or cleanup meta data / tags.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t
PrefetchNode(const void* elem_address) {
if (EnableSpecializedTags()) {
uintptr_t elem;
memcpy(&elem, elem_address, sizeof(elem));
if (static_cast<Tag>(elem & 3) != Tag::kDynamic) {
return sizeof(TaggedNode);
}
}
return sizeof(DynamicNode);
}
// Destroys the object referenced by the cleanup node at memory location `pos`.
// Returns the size of the cleanup (meta) data at this address, allowing the
// caller to advance cleanup iterators without needing to examine or know
// anything about the underlying cleanup node or cleanup meta data / tags.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t DestroyNode(const void* pos) {
uintptr_t elem;
memcpy(&elem, pos, sizeof(elem));
if (EnableSpecializedTags()) {
switch (static_cast<Tag>(elem & 3)) {
case Tag::kString: {
// Some compilers don't like fully qualified explicit dtor calls,
// so use an alias to avoid having to type `::`.
using T = std::string;
reinterpret_cast<T*>(elem - static_cast<uintptr_t>(Tag::kString))->~T();
return sizeof(TaggedNode);
}
case Tag::kCord: {
using T = absl::Cord;
reinterpret_cast<T*>(elem - static_cast<uintptr_t>(Tag::kCord))->~T();
return sizeof(TaggedNode);
}
case Tag::kDynamic:
default:
break;
}
}
static_cast<const DynamicNode*>(pos)->destructor(
reinterpret_cast<void*>(elem - static_cast<uintptr_t>(Tag::kDynamic)));
return sizeof(DynamicNode);
// Destroys the object referenced by the cleanup node.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void DestroyNode(void* pos) {
CleanupNode* cleanup = ToCleanup(pos);
cleanup->destructor(cleanup->elem);
}
// Append in `out` the pointer to the to-be-cleaned object in `pos`.
// Return the length of the cleanup node to allow the caller to advance the
// position, like `DestroyNode` does.
inline size_t PeekNode(const void* pos, std::vector<void*>& out) {
uintptr_t elem;
memcpy(&elem, pos, sizeof(elem));
out.push_back(reinterpret_cast<void*>(elem & ~3));
if (EnableSpecializedTags()) {
switch (static_cast<Tag>(elem & 3)) {
case Tag::kString:
case Tag::kCord:
return sizeof(TaggedNode);
case Tag::kDynamic:
default:
break;
}
}
return sizeof(DynamicNode);
}
// Returns the `tag` identifying the type of object for `destructor` or
// kDynamic if `destructor` does not identify a well know object type.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE Tag Type(void (*destructor)(void*)) {
if (EnableSpecializedTags()) {
if (destructor == &arena_destruct_object<std::string>) {
return Tag::kString;
}
if (destructor == &arena_destruct_object<absl::Cord>) {
return Tag::kCord;
}
}
return Tag::kDynamic;
}
// Returns the required size in bytes off the node type identified by `tag`.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(Tag tag) {
if (!EnableSpecializedTags()) return sizeof(DynamicNode);
switch (tag) {
case Tag::kDynamic:
return sizeof(DynamicNode);
case Tag::kString:
return sizeof(TaggedNode);
case Tag::kCord:
return sizeof(TaggedNode);
default:
ABSL_DCHECK(false) << "Corrupted cleanup tag: " << static_cast<int>(tag);
return sizeof(DynamicNode);
}
inline void PeekNode(void* pos, std::vector<void*>& out) {
out.push_back(ToCleanup(pos)->elem);
}
// Returns the required size in bytes off the node type for `destructor`.
inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(void (*destructor)(void*)) {
return destructor == nullptr ? 0 : Size(Type(destructor));
// Returns the required size for a cleanup node.
constexpr ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size() {
return sizeof(CleanupNode);
}
} // namespace cleanup

@ -250,7 +250,7 @@ class PROTOBUF_EXPORT SerialArena {
char* ret = ArenaAlignAs(align).CeilDefaultAligned(ptr());
// See the comment in MaybeAllocateAligned re uintptr_t.
if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n +
cleanup::Size(destructor) >
cleanup::Size() >
reinterpret_cast<uintptr_t>(limit_))) {
return AllocateAlignedWithCleanupFallback(n, align, destructor);
}
@ -265,9 +265,8 @@ class PROTOBUF_EXPORT SerialArena {
PROTOBUF_ALWAYS_INLINE
void AddCleanup(void* elem, void (*destructor)(void*)) {
size_t required = cleanup::Size(destructor);
size_t has = static_cast<size_t>(limit_ - ptr());
if (PROTOBUF_PREDICT_FALSE(required > has)) {
if (PROTOBUF_PREDICT_FALSE(cleanup::Size() > has)) {
return AddCleanupFallback(elem, destructor);
}
AddCleanupFromExisting(elem, destructor);
@ -301,14 +300,13 @@ class PROTOBUF_EXPORT SerialArena {
PROTOBUF_ALWAYS_INLINE
void AddCleanupFromExisting(void* elem, void (*destructor)(void*)) {
cleanup::Tag tag = cleanup::Type(destructor);
size_t n = cleanup::Size(tag);
const size_t cleanup_size = cleanup::Size();
PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - n, n);
limit_ -= n;
PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - cleanup_size, cleanup_size);
limit_ -= cleanup_size;
MaybePrefetchBackwards(limit_);
ABSL_DCHECK_GE(limit_, ptr());
cleanup::CreateNode(tag, limit_, elem, destructor);
cleanup::CreateNode(limit_, elem, destructor);
}
// Prefetch the next kPrefetchForwardsDegree bytes after `prefetch_ptr_` and

Loading…
Cancel
Save