diff --git a/src/google/protobuf/arena.cc b/src/google/protobuf/arena.cc index 023e9c261d..5447b18846 100644 --- a/src/google/protobuf/arena.cc +++ b/src/google/protobuf/arena.cc @@ -20,6 +20,7 @@ #include "absl/container/internal/layout.h" #include "absl/synchronization/mutex.h" #include "google/protobuf/arena_allocation_policy.h" +#include "google/protobuf/arena_cleanup.h" #include "google/protobuf/arenaz_sampler.h" #include "google/protobuf/port.h" #include "google/protobuf/serial_arena.h" @@ -136,9 +137,9 @@ std::vector SerialArena::PeekCleanupListForTesting() { ArenaBlock* b = head(); if (b->IsSentry()) return res; - const auto peek_list = [&](const char* pos, const char* end) { - while (pos != end) { - pos += cleanup::PeekNode(pos, res); + const auto peek_list = [&](char* pos, char* end) { + for (; pos != end; pos += cleanup::Size()) { + cleanup::PeekNode(pos, res); } }; @@ -222,15 +223,14 @@ void* SerialArena::AllocateFromStringBlockFallback() { PROTOBUF_NOINLINE void* SerialArena::AllocateAlignedWithCleanupFallback( size_t n, size_t align, void (*destructor)(void*)) { - size_t required = AlignUpTo(n, align) + cleanup::Size(destructor); + size_t required = AlignUpTo(n, align) + cleanup::Size(); AllocateNewBlock(required); return AllocateAlignedWithCleanup(n, align, destructor); } PROTOBUF_NOINLINE void SerialArena::AddCleanupFallback(void* elem, void (*destructor)(void*)) { - size_t required = cleanup::Size(destructor); - AllocateNewBlock(required); + AllocateNewBlock(cleanup::Size()); AddCleanupFromExisting(elem, destructor); } @@ -324,8 +324,8 @@ void SerialArena::CleanupList() { char* limit = b->Limit(); char* it = reinterpret_cast(b->cleanup_nodes); ABSL_DCHECK(!b->IsSentry() || it == limit); - while (it < limit) { - it += cleanup::DestroyNode(it); + for (; it < limit; it += cleanup::Size()) { + cleanup::DestroyNode(it); } b = b->next; } while (b); diff --git a/src/google/protobuf/arena_cleanup.h b/src/google/protobuf/arena_cleanup.h index 0cd47d0b8c..5b718a1c85 100644 --- a/src/google/protobuf/arena_cleanup.h +++ b/src/google/protobuf/arena_cleanup.h @@ -33,169 +33,43 @@ void arena_destruct_object(void* object) { reinterpret_cast(object)->~T(); } -// Tag defines the type of cleanup / cleanup object. This tag is stored in the -// lowest 2 bits of the `elem` value identifying the type of node. All node -// types must start with a `uintptr_t` that stores `Tag` in its low two bits. -enum class Tag : uintptr_t { - kDynamic = 0, // DynamicNode - kString = 1, // TaggedNode (std::string) - kCord = 2, // TaggedNode (absl::Cord) -}; - -// DynamicNode contains the object (`elem`) that needs to be +// CleanupNode contains the object (`elem`) that needs to be // destroyed, and the function to destroy it (`destructor`) // elem must be aligned at minimum on a 4 byte boundary. -struct DynamicNode { - uintptr_t elem; +struct CleanupNode { + void* elem; void (*destructor)(void*); }; -// TaggedNode contains a `std::string` or `absl::Cord` object (`elem`) that -// needs to be destroyed. The lowest 2 bits of `elem` contain the non-zero -// `kString` or `kCord` tag. -struct TaggedNode { - uintptr_t elem; -}; - -// EnableSpecializedTags() return true if the alignment of tagged objects -// such as std::string allow us to poke tags in the 2 LSB bits. -inline constexpr bool EnableSpecializedTags() { - // For now we require 2 bits - return alignof(std::string) >= 8 && alignof(absl::Cord) >= 8; +inline ABSL_ATTRIBUTE_ALWAYS_INLINE CleanupNode* ToCleanup(void* pos) { + return reinterpret_cast(pos); } -// Adds a cleanup entry identified by `tag` at memory location `pos`. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(Tag tag, void* pos, - const void* elem_raw, +// Adds a cleanup entry at memory location `pos`. +inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CreateNode(void* pos, void* elem, void (*destructor)(void*)) { - auto elem = reinterpret_cast(elem_raw); - if (EnableSpecializedTags()) { - ABSL_DCHECK_EQ(elem & 3, 0ULL); // Must be aligned - switch (tag) { - case Tag::kString: { - TaggedNode n = {elem | static_cast(Tag::kString)}; - memcpy(pos, &n, sizeof(n)); - return; - } - case Tag::kCord: { - TaggedNode n = {elem | static_cast(Tag::kCord)}; - memcpy(pos, &n, sizeof(n)); - return; - } - - case Tag::kDynamic: - default: - break; - } - } - DynamicNode n = {elem, destructor}; + CleanupNode n = {elem, destructor}; memcpy(pos, &n, sizeof(n)); } -// Optimization: performs a prefetch on `elem_address`. -// Returns the size of the cleanup (meta) data at this address, allowing the -// caller to advance cleanup iterators without needing to examine or know -// anything about the underlying cleanup node or cleanup meta data / tags. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t -PrefetchNode(const void* elem_address) { - if (EnableSpecializedTags()) { - uintptr_t elem; - memcpy(&elem, elem_address, sizeof(elem)); - if (static_cast(elem & 3) != Tag::kDynamic) { - return sizeof(TaggedNode); - } - } - return sizeof(DynamicNode); +// Optimization: performs a prefetch on the elem for the cleanup node at `pos`. +inline ABSL_ATTRIBUTE_ALWAYS_INLINE void PrefetchNode(void* pos) { } -// Destroys the object referenced by the cleanup node at memory location `pos`. -// Returns the size of the cleanup (meta) data at this address, allowing the -// caller to advance cleanup iterators without needing to examine or know -// anything about the underlying cleanup node or cleanup meta data / tags. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t DestroyNode(const void* pos) { - uintptr_t elem; - memcpy(&elem, pos, sizeof(elem)); - if (EnableSpecializedTags()) { - switch (static_cast(elem & 3)) { - case Tag::kString: { - // Some compilers don't like fully qualified explicit dtor calls, - // so use an alias to avoid having to type `::`. - using T = std::string; - reinterpret_cast(elem - static_cast(Tag::kString))->~T(); - return sizeof(TaggedNode); - } - case Tag::kCord: { - using T = absl::Cord; - reinterpret_cast(elem - static_cast(Tag::kCord))->~T(); - return sizeof(TaggedNode); - } - - case Tag::kDynamic: - - default: - break; - } - } - static_cast(pos)->destructor( - reinterpret_cast(elem - static_cast(Tag::kDynamic))); - return sizeof(DynamicNode); +// Destroys the object referenced by the cleanup node. +inline ABSL_ATTRIBUTE_ALWAYS_INLINE void DestroyNode(void* pos) { + CleanupNode* cleanup = ToCleanup(pos); + cleanup->destructor(cleanup->elem); } // Append in `out` the pointer to the to-be-cleaned object in `pos`. -// Return the length of the cleanup node to allow the caller to advance the -// position, like `DestroyNode` does. -inline size_t PeekNode(const void* pos, std::vector& out) { - uintptr_t elem; - memcpy(&elem, pos, sizeof(elem)); - out.push_back(reinterpret_cast(elem & ~3)); - if (EnableSpecializedTags()) { - switch (static_cast(elem & 3)) { - case Tag::kString: - case Tag::kCord: - return sizeof(TaggedNode); - - case Tag::kDynamic: - default: - break; - } - } - return sizeof(DynamicNode); -} - -// Returns the `tag` identifying the type of object for `destructor` or -// kDynamic if `destructor` does not identify a well know object type. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE Tag Type(void (*destructor)(void*)) { - if (EnableSpecializedTags()) { - if (destructor == &arena_destruct_object) { - return Tag::kString; - } - if (destructor == &arena_destruct_object) { - return Tag::kCord; - } - } - return Tag::kDynamic; -} - -// Returns the required size in bytes off the node type identified by `tag`. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(Tag tag) { - if (!EnableSpecializedTags()) return sizeof(DynamicNode); - - switch (tag) { - case Tag::kDynamic: - return sizeof(DynamicNode); - case Tag::kString: - return sizeof(TaggedNode); - case Tag::kCord: - return sizeof(TaggedNode); - default: - ABSL_DCHECK(false) << "Corrupted cleanup tag: " << static_cast(tag); - return sizeof(DynamicNode); - } +inline void PeekNode(void* pos, std::vector& out) { + out.push_back(ToCleanup(pos)->elem); } -// Returns the required size in bytes off the node type for `destructor`. -inline ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size(void (*destructor)(void*)) { - return destructor == nullptr ? 0 : Size(Type(destructor)); +// Returns the required size for a cleanup node. +constexpr ABSL_ATTRIBUTE_ALWAYS_INLINE size_t Size() { + return sizeof(CleanupNode); } } // namespace cleanup diff --git a/src/google/protobuf/serial_arena.h b/src/google/protobuf/serial_arena.h index c94ea9bb7d..0f5a9e87bc 100644 --- a/src/google/protobuf/serial_arena.h +++ b/src/google/protobuf/serial_arena.h @@ -250,7 +250,7 @@ class PROTOBUF_EXPORT SerialArena { char* ret = ArenaAlignAs(align).CeilDefaultAligned(ptr()); // See the comment in MaybeAllocateAligned re uintptr_t. if (PROTOBUF_PREDICT_FALSE(reinterpret_cast(ret) + n + - cleanup::Size(destructor) > + cleanup::Size() > reinterpret_cast(limit_))) { return AllocateAlignedWithCleanupFallback(n, align, destructor); } @@ -265,9 +265,8 @@ class PROTOBUF_EXPORT SerialArena { PROTOBUF_ALWAYS_INLINE void AddCleanup(void* elem, void (*destructor)(void*)) { - size_t required = cleanup::Size(destructor); size_t has = static_cast(limit_ - ptr()); - if (PROTOBUF_PREDICT_FALSE(required > has)) { + if (PROTOBUF_PREDICT_FALSE(cleanup::Size() > has)) { return AddCleanupFallback(elem, destructor); } AddCleanupFromExisting(elem, destructor); @@ -301,14 +300,13 @@ class PROTOBUF_EXPORT SerialArena { PROTOBUF_ALWAYS_INLINE void AddCleanupFromExisting(void* elem, void (*destructor)(void*)) { - cleanup::Tag tag = cleanup::Type(destructor); - size_t n = cleanup::Size(tag); + const size_t cleanup_size = cleanup::Size(); - PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - n, n); - limit_ -= n; + PROTOBUF_UNPOISON_MEMORY_REGION(limit_ - cleanup_size, cleanup_size); + limit_ -= cleanup_size; MaybePrefetchBackwards(limit_); ABSL_DCHECK_GE(limit_, ptr()); - cleanup::CreateNode(tag, limit_, elem, destructor); + cleanup::CreateNode(limit_, elem, destructor); } // Prefetch the next kPrefetchForwardsDegree bytes after `prefetch_ptr_` and