[arena+context] Begin merging the concepts of context and arenas for calls (#36773)

Add a dynamic registration mechanism for a new kind of context: one that lives in an arena.

An upcoming set of changes will move all of the legacy context types into this mechanism.
It's likely we'll move other promise based context types to this mechanism also, until the only promise-based context type is `Arena`.

Closes #36773

COPYBARA_INTEGRATE_REVIEW=https://github.com/grpc/grpc/pull/36773 from ctiller:transport-refs-7.8 e6892031ca
PiperOrigin-RevId: 638810722
pull/36777/head
Craig Tiller 10 months ago committed by Copybara-Service
parent 766813f164
commit 3562094389
  1. 38
      src/core/lib/resource_quota/arena.cc
  2. 84
      src/core/lib/resource_quota/arena.h
  3. 6
      src/core/lib/resource_quota/memory_quota.cc
  4. 18
      test/core/resource_quota/arena_test.cc

@ -21,18 +21,23 @@
#include <atomic>
#include <new>
#include "absl/log/log.h"
#include <grpc/support/alloc.h>
#include <grpc/support/port_platform.h>
#include "src/core/lib/resource_quota/resource_quota.h"
#include "src/core/util/alloc.h"
namespace grpc_core {
namespace {
void* ArenaStorage(size_t initial_size) {
void* ArenaStorage(size_t& initial_size) {
static constexpr size_t base_size =
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_core::Arena));
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena));
initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size);
initial_size = std::max(initial_size,
arena_detail::BaseArenaContextTraits::ContextSize());
size_t alloc_size = base_size + initial_size;
static constexpr size_t alignment =
(GPR_CACHELINE_SIZE > GPR_MAX_ALIGNMENT &&
@ -44,9 +49,11 @@ void* ArenaStorage(size_t initial_size) {
} // namespace
namespace grpc_core {
Arena::~Arena() {
for (size_t i = 0; i < arena_detail::BaseArenaContextTraits::NumContexts();
++i) {
arena_detail::BaseArenaContextTraits::Destroy(i, contexts()[i]);
}
DestroyManagedNewObjects();
arena_factory_->FinalizeArena(this);
arena_factory_->allocator().Release(
@ -58,24 +65,25 @@ Arena::~Arena() {
gpr_free_aligned(z);
z = prev_z;
}
#ifdef GRPC_ARENA_TRACE_POOLED_ALLOCATIONS
gpr_log(GPR_ERROR, "DESTRUCT_ARENA %p", this);
#endif
}
RefCountedPtr<Arena> Arena::Create(size_t initial_size,
RefCountedPtr<ArenaFactory> arena_factory) {
return RefCountedPtr<Arena>(new (ArenaStorage(initial_size)) Arena(
initial_size, 0, std::move(arena_factory)));
void* p = ArenaStorage(initial_size);
return RefCountedPtr<Arena>(
new (p) Arena(initial_size, std::move(arena_factory)));
}
Arena::Arena(size_t initial_size, size_t initial_alloc,
RefCountedPtr<ArenaFactory> arena_factory)
: total_used_(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_alloc)),
initial_zone_size_(initial_size),
Arena::Arena(size_t initial_size, RefCountedPtr<ArenaFactory> arena_factory)
: initial_zone_size_(initial_size),
total_used_(arena_detail::BaseArenaContextTraits::ContextSize()),
arena_factory_(std::move(arena_factory)) {
arena_factory_->allocator().Reserve(
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_alloc));
for (size_t i = 0; i < arena_detail::BaseArenaContextTraits::NumContexts();
++i) {
contexts()[i] = nullptr;
}
CHECK_GE(initial_size, arena_detail::BaseArenaContextTraits::ContextSize());
arena_factory_->allocator().Reserve(initial_size);
}
void Arena::DestroyManagedNewObjects() {

@ -44,8 +44,59 @@ namespace grpc_core {
class Arena;
template <typename T>
struct ArenaContextType;
namespace arena_detail {
// Tracks all registered arena context types (these should only be registered
// via ArenaContextTraits at static initialization time).
class BaseArenaContextTraits {
public:
// Count of number of contexts that have been allocated.
static uint16_t NumContexts() {
return static_cast<uint16_t>(RegisteredTraits().size());
}
// Number of bytes required to store the context pointers on an arena.
static size_t ContextSize() { return NumContexts() * sizeof(void*); }
// Call the registered destruction function for a context.
static void Destroy(uint16_t id, void* ptr) {
if (ptr == nullptr) return;
RegisteredTraits()[id](ptr);
}
protected:
// Allocate a new context id and register the destruction function.
static uint16_t MakeId(void (*destroy)(void* ptr)) {
auto& traits = RegisteredTraits();
const uint16_t id = static_cast<uint16_t>(traits.size());
traits.push_back(destroy);
return id;
}
private:
static std::vector<void (*)(void*)>& RegisteredTraits() {
static NoDestruct<std::vector<void (*)(void*)>> registered_traits;
return *registered_traits;
}
};
// Traits for a specific context type.
template <typename T>
class ArenaContextTraits : public BaseArenaContextTraits {
public:
static uint16_t id() { return id_; }
private:
static const uint16_t id_;
};
template <typename T>
const uint16_t ArenaContextTraits<T>::id_ = BaseArenaContextTraits::MakeId(
[](void* ptr) { ArenaContextType<T>::Destroy(static_cast<T*>(ptr)); });
template <typename T, typename A, typename B>
struct IfArray {
using Result = A;
@ -215,6 +266,21 @@ class Arena final : public RefCounted<Arena, NonPolymorphicRefCount,
delete p;
}
template <typename T>
T* GetContext() {
return static_cast<T*>(
contexts()[arena_detail::ArenaContextTraits<T>::id()]);
}
template <typename T>
void SetContext(T* context) {
void*& slot = contexts()[arena_detail::ArenaContextTraits<T>::id()];
if (slot != nullptr) {
ArenaContextType<T>::Destroy(static_cast<T*>(slot));
}
slot = context;
}
private:
friend struct arena_detail::UnrefDestroy;
@ -247,19 +313,20 @@ class Arena final : public RefCounted<Arena, NonPolymorphicRefCount,
// quick optimization (avoiding an atomic fetch-add) for the common case
// where we wish to create an arena and then perform an immediate
// allocation.
explicit Arena(size_t initial_size, size_t initial_alloc,
explicit Arena(size_t initial_size,
RefCountedPtr<ArenaFactory> arena_factory);
~Arena();
void* AllocZone(size_t size);
void Destroy() const;
void** contexts() { return reinterpret_cast<void**>(this + 1); }
// Keep track of the total used size. We use this in our call sizing
// hysteresis.
std::atomic<size_t> total_used_{0};
std::atomic<size_t> total_allocated_{0};
const size_t initial_zone_size_;
std::atomic<size_t> total_used_;
std::atomic<size_t> total_allocated_{initial_zone_size_};
// If the initial arena allocation wasn't enough, we allocate additional zones
// in a reverse linked list. Each additional zone consists of (1) a pointer to
// the zone added before this zone (null if this is the first additional zone)
@ -280,6 +347,17 @@ inline void UnrefDestroy::operator()(const Arena* arena) const {
}
} // namespace arena_detail
namespace promise_detail {
template <typename T>
class Context<T, absl::void_t<decltype(ArenaContextType<T>::Destroy)>> {
public:
static T* get() { return GetContext<Arena>()->GetContext<T>(); }
static void set(T* value) { GetContext<Arena>()->SetContext(value); }
};
} // namespace promise_detail
} // namespace grpc_core
#endif // GRPC_SRC_CORE_LIB_RESOURCE_QUOTA_ARENA_H

@ -253,9 +253,9 @@ GrpcMemoryAllocatorImpl::GrpcMemoryAllocatorImpl(
}
GrpcMemoryAllocatorImpl::~GrpcMemoryAllocatorImpl() {
CHECK(free_bytes_.load(std::memory_order_acquire) +
sizeof(GrpcMemoryAllocatorImpl) ==
taken_bytes_.load(std::memory_order_relaxed));
CHECK_EQ(free_bytes_.load(std::memory_order_acquire) +
sizeof(GrpcMemoryAllocatorImpl),
taken_bytes_.load(std::memory_order_relaxed));
memory_quota_->Return(taken_bytes_.load(std::memory_order_relaxed));
}

@ -274,6 +274,24 @@ TEST(ArenaTest, ConcurrentMakePooled) {
}
}
struct Foo {
explicit Foo(int x) : p(std::make_unique<int>(x)) {}
std::unique_ptr<int> p;
};
template <>
struct ArenaContextType<Foo> {
static void Destroy(Foo* p) { p->~Foo(); }
};
TEST(ArenaTest, FooContext) {
auto arena = SimpleArenaAllocator()->MakeArena();
EXPECT_EQ(arena->GetContext<Foo>(), nullptr);
arena->SetContext(arena->New<Foo>(42));
ASSERT_NE(arena->GetContext<Foo>(), nullptr);
EXPECT_EQ(*arena->GetContext<Foo>()->p, 42);
}
class MockArenaFactory : public ArenaFactory {
public:
MockArenaFactory()

Loading…
Cancel
Save