diff --git a/src/core/lib/gprpp/arena.cc b/src/core/lib/gprpp/arena.cc index ab3a4865a12..5c344db4e35 100644 --- a/src/core/lib/gprpp/arena.cc +++ b/src/core/lib/gprpp/arena.cc @@ -31,11 +31,23 @@ #include "src/core/lib/gpr/alloc.h" #include "src/core/lib/gprpp/memory.h" -template -static void* gpr_arena_malloc(size_t size) { - return gpr_malloc_aligned(size, alignment); +namespace { + +void* ArenaStorage(size_t initial_size) { + static constexpr size_t base_size = + GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_core::Arena)); + initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size); + size_t alloc_size = base_size + initial_size; + static constexpr size_t alignment = + (GPR_CACHELINE_SIZE > GPR_MAX_ALIGNMENT && + GPR_CACHELINE_SIZE % GPR_MAX_ALIGNMENT == 0) + ? GPR_CACHELINE_SIZE + : GPR_MAX_ALIGNMENT; + return gpr_malloc_aligned(alloc_size, alignment); } +} // namespace + namespace grpc_core { Arena::~Arena() { @@ -49,16 +61,17 @@ Arena::~Arena() { } Arena* Arena::Create(size_t initial_size) { + return new (ArenaStorage(initial_size)) Arena(initial_size); +} + +Pair Arena::CreateWithAlloc(size_t initial_size, + size_t alloc_size) { static constexpr size_t base_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena)); - initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size); - size_t alloc_size = base_size + initial_size; - static constexpr size_t alignment = - (GPR_CACHELINE_SIZE > GPR_MAX_ALIGNMENT && - GPR_CACHELINE_SIZE % GPR_MAX_ALIGNMENT == 0) - ? GPR_CACHELINE_SIZE - : GPR_MAX_ALIGNMENT; - return new (gpr_arena_malloc(alloc_size)) Arena(initial_size); + auto* new_arena = + new (ArenaStorage(initial_size)) Arena(initial_size, alloc_size); + void* first_alloc = reinterpret_cast(new_arena) + base_size; + return MakePair(new_arena, first_alloc); } size_t Arena::Destroy() { @@ -77,7 +90,7 @@ void* Arena::AllocZone(size_t size) { static constexpr size_t zone_base_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Zone)); size_t alloc_size = zone_base_size + size; - Zone* z = new (gpr_arena_malloc(alloc_size)) Zone(); + Zone* z = new (gpr_malloc_aligned(alloc_size, GPR_MAX_ALIGNMENT)) Zone(); { gpr_spinlock_lock(&arena_growth_spinlock_); z->prev = last_zone_; diff --git a/src/core/lib/gprpp/arena.h b/src/core/lib/gprpp/arena.h index 749f4e47ee8..b1b0c4a85cb 100644 --- a/src/core/lib/gprpp/arena.h +++ b/src/core/lib/gprpp/arena.h @@ -36,6 +36,7 @@ #include "src/core/lib/gpr/alloc.h" #include "src/core/lib/gpr/spinlock.h" #include "src/core/lib/gprpp/atomic.h" +#include "src/core/lib/gprpp/pair.h" #include @@ -45,6 +46,13 @@ class Arena { public: // Create an arena, with \a initial_size bytes in the first allocated buffer. static Arena* Create(size_t initial_size); + + // Create an arena, with \a initial_size bytes in the first allocated buffer, + // and return both a void pointer to the returned arena and a void* with the + // first allocation. + static Pair CreateWithAlloc(size_t initial_size, + size_t alloc_size); + // Destroy an arena, returning the total number of bytes allocated. size_t Destroy(); // Allocate \a size bytes from the arena. @@ -76,7 +84,21 @@ class Arena { Zone* prev; }; - explicit Arena(size_t initial_size) : initial_zone_size_(initial_size) {} + // Initialize an arena. + // Parameters: + // initial_size: The initial size of the whole arena in bytes. These bytes + // are contained within 'zone 0'. If the arena user ends up requiring more + // memory than the arena contains in zone 0, subsequent zones are allocated + // on demand and maintained in a tail-linked list. + // + // initial_alloc: Optionally, construct the arena as though a call to + // Alloc() had already been made for initial_alloc bytes. This provides a + // quick optimization (avoiding an atomic fetch-add) for the common case + // where we wish to create an arena and then perform an immediate + // allocation. + explicit Arena(size_t initial_size, size_t initial_alloc = 0) + : total_used_(initial_alloc), initial_zone_size_(initial_size) {} + ~Arena(); void* AllocZone(size_t size); diff --git a/src/core/lib/surface/call.cc b/src/core/lib/surface/call.cc index 54fdacd847c..bd140021c96 100644 --- a/src/core/lib/surface/call.cc +++ b/src/core/lib/surface/call.cc @@ -321,16 +321,23 @@ grpc_error* grpc_call_create(const grpc_call_create_args* args, GRPC_CHANNEL_INTERNAL_REF(args->channel, "call"); + grpc_core::Arena* arena; + grpc_call* call; grpc_error* error = GRPC_ERROR_NONE; grpc_channel_stack* channel_stack = grpc_channel_get_channel_stack(args->channel); - grpc_call* call; size_t initial_size = grpc_channel_get_call_size_estimate(args->channel); GRPC_STATS_INC_CALL_INITIAL_SIZE(initial_size); - grpc_core::Arena* arena = grpc_core::Arena::Create(initial_size); - call = new (arena->Alloc(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call)) + - channel_stack->call_stack_size)) - grpc_call(arena, *args); + size_t call_and_stack_size = + GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call)) + + channel_stack->call_stack_size; + size_t call_alloc_size = + call_and_stack_size + (args->parent ? sizeof(child_call) : 0); + + std::pair arena_with_call = + grpc_core::Arena::CreateWithAlloc(initial_size, call_alloc_size); + arena = arena_with_call.first; + call = new (arena_with_call.second) grpc_call(arena, *args); *out_call = call; grpc_slice path = grpc_empty_slice(); if (call->is_client) { @@ -362,7 +369,8 @@ grpc_error* grpc_call_create(const grpc_call_create_args* args, bool immediately_cancel = false; if (args->parent != nullptr) { - call->child = arena->New(args->parent); + call->child = new (reinterpret_cast(arena_with_call.second) + + call_and_stack_size) child_call(args->parent); GRPC_CALL_INTERNAL_REF(args->parent, "child"); GPR_ASSERT(call->is_client);