From b8829239b8112237d0064a90b841a7ddc97b73eb Mon Sep 17 00:00:00 2001 From: Craig Tiller Date: Wed, 2 Aug 2023 07:20:30 -0700 Subject: [PATCH] [arena] Use malloc for pooled allocations (#33927) There's *very* little difference in cost for a pooled arena allocation and a tcmalloc allocation - but the pooled allocation causes memory stranding for call lifetime, whereas the tcmalloc allocation allows that to be shared between calls - leading to a much lower overall cost. --------- Co-authored-by: ctiller --- src/core/BUILD | 4 ---- src/core/lib/resource_quota/arena.cc | 2 ++ src/core/lib/resource_quota/arena.h | 14 ++++++++------ .../lib/security/transport/client_auth_filter.cc | 11 +++++++---- test/core/resource_quota/arena_test.cc | 6 ++++++ 5 files changed, 23 insertions(+), 14 deletions(-) diff --git a/src/core/BUILD b/src/core/BUILD index 68efe9d4737..6b90243d253 100644 --- a/src/core/BUILD +++ b/src/core/BUILD @@ -1124,10 +1124,6 @@ grpc_cc_library( hdrs = [ "lib/resource_quota/arena.h", ], - external_deps = [ - "absl/meta:type_traits", - "absl/utility", - ], visibility = [ "@grpc:alt_grpc_base_legacy", ], diff --git a/src/core/lib/resource_quota/arena.cc b/src/core/lib/resource_quota/arena.cc index d37ef426490..332bdc4c85c 100644 --- a/src/core/lib/resource_quota/arena.cc +++ b/src/core/lib/resource_quota/arena.cc @@ -121,6 +121,7 @@ void Arena::ManagedNewObject::Link(std::atomic* head) { } } +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC void* Arena::AllocPooled(size_t obj_size, size_t alloc_size, std::atomic* head) { // ABA mitigation: @@ -177,5 +178,6 @@ void Arena::FreePooled(void* p, std::atomic* head) { node->next, node, std::memory_order_acq_rel, std::memory_order_relaxed)) { } } +#endif } // namespace grpc_core diff --git a/src/core/lib/resource_quota/arena.h b/src/core/lib/resource_quota/arena.h index 1dcb530243e..9a072458ea0 100644 --- a/src/core/lib/resource_quota/arena.h +++ b/src/core/lib/resource_quota/arena.h @@ -30,14 +30,10 @@ #include #include -#include +#include #include -#include #include -#include "absl/meta/type_traits.h" -#include "absl/utility/utility.h" - #include #include "src/core/lib/gpr/alloc.h" @@ -45,13 +41,14 @@ #include "src/core/lib/promise/context.h" #include "src/core/lib/resource_quota/memory_quota.h" -// #define GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC +#define GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC // #define GRPC_ARENA_TRACE_POOLED_ALLOCATIONS namespace grpc_core { namespace arena_detail { +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC struct PoolAndSize { size_t alloc_size; size_t pool_index; @@ -113,16 +110,19 @@ PoolAndSize ChoosePoolForAllocationSize( size_t n, absl::integer_sequence) { return ChoosePoolForAllocationSizeImpl<0, kBucketSizes...>::Fn(n); } +#endif } // namespace arena_detail class Arena { +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC // Selected pool sizes. // How to tune: see tools/codegen/core/optimize_arena_pool_sizes.py using PoolSizes = absl::integer_sequence; struct FreePoolNode { FreePoolNode* next; }; +#endif public: // Create an arena, with \a initial_size bytes in the first allocated buffer. @@ -372,9 +372,11 @@ class Arena { void* AllocZone(size_t size); +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC void* AllocPooled(size_t obj_size, size_t alloc_size, std::atomic* head); static void FreePooled(void* p, std::atomic* head); +#endif void TracePoolAlloc(size_t size, void* ptr) { (void)size; diff --git a/src/core/lib/security/transport/client_auth_filter.cc b/src/core/lib/security/transport/client_auth_filter.cc index 8094ff429a5..57d10f9a0b7 100644 --- a/src/core/lib/security/transport/client_auth_filter.cc +++ b/src/core/lib/security/transport/client_auth_filter.cc @@ -196,10 +196,13 @@ ArenaPromise ClientAuthFilter::MakeCallPromise( if (host == nullptr) { return next_promise_factory(std::move(call_args)); } - return TrySeq(args_.security_connector->CheckCallHost( - host->as_string_view(), args_.auth_context.get()), - GetCallCredsMetadata(std::move(call_args)), - next_promise_factory); + return TrySeq( + args_.security_connector->CheckCallHost(host->as_string_view(), + args_.auth_context.get()), + [this, call_args = std::move(call_args)]() mutable { + return GetCallCredsMetadata(std::move(call_args)); + }, + next_promise_factory); } absl::StatusOr ClientAuthFilter::Create( diff --git a/test/core/resource_quota/arena_test.cc b/test/core/resource_quota/arena_test.cc index 665364305f3..1e35a105f6b 100644 --- a/test/core/resource_quota/arena_test.cc +++ b/test/core/resource_quota/arena_test.cc @@ -191,6 +191,7 @@ bool IsScribbled(Int* ints, int n, int offset) { return true; } +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC TEST_F(ArenaTest, PooledObjectsArePooled) { struct TestObj { char a[100]; @@ -208,6 +209,7 @@ TEST_F(ArenaTest, PooledObjectsArePooled) { Scribble(obj->a, 100, 2); EXPECT_TRUE(IsScribbled(obj->a, 100, 2)); } +#endif TEST_F(ArenaTest, CreateManyObjects) { struct TestObj { @@ -238,7 +240,11 @@ TEST_F(ArenaTest, CreateManyObjectsWithDestructors) { TEST_F(ArenaTest, CreatePoolArray) { auto arena = MakeScopedArena(1024, &memory_allocator_); auto p = arena->MakePooledArray(1024); +#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC EXPECT_FALSE(p.get_deleter().has_freelist()); +#else + EXPECT_TRUE(p.get_deleter().has_freelist()); +#endif p = arena->MakePooledArray(5); EXPECT_TRUE(p.get_deleter().has_freelist()); Scribble(p.get(), 5, 1);