|
|
|
@ -40,76 +40,12 @@ |
|
|
|
|
#include "src/core/lib/resource_quota/memory_quota.h" |
|
|
|
|
#include "src/core/util/alloc.h" |
|
|
|
|
|
|
|
|
|
#define GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
// #define GRPC_ARENA_TRACE_POOLED_ALLOCATIONS
|
|
|
|
|
|
|
|
|
|
namespace grpc_core { |
|
|
|
|
|
|
|
|
|
namespace arena_detail { |
|
|
|
|
|
|
|
|
|
#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
struct PoolAndSize { |
|
|
|
|
size_t alloc_size; |
|
|
|
|
size_t pool_index; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
template <typename Void, size_t kIndex, size_t kObjectSize, |
|
|
|
|
size_t... kBucketSize> |
|
|
|
|
struct PoolIndexForSize; |
|
|
|
|
|
|
|
|
|
template <size_t kObjectSize, size_t kIndex, size_t kSmallestRemainingBucket, |
|
|
|
|
size_t... kBucketSizes> |
|
|
|
|
struct PoolIndexForSize< |
|
|
|
|
absl::enable_if_t<kObjectSize <= kSmallestRemainingBucket>, kIndex, |
|
|
|
|
kObjectSize, kSmallestRemainingBucket, kBucketSizes...> { |
|
|
|
|
static constexpr size_t kPool = kIndex; |
|
|
|
|
static constexpr size_t kSize = kSmallestRemainingBucket; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
template <size_t kObjectSize, size_t kIndex, size_t kSmallestRemainingBucket, |
|
|
|
|
size_t... kBucketSizes> |
|
|
|
|
struct PoolIndexForSize< |
|
|
|
|
absl::enable_if_t<(kObjectSize > kSmallestRemainingBucket)>, kIndex, |
|
|
|
|
kObjectSize, kSmallestRemainingBucket, kBucketSizes...> |
|
|
|
|
: public PoolIndexForSize<void, kIndex + 1, kObjectSize, kBucketSizes...> { |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
template <size_t kObjectSize, size_t... kBucketSizes> |
|
|
|
|
constexpr size_t PoolFromObjectSize( |
|
|
|
|
absl::integer_sequence<size_t, kBucketSizes...>) { |
|
|
|
|
return PoolIndexForSize<void, 0, kObjectSize, kBucketSizes...>::kPool; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
template <size_t kObjectSize, size_t... kBucketSizes> |
|
|
|
|
constexpr size_t AllocationSizeFromObjectSize( |
|
|
|
|
absl::integer_sequence<size_t, kBucketSizes...>) { |
|
|
|
|
return PoolIndexForSize<void, 0, kObjectSize, kBucketSizes...>::kSize; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
template <size_t kIndex, size_t... kBucketSizes> |
|
|
|
|
struct ChoosePoolForAllocationSizeImpl; |
|
|
|
|
|
|
|
|
|
template <size_t kIndex, size_t kBucketSize, size_t... kBucketSizes> |
|
|
|
|
struct ChoosePoolForAllocationSizeImpl<kIndex, kBucketSize, kBucketSizes...> { |
|
|
|
|
static PoolAndSize Fn(size_t n) { |
|
|
|
|
if (n <= kBucketSize) return {kBucketSize, kIndex}; |
|
|
|
|
return ChoosePoolForAllocationSizeImpl<kIndex + 1, kBucketSizes...>::Fn(n); |
|
|
|
|
} |
|
|
|
|
}; |
|
|
|
|
class Arena; |
|
|
|
|
|
|
|
|
|
template <size_t kIndex> |
|
|
|
|
struct ChoosePoolForAllocationSizeImpl<kIndex> { |
|
|
|
|
static PoolAndSize Fn(size_t n) { |
|
|
|
|
return PoolAndSize{n, std::numeric_limits<size_t>::max()}; |
|
|
|
|
} |
|
|
|
|
}; |
|
|
|
|
namespace arena_detail { |
|
|
|
|
|
|
|
|
|
template <size_t... kBucketSizes> |
|
|
|
|
PoolAndSize ChoosePoolForAllocationSize( |
|
|
|
|
size_t n, absl::integer_sequence<size_t, kBucketSizes...>) { |
|
|
|
|
return ChoosePoolForAllocationSizeImpl<0, kBucketSizes...>::Fn(n); |
|
|
|
|
} |
|
|
|
|
#else |
|
|
|
|
template <typename T, typename A, typename B> |
|
|
|
|
struct IfArray { |
|
|
|
|
using Result = A; |
|
|
|
@ -119,30 +55,36 @@ template <typename T, typename A, typename B> |
|
|
|
|
struct IfArray<T[], A, B> { |
|
|
|
|
using Result = B; |
|
|
|
|
}; |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
struct UnrefDestroy { |
|
|
|
|
void operator()(Arena* arena) const; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
} // namespace arena_detail
|
|
|
|
|
|
|
|
|
|
class Arena { |
|
|
|
|
#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
// Selected pool sizes.
|
|
|
|
|
// How to tune: see tools/codegen/core/optimize_arena_pool_sizes.py
|
|
|
|
|
using PoolSizes = absl::integer_sequence<size_t, 80, 304, 528, 1024>; |
|
|
|
|
struct FreePoolNode { |
|
|
|
|
FreePoolNode* next; |
|
|
|
|
}; |
|
|
|
|
#endif |
|
|
|
|
class ArenaFactory : public RefCounted<ArenaFactory> { |
|
|
|
|
public: |
|
|
|
|
virtual RefCountedPtr<Arena> MakeArena() = 0; |
|
|
|
|
virtual void FinalizeArena(Arena* arena) = 0; |
|
|
|
|
|
|
|
|
|
MemoryAllocator& allocator() { return allocator_; } |
|
|
|
|
|
|
|
|
|
protected: |
|
|
|
|
explicit ArenaFactory(MemoryAllocator allocator) |
|
|
|
|
: allocator_(std::move(allocator)) {} |
|
|
|
|
|
|
|
|
|
private: |
|
|
|
|
MemoryAllocator allocator_; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
RefCountedPtr<ArenaFactory> SimpleArenaAllocator(size_t initial_size = 1024); |
|
|
|
|
|
|
|
|
|
class Arena : public RefCounted<Arena, NonPolymorphicRefCount, |
|
|
|
|
arena_detail::UnrefDestroy> { |
|
|
|
|
public: |
|
|
|
|
// Create an arena, with \a initial_size bytes in the first allocated buffer.
|
|
|
|
|
static Arena* Create(size_t initial_size, MemoryAllocator* memory_allocator); |
|
|
|
|
|
|
|
|
|
// Create an arena, with \a initial_size bytes in the first allocated buffer,
|
|
|
|
|
// and return both a void pointer to the returned arena and a void* with the
|
|
|
|
|
// first allocation.
|
|
|
|
|
static std::pair<Arena*, void*> CreateWithAlloc( |
|
|
|
|
size_t initial_size, size_t alloc_size, |
|
|
|
|
MemoryAllocator* memory_allocator); |
|
|
|
|
static RefCountedPtr<Arena> Create(size_t initial_size, |
|
|
|
|
RefCountedPtr<ArenaFactory> arena_factory); |
|
|
|
|
|
|
|
|
|
// Destroy all `ManagedNew` allocated objects.
|
|
|
|
|
// Allows safe destruction of these objects even if they need context held by
|
|
|
|
@ -151,9 +93,6 @@ class Arena { |
|
|
|
|
// TODO(ctiller): eliminate ManagedNew.
|
|
|
|
|
void DestroyManagedNewObjects(); |
|
|
|
|
|
|
|
|
|
// Destroy an arena.
|
|
|
|
|
void Destroy(); |
|
|
|
|
|
|
|
|
|
// Return the total amount of memory allocated by this arena.
|
|
|
|
|
size_t TotalUsedBytes() const { |
|
|
|
|
return total_used_.load(std::memory_order_relaxed); |
|
|
|
@ -194,95 +133,6 @@ class Arena { |
|
|
|
|
return &p->t; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
class PooledDeleter { |
|
|
|
|
public: |
|
|
|
|
explicit PooledDeleter(std::atomic<FreePoolNode*>* free_list) |
|
|
|
|
: free_list_(free_list) {} |
|
|
|
|
PooledDeleter() = default; |
|
|
|
|
template <typename T> |
|
|
|
|
void operator()(T* p) { |
|
|
|
|
// TODO(ctiller): promise based filter hijacks ownership of some pointers
|
|
|
|
|
// to make them appear as PoolPtr without really transferring ownership,
|
|
|
|
|
// by setting the arena to nullptr.
|
|
|
|
|
// This is a transitional hack and should be removed once promise based
|
|
|
|
|
// filter is removed.
|
|
|
|
|
if (free_list_ != nullptr) { |
|
|
|
|
p->~T(); |
|
|
|
|
FreePooled(p, free_list_); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool has_freelist() const { return free_list_ != nullptr; } |
|
|
|
|
|
|
|
|
|
private: |
|
|
|
|
std::atomic<FreePoolNode*>* free_list_; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
template <typename T> |
|
|
|
|
using PoolPtr = std::unique_ptr<T, PooledDeleter>; |
|
|
|
|
|
|
|
|
|
// Make a unique_ptr to T that is allocated from the arena.
|
|
|
|
|
// When the pointer is released, the memory may be reused for other
|
|
|
|
|
// MakePooled(.*) calls.
|
|
|
|
|
// CAUTION: The amount of memory allocated is rounded up to the nearest
|
|
|
|
|
// value in Arena::PoolSizes, and so this may pessimize total
|
|
|
|
|
// arena size.
|
|
|
|
|
template <typename T, typename... Args> |
|
|
|
|
PoolPtr<T> MakePooled(Args&&... args) { |
|
|
|
|
auto* free_list = |
|
|
|
|
&pools_[arena_detail::PoolFromObjectSize<sizeof(T)>(PoolSizes())]; |
|
|
|
|
return PoolPtr<T>( |
|
|
|
|
new (AllocPooled( |
|
|
|
|
sizeof(T), |
|
|
|
|
arena_detail::AllocationSizeFromObjectSize<sizeof(T)>(PoolSizes()), |
|
|
|
|
free_list)) T(std::forward<Args>(args)...), |
|
|
|
|
PooledDeleter(free_list)); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Make a unique_ptr to an array of T that is allocated from the arena.
|
|
|
|
|
// When the pointer is released, the memory may be reused for other
|
|
|
|
|
// MakePooled(.*) calls.
|
|
|
|
|
// One can use MakePooledArray<char> to allocate a buffer of bytes.
|
|
|
|
|
// CAUTION: The amount of memory allocated is rounded up to the nearest
|
|
|
|
|
// value in Arena::PoolSizes, and so this may pessimize total
|
|
|
|
|
// arena size.
|
|
|
|
|
template <typename T> |
|
|
|
|
PoolPtr<T[]> MakePooledArray(size_t n) { |
|
|
|
|
auto where = |
|
|
|
|
arena_detail::ChoosePoolForAllocationSize(n * sizeof(T), PoolSizes()); |
|
|
|
|
if (where.pool_index == std::numeric_limits<size_t>::max()) { |
|
|
|
|
return PoolPtr<T[]>(new (Alloc(where.alloc_size)) T[n], |
|
|
|
|
PooledDeleter(nullptr)); |
|
|
|
|
} else { |
|
|
|
|
return PoolPtr<T[]>(new (AllocPooled(where.alloc_size, where.alloc_size, |
|
|
|
|
&pools_[where.pool_index])) T[n], |
|
|
|
|
PooledDeleter(&pools_[where.pool_index])); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Like MakePooled, but with manual memory management.
|
|
|
|
|
// The caller is responsible for calling DeletePooled() on the returned
|
|
|
|
|
// pointer, and expected to call it with the same type T as was passed to this
|
|
|
|
|
// function (else the free list returned to the arena will be corrupted).
|
|
|
|
|
template <typename T, typename... Args> |
|
|
|
|
T* NewPooled(Args&&... args) { |
|
|
|
|
auto* free_list = |
|
|
|
|
&pools_[arena_detail::PoolFromObjectSize<sizeof(T)>(PoolSizes())]; |
|
|
|
|
return new (AllocPooled( |
|
|
|
|
sizeof(T), |
|
|
|
|
arena_detail::AllocationSizeFromObjectSize<sizeof(T)>(PoolSizes()), |
|
|
|
|
free_list)) T(std::forward<Args>(args)...); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
template <typename T> |
|
|
|
|
void DeletePooled(T* p) { |
|
|
|
|
auto* free_list = |
|
|
|
|
&pools_[arena_detail::PoolFromObjectSize<sizeof(T)>(PoolSizes())]; |
|
|
|
|
p->~T(); |
|
|
|
|
FreePooled(p, free_list); |
|
|
|
|
} |
|
|
|
|
#else |
|
|
|
|
class PooledDeleter { |
|
|
|
|
public: |
|
|
|
|
PooledDeleter() = default; |
|
|
|
@ -364,9 +214,10 @@ class Arena { |
|
|
|
|
void DeletePooled(T* p) { |
|
|
|
|
delete p; |
|
|
|
|
} |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
private: |
|
|
|
|
friend struct arena_detail::UnrefDestroy; |
|
|
|
|
|
|
|
|
|
struct Zone { |
|
|
|
|
Zone* prev; |
|
|
|
|
}; |
|
|
|
@ -397,34 +248,12 @@ class Arena { |
|
|
|
|
// where we wish to create an arena and then perform an immediate
|
|
|
|
|
// allocation.
|
|
|
|
|
explicit Arena(size_t initial_size, size_t initial_alloc, |
|
|
|
|
MemoryAllocator* memory_allocator) |
|
|
|
|
: total_used_(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_alloc)), |
|
|
|
|
initial_zone_size_(initial_size), |
|
|
|
|
memory_allocator_(memory_allocator) {} |
|
|
|
|
RefCountedPtr<ArenaFactory> arena_factory); |
|
|
|
|
|
|
|
|
|
~Arena(); |
|
|
|
|
|
|
|
|
|
void* AllocZone(size_t size); |
|
|
|
|
|
|
|
|
|
#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
void* AllocPooled(size_t obj_size, size_t alloc_size, |
|
|
|
|
std::atomic<FreePoolNode*>* head); |
|
|
|
|
static void FreePooled(void* p, std::atomic<FreePoolNode*>* head); |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
void TracePoolAlloc(size_t size, void* ptr) { |
|
|
|
|
(void)size; |
|
|
|
|
(void)ptr; |
|
|
|
|
#ifdef GRPC_ARENA_TRACE_POOLED_ALLOCATIONS |
|
|
|
|
gpr_log(GPR_ERROR, "ARENA %p ALLOC %" PRIdPTR " @ %p", this, size, ptr); |
|
|
|
|
#endif |
|
|
|
|
} |
|
|
|
|
static void TracePoolFree(void* ptr) { |
|
|
|
|
(void)ptr; |
|
|
|
|
#ifdef GRPC_ARENA_TRACE_POOLED_ALLOCATIONS |
|
|
|
|
gpr_log(GPR_ERROR, "FREE %p", ptr); |
|
|
|
|
#endif |
|
|
|
|
} |
|
|
|
|
void Destroy(); |
|
|
|
|
|
|
|
|
|
// Keep track of the total used size. We use this in our call sizing
|
|
|
|
|
// hysteresis.
|
|
|
|
@ -438,27 +267,17 @@ class Arena { |
|
|
|
|
// last zone; the zone list is reverse-walked during arena destruction only.
|
|
|
|
|
std::atomic<Zone*> last_zone_{nullptr}; |
|
|
|
|
std::atomic<ManagedNewObject*> managed_new_head_{nullptr}; |
|
|
|
|
#ifndef GRPC_ARENA_POOLED_ALLOCATIONS_USE_MALLOC |
|
|
|
|
std::atomic<FreePoolNode*> pools_[PoolSizes::size()]{}; |
|
|
|
|
#endif |
|
|
|
|
// The backing memory quota
|
|
|
|
|
MemoryAllocator* const memory_allocator_; |
|
|
|
|
RefCountedPtr<ArenaFactory> arena_factory_; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
// Smart pointer for arenas when the final size is not required.
|
|
|
|
|
struct ScopedArenaDeleter { |
|
|
|
|
void operator()(Arena* arena) { arena->Destroy(); } |
|
|
|
|
}; |
|
|
|
|
using ScopedArenaPtr = std::unique_ptr<Arena, ScopedArenaDeleter>; |
|
|
|
|
inline ScopedArenaPtr MakeScopedArena(size_t initial_size, |
|
|
|
|
MemoryAllocator* memory_allocator) { |
|
|
|
|
return ScopedArenaPtr(Arena::Create(initial_size, memory_allocator)); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// Arenas form a context for activities
|
|
|
|
|
template <> |
|
|
|
|
struct ContextType<Arena> {}; |
|
|
|
|
|
|
|
|
|
namespace arena_detail { |
|
|
|
|
inline void UnrefDestroy::operator()(Arena* arena) const { arena->Destroy(); } |
|
|
|
|
} // namespace arena_detail
|
|
|
|
|
|
|
|
|
|
} // namespace grpc_core
|
|
|
|
|
|
|
|
|
|
#endif // GRPC_SRC_CORE_LIB_RESOURCE_QUOTA_ARENA_H
|
|
|
|
|