|
|
|
@ -5,8 +5,13 @@ |
|
|
|
|
// license that can be found in the LICENSE file or at
|
|
|
|
|
// https://developers.google.com/open-source/licenses/bsd
|
|
|
|
|
|
|
|
|
|
#include "upb/mem/internal/arena.h" |
|
|
|
|
#include "upb/mem/arena.h" |
|
|
|
|
|
|
|
|
|
#include <stddef.h> |
|
|
|
|
#include <stdint.h> |
|
|
|
|
|
|
|
|
|
#include "upb/mem/alloc.h" |
|
|
|
|
#include "upb/mem/internal/arena.h" |
|
|
|
|
#include "upb/port/atomic.h" |
|
|
|
|
|
|
|
|
|
// Must be last.
|
|
|
|
@ -19,15 +24,59 @@ struct _upb_MemBlock { |
|
|
|
|
// Data follows.
|
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
static const size_t memblock_reserve = |
|
|
|
|
static const size_t kUpb_MemblockReserve = |
|
|
|
|
UPB_ALIGN_UP(sizeof(_upb_MemBlock), UPB_MALLOC_ALIGN); |
|
|
|
|
|
|
|
|
|
typedef struct _upb_ArenaRoot { |
|
|
|
|
typedef struct { |
|
|
|
|
upb_Arena* root; |
|
|
|
|
uintptr_t tagged_count; |
|
|
|
|
} _upb_ArenaRoot; |
|
|
|
|
} upb_ArenaRoot; |
|
|
|
|
|
|
|
|
|
static bool _upb_Arena_IsTaggedRefcount(uintptr_t parent_or_count) { |
|
|
|
|
return (parent_or_count & 1) == 1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static bool _upb_Arena_IsTaggedPointer(uintptr_t parent_or_count) { |
|
|
|
|
return (parent_or_count & 1) == 0; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static uintptr_t _upb_Arena_RefCountFromTagged(uintptr_t parent_or_count) { |
|
|
|
|
UPB_ASSERT(_upb_Arena_IsTaggedRefcount(parent_or_count)); |
|
|
|
|
return parent_or_count >> 1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static uintptr_t _upb_Arena_TaggedFromRefcount(uintptr_t refcount) { |
|
|
|
|
uintptr_t parent_or_count = (refcount << 1) | 1; |
|
|
|
|
UPB_ASSERT(_upb_Arena_IsTaggedRefcount(parent_or_count)); |
|
|
|
|
return parent_or_count; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static _upb_ArenaRoot _upb_Arena_FindRoot(upb_Arena* a) { |
|
|
|
|
static upb_Arena* _upb_Arena_PointerFromTagged(uintptr_t parent_or_count) { |
|
|
|
|
UPB_ASSERT(_upb_Arena_IsTaggedPointer(parent_or_count)); |
|
|
|
|
return (upb_Arena*)parent_or_count; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static uintptr_t _upb_Arena_TaggedFromPointer(upb_Arena* a) { |
|
|
|
|
uintptr_t parent_or_count = (uintptr_t)a; |
|
|
|
|
UPB_ASSERT(_upb_Arena_IsTaggedPointer(parent_or_count)); |
|
|
|
|
return parent_or_count; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static upb_alloc* _upb_Arena_BlockAlloc(upb_Arena* arena) { |
|
|
|
|
return (upb_alloc*)(arena->block_alloc & ~0x1); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static uintptr_t _upb_Arena_MakeBlockAlloc(upb_alloc* alloc, bool has_initial) { |
|
|
|
|
uintptr_t alloc_uint = (uintptr_t)alloc; |
|
|
|
|
UPB_ASSERT((alloc_uint & 1) == 0); |
|
|
|
|
return alloc_uint | (has_initial ? 1 : 0); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static bool _upb_Arena_HasInitialBlock(upb_Arena* arena) { |
|
|
|
|
return arena->block_alloc & 0x1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static upb_ArenaRoot _upb_Arena_FindRoot(upb_Arena* a) { |
|
|
|
|
uintptr_t poc = upb_Atomic_Load(&a->parent_or_count, memory_order_acquire); |
|
|
|
|
while (_upb_Arena_IsTaggedPointer(poc)) { |
|
|
|
|
upb_Arena* next = _upb_Arena_PointerFromTagged(poc); |
|
|
|
@ -61,7 +110,7 @@ static _upb_ArenaRoot _upb_Arena_FindRoot(upb_Arena* a) { |
|
|
|
|
a = next; |
|
|
|
|
poc = next_poc; |
|
|
|
|
} |
|
|
|
|
return (_upb_ArenaRoot){.root = a, .tagged_count = poc}; |
|
|
|
|
return (upb_ArenaRoot){.root = a, .tagged_count = poc}; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
size_t upb_Arena_SpaceAllocated(upb_Arena* arena) { |
|
|
|
@ -92,7 +141,7 @@ uint32_t upb_Arena_DebugRefCount(upb_Arena* a) { |
|
|
|
|
return _upb_Arena_RefCountFromTagged(poc); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static void upb_Arena_AddBlock(upb_Arena* a, void* ptr, size_t size) { |
|
|
|
|
static void _upb_Arena_AddBlock(upb_Arena* a, void* ptr, size_t size) { |
|
|
|
|
_upb_MemBlock* block = ptr; |
|
|
|
|
|
|
|
|
|
// Insert into linked list.
|
|
|
|
@ -100,37 +149,36 @@ static void upb_Arena_AddBlock(upb_Arena* a, void* ptr, size_t size) { |
|
|
|
|
upb_Atomic_Init(&block->next, a->blocks); |
|
|
|
|
upb_Atomic_Store(&a->blocks, block, memory_order_release); |
|
|
|
|
|
|
|
|
|
a->head.ptr = UPB_PTR_AT(block, memblock_reserve, char); |
|
|
|
|
a->head.end = UPB_PTR_AT(block, size, char); |
|
|
|
|
a->head.UPB_PRIVATE(ptr) = UPB_PTR_AT(block, kUpb_MemblockReserve, char); |
|
|
|
|
a->head.UPB_PRIVATE(end) = UPB_PTR_AT(block, size, char); |
|
|
|
|
|
|
|
|
|
UPB_POISON_MEMORY_REGION(a->head.ptr, a->head.end - a->head.ptr); |
|
|
|
|
UPB_POISON_MEMORY_REGION(a->head.UPB_PRIVATE(ptr), |
|
|
|
|
a->head.UPB_PRIVATE(end) - a->head.UPB_PRIVATE(ptr)); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static bool upb_Arena_AllocBlock(upb_Arena* a, size_t size) { |
|
|
|
|
static bool _upb_Arena_AllocBlock(upb_Arena* a, size_t size) { |
|
|
|
|
if (!a->block_alloc) return false; |
|
|
|
|
_upb_MemBlock* last_block = upb_Atomic_Load(&a->blocks, memory_order_acquire); |
|
|
|
|
size_t last_size = last_block != NULL ? last_block->size : 128; |
|
|
|
|
size_t block_size = UPB_MAX(size, last_size * 2) + memblock_reserve; |
|
|
|
|
_upb_MemBlock* block = upb_malloc(upb_Arena_BlockAlloc(a), block_size); |
|
|
|
|
size_t block_size = UPB_MAX(size, last_size * 2) + kUpb_MemblockReserve; |
|
|
|
|
_upb_MemBlock* block = upb_malloc(_upb_Arena_BlockAlloc(a), block_size); |
|
|
|
|
|
|
|
|
|
if (!block) return false; |
|
|
|
|
upb_Arena_AddBlock(a, block, block_size); |
|
|
|
|
_upb_Arena_AddBlock(a, block, block_size); |
|
|
|
|
UPB_ASSERT(UPB_PRIVATE(_upb_ArenaHas)(a) >= size); |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void* _upb_Arena_SlowMalloc(upb_Arena* a, size_t size) { |
|
|
|
|
if (!upb_Arena_AllocBlock(a, size)) return NULL; /* Out of memory. */ |
|
|
|
|
UPB_ASSERT(_upb_ArenaHas(a) >= size); |
|
|
|
|
return upb_Arena_Malloc(a, size); |
|
|
|
|
void* UPB_PRIVATE(_upb_Arena_SlowMalloc)(upb_Arena* a, size_t size) { |
|
|
|
|
if (!_upb_Arena_AllocBlock(a, size)) return NULL; // OOM
|
|
|
|
|
return upb_Arena_Malloc(a, size - UPB_ASAN_GUARD_SIZE); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
/* Public Arena API ***********************************************************/ |
|
|
|
|
|
|
|
|
|
static upb_Arena* upb_Arena_InitSlow(upb_alloc* alloc) { |
|
|
|
|
const size_t first_block_overhead = sizeof(upb_Arena) + memblock_reserve; |
|
|
|
|
static upb_Arena* _upb_Arena_InitSlow(upb_alloc* alloc) { |
|
|
|
|
const size_t first_block_overhead = sizeof(upb_Arena) + kUpb_MemblockReserve; |
|
|
|
|
upb_Arena* a; |
|
|
|
|
|
|
|
|
|
/* We need to malloc the initial block. */ |
|
|
|
|
// We need to malloc the initial block.
|
|
|
|
|
char* mem; |
|
|
|
|
size_t n = first_block_overhead + 256; |
|
|
|
|
if (!alloc || !(mem = upb_malloc(alloc, n))) { |
|
|
|
@ -140,13 +188,13 @@ static upb_Arena* upb_Arena_InitSlow(upb_alloc* alloc) { |
|
|
|
|
a = UPB_PTR_AT(mem, n - sizeof(*a), upb_Arena); |
|
|
|
|
n -= sizeof(*a); |
|
|
|
|
|
|
|
|
|
a->block_alloc = upb_Arena_MakeBlockAlloc(alloc, 0); |
|
|
|
|
a->block_alloc = _upb_Arena_MakeBlockAlloc(alloc, 0); |
|
|
|
|
upb_Atomic_Init(&a->parent_or_count, _upb_Arena_TaggedFromRefcount(1)); |
|
|
|
|
upb_Atomic_Init(&a->next, NULL); |
|
|
|
|
upb_Atomic_Init(&a->tail, a); |
|
|
|
|
upb_Atomic_Init(&a->blocks, NULL); |
|
|
|
|
|
|
|
|
|
upb_Arena_AddBlock(a, mem, n); |
|
|
|
|
_upb_Arena_AddBlock(a, mem, n); |
|
|
|
|
|
|
|
|
|
return a; |
|
|
|
|
} |
|
|
|
@ -167,7 +215,7 @@ upb_Arena* upb_Arena_Init(void* mem, size_t n, upb_alloc* alloc) { |
|
|
|
|
n = UPB_ALIGN_DOWN(n, UPB_ALIGN_OF(upb_Arena)); |
|
|
|
|
|
|
|
|
|
if (UPB_UNLIKELY(n < sizeof(upb_Arena))) { |
|
|
|
|
return upb_Arena_InitSlow(alloc); |
|
|
|
|
return _upb_Arena_InitSlow(alloc); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
a = UPB_PTR_AT(mem, n - sizeof(*a), upb_Arena); |
|
|
|
@ -176,21 +224,21 @@ upb_Arena* upb_Arena_Init(void* mem, size_t n, upb_alloc* alloc) { |
|
|
|
|
upb_Atomic_Init(&a->next, NULL); |
|
|
|
|
upb_Atomic_Init(&a->tail, a); |
|
|
|
|
upb_Atomic_Init(&a->blocks, NULL); |
|
|
|
|
a->block_alloc = upb_Arena_MakeBlockAlloc(alloc, 1); |
|
|
|
|
a->head.ptr = mem; |
|
|
|
|
a->head.end = UPB_PTR_AT(mem, n - sizeof(*a), char); |
|
|
|
|
a->block_alloc = _upb_Arena_MakeBlockAlloc(alloc, 1); |
|
|
|
|
a->head.UPB_PRIVATE(ptr) = mem; |
|
|
|
|
a->head.UPB_PRIVATE(end) = UPB_PTR_AT(mem, n - sizeof(*a), char); |
|
|
|
|
|
|
|
|
|
return a; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static void arena_dofree(upb_Arena* a) { |
|
|
|
|
static void _upb_Arena_DoFree(upb_Arena* a) { |
|
|
|
|
UPB_ASSERT(_upb_Arena_RefCountFromTagged(a->parent_or_count) == 1); |
|
|
|
|
|
|
|
|
|
while (a != NULL) { |
|
|
|
|
// Load first since arena itself is likely from one of its blocks.
|
|
|
|
|
upb_Arena* next_arena = |
|
|
|
|
(upb_Arena*)upb_Atomic_Load(&a->next, memory_order_acquire); |
|
|
|
|
upb_alloc* block_alloc = upb_Arena_BlockAlloc(a); |
|
|
|
|
upb_alloc* block_alloc = _upb_Arena_BlockAlloc(a); |
|
|
|
|
_upb_MemBlock* block = upb_Atomic_Load(&a->blocks, memory_order_acquire); |
|
|
|
|
while (block != NULL) { |
|
|
|
|
// Load first since we are deleting block.
|
|
|
|
@ -215,7 +263,7 @@ retry: |
|
|
|
|
// expensive then direct loads. As an optimization, we only do RMW ops
|
|
|
|
|
// when we need to update things for other threads to see.
|
|
|
|
|
if (poc == _upb_Arena_TaggedFromRefcount(1)) { |
|
|
|
|
arena_dofree(a); |
|
|
|
|
_upb_Arena_DoFree(a); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -266,14 +314,14 @@ static upb_Arena* _upb_Arena_DoFuse(upb_Arena* a1, upb_Arena* a2, |
|
|
|
|
// In parent pointer mode, it may change what pointer it refers to in the
|
|
|
|
|
// tree, but it will always approach a root. Any operation that walks the
|
|
|
|
|
// tree to the root may collapse levels of the tree concurrently.
|
|
|
|
|
_upb_ArenaRoot r1 = _upb_Arena_FindRoot(a1); |
|
|
|
|
_upb_ArenaRoot r2 = _upb_Arena_FindRoot(a2); |
|
|
|
|
upb_ArenaRoot r1 = _upb_Arena_FindRoot(a1); |
|
|
|
|
upb_ArenaRoot r2 = _upb_Arena_FindRoot(a2); |
|
|
|
|
|
|
|
|
|
if (r1.root == r2.root) return r1.root; // Already fused.
|
|
|
|
|
|
|
|
|
|
// Avoid cycles by always fusing into the root with the lower address.
|
|
|
|
|
if ((uintptr_t)r1.root > (uintptr_t)r2.root) { |
|
|
|
|
_upb_ArenaRoot tmp = r1; |
|
|
|
|
upb_ArenaRoot tmp = r1; |
|
|
|
|
r1 = r2; |
|
|
|
|
r2 = tmp; |
|
|
|
|
} |
|
|
|
@ -333,7 +381,7 @@ bool upb_Arena_Fuse(upb_Arena* a1, upb_Arena* a2) { |
|
|
|
|
|
|
|
|
|
// Do not fuse initial blocks since we cannot lifetime extend them.
|
|
|
|
|
// Any other fuse scenario is allowed.
|
|
|
|
|
if (upb_Arena_HasInitialBlock(a1) || upb_Arena_HasInitialBlock(a2)) { |
|
|
|
|
if (_upb_Arena_HasInitialBlock(a1) || _upb_Arena_HasInitialBlock(a2)) { |
|
|
|
|
return false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -348,8 +396,8 @@ bool upb_Arena_Fuse(upb_Arena* a1, upb_Arena* a2) { |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool upb_Arena_IncRefFor(upb_Arena* arena, const void* owner) { |
|
|
|
|
_upb_ArenaRoot r; |
|
|
|
|
if (upb_Arena_HasInitialBlock(arena)) return false; |
|
|
|
|
upb_ArenaRoot r; |
|
|
|
|
if (_upb_Arena_HasInitialBlock(arena)) return false; |
|
|
|
|
|
|
|
|
|
retry: |
|
|
|
|
r = _upb_Arena_FindRoot(arena); |
|
|
|
|