clean up the arena code a bit more:

_upb_Arena_FastMalloc() has been directly inlined
 Arena::allocator() has been removed
 mem_block is now _upb_MemBlock
 some calls to upb_Arena_Alloc() (which is going away) have been changed to use upb_Arena_Malloc() instead

PiperOrigin-RevId: 489874377
pull/13171/head
Eric Salo 2 years ago committed by Copybara-Service
parent f3a0cc49da
commit e932f7931a
  1. 3
      protos/protos.h
  2. 20
      upb/mem/arena.c
  3. 48
      upb/mem/arena.h
  4. 12
      upb/mem/arena_internal.h
  5. 7
      upb/upb.hpp

@ -103,9 +103,8 @@ inline absl::string_view UpbStrToStringView(upb_StringView str) {
// TODO: update bzl and move to upb runtime / protos.cc.
inline upb_StringView UpbStrFromStringView(absl::string_view str,
upb_Arena* arena) {
upb_alloc* alloc = upb_Arena_Alloc(arena);
const size_t str_size = str.size();
char* buffer = static_cast<char*>(upb_malloc(alloc, str_size));
char* buffer = static_cast<char*>(upb_Arena_Malloc(arena, str_size));
memcpy(buffer, str.data(), str_size);
return upb_StringView_FromDataAndSize(buffer, str_size);
}

@ -43,11 +43,11 @@ static uintptr_t upb_cleanup_metadata(uint32_t* cleanup,
return (uintptr_t)cleanup | has_initial_block;
}
struct mem_block {
struct mem_block* next;
struct _upb_MemBlock {
struct _upb_MemBlock* next;
uint32_t size;
uint32_t cleanups;
/* Data follows. */
// Data follows.
};
typedef struct cleanup_ent {
@ -56,7 +56,7 @@ typedef struct cleanup_ent {
} cleanup_ent;
static const size_t memblock_reserve =
UPB_ALIGN_UP(sizeof(mem_block), UPB_MALLOC_ALIGN);
UPB_ALIGN_UP(sizeof(_upb_MemBlock), UPB_MALLOC_ALIGN);
static upb_Arena* arena_findroot(upb_Arena* a) {
/* Path splitting keeps time complexity down, see:
@ -73,10 +73,10 @@ size_t upb_Arena_SpaceAllocated(upb_Arena* arena) {
arena = arena_findroot(arena);
size_t memsize = 0;
mem_block* block = arena->freelist;
_upb_MemBlock* block = arena->freelist;
while (block) {
memsize += sizeof(mem_block) + block->size;
memsize += sizeof(_upb_MemBlock) + block->size;
block = block->next;
}
@ -89,7 +89,7 @@ uint32_t upb_Arena_DebugRefCount(upb_Arena* arena) {
static void upb_Arena_addblock(upb_Arena* a, upb_Arena* root, void* ptr,
size_t size) {
mem_block* block = ptr;
_upb_MemBlock* block = ptr;
/* The block is for arena |a|, but should appear in the freelist of |root|. */
block->next = root->freelist;
@ -110,7 +110,7 @@ static void upb_Arena_addblock(upb_Arena* a, upb_Arena* root, void* ptr,
static bool upb_Arena_Allocblock(upb_Arena* a, size_t size) {
upb_Arena* root = arena_findroot(a);
size_t block_size = UPB_MAX(size, a->last_size * 2) + memblock_reserve;
mem_block* block = upb_malloc(root->block_alloc, block_size);
_upb_MemBlock* block = upb_malloc(root->block_alloc, block_size);
if (!block) return false;
upb_Arena_addblock(a, root, block, block_size);
@ -193,13 +193,13 @@ upb_Arena* upb_Arena_Init(void* mem, size_t n, upb_alloc* alloc) {
}
static void arena_dofree(upb_Arena* a) {
mem_block* block = a->freelist;
_upb_MemBlock* block = a->freelist;
UPB_ASSERT(a->parent == a);
UPB_ASSERT(a->refcount == 0);
while (block) {
/* Load first since we are deleting block. */
mem_block* next = block->next;
_upb_MemBlock* next = block->next;
if (block->cleanups > 0) {
cleanup_ent* end = UPB_PTR_AT(block, block->size, void);

@ -25,20 +25,6 @@
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef UPB_MEM_ARENA_H_
#define UPB_MEM_ARENA_H_
#include <string.h>
#include "upb/mem/alloc.h"
// Must be last.
#include "upb/port/def.inc"
#ifdef __cplusplus
extern "C" {
#endif
/* upb_Arena is a specific allocator implementation that uses arena allocation.
* The user provides an allocator that will be used to allocate the underlying
* arena blocks. Arenas by nature do not require the individual allocations
@ -51,10 +37,20 @@ extern "C" {
* upb_alloc interface, but it would not be as efficient for the
* single-threaded case. */
typedef void upb_CleanupFunc(void* ud);
#ifndef UPB_MEM_ARENA_H_
#define UPB_MEM_ARENA_H_
#include <string.h>
#include "upb/mem/alloc.h"
// Must be last.
#include "upb/port/def.inc"
typedef struct upb_Arena upb_Arena;
typedef void upb_CleanupFunc(void* context);
typedef struct {
/* We implement the allocator interface.
* This must be the first member of upb_Arena!
@ -64,6 +60,10 @@ typedef struct {
char *ptr, *end;
} _upb_ArenaHead;
#ifdef __cplusplus
extern "C" {
#endif
/* Creates an arena from the given initial block (if any -- n may be 0).
* Additional blocks will be allocated from |alloc|. If |alloc| is NULL, this
* is a fixed-size arena and cannot grow. */
@ -82,7 +82,13 @@ UPB_INLINE size_t _upb_ArenaHas(upb_Arena* a) {
return (size_t)(h->end - h->ptr);
}
UPB_INLINE void* _upb_Arena_FastMalloc(upb_Arena* a, size_t size) {
UPB_INLINE void* upb_Arena_Malloc(upb_Arena* a, size_t size) {
size = UPB_ALIGN_MALLOC(size);
if (UPB_UNLIKELY(_upb_ArenaHas(a) < size)) {
return _upb_Arena_SlowMalloc(a, size);
}
// We have enough space to do a fast malloc.
_upb_ArenaHead* h = (_upb_ArenaHead*)a;
void* ret = h->ptr;
UPB_ASSERT(UPB_ALIGN_MALLOC((uintptr_t)ret) == (uintptr_t)ret);
@ -105,16 +111,6 @@ UPB_INLINE void* _upb_Arena_FastMalloc(upb_Arena* a, size_t size) {
return ret;
}
UPB_INLINE void* upb_Arena_Malloc(upb_Arena* a, size_t size) {
size = UPB_ALIGN_MALLOC(size);
if (UPB_UNLIKELY(_upb_ArenaHas(a) < size)) {
return _upb_Arena_SlowMalloc(a, size);
}
return _upb_Arena_FastMalloc(a, size);
}
// Shrinks the last alloc from arena.
// REQUIRES: (ptr, oldsize) was the last malloc/realloc from this arena.
// We could also add a upb_Arena_TryShrinkLast() which is simply a no-op if

@ -33,11 +33,7 @@
// Must be last.
#include "upb/port/def.inc"
#ifdef __cplusplus
extern "C" {
#endif
typedef struct mem_block mem_block;
typedef struct _upb_MemBlock _upb_MemBlock;
struct upb_Arena {
_upb_ArenaHead head;
@ -58,9 +54,13 @@ struct upb_Arena {
struct upb_Arena* parent;
/* Linked list of blocks to free/cleanup. */
mem_block *freelist, *freelist_tail;
_upb_MemBlock *freelist, *freelist_tail;
};
#ifdef __cplusplus
extern "C" {
#endif
#ifdef __cplusplus
} /* extern "C" */
#endif

@ -77,13 +77,6 @@ class Arena {
upb_Arena* ptr() const { return ptr_.get(); }
// Allows this arena to be used as a generic allocator.
//
// The arena does not need free() calls so when using Arena as an allocator
// it is safe to skip them. However they are no-ops so there is no harm in
// calling free() either.
upb_alloc* allocator() { return upb_Arena_Alloc(ptr_.get()); }
// Add a cleanup function to run when the arena is destroyed.
// Returns false on out-of-memory.
template <class T>

Loading…
Cancel
Save