commit
5a132b8245
179 changed files with 5373 additions and 2126 deletions
@ -1,30 +0,0 @@ |
|||||||
{ |
|
||||||
"version": "0.2.0", |
|
||||||
"configurations": [ |
|
||||||
{ |
|
||||||
"type": "node", |
|
||||||
"request": "launch", |
|
||||||
"name": "Mocha Tests", |
|
||||||
"cwd": "${workspaceRoot}", |
|
||||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/mocha", |
|
||||||
"windows": { |
|
||||||
"runtimeExecutable": "${workspaceRoot}/node_modules/.bin/mocha.cmd" |
|
||||||
}, |
|
||||||
"runtimeArgs": [ |
|
||||||
"-u", |
|
||||||
"tdd", |
|
||||||
"--timeout", |
|
||||||
"999999", |
|
||||||
"--colors", |
|
||||||
"${workspaceRoot}/src/node/test" |
|
||||||
], |
|
||||||
"internalConsoleOptions": "openOnSessionStart" |
|
||||||
}, |
|
||||||
{ |
|
||||||
"type": "node", |
|
||||||
"request": "attach", |
|
||||||
"name": "Attach to Process", |
|
||||||
"port": 5858 |
|
||||||
} |
|
||||||
] |
|
||||||
} |
|
File diff suppressed because it is too large
Load Diff
@ -1,152 +0,0 @@ |
|||||||
/*
|
|
||||||
* |
|
||||||
* Copyright 2017 gRPC authors. |
|
||||||
* |
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License"); |
|
||||||
* you may not use this file except in compliance with the License. |
|
||||||
* You may obtain a copy of the License at |
|
||||||
* |
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* |
|
||||||
* Unless required by applicable law or agreed to in writing, software |
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, |
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
||||||
* See the License for the specific language governing permissions and |
|
||||||
* limitations under the License. |
|
||||||
* |
|
||||||
*/ |
|
||||||
|
|
||||||
#include <grpc/support/port_platform.h> |
|
||||||
|
|
||||||
#include "src/core/lib/gpr/arena.h" |
|
||||||
|
|
||||||
#include <string.h> |
|
||||||
#include <new> |
|
||||||
|
|
||||||
#include <grpc/support/alloc.h> |
|
||||||
#include <grpc/support/atm.h> |
|
||||||
#include <grpc/support/log.h> |
|
||||||
#include <grpc/support/sync.h> |
|
||||||
|
|
||||||
#include "src/core/lib/gpr/alloc.h" |
|
||||||
#include "src/core/lib/gprpp/memory.h" |
|
||||||
|
|
||||||
static void* gpr_arena_malloc(size_t size) { |
|
||||||
return gpr_malloc_aligned(size, GPR_MAX_ALIGNMENT); |
|
||||||
} |
|
||||||
|
|
||||||
// Uncomment this to use a simple arena that simply allocates the
|
|
||||||
// requested amount of memory for each call to gpr_arena_alloc(). This
|
|
||||||
// effectively eliminates the efficiency gain of using an arena, but it
|
|
||||||
// may be useful for debugging purposes.
|
|
||||||
//#define SIMPLE_ARENA_FOR_DEBUGGING
|
|
||||||
#ifdef SIMPLE_ARENA_FOR_DEBUGGING |
|
||||||
|
|
||||||
struct gpr_arena { |
|
||||||
gpr_arena() { gpr_mu_init(&mu); } |
|
||||||
~gpr_arena() { |
|
||||||
gpr_mu_destroy(&mu); |
|
||||||
for (size_t i = 0; i < num_ptrs; ++i) { |
|
||||||
gpr_free_aligned(ptrs[i]); |
|
||||||
} |
|
||||||
gpr_free(ptrs); |
|
||||||
} |
|
||||||
|
|
||||||
gpr_mu mu; |
|
||||||
void** ptrs = nullptr; |
|
||||||
size_t num_ptrs = 0; |
|
||||||
}; |
|
||||||
|
|
||||||
gpr_arena* gpr_arena_create(size_t ignored_initial_size) { |
|
||||||
return grpc_core::New<gpr_arena>(); |
|
||||||
} |
|
||||||
|
|
||||||
size_t gpr_arena_destroy(gpr_arena* arena) { |
|
||||||
grpc_core::Delete(arena); |
|
||||||
return 1; // Value doesn't matter, since it won't be used.
|
|
||||||
} |
|
||||||
|
|
||||||
void* gpr_arena_alloc(gpr_arena* arena, size_t size) { |
|
||||||
gpr_mu_lock(&arena->mu); |
|
||||||
arena->ptrs = |
|
||||||
(void**)gpr_realloc(arena->ptrs, sizeof(void*) * (arena->num_ptrs + 1)); |
|
||||||
void* retval = arena->ptrs[arena->num_ptrs++] = gpr_arena_malloc(size); |
|
||||||
gpr_mu_unlock(&arena->mu); |
|
||||||
return retval; |
|
||||||
} |
|
||||||
|
|
||||||
#else // SIMPLE_ARENA_FOR_DEBUGGING
|
|
||||||
|
|
||||||
// TODO(roth): We currently assume that all callers need alignment of 16
|
|
||||||
// bytes, which may be wrong in some cases. As part of converting the
|
|
||||||
// arena API to C++, we should consider replacing gpr_arena_alloc() with a
|
|
||||||
// template that takes the type of the value being allocated, which
|
|
||||||
// would allow us to use the alignment actually needed by the caller.
|
|
||||||
|
|
||||||
typedef struct zone { |
|
||||||
zone* next = nullptr; |
|
||||||
} zone; |
|
||||||
|
|
||||||
struct gpr_arena { |
|
||||||
gpr_arena(size_t initial_size) |
|
||||||
: initial_zone_size(initial_size), last_zone(&initial_zone) { |
|
||||||
gpr_mu_init(&arena_growth_mutex); |
|
||||||
} |
|
||||||
~gpr_arena() { |
|
||||||
gpr_mu_destroy(&arena_growth_mutex); |
|
||||||
zone* z = initial_zone.next; |
|
||||||
while (z) { |
|
||||||
zone* next_z = z->next; |
|
||||||
z->~zone(); |
|
||||||
gpr_free_aligned(z); |
|
||||||
z = next_z; |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
// Keep track of the total used size. We use this in our call sizing
|
|
||||||
// historesis.
|
|
||||||
gpr_atm total_used = 0; |
|
||||||
size_t initial_zone_size; |
|
||||||
zone initial_zone; |
|
||||||
zone* last_zone; |
|
||||||
gpr_mu arena_growth_mutex; |
|
||||||
}; |
|
||||||
|
|
||||||
gpr_arena* gpr_arena_create(size_t initial_size) { |
|
||||||
initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size); |
|
||||||
return new (gpr_arena_malloc( |
|
||||||
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + initial_size)) |
|
||||||
gpr_arena(initial_size); |
|
||||||
} |
|
||||||
|
|
||||||
size_t gpr_arena_destroy(gpr_arena* arena) { |
|
||||||
const gpr_atm size = gpr_atm_no_barrier_load(&arena->total_used); |
|
||||||
arena->~gpr_arena(); |
|
||||||
gpr_free_aligned(arena); |
|
||||||
return static_cast<size_t>(size); |
|
||||||
} |
|
||||||
|
|
||||||
void* gpr_arena_alloc(gpr_arena* arena, size_t size) { |
|
||||||
size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size); |
|
||||||
size_t begin = gpr_atm_no_barrier_fetch_add(&arena->total_used, size); |
|
||||||
if (begin + size <= arena->initial_zone_size) { |
|
||||||
return reinterpret_cast<char*>(arena) + |
|
||||||
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(gpr_arena)) + begin; |
|
||||||
} else { |
|
||||||
// If the allocation isn't able to end in the initial zone, create a new
|
|
||||||
// zone for this allocation, and any unused space in the initial zone is
|
|
||||||
// wasted. This overflowing and wasting is uncommon because of our arena
|
|
||||||
// sizing historesis (that is, most calls should have a large enough initial
|
|
||||||
// zone and will not need to grow the arena).
|
|
||||||
gpr_mu_lock(&arena->arena_growth_mutex); |
|
||||||
zone* z = new (gpr_arena_malloc( |
|
||||||
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)) + size)) zone(); |
|
||||||
arena->last_zone->next = z; |
|
||||||
arena->last_zone = z; |
|
||||||
gpr_mu_unlock(&arena->arena_growth_mutex); |
|
||||||
return reinterpret_cast<char*>(z) + |
|
||||||
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(zone)); |
|
||||||
} |
|
||||||
} |
|
||||||
|
|
||||||
#endif // SIMPLE_ARENA_FOR_DEBUGGING
|
|
@ -0,0 +1,103 @@ |
|||||||
|
/*
|
||||||
|
* |
||||||
|
* Copyright 2017 gRPC authors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||||
|
* you may not use this file except in compliance with the License. |
||||||
|
* You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
#include <grpc/support/port_platform.h> |
||||||
|
|
||||||
|
#include "src/core/lib/gprpp/arena.h" |
||||||
|
|
||||||
|
#include <string.h> |
||||||
|
#include <new> |
||||||
|
|
||||||
|
#include <grpc/support/alloc.h> |
||||||
|
#include <grpc/support/atm.h> |
||||||
|
#include <grpc/support/log.h> |
||||||
|
#include <grpc/support/sync.h> |
||||||
|
|
||||||
|
#include "src/core/lib/gpr/alloc.h" |
||||||
|
#include "src/core/lib/gprpp/memory.h" |
||||||
|
|
||||||
|
namespace { |
||||||
|
|
||||||
|
void* ArenaStorage(size_t initial_size) { |
||||||
|
static constexpr size_t base_size = |
||||||
|
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_core::Arena)); |
||||||
|
initial_size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_size); |
||||||
|
size_t alloc_size = base_size + initial_size; |
||||||
|
static constexpr size_t alignment = |
||||||
|
(GPR_CACHELINE_SIZE > GPR_MAX_ALIGNMENT && |
||||||
|
GPR_CACHELINE_SIZE % GPR_MAX_ALIGNMENT == 0) |
||||||
|
? GPR_CACHELINE_SIZE |
||||||
|
: GPR_MAX_ALIGNMENT; |
||||||
|
return gpr_malloc_aligned(alloc_size, alignment); |
||||||
|
} |
||||||
|
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
namespace grpc_core { |
||||||
|
|
||||||
|
Arena::~Arena() { |
||||||
|
Zone* z = last_zone_; |
||||||
|
while (z) { |
||||||
|
Zone* prev_z = z->prev; |
||||||
|
z->~Zone(); |
||||||
|
gpr_free_aligned(z); |
||||||
|
z = prev_z; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
Arena* Arena::Create(size_t initial_size) { |
||||||
|
return new (ArenaStorage(initial_size)) Arena(initial_size); |
||||||
|
} |
||||||
|
|
||||||
|
Pair<Arena*, void*> Arena::CreateWithAlloc(size_t initial_size, |
||||||
|
size_t alloc_size) { |
||||||
|
static constexpr size_t base_size = |
||||||
|
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena)); |
||||||
|
auto* new_arena = |
||||||
|
new (ArenaStorage(initial_size)) Arena(initial_size, alloc_size); |
||||||
|
void* first_alloc = reinterpret_cast<char*>(new_arena) + base_size; |
||||||
|
return MakePair(new_arena, first_alloc); |
||||||
|
} |
||||||
|
|
||||||
|
size_t Arena::Destroy() { |
||||||
|
size_t size = total_used_.Load(MemoryOrder::RELAXED); |
||||||
|
this->~Arena(); |
||||||
|
gpr_free_aligned(this); |
||||||
|
return size; |
||||||
|
} |
||||||
|
|
||||||
|
void* Arena::AllocZone(size_t size) { |
||||||
|
// If the allocation isn't able to end in the initial zone, create a new
|
||||||
|
// zone for this allocation, and any unused space in the initial zone is
|
||||||
|
// wasted. This overflowing and wasting is uncommon because of our arena
|
||||||
|
// sizing hysteresis (that is, most calls should have a large enough initial
|
||||||
|
// zone and will not need to grow the arena).
|
||||||
|
static constexpr size_t zone_base_size = |
||||||
|
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Zone)); |
||||||
|
size_t alloc_size = zone_base_size + size; |
||||||
|
Zone* z = new (gpr_malloc_aligned(alloc_size, GPR_MAX_ALIGNMENT)) Zone(); |
||||||
|
{ |
||||||
|
gpr_spinlock_lock(&arena_growth_spinlock_); |
||||||
|
z->prev = last_zone_; |
||||||
|
last_zone_ = z; |
||||||
|
gpr_spinlock_unlock(&arena_growth_spinlock_); |
||||||
|
} |
||||||
|
return reinterpret_cast<char*>(z) + zone_base_size; |
||||||
|
} |
||||||
|
|
||||||
|
} // namespace grpc_core
|
@ -0,0 +1,121 @@ |
|||||||
|
/*
|
||||||
|
* |
||||||
|
* Copyright 2017 gRPC authors. |
||||||
|
* |
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License"); |
||||||
|
* you may not use this file except in compliance with the License. |
||||||
|
* You may obtain a copy of the License at |
||||||
|
* |
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
* |
||||||
|
* Unless required by applicable law or agreed to in writing, software |
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS, |
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||||
|
* See the License for the specific language governing permissions and |
||||||
|
* limitations under the License. |
||||||
|
* |
||||||
|
*/ |
||||||
|
|
||||||
|
// \file Arena based allocator
|
||||||
|
// Allows very fast allocation of memory, but that memory cannot be freed until
|
||||||
|
// the arena as a whole is freed
|
||||||
|
// Tracks the total memory allocated against it, so that future arenas can
|
||||||
|
// pre-allocate the right amount of memory
|
||||||
|
|
||||||
|
#ifndef GRPC_CORE_LIB_GPRPP_ARENA_H |
||||||
|
#define GRPC_CORE_LIB_GPRPP_ARENA_H |
||||||
|
|
||||||
|
#include <grpc/support/port_platform.h> |
||||||
|
|
||||||
|
#include <new> |
||||||
|
#include <utility> |
||||||
|
|
||||||
|
#include <grpc/support/alloc.h> |
||||||
|
#include <grpc/support/sync.h> |
||||||
|
|
||||||
|
#include "src/core/lib/gpr/alloc.h" |
||||||
|
#include "src/core/lib/gpr/spinlock.h" |
||||||
|
#include "src/core/lib/gprpp/atomic.h" |
||||||
|
#include "src/core/lib/gprpp/pair.h" |
||||||
|
|
||||||
|
#include <stddef.h> |
||||||
|
|
||||||
|
namespace grpc_core { |
||||||
|
|
||||||
|
class Arena { |
||||||
|
public: |
||||||
|
// Create an arena, with \a initial_size bytes in the first allocated buffer.
|
||||||
|
static Arena* Create(size_t initial_size); |
||||||
|
|
||||||
|
// Create an arena, with \a initial_size bytes in the first allocated buffer,
|
||||||
|
// and return both a void pointer to the returned arena and a void* with the
|
||||||
|
// first allocation.
|
||||||
|
static Pair<Arena*, void*> CreateWithAlloc(size_t initial_size, |
||||||
|
size_t alloc_size); |
||||||
|
|
||||||
|
// Destroy an arena, returning the total number of bytes allocated.
|
||||||
|
size_t Destroy(); |
||||||
|
// Allocate \a size bytes from the arena.
|
||||||
|
void* Alloc(size_t size) { |
||||||
|
static constexpr size_t base_size = |
||||||
|
GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena)); |
||||||
|
size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size); |
||||||
|
size_t begin = total_used_.FetchAdd(size, MemoryOrder::RELAXED); |
||||||
|
if (begin + size <= initial_zone_size_) { |
||||||
|
return reinterpret_cast<char*>(this) + base_size + begin; |
||||||
|
} else { |
||||||
|
return AllocZone(size); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// TODO(roth): We currently assume that all callers need alignment of 16
|
||||||
|
// bytes, which may be wrong in some cases. When we have time, we should
|
||||||
|
// change this to instead use the alignment of the type being allocated by
|
||||||
|
// this method.
|
||||||
|
template <typename T, typename... Args> |
||||||
|
T* New(Args&&... args) { |
||||||
|
T* t = static_cast<T*>(Alloc(sizeof(T))); |
||||||
|
new (t) T(std::forward<Args>(args)...); |
||||||
|
return t; |
||||||
|
} |
||||||
|
|
||||||
|
private: |
||||||
|
struct Zone { |
||||||
|
Zone* prev; |
||||||
|
}; |
||||||
|
|
||||||
|
// Initialize an arena.
|
||||||
|
// Parameters:
|
||||||
|
// initial_size: The initial size of the whole arena in bytes. These bytes
|
||||||
|
// are contained within 'zone 0'. If the arena user ends up requiring more
|
||||||
|
// memory than the arena contains in zone 0, subsequent zones are allocated
|
||||||
|
// on demand and maintained in a tail-linked list.
|
||||||
|
//
|
||||||
|
// initial_alloc: Optionally, construct the arena as though a call to
|
||||||
|
// Alloc() had already been made for initial_alloc bytes. This provides a
|
||||||
|
// quick optimization (avoiding an atomic fetch-add) for the common case
|
||||||
|
// where we wish to create an arena and then perform an immediate
|
||||||
|
// allocation.
|
||||||
|
explicit Arena(size_t initial_size, size_t initial_alloc = 0) |
||||||
|
: total_used_(initial_alloc), initial_zone_size_(initial_size) {} |
||||||
|
|
||||||
|
~Arena(); |
||||||
|
|
||||||
|
void* AllocZone(size_t size); |
||||||
|
|
||||||
|
// Keep track of the total used size. We use this in our call sizing
|
||||||
|
// hysteresis.
|
||||||
|
Atomic<size_t> total_used_; |
||||||
|
size_t initial_zone_size_; |
||||||
|
gpr_spinlock arena_growth_spinlock_ = GPR_SPINLOCK_STATIC_INITIALIZER; |
||||||
|
// If the initial arena allocation wasn't enough, we allocate additional zones
|
||||||
|
// in a reverse linked list. Each additional zone consists of (1) a pointer to
|
||||||
|
// the zone added before this zone (null if this is the first additional zone)
|
||||||
|
// and (2) the allocated memory. The arena itself maintains a pointer to the
|
||||||
|
// last zone; the zone list is reverse-walked during arena destruction only.
|
||||||
|
Zone* last_zone_ = nullptr; |
||||||
|
}; |
||||||
|
|
||||||
|
} // namespace grpc_core
|
||||||
|
|
||||||
|
#endif /* GRPC_CORE_LIB_GPRPP_ARENA_H */ |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue