Merge pull request #18407 from arjunroy/slice_ref_devirt

grpc_slice_refcount devirtualization
reviewable/pr18745/r4^2
Arjun Roy 6 years ago committed by GitHub
commit ffb0b63b59
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 22
      include/grpc/impl/codegen/slice.h
  2. 2
      src/core/ext/transport/chttp2/transport/hpack_encoder.cc
  3. 11
      src/core/ext/transport/chttp2/transport/writing.cc
  4. 16
      src/core/lib/gprpp/ref_counted.h
  5. 57
      src/core/lib/iomgr/resource_quota.cc
  6. 215
      src/core/lib/slice/slice.cc
  7. 133
      src/core/lib/slice/slice_intern.cc
  8. 204
      src/core/lib/slice/slice_internal.h
  9. 224
      src/core/lib/transport/static_metadata.cc
  10. 3
      src/core/lib/transport/static_metadata.h
  11. 90
      src/core/lib/transport/transport.cc
  12. 37
      src/core/lib/transport/transport.h
  13. 7
      test/core/slice/slice_test.cc
  14. 3
      test/core/transport/stream_owned_slice_test.cc
  15. 22
      tools/codegen/core/gen_static_metadata.py

@ -40,27 +40,6 @@ typedef struct grpc_slice grpc_slice;
reference ownership semantics (who should call unref?) and mutability
constraints (is the callee allowed to modify the slice?) */
typedef struct grpc_slice_refcount_vtable {
void (*ref)(void*);
void (*unref)(void*);
int (*eq)(grpc_slice a, grpc_slice b);
uint32_t (*hash)(grpc_slice slice);
} grpc_slice_refcount_vtable;
/** Reference count container for grpc_slice. Contains function pointers to
increment and decrement reference counts. Implementations should cleanup
when the reference count drops to zero.
Typically client code should not touch this, and use grpc_slice_malloc,
grpc_slice_new, or grpc_slice_new_with_len instead. */
typedef struct grpc_slice_refcount {
const grpc_slice_refcount_vtable* vtable;
/** If a subset of this slice is taken, use this pointer for the refcount.
Typically points back to the refcount itself, however iterning
implementations can use this to avoid a verification step on each hash
or equality check */
struct grpc_slice_refcount* sub_refcount;
} grpc_slice_refcount;
/* Inlined half of grpc_slice is allowed to expand the size of the overall type
by this many bytes */
#define GRPC_SLICE_INLINE_EXTRA_SIZE sizeof(void*)
@ -68,6 +47,7 @@ typedef struct grpc_slice_refcount {
#define GRPC_SLICE_INLINED_SIZE \
(sizeof(size_t) + sizeof(uint8_t*) - 1 + GRPC_SLICE_INLINE_EXTRA_SIZE)
struct grpc_slice_refcount;
/** A grpc_slice s, if initialized, represents the byte range
s.bytes[0..s.length-1].

@ -56,7 +56,7 @@
/* don't consider adding anything bigger than this to the hpack table */
#define MAX_DECODER_SPACE_USAGE 512
static grpc_slice_refcount terminal_slice_refcount = {nullptr, nullptr};
static grpc_slice_refcount terminal_slice_refcount;
static const grpc_slice terminal_slice = {
&terminal_slice_refcount, /* refcount */
{{0, nullptr}} /* data.refcounted */

@ -163,15 +163,6 @@ static void report_stall(grpc_chttp2_transport* t, grpc_chttp2_stream* s,
}
}
static bool stream_ref_if_not_destroyed(gpr_refcount* r) {
gpr_atm count;
do {
count = gpr_atm_acq_load(&r->count);
if (count == 0) return false;
} while (!gpr_atm_rel_cas(&r->count, count, count + 1));
return true;
}
/* How many bytes would we like to put on the wire during a single syscall */
static uint32_t target_write_size(grpc_chttp2_transport* t) {
return 1024 * 1024;
@ -254,7 +245,7 @@ class WriteContext {
while (grpc_chttp2_list_pop_stalled_by_transport(t_, &s)) {
if (t_->closed_with_error == GRPC_ERROR_NONE &&
grpc_chttp2_list_add_writable_stream(t_, s)) {
if (!stream_ref_if_not_destroyed(&s->refcount->refs)) {
if (!s->refcount->refs.RefIfNonZero()) {
grpc_chttp2_list_remove_writable_stream(t_, s);
}
}

@ -123,6 +123,22 @@ class RefCount {
RefNonZero();
}
bool RefIfNonZero() { return value_.IncrementIfNonzero(); }
bool RefIfNonZero(const DebugLocation& location, const char* reason) {
#ifndef NDEBUG
if (location.Log() && trace_flag_ != nullptr && trace_flag_->enabled()) {
const RefCount::Value old_refs = get();
gpr_log(GPR_INFO,
"%s:%p %s:%d ref_if_non_zero "
"%" PRIdPTR " -> %" PRIdPTR " %s",
trace_flag_->name(), this, location.file(), location.line(),
old_refs, old_refs + 1, reason);
}
#endif
return RefIfNonZero();
}
// Decrements the ref-count and returns true if the ref-count reaches 0.
bool Unref() {
const Value prior = value_.FetchSub(1, MemoryOrder::ACQ_REL);

@ -32,6 +32,7 @@
#include "src/core/lib/gpr/useful.h"
#include "src/core/lib/iomgr/combiner.h"
#include "src/core/lib/slice/slice_internal.h"
grpc_core::TraceFlag grpc_resource_quota_trace(false, "resource_quota");
@ -430,41 +431,43 @@ static bool rq_reclaim(grpc_resource_quota* resource_quota, bool destructive) {
* ru_slice: a slice implementation that is backed by a grpc_resource_user
*/
typedef struct {
grpc_slice_refcount base;
gpr_refcount refs;
grpc_resource_user* resource_user;
size_t size;
} ru_slice_refcount;
namespace grpc_core {
static void ru_slice_ref(void* p) {
ru_slice_refcount* rc = static_cast<ru_slice_refcount*>(p);
gpr_ref(&rc->refs);
}
static void ru_slice_unref(void* p) {
ru_slice_refcount* rc = static_cast<ru_slice_refcount*>(p);
if (gpr_unref(&rc->refs)) {
grpc_resource_user_free(rc->resource_user, rc->size);
class RuSliceRefcount {
public:
static void Destroy(void* p) {
auto* rc = static_cast<RuSliceRefcount*>(p);
rc->~RuSliceRefcount();
gpr_free(rc);
}
}
RuSliceRefcount(grpc_resource_user* resource_user, size_t size)
: base_(grpc_slice_refcount::Type::REGULAR, &refs_, Destroy, this,
&base_),
resource_user_(resource_user),
size_(size) {
// Nothing to do here.
}
~RuSliceRefcount() { grpc_resource_user_free(resource_user_, size_); }
grpc_slice_refcount* base_refcount() { return &base_; }
static const grpc_slice_refcount_vtable ru_slice_vtable = {
ru_slice_ref, ru_slice_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
private:
grpc_slice_refcount base_;
RefCount refs_;
grpc_resource_user* resource_user_;
size_t size_;
};
} // namespace grpc_core
static grpc_slice ru_slice_create(grpc_resource_user* resource_user,
size_t size) {
ru_slice_refcount* rc = static_cast<ru_slice_refcount*>(
gpr_malloc(sizeof(ru_slice_refcount) + size));
rc->base.vtable = &ru_slice_vtable;
rc->base.sub_refcount = &rc->base;
gpr_ref_init(&rc->refs, 1);
rc->resource_user = resource_user;
rc->size = size;
auto* rc = static_cast<grpc_core::RuSliceRefcount*>(
gpr_malloc(sizeof(grpc_core::RuSliceRefcount) + size));
new (rc) grpc_core::RuSliceRefcount(resource_user, size);
grpc_slice slice;
slice.refcount = &rc->base;
slice.refcount = rc->base_refcount();
slice.data.refcounted.bytes = reinterpret_cast<uint8_t*>(rc + 1);
slice.data.refcounted.length = size;
return slice;

@ -26,6 +26,7 @@
#include <string.h>
#include "src/core/lib/gprpp/memory.h"
#include "src/core/lib/gprpp/ref_counted.h"
#include "src/core/lib/iomgr/exec_ctx.h"
@ -67,17 +68,10 @@ void grpc_slice_unref(grpc_slice slice) {
/* grpc_slice_from_static_string support structure - a refcount that does
nothing */
static void noop_ref(void* unused) {}
static void noop_unref(void* unused) {}
static const grpc_slice_refcount_vtable noop_refcount_vtable = {
noop_ref, noop_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
static grpc_slice_refcount noop_refcount = {&noop_refcount_vtable,
&noop_refcount};
static grpc_slice_refcount NoopRefcount;
size_t grpc_slice_memory_usage(grpc_slice s) {
if (s.refcount == nullptr || s.refcount == &noop_refcount) {
if (s.refcount == nullptr || s.refcount == &NoopRefcount) {
return 0;
} else {
return s.data.refcounted.length;
@ -86,7 +80,7 @@ size_t grpc_slice_memory_usage(grpc_slice s) {
grpc_slice grpc_slice_from_static_buffer(const void* s, size_t len) {
grpc_slice slice;
slice.refcount = &noop_refcount;
slice.refcount = &NoopRefcount;
slice.data.refcounted.bytes = (uint8_t*)s;
slice.data.refcounted.length = len;
return slice;
@ -96,45 +90,43 @@ grpc_slice grpc_slice_from_static_string(const char* s) {
return grpc_slice_from_static_buffer(s, strlen(s));
}
namespace grpc_core {
/* grpc_slice_new support structures - we create a refcount object extended
with the user provided data pointer & destroy function */
typedef struct new_slice_refcount {
grpc_slice_refcount rc;
gpr_refcount refs;
void (*user_destroy)(void*);
void* user_data;
} new_slice_refcount;
static void new_slice_ref(void* p) {
new_slice_refcount* r = static_cast<new_slice_refcount*>(p);
gpr_ref(&r->refs);
}
static void new_slice_unref(void* p) {
new_slice_refcount* r = static_cast<new_slice_refcount*>(p);
if (gpr_unref(&r->refs)) {
r->user_destroy(r->user_data);
gpr_free(r);
class NewSliceRefcount {
public:
static void Destroy(void* arg) {
Delete(static_cast<NewSliceRefcount*>(arg));
}
}
static const grpc_slice_refcount_vtable new_slice_vtable = {
new_slice_ref, new_slice_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
NewSliceRefcount(void (*destroy)(void*), void* user_data)
: rc_(grpc_slice_refcount::Type::REGULAR, &refs_, Destroy, this, &rc_),
user_destroy_(destroy),
user_data_(user_data) {}
GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
grpc_slice_refcount* base_refcount() { return &rc_; }
private:
~NewSliceRefcount() { user_destroy_(user_data_); }
grpc_slice_refcount rc_;
RefCount refs_;
void (*user_destroy_)(void*);
void* user_data_;
};
} // namespace grpc_core
grpc_slice grpc_slice_new_with_user_data(void* p, size_t len,
void (*destroy)(void*),
void* user_data) {
grpc_slice slice;
new_slice_refcount* rc =
static_cast<new_slice_refcount*>(gpr_malloc(sizeof(new_slice_refcount)));
gpr_ref_init(&rc->refs, 1);
rc->rc.vtable = &new_slice_vtable;
rc->rc.sub_refcount = &rc->rc;
rc->user_destroy = destroy;
rc->user_data = user_data;
slice.refcount = &rc->rc;
slice.refcount =
grpc_core::New<grpc_core::NewSliceRefcount>(destroy, user_data)
->base_refcount();
slice.data.refcounted.bytes = static_cast<uint8_t*>(p);
slice.data.refcounted.length = len;
return slice;
@ -145,46 +137,45 @@ grpc_slice grpc_slice_new(void* p, size_t len, void (*destroy)(void*)) {
return grpc_slice_new_with_user_data(p, len, destroy, p);
}
namespace grpc_core {
/* grpc_slice_new_with_len support structures - we create a refcount object
extended with the user provided data pointer & destroy function */
typedef struct new_with_len_slice_refcount {
grpc_slice_refcount rc;
gpr_refcount refs;
void* user_data;
size_t user_length;
void (*user_destroy)(void*, size_t);
} new_with_len_slice_refcount;
static void new_with_len_ref(void* p) {
new_with_len_slice_refcount* r = static_cast<new_with_len_slice_refcount*>(p);
gpr_ref(&r->refs);
}
static void new_with_len_unref(void* p) {
new_with_len_slice_refcount* r = static_cast<new_with_len_slice_refcount*>(p);
if (gpr_unref(&r->refs)) {
r->user_destroy(r->user_data, r->user_length);
gpr_free(r);
class NewWithLenSliceRefcount {
public:
static void Destroy(void* arg) {
Delete(static_cast<NewWithLenSliceRefcount*>(arg));
}
}
static const grpc_slice_refcount_vtable new_with_len_vtable = {
new_with_len_ref, new_with_len_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
NewWithLenSliceRefcount(void (*destroy)(void*, size_t), void* user_data,
size_t user_length)
: rc_(grpc_slice_refcount::Type::REGULAR, &refs_, Destroy, this, &rc_),
user_data_(user_data),
user_length_(user_length),
user_destroy_(destroy) {}
GPRC_ALLOW_CLASS_TO_USE_NON_PUBLIC_DELETE
grpc_slice_refcount* base_refcount() { return &rc_; }
private:
~NewWithLenSliceRefcount() { user_destroy_(user_data_, user_length_); }
grpc_slice_refcount rc_;
RefCount refs_;
void* user_data_;
size_t user_length_;
void (*user_destroy_)(void*, size_t);
};
} // namespace grpc_core
grpc_slice grpc_slice_new_with_len(void* p, size_t len,
void (*destroy)(void*, size_t)) {
grpc_slice slice;
new_with_len_slice_refcount* rc = static_cast<new_with_len_slice_refcount*>(
gpr_malloc(sizeof(new_with_len_slice_refcount)));
gpr_ref_init(&rc->refs, 1);
rc->rc.vtable = &new_with_len_vtable;
rc->rc.sub_refcount = &rc->rc;
rc->user_destroy = destroy;
rc->user_data = p;
rc->user_length = len;
slice.refcount = &rc->rc;
slice.refcount =
grpc_core::New<grpc_core::NewWithLenSliceRefcount>(destroy, p, len)
->base_refcount();
slice.data.refcounted.bytes = static_cast<uint8_t*>(p);
slice.data.refcounted.length = len;
return slice;
@ -203,39 +194,28 @@ grpc_slice grpc_slice_from_copied_string(const char* source) {
namespace {
struct MallocRefCount {
MallocRefCount(const grpc_slice_refcount_vtable* vtable) {
base.vtable = vtable;
base.sub_refcount = &base;
class MallocRefCount {
public:
static void Destroy(void* arg) {
MallocRefCount* r = static_cast<MallocRefCount*>(arg);
r->~MallocRefCount();
gpr_free(r);
}
void Ref() { refs.Ref(); }
void Unref() {
if (refs.Unref()) {
gpr_free(this);
}
}
MallocRefCount()
: base_(grpc_slice_refcount::Type::REGULAR, &refs_, Destroy, this,
&base_) {}
~MallocRefCount() = default;
grpc_slice_refcount* base_refcount() { return &base_; }
grpc_slice_refcount base;
grpc_core::RefCount refs;
private:
grpc_slice_refcount base_;
grpc_core::RefCount refs_;
};
} // namespace
static void malloc_ref(void* p) {
MallocRefCount* r = static_cast<MallocRefCount*>(p);
r->Ref();
}
static void malloc_unref(void* p) {
MallocRefCount* r = static_cast<MallocRefCount*>(p);
r->Unref();
}
static const grpc_slice_refcount_vtable malloc_vtable = {
malloc_ref, malloc_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
grpc_slice grpc_slice_malloc_large(size_t length) {
grpc_slice slice;
@ -248,14 +228,16 @@ grpc_slice grpc_slice_malloc_large(size_t length) {
refcount is a malloc_refcount
bytes is an array of bytes of the requested length
Both parts are placed in the same allocation returned from gpr_malloc */
void* data =
auto* rc =
static_cast<MallocRefCount*>(gpr_malloc(sizeof(MallocRefCount) + length));
auto* rc = new (data) MallocRefCount(&malloc_vtable);
/* Initial refcount on rc is 1 - and it's up to the caller to release
this reference. */
new (rc) MallocRefCount();
/* Build up the slice to be returned. */
/* The slices refcount points back to the allocated block. */
slice.refcount = &rc->base;
slice.refcount = rc->base_refcount();
/* The data bytes are placed immediately after the refcount struct */
slice.data.refcounted.bytes = reinterpret_cast<uint8_t*>(rc + 1);
/* And the length of the block is set to the requested length */
@ -286,7 +268,7 @@ grpc_slice grpc_slice_sub_no_ref(grpc_slice source, size_t begin, size_t end) {
GPR_ASSERT(source.data.refcounted.length >= end);
/* Build the result */
subset.refcount = source.refcount->sub_refcount;
subset.refcount = source.refcount->sub_refcount();
/* Point into the source array */
subset.data.refcounted.bytes = source.data.refcounted.bytes + begin;
subset.data.refcounted.length = end - begin;
@ -312,7 +294,7 @@ grpc_slice grpc_slice_sub(grpc_slice source, size_t begin, size_t end) {
} else {
subset = grpc_slice_sub_no_ref(source, begin, end);
/* Bump the refcount */
subset.refcount->vtable->ref(subset.refcount);
subset.refcount->Ref();
}
return subset;
}
@ -340,23 +322,23 @@ grpc_slice grpc_slice_split_tail_maybe_ref(grpc_slice* source, size_t split,
tail.data.inlined.length = static_cast<uint8_t>(tail_length);
memcpy(tail.data.inlined.bytes, source->data.refcounted.bytes + split,
tail_length);
source->refcount = source->refcount->sub_refcount;
source->refcount = source->refcount->sub_refcount();
} else {
/* Build the result */
switch (ref_whom) {
case GRPC_SLICE_REF_TAIL:
tail.refcount = source->refcount->sub_refcount;
source->refcount = &noop_refcount;
tail.refcount = source->refcount->sub_refcount();
source->refcount = &NoopRefcount;
break;
case GRPC_SLICE_REF_HEAD:
tail.refcount = &noop_refcount;
source->refcount = source->refcount->sub_refcount;
tail.refcount = &NoopRefcount;
source->refcount = source->refcount->sub_refcount();
break;
case GRPC_SLICE_REF_BOTH:
tail.refcount = source->refcount->sub_refcount;
source->refcount = source->refcount->sub_refcount;
tail.refcount = source->refcount->sub_refcount();
source->refcount = source->refcount->sub_refcount();
/* Bump the refcount */
tail.refcount->vtable->ref(tail.refcount);
tail.refcount->Ref();
break;
}
/* Point into the source array */
@ -392,20 +374,20 @@ grpc_slice grpc_slice_split_head(grpc_slice* source, size_t split) {
head.refcount = nullptr;
head.data.inlined.length = static_cast<uint8_t>(split);
memcpy(head.data.inlined.bytes, source->data.refcounted.bytes, split);
source->refcount = source->refcount->sub_refcount;
source->refcount = source->refcount->sub_refcount();
source->data.refcounted.bytes += split;
source->data.refcounted.length -= split;
} else {
GPR_ASSERT(source->data.refcounted.length >= split);
/* Build the result */
head.refcount = source->refcount->sub_refcount;
head.refcount = source->refcount->sub_refcount();
/* Bump the refcount */
head.refcount->vtable->ref(head.refcount);
head.refcount->Ref();
/* Point into the source array */
head.data.refcounted.bytes = source->data.refcounted.bytes;
head.data.refcounted.length = split;
source->refcount = source->refcount->sub_refcount;
source->refcount = source->refcount->sub_refcount();
source->data.refcounted.bytes += split;
source->data.refcounted.length -= split;
}
@ -421,8 +403,9 @@ int grpc_slice_default_eq_impl(grpc_slice a, grpc_slice b) {
}
int grpc_slice_eq(grpc_slice a, grpc_slice b) {
if (a.refcount && b.refcount && a.refcount->vtable == b.refcount->vtable) {
return a.refcount->vtable->eq(a, b);
if (a.refcount && b.refcount &&
a.refcount->GetType() == b.refcount->GetType()) {
return a.refcount->Eq(a, b);
}
return grpc_slice_default_eq_impl(a, b);
}

@ -27,6 +27,7 @@
#include <grpc/support/log.h>
#include "src/core/lib/gpr/murmur_hash.h"
#include "src/core/lib/gprpp/sync.h"
#include "src/core/lib/iomgr/iomgr_internal.h" /* for iomgr_abort_on_leaks() */
#include "src/core/lib/profiling/timers.h"
#include "src/core/lib/slice/slice_string_helpers.h"
@ -39,24 +40,17 @@
#define TABLE_IDX(hash, capacity) (((hash) >> LOG2_SHARD_COUNT) % (capacity))
#define SHARD_IDX(hash) ((hash) & ((1 << LOG2_SHARD_COUNT) - 1))
typedef struct interned_slice_refcount {
grpc_slice_refcount base;
grpc_slice_refcount sub;
size_t length;
gpr_atm refcnt;
uint32_t hash;
struct interned_slice_refcount* bucket_next;
} interned_slice_refcount;
using grpc_core::InternedSliceRefcount;
typedef struct slice_shard {
gpr_mu mu;
interned_slice_refcount** strs;
InternedSliceRefcount** strs;
size_t count;
size_t capacity;
} slice_shard;
/* hash seed: decided at initialization time */
static uint32_t g_hash_seed;
uint32_t g_hash_seed;
static int g_forced_hash_seed = 0;
static slice_shard g_shards[SHARD_COUNT];
@ -69,73 +63,35 @@ typedef struct {
static static_metadata_hash_ent
static_metadata_hash[4 * GRPC_STATIC_MDSTR_COUNT];
static uint32_t max_static_metadata_hash_probe;
static uint32_t static_metadata_hash_values[GRPC_STATIC_MDSTR_COUNT];
uint32_t grpc_static_metadata_hash_values[GRPC_STATIC_MDSTR_COUNT];
static void interned_slice_ref(void* p) {
interned_slice_refcount* s = static_cast<interned_slice_refcount*>(p);
GPR_ASSERT(gpr_atm_no_barrier_fetch_add(&s->refcnt, 1) > 0);
}
namespace grpc_core {
static void interned_slice_destroy(interned_slice_refcount* s) {
slice_shard* shard = &g_shards[SHARD_IDX(s->hash)];
gpr_mu_lock(&shard->mu);
GPR_ASSERT(0 == gpr_atm_no_barrier_load(&s->refcnt));
interned_slice_refcount** prev_next;
interned_slice_refcount* cur;
for (prev_next = &shard->strs[TABLE_IDX(s->hash, shard->capacity)],
InternedSliceRefcount::~InternedSliceRefcount() {
slice_shard* shard = &g_shards[SHARD_IDX(this->hash)];
MutexLock lock(&shard->mu);
InternedSliceRefcount** prev_next;
InternedSliceRefcount* cur;
for (prev_next = &shard->strs[TABLE_IDX(this->hash, shard->capacity)],
cur = *prev_next;
cur != s; prev_next = &cur->bucket_next, cur = cur->bucket_next)
cur != this; prev_next = &cur->bucket_next, cur = cur->bucket_next)
;
*prev_next = cur->bucket_next;
shard->count--;
gpr_free(s);
gpr_mu_unlock(&shard->mu);
}
static void interned_slice_unref(void* p) {
interned_slice_refcount* s = static_cast<interned_slice_refcount*>(p);
if (1 == gpr_atm_full_fetch_add(&s->refcnt, -1)) {
interned_slice_destroy(s);
}
}
static void interned_slice_sub_ref(void* p) {
interned_slice_ref((static_cast<char*>(p)) -
offsetof(interned_slice_refcount, sub));
}
static void interned_slice_sub_unref(void* p) {
interned_slice_unref((static_cast<char*>(p)) -
offsetof(interned_slice_refcount, sub));
}
static uint32_t interned_slice_hash(grpc_slice slice) {
interned_slice_refcount* s =
reinterpret_cast<interned_slice_refcount*>(slice.refcount);
return s->hash;
}
static int interned_slice_eq(grpc_slice a, grpc_slice b) {
return a.refcount == b.refcount;
}
static const grpc_slice_refcount_vtable interned_slice_vtable = {
interned_slice_ref, interned_slice_unref, interned_slice_eq,
interned_slice_hash};
static const grpc_slice_refcount_vtable interned_slice_sub_vtable = {
interned_slice_sub_ref, interned_slice_sub_unref,
grpc_slice_default_eq_impl, grpc_slice_default_hash_impl};
} // namespace grpc_core
static void grow_shard(slice_shard* shard) {
GPR_TIMER_SCOPE("grow_strtab", 0);
size_t capacity = shard->capacity * 2;
size_t i;
interned_slice_refcount** strtab;
interned_slice_refcount *s, *next;
InternedSliceRefcount** strtab;
InternedSliceRefcount *s, *next;
strtab = static_cast<interned_slice_refcount**>(
gpr_zalloc(sizeof(interned_slice_refcount*) * capacity));
strtab = static_cast<InternedSliceRefcount**>(
gpr_zalloc(sizeof(InternedSliceRefcount*) * capacity));
for (i = 0; i < shard->capacity; i++) {
for (s = shard->strs[i]; s; s = next) {
@ -150,7 +106,7 @@ static void grow_shard(slice_shard* shard) {
shard->capacity = capacity;
}
static grpc_slice materialize(interned_slice_refcount* s) {
static grpc_slice materialize(InternedSliceRefcount* s) {
grpc_slice slice;
slice.refcount = &s->base;
slice.data.refcounted.bytes = reinterpret_cast<uint8_t*>(s + 1);
@ -164,7 +120,7 @@ uint32_t grpc_slice_default_hash_impl(grpc_slice s) {
}
uint32_t grpc_static_slice_hash(grpc_slice s) {
return static_metadata_hash_values[GRPC_STATIC_METADATA_INDEX(s)];
return grpc_static_metadata_hash_values[GRPC_STATIC_METADATA_INDEX(s)];
}
int grpc_static_slice_eq(grpc_slice a, grpc_slice b) {
@ -173,7 +129,7 @@ int grpc_static_slice_eq(grpc_slice a, grpc_slice b) {
uint32_t grpc_slice_hash(grpc_slice s) {
return s.refcount == nullptr ? grpc_slice_default_hash_impl(s)
: s.refcount->vtable->hash(s);
: s.refcount->Hash(s);
}
grpc_slice grpc_slice_maybe_static_intern(grpc_slice slice,
@ -197,8 +153,9 @@ grpc_slice grpc_slice_maybe_static_intern(grpc_slice slice,
}
bool grpc_slice_is_interned(const grpc_slice& slice) {
return (slice.refcount && slice.refcount->vtable == &interned_slice_vtable) ||
GRPC_IS_STATIC_METADATA_STRING(slice);
return (slice.refcount &&
(slice.refcount->GetType() == grpc_slice_refcount::Type::INTERNED ||
GRPC_IS_STATIC_METADATA_STRING(slice)));
}
grpc_slice grpc_slice_intern(grpc_slice slice) {
@ -208,6 +165,7 @@ grpc_slice grpc_slice_intern(grpc_slice slice) {
}
uint32_t hash = grpc_slice_hash(slice);
for (uint32_t i = 0; i <= max_static_metadata_hash_probe; i++) {
static_metadata_hash_ent ent =
static_metadata_hash[(hash + i) % GPR_ARRAY_SIZE(static_metadata_hash)];
@ -217,7 +175,7 @@ grpc_slice grpc_slice_intern(grpc_slice slice) {
}
}
interned_slice_refcount* s;
InternedSliceRefcount* s;
slice_shard* shard = &g_shards[SHARD_IDX(hash)];
gpr_mu_lock(&shard->mu);
@ -226,14 +184,7 @@ grpc_slice grpc_slice_intern(grpc_slice slice) {
size_t idx = TABLE_IDX(hash, shard->capacity);
for (s = shard->strs[idx]; s; s = s->bucket_next) {
if (s->hash == hash && grpc_slice_eq(slice, materialize(s))) {
if (gpr_atm_no_barrier_fetch_add(&s->refcnt, 1) == 0) {
/* If we get here, we've added a ref to something that was about to
* die - drop it immediately.
* The *only* possible path here (given the shard mutex) should be to
* drop from one ref back to zero - assert that with a CAS */
GPR_ASSERT(gpr_atm_rel_cas(&s->refcnt, 1, 0));
/* and treat this as if we were never here... sshhh */
} else {
if (s->refcnt.RefIfNonZero()) {
gpr_mu_unlock(&shard->mu);
return materialize(s);
}
@ -242,27 +193,20 @@ grpc_slice grpc_slice_intern(grpc_slice slice) {
/* not found: create a new string */
/* string data goes after the internal_string header */
s = static_cast<interned_slice_refcount*>(
s = static_cast<InternedSliceRefcount*>(
gpr_malloc(sizeof(*s) + GRPC_SLICE_LENGTH(slice)));
gpr_atm_rel_store(&s->refcnt, 1);
s->length = GRPC_SLICE_LENGTH(slice);
s->hash = hash;
s->base.vtable = &interned_slice_vtable;
s->base.sub_refcount = &s->sub;
s->sub.vtable = &interned_slice_sub_vtable;
s->sub.sub_refcount = &s->sub;
s->bucket_next = shard->strs[idx];
shard->strs[idx] = s;
memcpy(s + 1, GRPC_SLICE_START_PTR(slice), GRPC_SLICE_LENGTH(slice));
new (s) grpc_core::InternedSliceRefcount(GRPC_SLICE_LENGTH(slice), hash,
shard->strs[idx]);
memcpy(reinterpret_cast<char*>(s + 1), GRPC_SLICE_START_PTR(slice),
GRPC_SLICE_LENGTH(slice));
shard->strs[idx] = s;
shard->count++;
if (shard->count > shard->capacity * 2) {
grow_shard(shard);
}
gpr_mu_unlock(&shard->mu);
return materialize(s);
}
@ -280,7 +224,7 @@ void grpc_slice_intern_init(void) {
gpr_mu_init(&shard->mu);
shard->count = 0;
shard->capacity = INITIAL_SHARD_CAPACITY;
shard->strs = static_cast<interned_slice_refcount**>(
shard->strs = static_cast<InternedSliceRefcount**>(
gpr_zalloc(sizeof(*shard->strs) * shard->capacity));
}
for (size_t i = 0; i < GPR_ARRAY_SIZE(static_metadata_hash); i++) {
@ -289,13 +233,13 @@ void grpc_slice_intern_init(void) {
}
max_static_metadata_hash_probe = 0;
for (size_t i = 0; i < GRPC_STATIC_MDSTR_COUNT; i++) {
static_metadata_hash_values[i] =
grpc_static_metadata_hash_values[i] =
grpc_slice_default_hash_impl(grpc_static_slice_table[i]);
for (size_t j = 0; j < GPR_ARRAY_SIZE(static_metadata_hash); j++) {
size_t slot = (static_metadata_hash_values[i] + j) %
size_t slot = (grpc_static_metadata_hash_values[i] + j) %
GPR_ARRAY_SIZE(static_metadata_hash);
if (static_metadata_hash[slot].idx == GRPC_STATIC_MDSTR_COUNT) {
static_metadata_hash[slot].hash = static_metadata_hash_values[i];
static_metadata_hash[slot].hash = grpc_static_metadata_hash_values[i];
static_metadata_hash[slot].idx = static_cast<uint32_t>(i);
if (j > max_static_metadata_hash_probe) {
max_static_metadata_hash_probe = static_cast<uint32_t>(j);
@ -315,8 +259,7 @@ void grpc_slice_intern_shutdown(void) {
gpr_log(GPR_DEBUG, "WARNING: %" PRIuPTR " metadata strings were leaked",
shard->count);
for (size_t j = 0; j < shard->capacity; j++) {
for (interned_slice_refcount* s = shard->strs[j]; s;
s = s->bucket_next) {
for (InternedSliceRefcount* s = shard->strs[j]; s; s = s->bucket_next) {
char* text =
grpc_dump_slice(materialize(s), GPR_DUMP_HEX | GPR_DUMP_ASCII);
gpr_log(GPR_DEBUG, "LEAKED: %s", text);

@ -23,17 +23,215 @@
#include <grpc/slice.h>
#include <grpc/slice_buffer.h>
#include <string.h>
inline const grpc_slice& grpc_slice_ref_internal(const grpc_slice& slice) {
#include "src/core/lib/gpr/murmur_hash.h"
#include "src/core/lib/gprpp/ref_counted.h"
#include "src/core/lib/transport/static_metadata.h"
// Interned slices have specific fast-path operations for hashing. To inline
// these operations, we need to forward declare them here.
extern uint32_t grpc_static_metadata_hash_values[GRPC_STATIC_MDSTR_COUNT];
extern uint32_t g_hash_seed;
// grpc_slice_refcount : A reference count for grpc_slice.
//
// Non-inlined grpc_slice objects are refcounted. Historically this was
// implemented via grpc_slice_refcount, a C-style polymorphic class using a
// manually managed vtable of operations. Subclasses would define their own
// vtable; the 'virtual' methods (ref, unref, equals and hash) would simply call
// the function pointers in the vtable as necessary.
//
// Unfortunately, this leads to some inefficiencies in the generated code that
// can be improved upon. For example, equality checking for interned slices is a
// simple equality check on the refcount pointer. With the vtable approach, this
// would translate to roughly the following (high-level) instructions:
//
// grpc_slice_equals(slice1, slice2):
// load vtable->eq -> eq_func
// call eq_func(slice1, slice2)
//
// interned_slice_equals(slice1, slice2)
// load slice1.ref -> r1
// load slice2.ref -> r2
// cmp r1, r2 -> retval
// ret retval
//
// This leads to a function call for a function defined in another translation
// unit, which imposes memory barriers, which reduces the compiler's ability to
// optimize (in addition to the added overhead of call/ret). Additionally, it
// may be harder to reason about branch prediction when we're jumping to
// essentially arbitrarily provided function pointers.
//
// In addition, it is arguable that while virtualization was helpful for
// Equals()/Hash() methods, that it was fundamentally unnecessary for
// Ref()/Unref().
//
// Instead, grpc_slice_refcount provides the same functionality as the C-style
// virtual class, but in a de-virtualized manner - Eq(), Hash(), Ref() and
// Unref() are provided within this header file. Fastpaths for Eq()/Hash()
// (interned and static metadata slices), as well as the Ref() operation, can
// all be inlined without any memory barriers.
//
// It does this by:
// 1. Using grpc_core::RefCount<> (header-only) for Ref/Unref. Two special cases
// need support: No-op ref/unref (eg. static metadata slices) and stream
// slice references (where all the slices share the streamref). This is in
// addition to the normal case of '1 slice, 1 ref'.
// To support these cases, we explicitly track a nullable pointer to the
// underlying RefCount<>. No-op ref/unref is used by checking the pointer for
// null, and doing nothing if it is. Both stream slice refs and 'normal'
// slices use the same path for Ref/Unref (by targeting the non-null
// pointer).
//
// 2. introducing the notion of grpc_slice_refcount::Type. This describes if a
// slice ref is used by a static metadata slice, an interned slice, or other
// slices. We switch on the slice ref type in order to provide fastpaths for
// Equals() and Hash().
//
// In total, this saves us roughly 1-2% latency for unary calls, with smaller
// calls benefitting. The effect is present, but not as useful, for larger calls
// where the cost of sending the data dominates.
struct grpc_slice_refcount {
public:
enum class Type {
STATIC, // Refcount for a static metadata slice.
INTERNED, // Refcount for an interned slice.
REGULAR // Refcount for non-static-metadata, non-interned slices.
};
typedef void (*DestroyerFn)(void*);
grpc_slice_refcount() = default;
explicit grpc_slice_refcount(grpc_slice_refcount* sub) : sub_refcount_(sub) {}
// Regular constructor for grpc_slice_refcount.
//
// Parameters:
// 1. grpc_slice_refcount::Type type
// Whether we are the refcount for a static
// metadata slice, an interned slice, or any other kind of slice.
//
// 2. RefCount* ref
// The pointer to the actual underlying grpc_core::RefCount. Rather than
// performing struct offset computations as in the original implementation to
// get to the refcount, which requires a virtual method, we devirtualize by
// using a nullable pointer to allow a single pair of Ref/Unref methods.
//
// 3. DestroyerFn destroyer_fn
// Called when the refcount goes to 0, with destroyer_arg as parameter.
//
// 4. void* destroyer_arg
// Argument for the virtualized destructor.
//
// 5. grpc_slice_refcount* sub
// Argument used for interned slices.
grpc_slice_refcount(grpc_slice_refcount::Type type, grpc_core::RefCount* ref,
DestroyerFn destroyer_fn, void* destroyer_arg,
grpc_slice_refcount* sub)
: ref_(ref),
ref_type_(type),
sub_refcount_(sub),
dest_fn_(destroyer_fn),
destroy_fn_arg_(destroyer_arg) {}
// Initializer for static refcounts.
grpc_slice_refcount(grpc_slice_refcount* sub, Type type)
: ref_type_(type), sub_refcount_(sub) {}
Type GetType() const { return ref_type_; }
int Eq(const grpc_slice& a, const grpc_slice& b);
uint32_t Hash(const grpc_slice& slice);
void Ref() {
if (ref_ == nullptr) return;
ref_->RefNonZero();
}
void Unref() {
if (ref_ == nullptr) return;
if (ref_->Unref()) {
dest_fn_(destroy_fn_arg_);
}
}
grpc_slice_refcount* sub_refcount() const { return sub_refcount_; }
private:
grpc_core::RefCount* ref_ = nullptr;
const Type ref_type_ = Type::REGULAR;
grpc_slice_refcount* sub_refcount_ = this;
DestroyerFn dest_fn_ = nullptr;
void* destroy_fn_arg_ = nullptr;
};
namespace grpc_core {
struct InternedSliceRefcount {
static void Destroy(void* arg) {
auto* rc = static_cast<InternedSliceRefcount*>(arg);
rc->~InternedSliceRefcount();
gpr_free(rc);
}
InternedSliceRefcount(size_t length, uint32_t hash,
InternedSliceRefcount* bucket_next)
: base(grpc_slice_refcount::Type::INTERNED, &refcnt, Destroy, this, &sub),
sub(grpc_slice_refcount::Type::REGULAR, &refcnt, Destroy, this, &sub),
length(length),
hash(hash),
bucket_next(bucket_next) {}
~InternedSliceRefcount();
grpc_slice_refcount base;
grpc_slice_refcount sub;
const size_t length;
RefCount refcnt;
const uint32_t hash;
InternedSliceRefcount* bucket_next;
};
} // namespace grpc_core
inline int grpc_slice_refcount::Eq(const grpc_slice& a, const grpc_slice& b) {
switch (ref_type_) {
case Type::STATIC:
return GRPC_STATIC_METADATA_INDEX(a) == GRPC_STATIC_METADATA_INDEX(b);
case Type::INTERNED:
return a.refcount == b.refcount;
case Type::REGULAR:
break;
}
if (GRPC_SLICE_LENGTH(a) != GRPC_SLICE_LENGTH(b)) return false;
if (GRPC_SLICE_LENGTH(a) == 0) return true;
return 0 == memcmp(GRPC_SLICE_START_PTR(a), GRPC_SLICE_START_PTR(b),
GRPC_SLICE_LENGTH(a));
}
inline uint32_t grpc_slice_refcount::Hash(const grpc_slice& slice) {
switch (ref_type_) {
case Type::STATIC:
return ::grpc_static_metadata_hash_values[GRPC_STATIC_METADATA_INDEX(
slice)];
case Type::INTERNED:
return reinterpret_cast<grpc_core::InternedSliceRefcount*>(slice.refcount)
->hash;
case Type::REGULAR:
break;
}
return gpr_murmur_hash3(GRPC_SLICE_START_PTR(slice), GRPC_SLICE_LENGTH(slice),
g_hash_seed);
}
inline grpc_slice grpc_slice_ref_internal(const grpc_slice& slice) {
if (slice.refcount) {
slice.refcount->vtable->ref(slice.refcount);
slice.refcount->Ref();
}
return slice;
}
inline void grpc_slice_unref_internal(const grpc_slice& slice) {
if (slice.refcount) {
slice.refcount->vtable->unref(slice.refcount);
slice.refcount->Unref();
}
}

@ -116,123 +116,115 @@ static uint8_t g_bytes[] = {
103, 122, 105, 112, 105, 100, 101, 110, 116, 105, 116, 121, 44, 100, 101,
102, 108, 97, 116, 101, 44, 103, 122, 105, 112};
static void static_ref(void* unused) {}
static void static_unref(void* unused) {}
static const grpc_slice_refcount_vtable static_sub_vtable = {
static_ref, static_unref, grpc_slice_default_eq_impl,
grpc_slice_default_hash_impl};
const grpc_slice_refcount_vtable grpc_static_metadata_vtable = {
static_ref, static_unref, grpc_static_slice_eq, grpc_static_slice_hash};
static grpc_slice_refcount static_sub_refcnt = {&static_sub_vtable,
&static_sub_refcnt};
static grpc_slice_refcount static_sub_refcnt;
grpc_slice_refcount grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
{&grpc_static_metadata_vtable, &static_sub_refcnt},
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
grpc_slice_refcount(&static_sub_refcnt, grpc_slice_refcount::Type::STATIC),
};
const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT] = {

@ -256,12 +256,11 @@ extern const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT];
#define GRPC_MDSTR_IDENTITY_COMMA_DEFLATE_COMMA_GZIP \
(grpc_static_slice_table[106])
extern const grpc_slice_refcount_vtable grpc_static_metadata_vtable;
extern grpc_slice_refcount
grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];
#define GRPC_IS_STATIC_METADATA_STRING(slice) \
((slice).refcount != NULL && \
(slice).refcount->vtable == &grpc_static_metadata_vtable)
(slice).refcount->GetType() == grpc_slice_refcount::Type::STATIC)
#define GRPC_STATIC_METADATA_INDEX(static_slice) \
((int)((static_slice).refcount - grpc_static_metadata_refcounts))

@ -39,72 +39,39 @@
grpc_core::DebugOnlyTraceFlag grpc_trace_stream_refcount(false,
"stream_refcount");
#ifndef NDEBUG
void grpc_stream_ref(grpc_stream_refcount* refcount, const char* reason) {
if (grpc_trace_stream_refcount.enabled()) {
gpr_atm val = gpr_atm_no_barrier_load(&refcount->refs.count);
gpr_log(GPR_DEBUG, "%s %p:%p REF %" PRIdPTR "->%" PRIdPTR " %s",
refcount->object_type, refcount, refcount->destroy.cb_arg, val,
val + 1, reason);
void grpc_stream_destroy(grpc_stream_refcount* refcount) {
if (!grpc_iomgr_is_any_background_poller_thread() &&
(grpc_core::ExecCtx::Get()->flags() &
GRPC_EXEC_CTX_FLAG_THREAD_RESOURCE_LOOP)) {
/* Ick.
The thread we're running on MAY be owned (indirectly) by a call-stack.
If that's the case, destroying the call-stack MAY try to destroy the
thread, which is a tangled mess that we just don't want to ever have to
cope with.
Throw this over to the executor (on a core-owned thread) and process it
there. */
refcount->destroy.scheduler =
grpc_core::Executor::Scheduler(grpc_core::ExecutorJobType::SHORT);
}
#else
void grpc_stream_ref(grpc_stream_refcount* refcount) {
#endif
gpr_ref_non_zero(&refcount->refs);
GRPC_CLOSURE_SCHED(&refcount->destroy, GRPC_ERROR_NONE);
}
#ifndef NDEBUG
void grpc_stream_unref(grpc_stream_refcount* refcount, const char* reason) {
if (grpc_trace_stream_refcount.enabled()) {
gpr_atm val = gpr_atm_no_barrier_load(&refcount->refs.count);
gpr_log(GPR_DEBUG, "%s %p:%p UNREF %" PRIdPTR "->%" PRIdPTR " %s",
refcount->object_type, refcount, refcount->destroy.cb_arg, val,
val - 1, reason);
}
#else
void grpc_stream_unref(grpc_stream_refcount* refcount) {
#endif
if (gpr_unref(&refcount->refs)) {
if (!grpc_iomgr_is_any_background_poller_thread() &&
(grpc_core::ExecCtx::Get()->flags() &
GRPC_EXEC_CTX_FLAG_THREAD_RESOURCE_LOOP)) {
/* Ick.
The thread we're running on MAY be owned (indirectly) by a call-stack.
If that's the case, destroying the call-stack MAY try to destroy the
thread, which is a tangled mess that we just don't want to ever have to
cope with.
Throw this over to the executor (on a core-owned thread) and process it
there. */
refcount->destroy.scheduler =
grpc_core::Executor::Scheduler(grpc_core::ExecutorJobType::SHORT);
}
GRPC_CLOSURE_SCHED(&refcount->destroy, GRPC_ERROR_NONE);
}
void slice_stream_destroy(void* arg) {
grpc_stream_destroy(static_cast<grpc_stream_refcount*>(arg));
}
#define STREAM_REF_FROM_SLICE_REF(p) \
((grpc_stream_refcount*)(((uint8_t*)p) - \
offsetof(grpc_stream_refcount, slice_refcount)))
static void slice_stream_ref(void* p) {
#ifndef NDEBUG
grpc_stream_ref(STREAM_REF_FROM_SLICE_REF(p), "slice");
#else
grpc_stream_ref(STREAM_REF_FROM_SLICE_REF(p));
#endif
}
static void slice_stream_unref(void* p) {
grpc_slice grpc_slice_from_stream_owned_buffer(grpc_stream_refcount* refcount,
void* buffer, size_t length) {
#ifndef NDEBUG
grpc_stream_unref(STREAM_REF_FROM_SLICE_REF(p), "slice");
grpc_stream_ref(STREAM_REF_FROM_SLICE_REF(&refcount->slice_refcount),
"slice");
#else
grpc_stream_unref(STREAM_REF_FROM_SLICE_REF(p));
grpc_stream_ref(STREAM_REF_FROM_SLICE_REF(&refcount->slice_refcount));
#endif
}
grpc_slice grpc_slice_from_stream_owned_buffer(grpc_stream_refcount* refcount,
void* buffer, size_t length) {
slice_stream_ref(&refcount->slice_refcount);
grpc_slice res;
res.refcount = &refcount->slice_refcount;
res.data.refcounted.bytes = static_cast<uint8_t*>(buffer);
@ -112,13 +79,6 @@ grpc_slice grpc_slice_from_stream_owned_buffer(grpc_stream_refcount* refcount,
return res;
}
static const grpc_slice_refcount_vtable stream_ref_slice_vtable = {
slice_stream_ref, /* ref */
slice_stream_unref, /* unref */
grpc_slice_default_eq_impl, /* eq */
grpc_slice_default_hash_impl /* hash */
};
#ifndef NDEBUG
void grpc_stream_ref_init(grpc_stream_refcount* refcount, int initial_refs,
grpc_iomgr_cb_func cb, void* cb_arg,
@ -128,10 +88,12 @@ void grpc_stream_ref_init(grpc_stream_refcount* refcount, int initial_refs,
void grpc_stream_ref_init(grpc_stream_refcount* refcount, int initial_refs,
grpc_iomgr_cb_func cb, void* cb_arg) {
#endif
gpr_ref_init(&refcount->refs, initial_refs);
GRPC_CLOSURE_INIT(&refcount->destroy, cb, cb_arg, grpc_schedule_on_exec_ctx);
refcount->slice_refcount.vtable = &stream_ref_slice_vtable;
refcount->slice_refcount.sub_refcount = &refcount->slice_refcount;
new (&refcount->refs) grpc_core::RefCount();
new (&refcount->slice_refcount) grpc_slice_refcount(
grpc_slice_refcount::Type::REGULAR, &refcount->refs, slice_stream_destroy,
refcount, &refcount->slice_refcount);
}
static void move64(uint64_t* from, uint64_t* to) {

@ -30,6 +30,7 @@
#include "src/core/lib/iomgr/polling_entity.h"
#include "src/core/lib/iomgr/pollset.h"
#include "src/core/lib/iomgr/pollset_set.h"
#include "src/core/lib/slice/slice_internal.h"
#include "src/core/lib/transport/byte_stream.h"
#include "src/core/lib/transport/metadata_batch.h"
@ -51,7 +52,7 @@ typedef struct grpc_stream grpc_stream;
extern grpc_core::DebugOnlyTraceFlag grpc_trace_stream_refcount;
typedef struct grpc_stream_refcount {
gpr_refcount refs;
grpc_core::RefCount refs;
grpc_closure destroy;
#ifndef NDEBUG
const char* object_type;
@ -63,19 +64,45 @@ typedef struct grpc_stream_refcount {
void grpc_stream_ref_init(grpc_stream_refcount* refcount, int initial_refs,
grpc_iomgr_cb_func cb, void* cb_arg,
const char* object_type);
void grpc_stream_ref(grpc_stream_refcount* refcount, const char* reason);
void grpc_stream_unref(grpc_stream_refcount* refcount, const char* reason);
#define GRPC_STREAM_REF_INIT(rc, ir, cb, cb_arg, objtype) \
grpc_stream_ref_init(rc, ir, cb, cb_arg, objtype)
#else
void grpc_stream_ref_init(grpc_stream_refcount* refcount, int initial_refs,
grpc_iomgr_cb_func cb, void* cb_arg);
void grpc_stream_ref(grpc_stream_refcount* refcount);
void grpc_stream_unref(grpc_stream_refcount* refcount);
#define GRPC_STREAM_REF_INIT(rc, ir, cb, cb_arg, objtype) \
grpc_stream_ref_init(rc, ir, cb, cb_arg)
#endif
#ifndef NDEBUG
inline void grpc_stream_ref(grpc_stream_refcount* refcount,
const char* reason) {
if (grpc_trace_stream_refcount.enabled()) {
gpr_log(GPR_DEBUG, "%s %p:%p REF %s", refcount->object_type, refcount,
refcount->destroy.cb_arg, reason);
}
#else
inline void grpc_stream_ref(grpc_stream_refcount* refcount) {
#endif
refcount->refs.RefNonZero();
}
void grpc_stream_destroy(grpc_stream_refcount* refcount);
#ifndef NDEBUG
inline void grpc_stream_unref(grpc_stream_refcount* refcount,
const char* reason) {
if (grpc_trace_stream_refcount.enabled()) {
gpr_log(GPR_DEBUG, "%s %p:%p UNREF %s", refcount->object_type, refcount,
refcount->destroy.cb_arg, reason);
}
#else
inline void grpc_stream_unref(grpc_stream_refcount* refcount) {
#endif
if (refcount->refs.Unref()) {
grpc_stream_destroy(refcount);
}
}
/* Wrap a buffer that is owned by some stream object into a slice that shares
the same refcount */
grpc_slice grpc_slice_from_stream_owned_buffer(grpc_stream_refcount* refcount,

@ -51,13 +51,6 @@ static void test_slice_malloc_returns_something_sensible(void) {
}
/* Returned slice length must be what was requested. */
GPR_ASSERT(GRPC_SLICE_LENGTH(slice) == length);
/* If the slice has a refcount, it must be destroyable. */
if (slice.refcount) {
GPR_ASSERT(slice.refcount->vtable != nullptr);
GPR_ASSERT(slice.refcount->vtable->ref != nullptr);
GPR_ASSERT(slice.refcount->vtable->unref != nullptr);
GPR_ASSERT(slice.refcount->vtable->hash != nullptr);
}
/* We must be able to write to every byte of the data */
for (i = 0; i < length; i++) {
GRPC_SLICE_START_PTR(slice)[i] = static_cast<uint8_t>(i);

@ -32,14 +32,11 @@ int main(int argc, char** argv) {
uint8_t buffer[] = "abc123";
grpc_stream_refcount r;
GRPC_STREAM_REF_INIT(&r, 1, do_nothing, nullptr, "test");
GPR_ASSERT(r.refs.count == 1);
grpc_slice slice =
grpc_slice_from_stream_owned_buffer(&r, buffer, sizeof(buffer));
GPR_ASSERT(GRPC_SLICE_START_PTR(slice) == buffer);
GPR_ASSERT(GRPC_SLICE_LENGTH(slice) == sizeof(buffer));
GPR_ASSERT(r.refs.count == 2);
grpc_slice_unref(slice);
GPR_ASSERT(r.refs.count == 1);
grpc_shutdown();
return 0;

@ -394,7 +394,7 @@ for i, elem in enumerate(all_strs):
def slice_def(i):
return ('{&grpc_static_metadata_refcounts[%d],'
' {{g_bytes+%d, %d}}}') % (i, id2strofs[i], len(all_strs[i]))
' {{%d, g_bytes+%d}}}') % (i, len(all_strs[i]), id2strofs[i])
# validate configuration
@ -412,29 +412,19 @@ print >> H
print >> C, 'static uint8_t g_bytes[] = {%s};' % (','.join(
'%d' % ord(c) for c in ''.join(all_strs)))
print >> C
print >> C, 'static void static_ref(void *unused) {}'
print >> C, 'static void static_unref(void *unused) {}'
print >> C, ('static const grpc_slice_refcount_vtable static_sub_vtable = '
'{static_ref, static_unref, grpc_slice_default_eq_impl, '
'grpc_slice_default_hash_impl};')
print >> H, ('extern const grpc_slice_refcount_vtable '
'grpc_static_metadata_vtable;')
print >> C, ('const grpc_slice_refcount_vtable grpc_static_metadata_vtable = '
'{static_ref, static_unref, grpc_static_slice_eq, '
'grpc_static_slice_hash};')
print >> C, ('static grpc_slice_refcount static_sub_refcnt = '
'{&static_sub_vtable, &static_sub_refcnt};')
print >> C, ('static grpc_slice_refcount static_sub_refcnt;')
print >> H, ('extern grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT];')
print >> C, ('grpc_slice_refcount '
'grpc_static_metadata_refcounts[GRPC_STATIC_MDSTR_COUNT] = {')
for i, elem in enumerate(all_strs):
print >> C, ' {&grpc_static_metadata_vtable, &static_sub_refcnt},'
print >> C, (' grpc_slice_refcount(&static_sub_refcnt, '
'grpc_slice_refcount::Type::STATIC), ')
print >> C, '};'
print >> C
print >> H, '#define GRPC_IS_STATIC_METADATA_STRING(slice) \\'
print >> H, (' ((slice).refcount != NULL && (slice).refcount->vtable == '
'&grpc_static_metadata_vtable)')
print >> H, (' ((slice).refcount != NULL && (slice).refcount->GetType() == '
'grpc_slice_refcount::Type::STATIC)')
print >> H
print >> C, ('const grpc_slice grpc_static_slice_table[GRPC_STATIC_MDSTR_COUNT]'
' = {')

Loading…
Cancel
Save