Export of internal Abseil changes

--
51e9291d9bc385082b4061fe760b236ba59c79c3 by Abseil Team <absl-team@google.com>:

Cast to uint64_t using braces instead of parentheses.

PiperOrigin-RevId: 372475909

--
939fc409855da2639dcaf2d1d4971ca0e0568d03 by Martijn Vels <mvels@google.com>:

Expand # flat nodes into size detailed counters.

PiperOrigin-RevId: 372474608

--
54b158c99b32f8a14821ce74fed0f9f836525ce7 by Martijn Vels <mvels@google.com>:

Make copies of sampled cords sampled as well

This applies to the following methods:
- Cord(const Cord&)
- operator=(const Cord&)
- Subcord(...)

PiperOrigin-RevId: 372468160

--
876c2581ce008871464e8b471efbb967d150f83b by Andy Getzendanner <durandal@google.com>:

Document the type of an ABSL_FLAGS.OnUpdate() callback.

PiperOrigin-RevId: 372406390
GitOrigin-RevId: 51e9291d9bc385082b4061fe760b236ba59c79c3
Change-Id: Ifb75122cae56b66c28128aee90a63bbb28d93817
pull/963/head
Abseil Team 4 years ago committed by Andy Getz
parent 70b29fe5a5
commit 079cf66254
  1. 3
      absl/flags/flag.h
  2. 6
      absl/strings/charconv.cc
  3. 21
      absl/strings/cord.cc
  4. 12
      absl/strings/cord.h
  5. 88
      absl/strings/cordz_test.cc
  6. 2
      absl/strings/internal/charconv_parse.cc
  7. 10
      absl/strings/internal/cord_internal.h
  8. 113
      absl/strings/internal/cordz_handle_test.cc
  9. 39
      absl/strings/internal/cordz_info.cc
  10. 16
      absl/strings/internal/cordz_info.h
  11. 61
      absl/strings/internal/cordz_info_statistics_test.cc
  12. 27
      absl/strings/internal/cordz_info_test.cc
  13. 15
      absl/strings/internal/cordz_statistics.h
  14. 10
      absl/strings/internal/cordz_update_tracker.h

@ -265,6 +265,8 @@ ABSL_NAMESPACE_END
//
// ABSL_FLAG(T, name, default_value, help).OnUpdate(callback);
//
// `callback` should be convertible to `void (*)()`.
//
// After any setting of the flag value, the callback will be called at least
// once. A rapid sequence of changes may be merged together into the same
// callback. No concurrent calls to the callback will be made for the same
@ -279,7 +281,6 @@ ABSL_NAMESPACE_END
// Note: ABSL_FLAG.OnUpdate() does not have a public definition. Hence, this
// comment serves as its API documentation.
// -----------------------------------------------------------------------------
// Implementation details below this section
// -----------------------------------------------------------------------------

@ -111,7 +111,7 @@ struct FloatTraits<double> {
return sign ? -ldexp(mantissa, exponent) : ldexp(mantissa, exponent);
#else
constexpr uint64_t kMantissaMask =
(uint64_t(1) << (kTargetMantissaBits - 1)) - 1;
(uint64_t{1} << (kTargetMantissaBits - 1)) - 1;
uint64_t dbl = static_cast<uint64_t>(sign) << 63;
if (mantissa > kMantissaMask) {
// Normal value.
@ -151,7 +151,7 @@ struct FloatTraits<float> {
return sign ? -ldexpf(mantissa, exponent) : ldexpf(mantissa, exponent);
#else
constexpr uint32_t kMantissaMask =
(uint32_t(1) << (kTargetMantissaBits - 1)) - 1;
(uint32_t{1} << (kTargetMantissaBits - 1)) - 1;
uint32_t flt = static_cast<uint32_t>(sign) << 31;
if (mantissa > kMantissaMask) {
// Normal value.
@ -499,7 +499,7 @@ bool MustRoundUp(uint64_t guess_mantissa, int guess_exponent,
template <typename FloatType>
CalculatedFloat CalculatedFloatFromRawValues(uint64_t mantissa, int exponent) {
CalculatedFloat result;
if (mantissa == uint64_t(1) << FloatTraits<FloatType>::kTargetMantissaBits) {
if (mantissa == uint64_t{1} << FloatTraits<FloatType>::kTargetMantissaBits) {
mantissa >>= 1;
exponent += 1;
}

@ -531,18 +531,19 @@ void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
assert(&src != this);
assert(is_tree() || src.is_tree());
auto constexpr method = CordzUpdateTracker::kAssignCord;
if (CordRep* tree = this->tree()) {
CordzUpdateScope scope(data_.cordz_info(), method);
CordRep::Unref(tree);
if (CordRep* src_tree = src.tree()) {
SetTree(CordRep::Ref(src_tree), scope);
} else {
scope.SetCordRep(nullptr);
data_ = src.data_;
}
if (ABSL_PREDICT_TRUE(!is_tree())) {
EmplaceTree(CordRep::Ref(src.as_tree()), src.data_, method);
return;
}
CordRep* tree = as_tree();
if (CordRep* src_tree = src.tree()) {
data_.set_tree(CordRep::Ref(src_tree));
CordzInfo::MaybeTrackCord(data_, src.data_, method);
} else {
EmplaceTree(CordRep::Ref(src.as_tree()), method);
CordzInfo::MaybeUntrackCord(data_.cordz_info());
data_ = src.data_;
}
CordRep::Unref(tree);
}
void Cord::InlineRep::UnrefTree() {

@ -1000,12 +1000,12 @@ constexpr Cord::InlineRep::InlineRep(cord_internal::InlineData data)
: data_(data) {}
inline Cord::InlineRep::InlineRep(const Cord::InlineRep& src)
: data_(src.data_) {
if (is_tree()) {
data_.clear_cordz_info();
absl::cord_internal::CordRep::Ref(as_tree());
CordzInfo::MaybeTrackCord(data_, src.data_,
CordzUpdateTracker::kConstructorCord);
: data_(InlineData::kDefaultInit) {
if (CordRep* tree = src.tree()) {
EmplaceTree(CordRep::Ref(tree), src.data_,
CordzUpdateTracker::kConstructorCord);
} else {
data_ = src.data_;
}
}

@ -67,6 +67,13 @@ absl::string_view MakeString(TestCordSize size) {
return MakeString(Length(size));
}
// Returns a cord with a sampled method of kAppendString.
absl::Cord MakeAppendStringCord(TestCordSize size) {
absl::Cord cord;
cord.Append(MakeString(size));
return cord;
}
std::string TestParamToString(::testing::TestParamInfo<TestCordSize> size) {
return absl::StrCat("On", ToString(size.param), "Cord");
}
@ -136,6 +143,16 @@ TEST(CordzTest, CopyConstruct) {
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
}
TEST(CordzTest, CopyConstructFromSampled) {
CordzSamplingIntervalHelper sample_every{1};
Cord src(MakeString(TestCordSize::kLarge));
Cord cord(src);
ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
EXPECT_THAT(stats.parent_method, Eq(Method::kConstructorString));
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(1));
}
TEST(CordzTest, MoveConstruct) {
CordzSamplingIntervalHelper sample_every{1};
Cord src(MakeString(TestCordSize::kLarge));
@ -146,7 +163,43 @@ TEST(CordzTest, MoveConstruct) {
TEST_P(CordzUpdateTest, AssignCord) {
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
cord() = src;
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAssignCord)));
EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignCord));
CordzStatistics stats = GetCordzInfoForTesting(cord())->GetCordzStatistics();
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
}
TEST_P(CordzUpdateTest, AssignSampledCord) {
Cord src = MakeAppendStringCord(TestCordSize::kLarge);
cord() = src;
ASSERT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignCord));
CordzStatistics stats = GetCordzInfoForTesting(cord())->GetCordzStatistics();
EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
}
TEST(CordzUpdateTest, AssignSampledCordToUnsampledCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord src = MakeAppendStringCord(TestCordSize::kLarge);
Cord cord = UnsampledCord(MakeString(TestCordSize::kLarge));
cord = src;
ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
}
TEST(CordzUpdateTest, AssignSampledCordToSampledCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord src = MakeAppendStringCord(TestCordSize::kLarge);
Cord cord(MakeString(TestCordSize::kLarge));
cord = src;
ASSERT_THAT(cord, HasValidCordzInfoOf(Method::kAssignCord));
CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
EXPECT_THAT(stats.parent_method, Eq(Method::kAppendString));
EXPECT_THAT(stats.update_tracker.Value(Method::kAppendString), Eq(1));
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(0));
}
TEST(CordzTest, AssignInlinedCord) {
@ -160,7 +213,7 @@ TEST(CordzTest, AssignInlinedCord) {
EXPECT_FALSE(CordzInfoIsListed(info));
}
TEST(CordzTest, MoveAssignCord) {
TEST(CordzUpdateTest, MoveAssignCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord cord;
Cord src(MakeString(TestCordSize::kLarge));
@ -168,6 +221,11 @@ TEST(CordzTest, MoveAssignCord) {
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
}
TEST_P(CordzUpdateTest, AssignLargeArray) {
cord() = MakeString(TestCordSize::kSmall);
EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
}
TEST_P(CordzUpdateTest, AssignSmallArray) {
cord() = MakeString(TestCordSize::kSmall);
EXPECT_THAT(cord(), HasValidCordzInfoOf(Method::kAssignString));
@ -333,16 +391,26 @@ TEST(CordzTest, RemoveSuffix) {
TEST(CordzTest, SubCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord src(MakeString(TestCordSize::kLarge));
Cord cord1 = src.Subcord(10, src.size() / 2);
EXPECT_THAT(cord1, HasValidCordzInfoOf(Method::kSubCord));
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
Cord cord = src.Subcord(10, src.size() / 2);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
}
Cord cord2 = src.Subcord(10, kMaxInline + 1);
EXPECT_THAT(cord2, HasValidCordzInfoOf(Method::kSubCord));
TEST(CordzTest, SmallSubCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
Cord cord = src.Subcord(10, kMaxInline + 1);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
}
Cord cord3 = src.Subcord(10, kMaxInline);
EXPECT_THAT(GetCordzInfoForTesting(cord3), Eq(nullptr));
TEST(CordzTest, SubCordFromSampledCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord src(MakeString(TestCordSize::kLarge));
Cord cord = src.Subcord(10, src.size() / 2);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kSubCord));
CordzStatistics stats = GetCordzInfoForTesting(cord)->GetCordzStatistics();
EXPECT_THAT(stats.parent_method, Eq(Method::kConstructorString));
EXPECT_THAT(stats.update_tracker.Value(Method::kConstructorString), Eq(1));
}
} // namespace

@ -52,7 +52,7 @@ static_assert(std::numeric_limits<double>::digits == 53, "IEEE double fact");
// The lowest valued 19-digit decimal mantissa we can read still contains
// sufficient information to reconstruct a binary mantissa.
static_assert(1000000000000000000u > (uint64_t(1) << (53 + 3)), "(b) above");
static_assert(1000000000000000000u > (uint64_t{1} << (53 + 3)), "(b) above");
// ParseFloat<16> will read the first 15 significant digits of the mantissa.
//

@ -366,6 +366,16 @@ class InlineData {
return as_tree_.cordz_info != kNullCordzInfo;
}
// Returns true if either of the provided instances hold a cordz_info value.
// This method is more efficient than the equivalent `data1.is_profiled() ||
// data2.is_profiled()`. Requires both arguments to hold a tree.
static bool is_either_profiled(const InlineData& data1,
const InlineData& data2) {
assert(data1.is_tree() && data2.is_tree());
return (data1.as_tree_.cordz_info | data2.as_tree_.cordz_info) !=
kNullCordzInfo;
}
// Returns the cordz_info sampling instance for this instance, or nullptr
// if the current instance is not sampled and does not have CordzInfo data.
// Requires the current instance to hold a tree value.

@ -191,65 +191,72 @@ TEST(CordzHandleTest, MultiThreaded) {
// manipulating a CordzHandle that might be operated upon in another thread.
std::vector<std::atomic<CordzHandle*>> handles(kNumHandles);
absl::synchronization_internal::ThreadPool pool(kNumThreads);
for (int i = 0; i < kNumThreads; ++i) {
pool.Schedule([&stop, &handles]() {
std::minstd_rand gen;
std::uniform_int_distribution<int> dist_type(0, 2);
std::uniform_int_distribution<int> dist_handle(0, kNumHandles - 1);
size_t max_safe_to_inspect = 0;
while (!stop.HasBeenNotified()) {
CordzHandle* handle;
switch (dist_type(gen)) {
case 0:
handle = new CordzHandle();
break;
case 1:
handle = new CordzSnapshot();
break;
default:
handle = nullptr;
break;
}
CordzHandle* old_handle = handles[dist_handle(gen)].exchange(handle);
if (old_handle != nullptr) {
std::vector<const CordzHandle*> safe_to_inspect =
old_handle->DiagnosticsGetSafeToInspectDeletedHandles();
for (const CordzHandle* handle : safe_to_inspect) {
// We're in a tight loop, so don't generate too many error messages.
ASSERT_FALSE(handle->is_snapshot());
// global bool which is set when any thread did get some 'safe to inspect'
// handles. On some platforms and OSS tests, we might risk that some pool
// threads are starved, stalled, or just got a few unlikely random 'handle'
// coin tosses, so we satisfy this test with simply observing 'some' thread
// did something meaningful, which should minimize the potential for flakes.
std::atomic<bool> found_safe_to_inspect(false);
{
absl::synchronization_internal::ThreadPool pool(kNumThreads);
for (int i = 0; i < kNumThreads; ++i) {
pool.Schedule([&stop, &handles, &found_safe_to_inspect]() {
std::minstd_rand gen;
std::uniform_int_distribution<int> dist_type(0, 2);
std::uniform_int_distribution<int> dist_handle(0, kNumHandles - 1);
while (!stop.HasBeenNotified()) {
CordzHandle* handle;
switch (dist_type(gen)) {
case 0:
handle = new CordzHandle();
break;
case 1:
handle = new CordzSnapshot();
break;
default:
handle = nullptr;
break;
}
if (safe_to_inspect.size() > max_safe_to_inspect) {
max_safe_to_inspect = safe_to_inspect.size();
CordzHandle* old_handle = handles[dist_handle(gen)].exchange(handle);
if (old_handle != nullptr) {
std::vector<const CordzHandle*> safe_to_inspect =
old_handle->DiagnosticsGetSafeToInspectDeletedHandles();
for (const CordzHandle* handle : safe_to_inspect) {
// We're in a tight loop, so don't generate too many error
// messages.
ASSERT_FALSE(handle->is_snapshot());
}
if (!safe_to_inspect.empty()) {
found_safe_to_inspect.store(true);
}
CordzHandle::Delete(old_handle);
}
CordzHandle::Delete(old_handle);
}
}
// Confirm that the test did *something*. This check will be satisfied as
// long as this thread has delete a CordzSnapshot object and a
// non-snapshot CordzHandle was deleted after the CordzSnapshot was
// created. This max_safe_to_inspect count will often reach around 30
// (assuming 4 threads and 10 slots for live handles). Don't use a strict
// bound to avoid making this test flaky.
EXPECT_THAT(max_safe_to_inspect, Gt(0));
// Have each thread attempt to clean up everything. Some thread will be
// the last to reach this cleanup code, and it will be guaranteed to clean
// up everything because nothing remains to create new handles.
for (auto& h : handles) {
if (CordzHandle* handle = h.exchange(nullptr)) {
CordzHandle::Delete(handle);
// Have each thread attempt to clean up everything. Some thread will be
// the last to reach this cleanup code, and it will be guaranteed to
// clean up everything because nothing remains to create new handles.
for (auto& h : handles) {
if (CordzHandle* handle = h.exchange(nullptr)) {
CordzHandle::Delete(handle);
}
}
}
});
});
}
// The threads will hammer away. Give it a little bit of time for tsan to
// spot errors.
absl::SleepFor(absl::Seconds(3));
stop.Notify();
}
// The threads will hammer away. Give it a little bit of time for tsan to
// spot errors.
absl::SleepFor(absl::Seconds(3));
stop.Notify();
// Confirm that the test did *something*. This check will be satisfied as
// long as any thread has deleted a CordzSnapshot object and a non-snapshot
// CordzHandle was deleted after the CordzSnapshot was created.
// See also comments on `found_safe_to_inspect`
EXPECT_TRUE(found_safe_to_inspect.load());
}
} // namespace

@ -135,6 +135,23 @@ class CordRepAnalyzer {
return (rep != nullptr && rep->tag == CONCAT) ? repref : RepRef{nullptr, 0};
}
// Counts a flat of the provide allocated size
void CountFlat(size_t size) {
statistics_.node_count++;
statistics_.node_counts.flat++;
if (size <= 64) {
statistics_.node_counts.flat_64++;
} else if (size <= 128) {
statistics_.node_counts.flat_128++;
} else if (size <= 256) {
statistics_.node_counts.flat_256++;
} else if (size <= 512) {
statistics_.node_counts.flat_512++;
} else if (size <= 1024) {
statistics_.node_counts.flat_1k++;
}
}
// Processes 'linear' reps (substring, flat, external) not requiring iteration
// or recursion. Returns RefRep{null} if all reps were processed, else returns
// the top-most non-linear concat or ring cordrep.
@ -152,9 +169,9 @@ class CordRepAnalyzer {
// Consume possible FLAT
if (rep.rep->tag >= FLAT) {
statistics_.node_count++;
statistics_.node_counts.flat++;
memory_usage.Add(rep.rep->flat()->AllocatedSize(), rep.refcount);
size_t size = rep.rep->flat()->AllocatedSize();
CountFlat(size);
memory_usage.Add(size, rep.refcount);
return RepRef{nullptr, 0};
}
@ -263,9 +280,15 @@ void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
MethodIdentifier method) {
assert(cord.is_tree());
assert(!cord.is_profiled());
auto* info = src.is_tree() && src.is_profiled() ? src.cordz_info() : nullptr;
CordzInfo* cordz_info = new CordzInfo(cord.as_tree(), info, method);
assert(src.is_tree());
// Unsample current as we the current cord is being replaced with 'src',
// so any method history is no longer relevant.
CordzInfo* cordz_info = cord.cordz_info();
if (cordz_info != nullptr) cordz_info->Untrack();
// Start new cord sample
cordz_info = new CordzInfo(cord.as_tree(), src.cordz_info(), method);
cord.set_cordz_info(cordz_info);
cordz_info->Track();
}
@ -297,6 +320,10 @@ CordzInfo::CordzInfo(CordRep* rep, const CordzInfo* src,
parent_method_(GetParentMethod(src)),
create_time_(absl::Now()) {
update_tracker_.LossyAdd(method);
if (src) {
// Copy parent counters.
update_tracker_.LossyAdd(src->update_tracker_);
}
}
CordzInfo::~CordzInfo() {

@ -65,7 +65,9 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
// Identical to TrackCord(), except that this function fills the
// `parent_stack` and `parent_method` properties of the returned CordzInfo
// instance from the provided `src` instance if `src` is sampled.
// This function should be used for sampling 'copy constructed' cords.
// This function should be used for sampling 'copy constructed' and 'copy
// assigned' cords. This function allows 'cord` to be already sampled, in
// which case the CordzInfo will be newly created from `src`.
static void TrackCord(InlineData& cord, const InlineData& src,
MethodIdentifier method);
@ -73,6 +75,15 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
// Uses `cordz_should_profile` to randomly pick cords to be sampled, and if
// so, invokes `TrackCord` to start sampling `cord`.
static void MaybeTrackCord(InlineData& cord, MethodIdentifier method);
// Maybe sample the cord identified by 'cord' for method 'method'.
// `src` identifies a 'parent' cord which content is copied into the current
// cord, typically the input cord for an assign emthod or copy constructor.
// Invokes the corresponding `TrackCord` method if either cord is sampled, or
// if `cord` is randomly picked for sampling. Possible scenarios:
// * `src` is sampled: `cord` will be set to sampled if not already sampled.
// Parent stack and update stats of `src` are copied into `cord`
// * `src` is not sampled: `cord` may be randomly picked for sampling.
static void MaybeTrackCord(InlineData& cord, const InlineData& src,
MethodIdentifier method);
@ -221,7 +232,8 @@ inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
InlineData& cord, const InlineData& src, MethodIdentifier method) {
if (ABSL_PREDICT_FALSE(cordz_should_profile())) {
if (ABSL_PREDICT_FALSE(InlineData::is_either_profiled(cord, src)) ||
ABSL_PREDICT_FALSE(cordz_should_profile())) {
TrackCord(cord, src, method);
}
}

@ -42,10 +42,18 @@ inline void PrintTo(const CordzStatistics& stats, std::ostream* s) {
namespace {
// Creates a flat of the specified length
CordRepFlat* Flat(int length = 512) {
auto* flat = CordRepFlat::New(length);
flat->length = length;
// Creates a flat of the specified allocated size
CordRepFlat* Flat(size_t size) {
// Round up to a tag size, as we are going to poke an exact tag size back into
// the allocated flat. 'size returning allocators' could grant us more than we
// wanted, but we are ok to poke the 'requested' size in the tag, even in the
// presence of sized deletes, so we need to make sure the size rounds
// perfectly to a tag value.
assert(size >= kMinFlatSize);
size = RoundUpForTag(size);
CordRepFlat* flat = CordRepFlat::New(size - kFlatOverhead);
flat->tag = AllocatedSizeToTag(size);
flat->length = size - kFlatOverhead;
return flat;
}
@ -174,6 +182,11 @@ MATCHER_P(EqStatistics, stats, "Statistics equal expected values") {
STATS_MATCHER_EXPECT_EQ(size);
STATS_MATCHER_EXPECT_EQ(node_count);
STATS_MATCHER_EXPECT_EQ(node_counts.flat);
STATS_MATCHER_EXPECT_EQ(node_counts.flat_64);
STATS_MATCHER_EXPECT_EQ(node_counts.flat_128);
STATS_MATCHER_EXPECT_EQ(node_counts.flat_256);
STATS_MATCHER_EXPECT_EQ(node_counts.flat_512);
STATS_MATCHER_EXPECT_EQ(node_counts.flat_1k);
STATS_MATCHER_EXPECT_EQ(node_counts.external);
STATS_MATCHER_EXPECT_EQ(node_counts.concat);
STATS_MATCHER_EXPECT_EQ(node_counts.substring);
@ -188,7 +201,7 @@ MATCHER_P(EqStatistics, stats, "Statistics equal expected values") {
TEST(CordzInfoStatisticsTest, Flat) {
RefHelper ref;
auto* flat = ref.NeedsUnref(Flat());
auto* flat = ref.NeedsUnref(Flat(512));
CordzStatistics expected;
expected.size = flat->length;
@ -196,13 +209,14 @@ TEST(CordzInfoStatisticsTest, Flat) {
expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
expected.node_count = 1;
expected.node_counts.flat = 1;
expected.node_counts.flat_512 = 1;
EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
}
TEST(CordzInfoStatisticsTest, SharedFlat) {
RefHelper ref;
auto* flat = ref.Ref(ref.NeedsUnref(Flat()));
auto* flat = ref.Ref(ref.NeedsUnref(Flat(64)));
CordzStatistics expected;
expected.size = flat->length;
@ -210,6 +224,7 @@ TEST(CordzInfoStatisticsTest, SharedFlat) {
expected.estimated_fair_share_memory_usage = SizeOf(flat) / 2;
expected.node_count = 1;
expected.node_counts.flat = 1;
expected.node_counts.flat_64 = 1;
EXPECT_THAT(SampleCord(flat), EqStatistics(expected));
}
@ -244,7 +259,7 @@ TEST(CordzInfoStatisticsTest, SharedExternal) {
TEST(CordzInfoStatisticsTest, Substring) {
RefHelper ref;
auto* flat = Flat();
auto* flat = Flat(1024);
auto* substring = ref.NeedsUnref(Substring(flat));
CordzStatistics expected;
@ -253,6 +268,7 @@ TEST(CordzInfoStatisticsTest, Substring) {
expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
expected.node_count = 2;
expected.node_counts.flat = 1;
expected.node_counts.flat_1k = 1;
expected.node_counts.substring = 1;
EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
@ -260,7 +276,7 @@ TEST(CordzInfoStatisticsTest, Substring) {
TEST(CordzInfoStatisticsTest, SharedSubstring) {
RefHelper ref;
auto* flat = ref.Ref(Flat(), 2);
auto* flat = ref.Ref(Flat(511), 2);
auto* substring = ref.Ref(ref.NeedsUnref(Substring(flat)));
CordzStatistics expected;
@ -270,6 +286,7 @@ TEST(CordzInfoStatisticsTest, SharedSubstring) {
SizeOf(substring) / 2 + SizeOf(flat) / 6;
expected.node_count = 2;
expected.node_counts.flat = 1;
expected.node_counts.flat_512 = 1;
expected.node_counts.substring = 1;
EXPECT_THAT(SampleCord(substring), EqStatistics(expected));
@ -277,7 +294,7 @@ TEST(CordzInfoStatisticsTest, SharedSubstring) {
TEST(CordzInfoStatisticsTest, Concat) {
RefHelper ref;
auto* flat1 = Flat(20);
auto* flat1 = Flat(300);
auto* flat2 = Flat(2000);
auto* concat = ref.NeedsUnref(Concat(flat1, flat2));
@ -288,6 +305,7 @@ TEST(CordzInfoStatisticsTest, Concat) {
expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
expected.node_count = 3;
expected.node_counts.flat = 2;
expected.node_counts.flat_512 = 1;
expected.node_counts.concat = 1;
EXPECT_THAT(SampleCord(concat), EqStatistics(expected));
@ -295,9 +313,9 @@ TEST(CordzInfoStatisticsTest, Concat) {
TEST(CordzInfoStatisticsTest, DeepConcat) {
RefHelper ref;
auto* flat1 = Flat(20);
auto* flat1 = Flat(300);
auto* flat2 = Flat(2000);
auto* flat3 = Flat(30);
auto* flat3 = Flat(400);
auto* external = External(3000);
auto* substring = Substring(external);
auto* concat1 = Concat(flat1, flat2);
@ -313,6 +331,7 @@ TEST(CordzInfoStatisticsTest, DeepConcat) {
expected.node_count = 8;
expected.node_counts.flat = 3;
expected.node_counts.flat_512 = 2;
expected.node_counts.external = 1;
expected.node_counts.concat = 3;
expected.node_counts.substring = 1;
@ -322,9 +341,9 @@ TEST(CordzInfoStatisticsTest, DeepConcat) {
TEST(CordzInfoStatisticsTest, DeepSharedConcat) {
RefHelper ref;
auto* flat1 = Flat(20);
auto* flat1 = Flat(40);
auto* flat2 = ref.Ref(Flat(2000), 4);
auto* flat3 = Flat(30);
auto* flat3 = Flat(70);
auto* external = ref.Ref(External(3000));
auto* substring = ref.Ref(Substring(external), 3);
auto* concat1 = Concat(flat1, flat2);
@ -339,6 +358,8 @@ TEST(CordzInfoStatisticsTest, DeepSharedConcat) {
expected.estimated_fair_share_memory_usage = FairShare(concat);
expected.node_count = 8;
expected.node_counts.flat = 3;
expected.node_counts.flat_64 = 1;
expected.node_counts.flat_128 = 1;
expected.node_counts.external = 1;
expected.node_counts.concat = 3;
expected.node_counts.substring = 1;
@ -348,9 +369,9 @@ TEST(CordzInfoStatisticsTest, DeepSharedConcat) {
TEST(CordzInfoStatisticsTest, Ring) {
RefHelper ref;
auto* flat1 = Flat(20);
auto* flat1 = Flat(240);
auto* flat2 = Flat(2000);
auto* flat3 = Flat(30);
auto* flat3 = Flat(70);
auto* external = External(3000);
CordRepRing* ring = CordRepRing::Create(flat1);
ring = CordRepRing::Append(ring, flat2);
@ -365,6 +386,8 @@ TEST(CordzInfoStatisticsTest, Ring) {
expected.estimated_fair_share_memory_usage = expected.estimated_memory_usage;
expected.node_count = 5;
expected.node_counts.flat = 3;
expected.node_counts.flat_128 = 1;
expected.node_counts.flat_256 = 1;
expected.node_counts.external = 1;
expected.node_counts.ring = 1;
@ -373,9 +396,9 @@ TEST(CordzInfoStatisticsTest, Ring) {
TEST(CordzInfoStatisticsTest, SharedSubstringRing) {
RefHelper ref;
auto* flat1 = ref.Ref(Flat(20));
auto* flat1 = ref.Ref(Flat(240));
auto* flat2 = Flat(200);
auto* flat3 = Flat(30);
auto* flat3 = Flat(70);
auto* external = ref.Ref(External(3000), 5);
CordRepRing* ring = CordRepRing::Create(flat1);
ring = CordRepRing::Append(ring, flat2);
@ -392,6 +415,8 @@ TEST(CordzInfoStatisticsTest, SharedSubstringRing) {
expected.estimated_fair_share_memory_usage = FairShare(substring);
expected.node_count = 6;
expected.node_counts.flat = 3;
expected.node_counts.flat_128 = 1;
expected.node_counts.flat_256 = 2;
expected.node_counts.external = 1;
expected.node_counts.ring = 1;
expected.node_counts.substring = 1;
@ -447,7 +472,7 @@ TEST(CordzInfoStatisticsTest, ThreadSafety) {
cord.set_inline_size(0);
} else {
// 50/50 Ring or Flat coin toss
CordRep* rep = Flat();
CordRep* rep = Flat(256);
rep = (coin_toss(gen) != 0) ? CordRepRing::Create(rep) : rep;
cord.make_tree(rep);

@ -74,6 +74,20 @@ TEST(CordzInfoTest, TrackCord) {
info->Untrack();
}
TEST(CordzInfoTest, MaybeTrackCordOnSampledCord) {
TestCordData data1;
CordzInfo::TrackCord(data1.data, kTrackCordMethod);
CordzInfo* info1 = data1.data.cordz_info();
TestCordData data2;
CordzInfo::MaybeTrackCord(data2.data, data1.data, kTrackCordMethod);
CordzInfo* info2 = data2.data.cordz_info();
ASSERT_THAT(info2, Ne(nullptr));
EXPECT_THAT(info2->GetCordRepForTesting(), Eq(data2.rep.rep));
info2->Untrack();
info1->Untrack();
}
TEST(CordzInfoTest, UntrackCord) {
TestCordData data;
CordzInfo::TrackCord(data.data, kTrackCordMethod);
@ -291,19 +305,6 @@ TEST(CordzInfoTest, FromParent) {
info_child->Untrack();
}
TEST(CordzInfoTest, FromParentInlined) {
InlineData parent;
TestCordData child;
CordzInfo* info = TrackChildCord(child.data, parent);
EXPECT_TRUE(info->GetParentStack().empty());
CordzStatistics statistics = info->GetCordzStatistics();
EXPECT_THAT(statistics.size, Eq(child.rep.rep->length));
EXPECT_THAT(statistics.method, Eq(kChildMethod));
EXPECT_THAT(statistics.parent_method, Eq(kUnknownMethod));
EXPECT_THAT(statistics.update_tracker.Value(kChildMethod), Eq(1));
info->Untrack();
}
} // namespace
} // namespace cord_internal
ABSL_NAMESPACE_END

@ -30,11 +30,16 @@ struct CordzStatistics {
// Node counts information
struct NodeCounts {
size_t flat = 0;
size_t external = 0;
size_t substring = 0;
size_t concat = 0;
size_t ring = 0;
size_t flat = 0; // #flats
size_t flat_64 = 0; // #flats up to 64 bytes
size_t flat_128 = 0; // #flats up to 128 bytes
size_t flat_256 = 0; // #flats up to 256 bytes
size_t flat_512 = 0; // #flats up to 512 bytes
size_t flat_1k = 0; // #flats up to 1K bytes
size_t external = 0; // #external reps
size_t substring = 0; // #substring reps
size_t concat = 0; // #concat reps
size_t ring = 0; // #ring buffer reps
};
// The size of the cord in bytes. This matches the result of Cord::size().

@ -91,6 +91,16 @@ class CordzUpdateTracker {
std::memory_order_relaxed);
}
// Adds all the values from `src` to this instance
void LossyAdd(const CordzUpdateTracker& src) {
for (int i = 0; i < kNumMethods; ++i) {
MethodIdentifier method = static_cast<MethodIdentifier>(i);
if (int64_t value = src.Value(method)) {
LossyAdd(method, value);
}
}
}
private:
// Until C++20 std::atomic is not constexpr default-constructible, so we need
// a wrapper for this class to be constexpr constructible.

Loading…
Cancel
Save