Export of internal Abseil changes

--
f6fbb03bff276e72123e8590519079e87732ae62 by Abseil Team <absl-team@google.com>:

Replace static absl::Mutex with SpinLock in absl::Cords

to avoid static initializers by absl::Mutex destructors

PiperOrigin-RevId: 370694199

--
654b7d9edfdc24f226990b2b46cbf91451a1d92a by Martijn Vels <mvels@google.com>:

Implement global data for CordzInfo in an ODR hardened way

This change puts the global data into a global list structure, and stores a reference to the global list in the handle itself. This hardens the implementation against ODR violations where info pointers are crossing dynamic library boundaries which are privately loaded.

PiperOrigin-RevId: 370673045

--
712dba768e66ee2ba85d6010829c617cd2af6ba7 by Martijn Vels <mvels@google.com>:

Intrument Cord::operator= for Cordz

PiperOrigin-RevId: 370659149

--
c0b347a2289e151b72680269332e264b8fa989c0 by Matt Kulukundis <kfm@google.com>:

Fix test guards for ABSL_ATTRIBUTE_RETURNS_NONNULL

PiperOrigin-RevId: 370594807

--
c2bedaa3472ef223f907de2604f9b9b58852ec5f by Martijn Vels <mvels@google.com>:

Add new Cordz instrumentation on GetAppendRegion.

PiperOrigin-RevId: 370587761

--
84fbfcc852697d509f6094482b86e84743a6b331 by Martijn Vels <mvels@google.com>:

Add instrumentation on Cord::Apppend(string_view)

PiperOrigin-RevId: 370576590

--
9e077390b8ca2239e1cb7bfbe1d5a04f2fc11d30 by Abseil Team <absl-team@google.com>:

Google-internal changes only.

PiperOrigin-RevId: 370558424

--
fb53c149eb2364ea34e3a67235f873866618b8ac by Matt Kulukundis <kfm@google.com>:

Update config.h macros with a few useful helpers to simplify version checking

PiperOrigin-RevId: 370557684

--
abf8142e99b9ff7e15f6528a357f1005461950b0 by Martijn Vels <mvels@google.com>:

clang-format cord

PiperOrigin-RevId: 370549371

--
e555985eabe63fcf0e980e9c433dd84caffec191 by Martijn Vels <mvels@google.com>:

Add MaybeUntrackCord() function

This function is near identical to the old UntrackCord() but allows info to be null, moving the cord.is_profiled() branch into CordzInfo.

PiperOrigin-RevId: 370528447

--
3883538efe4601f7864bda70a50d868bb383c63b by Derek Mauro <dmauro@google.com>:

Internal change

PiperOrigin-RevId: 370503186

--
a9514b65542fde1bc73584e6f3c1c4b3a05f215f by Derek Mauro <dmauro@google.com>:

Add -Winvalid-constexpr to warning options for LLVM

PiperOrigin-RevId: 370455171

--
d8a3966de2cf15a2dc28e17e49a3d27d205eca92 by Martijn Vels <mvels@google.com>:

Add naive UniqueGenerator<T, kMaxValues, ...> to avoid flakes from dup random values.

PiperOrigin-RevId: 370179772

--
46d0caa1a12b68a5998d4f919e20f0f83b9286f8 by Martijn Vels <mvels@google.com>:

Add new Cordz instrumentation on PrependTree.

PiperOrigin-RevId: 370138969
GitOrigin-RevId: f6fbb03bff276e72123e8590519079e87732ae62
Change-Id: Ifa4c00a5c7b01198ee367a3253bea6b66612135e
pull/963/head
Abseil Team 4 years ago committed by Derek Mauro
parent d96e287417
commit a5167f986b
  1. 7
      absl/base/attributes.h
  2. 46
      absl/base/config.h
  3. 21
      absl/container/internal/hash_generator_testing.h
  4. 38
      absl/container/internal/unordered_map_constructor_test.h
  5. 1
      absl/copts/GENERATED_AbseilCopts.cmake
  6. 1
      absl/copts/GENERATED_copts.bzl
  7. 1
      absl/copts/copts.py
  8. 2
      absl/copts/generate_copts.py
  9. 5
      absl/strings/BUILD.bazel
  10. 5
      absl/strings/CMakeLists.txt
  11. 295
      absl/strings/cord.cc
  12. 54
      absl/strings/cord.h
  13. 38
      absl/strings/cord_test_helpers.h
  14. 130
      absl/strings/cordz_test.cc
  15. 7
      absl/strings/cordz_test_helpers.h
  16. 15
      absl/strings/internal/cordz_handle.cc
  17. 7
      absl/strings/internal/cordz_handle.h
  18. 78
      absl/strings/internal/cordz_info.cc
  19. 59
      absl/strings/internal/cordz_info.h
  20. 30
      absl/strings/internal/cordz_info_test.cc
  21. 18
      absl/strings/internal/cordz_sample_token_test.cc
  22. 8
      absl/strings/internal/cordz_update_scope.h
  23. 2
      absl/synchronization/internal/waiter.cc

@ -281,10 +281,7 @@
// ABSL_ATTRIBUTE_RETURNS_NONNULL
//
// Tells the compiler that a particular function never returns a null pointer.
#if ABSL_HAVE_ATTRIBUTE(returns_nonnull) || \
(defined(__GNUC__) && \
(__GNUC__ > 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 9)) && \
!defined(__clang__))
#if ABSL_HAVE_ATTRIBUTE(returns_nonnull)
#define ABSL_ATTRIBUTE_RETURNS_NONNULL __attribute__((returns_nonnull))
#else
#define ABSL_ATTRIBUTE_RETURNS_NONNULL
@ -622,7 +619,7 @@
#if __has_feature(cxx_attributes) && __has_warning("-Wimplicit-fallthrough")
#define ABSL_FALLTHROUGH_INTENDED [[clang::fallthrough]]
#endif
#elif defined(__GNUC__) && __GNUC__ >= 7
#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 0)
#define ABSL_FALLTHROUGH_INTENDED [[gnu::fallthrough]]
#endif

@ -166,6 +166,22 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#define ABSL_HAVE_FEATURE(f) 0
#endif
// Portable check for GCC minimum version:
// https://gcc.gnu.org/onlinedocs/cpp/Common-Predefined-Macros.html
#if defined(__GNUC__) && defined(__GNUC_MINOR__)
#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) \
(__GNUC__ > (x) || __GNUC__ == (x) && __GNUC_MINOR__ >= (y))
#else
#define ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(x, y) 0
#endif
#if defined(__clang__) && defined(__clang_major__) && defined(__clang_minor__)
#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) \
(__clang_major__ > (x) || __clang_major__ == (x) && __clang_minor__ >= (y))
#else
#define ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(x, y) 0
#endif
// ABSL_HAVE_TLS is defined to 1 when __thread should be supported.
// We assume __thread is supported on Linux when compiled with Clang or compiled
// against libstdc++ with _GLIBCXX_HAVE_TLS defined.
@ -183,10 +199,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// gcc >= 4.8.1 using libstdc++, and Visual Studio.
#ifdef ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE
#error ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE cannot be directly set
#elif defined(_LIBCPP_VERSION) || \
(!defined(__clang__) && defined(__GNUC__) && defined(__GLIBCXX__) && \
(__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))) || \
defined(_MSC_VER)
#elif defined(_LIBCPP_VERSION) || defined(_MSC_VER) || \
(!defined(__clang__) && defined(__GLIBCXX__) && \
ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(4, 8))
#define ABSL_HAVE_STD_IS_TRIVIALLY_DESTRUCTIBLE 1
#endif
@ -205,10 +220,9 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#error ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE cannot be directly set
#elif defined(ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE)
#error ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE cannot directly set
#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
(!defined(__clang__) && defined(__GNUC__) && \
(__GNUC__ > 7 || (__GNUC__ == 7 && __GNUC_MINOR__ >= 4)) && \
(defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
#elif (defined(__clang__) && defined(_LIBCPP_VERSION)) || \
(!defined(__clang__) && ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(7, 4) && \
(defined(_LIBCPP_VERSION) || defined(__GLIBCXX__))) || \
(defined(_MSC_VER) && !defined(__NVCC__))
#define ABSL_HAVE_STD_IS_TRIVIALLY_CONSTRUCTIBLE 1
#define ABSL_HAVE_STD_IS_TRIVIALLY_ASSIGNABLE 1
@ -222,7 +236,7 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
#if ABSL_INTERNAL_HAS_KEYWORD(__builtin_LINE) && \
ABSL_INTERNAL_HAS_KEYWORD(__builtin_FILE)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
#elif defined(__GNUC__) && __GNUC__ >= 5
#elif ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0)
#define ABSL_HAVE_SOURCE_LOCATION_CURRENT 1
#endif
#endif
@ -319,25 +333,21 @@ static_assert(ABSL_INTERNAL_INLINE_NAMESPACE_STR[0] != 'h' ||
// For further details, consult the compiler's documentation.
#ifdef ABSL_HAVE_EXCEPTIONS
#error ABSL_HAVE_EXCEPTIONS cannot be directly set.
#elif defined(__clang__)
#if __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
#elif ABSL_INTERNAL_HAVE_MIN_CLANG_VERSION(3, 6)
// Clang >= 3.6
#if ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // ABSL_HAVE_FEATURE(cxx_exceptions)
#else
#elif defined(__clang__)
// Clang < 3.6
// http://releases.llvm.org/3.6.0/tools/clang/docs/ReleaseNotes.html#the-exceptions-macro
#if defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
#define ABSL_HAVE_EXCEPTIONS 1
#endif // defined(__EXCEPTIONS) && ABSL_HAVE_FEATURE(cxx_exceptions)
#endif // __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 6)
// Handle remaining special cases and default to exceptions being supported.
#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
!(defined(__GNUC__) && (__GNUC__ >= 5) && !defined(__cpp_exceptions)) && \
#elif !(defined(__GNUC__) && (__GNUC__ < 5) && !defined(__EXCEPTIONS)) && \
!(ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(5, 0) && \
!defined(__cpp_exceptions)) && \
!(defined(_MSC_VER) && !defined(_CPPUNWIND))
#define ABSL_HAVE_EXCEPTIONS 1
#endif

@ -21,11 +21,13 @@
#include <stdint.h>
#include <algorithm>
#include <cassert>
#include <iosfwd>
#include <random>
#include <tuple>
#include <type_traits>
#include <utility>
#include <vector>
#include "absl/container/internal/hash_policy_testing.h"
#include "absl/memory/memory.h"
@ -153,6 +155,25 @@ using GeneratedType = decltype(
typename Container::value_type,
typename Container::key_type>::type>&>()());
// Naive wrapper that performs a linear search of previous values.
// Beware this is O(SQR), which is reasonable for smaller kMaxValues.
template <class T, size_t kMaxValues = 64, class E = void>
struct UniqueGenerator {
Generator<T, E> gen;
std::vector<T> values;
T operator()() {
assert(values.size() < kMaxValues);
for (;;) {
T value = gen();
if (std::find(values.begin(), values.end(), value) == values.end()) {
values.push_back(value);
return value;
}
}
}
};
} // namespace hash_internal
} // namespace container_internal
ABSL_NAMESPACE_END

@ -179,7 +179,7 @@ TYPED_TEST_P(ConstructorTest, InputIteratorBucketHashEqualAlloc) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
hash_internal::Generator<T>());
hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, equal, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.key_eq(), equal);
@ -198,7 +198,7 @@ void InputIteratorBucketAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
hash_internal::Generator<T>());
hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, alloc);
EXPECT_EQ(m.get_allocator(), alloc);
EXPECT_THAT(items(m), ::testing::UnorderedElementsAreArray(values));
@ -221,7 +221,7 @@ void InputIteratorBucketHashAllocTest(std::true_type) {
A alloc(0);
std::vector<T> values;
std::generate_n(std::back_inserter(values), 10,
hash_internal::Generator<T>());
hash_internal::UniqueGenerator<T>());
TypeParam m(values.begin(), values.end(), 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
EXPECT_EQ(m.get_allocator(), alloc);
@ -241,8 +241,9 @@ TYPED_TEST_P(ConstructorTest, CopyConstructor) {
H hasher;
E equal;
A alloc(0);
hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m);
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@ -262,8 +263,9 @@ void CopyConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam n(m, A(11));
EXPECT_EQ(m.hash_function(), n.hash_function());
EXPECT_EQ(m.key_eq(), n.key_eq());
@ -285,8 +287,9 @@ TYPED_TEST_P(ConstructorTest, MoveConstructor) {
H hasher;
E equal;
A alloc(0);
hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t));
EXPECT_EQ(m.hash_function(), n.hash_function());
@ -307,8 +310,9 @@ void MoveConstructorAllocTest(std::true_type) {
H hasher;
E equal;
A alloc(0);
hash_internal::UniqueGenerator<T> gen;
TypeParam m(123, hasher, equal, alloc);
for (size_t i = 0; i != 10; ++i) m.insert(hash_internal::Generator<T>()());
for (size_t i = 0; i != 10; ++i) m.insert(gen());
TypeParam t(m);
TypeParam n(std::move(t), A(1));
EXPECT_EQ(m.hash_function(), n.hash_function());
@ -325,7 +329,7 @@ TYPED_TEST_P(ConstructorTest, MoveConstructorAlloc) {
TYPED_TEST_P(ConstructorTest, InitializerListBucketHashEqualAlloc) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
using H = typename TypeParam::hasher;
using E = typename TypeParam::key_equal;
@ -348,7 +352,7 @@ template <typename TypeParam>
void InitializerListBucketAllocTest(std::true_type) {
using T = hash_internal::GeneratedType<TypeParam>;
using A = typename TypeParam::allocator_type;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
A alloc(0);
TypeParam m(values, 123, alloc);
@ -371,7 +375,7 @@ void InitializerListBucketHashAllocTest(std::true_type) {
using A = typename TypeParam::allocator_type;
H hasher;
A alloc(0);
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values, 123, hasher, alloc);
EXPECT_EQ(m.hash_function(), hasher);
@ -392,7 +396,7 @@ TYPED_TEST_P(ConstructorTest, Assignment) {
H hasher;
E equal;
A alloc(0);
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam n;
n = m;
@ -412,7 +416,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
H hasher;
E equal;
A alloc(0);
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()}, 123, hasher, equal, alloc);
TypeParam t(m);
TypeParam n;
@ -424,7 +428,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignment) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@ -433,7 +437,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerList) {
TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam n({gen()});
n = m;
@ -442,7 +446,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
TypeParam m({gen(), gen(), gen()});
TypeParam t(m);
TypeParam n({gen()});
@ -452,7 +456,7 @@ TYPED_TEST_P(ConstructorTest, MoveAssignmentOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m;
m = values;
@ -461,7 +465,7 @@ TYPED_TEST_P(ConstructorTest, AssignmentFromInitializerListOverwritesExisting) {
TYPED_TEST_P(ConstructorTest, AssignmentOnSelf) {
using T = hash_internal::GeneratedType<TypeParam>;
hash_internal::Generator<T> gen;
hash_internal::UniqueGenerator<T> gen;
std::initializer_list<T> values = {gen(), gen(), gen(), gen(), gen()};
TypeParam m(values);
m = *&m; // Avoid -Wself-assign

@ -71,6 +71,7 @@ list(APPEND ABSL_LLVM_FLAGS
"-Wformat-security"
"-Wgnu-redeclared-enum"
"-Winfinite-recursion"
"-Winvalid-constexpr"
"-Wliteral-conversion"
"-Wmissing-declarations"
"-Woverlength-strings"

@ -72,6 +72,7 @@ ABSL_LLVM_FLAGS = [
"-Wformat-security",
"-Wgnu-redeclared-enum",
"-Winfinite-recursion",
"-Winvalid-constexpr",
"-Wliteral-conversion",
"-Wmissing-declarations",
"-Woverlength-strings",

@ -87,6 +87,7 @@ COPT_VARS = {
"-Wformat-security",
"-Wgnu-redeclared-enum",
"-Winfinite-recursion",
"-Winvalid-constexpr",
"-Wliteral-conversion",
"-Wmissing-declarations",
"-Woverlength-strings",

@ -1,4 +1,4 @@
#!/usr/bin/python
#!/usr/bin/env python3
"""Generate Abseil compile compile option configs.
Usage: <path_to_absl>/copts/generate_copts.py

@ -354,6 +354,7 @@ cc_library(
hdrs = ["internal/cordz_handle.h"],
copts = ABSL_DEFAULT_COPTS,
deps = [
"//absl/base",
"//absl/base:config",
"//absl/base:raw_logging_internal",
"//absl/synchronization",
@ -371,8 +372,10 @@ cc_library(
":cordz_handle",
":cordz_statistics",
":cordz_update_tracker",
"//absl/base",
"//absl/base:config",
"//absl/base:core_headers",
"//absl/base:raw_logging_internal",
"//absl/debugging:stacktrace",
"//absl/synchronization",
"//absl/types:span",
@ -525,6 +528,7 @@ cc_library(
deps = [
":cord",
":cord_internal",
":strings",
],
)
@ -587,6 +591,7 @@ cc_test(
visibility = ["//visibility:private"],
deps = [
":cord",
":cord_test_helpers",
":cordz_functions",
":cordz_info",
":cordz_sample_token",

@ -655,6 +655,7 @@ absl_cc_library(
COPTS
${ABSL_DEFAULT_COPTS}
DEPS
absl::base
absl::config
absl::raw_logging_internal
absl::synchronization
@ -687,6 +688,7 @@ absl_cc_library(
COPTS
${ABSL_DEFAULT_COPTS}
DEPS
absl::base
absl::config
absl::cord_internal
absl::cordz_functions
@ -695,6 +697,7 @@ absl_cc_library(
absl::cordz_update_tracker
absl::core_headers
absl::span
absl::raw_logging_internal
absl::stacktrace
absl::synchronization
)
@ -829,6 +832,7 @@ absl_cc_library(
DEPS
absl::cord
absl::cord_internal
absl::strings
TESTONLY
)
@ -914,6 +918,7 @@ absl_cc_test(
${ABSL_TEST_COPTS}
DEPS
absl::cord
absl::cord_test_helpers
absl::cordz_test_helpers
absl::cordz_functions
absl::cordz_info

@ -210,7 +210,7 @@ static CordRep* MakeBalancedTree(CordRep** reps, size_t n) {
}
static CordRepFlat* CreateFlat(const char* data, size_t length,
size_t alloc_hint) {
size_t alloc_hint) {
CordRepFlat* flat = CordRepFlat::New(length + alloc_hint);
flat->length = length;
memcpy(flat->Data(), data, length);
@ -234,9 +234,7 @@ static CordRep* RingNewTree(const char* data, size_t length,
// Create a new tree out of the specified array.
// The returned node has a refcount of 1.
static CordRep* NewTree(const char* data,
size_t length,
size_t alloc_hint) {
static CordRep* NewTree(const char* data, size_t length, size_t alloc_hint) {
if (length == 0) return nullptr;
if (cord_ring_enabled()) {
return RingNewTree(data, length, alloc_hint);
@ -303,24 +301,6 @@ inline char* Cord::InlineRep::set_data(size_t n) {
return data_.as_chars();
}
static inline CordRepFlat* MakeFlat(const InlineData& data, size_t extra = 0) {
static_assert(kMinFlatLength >= sizeof(data), "");
size_t len = data.inline_size();
CordRepFlat* result = CordRepFlat::New(len + extra);
result->length = len;
memcpy(result->Data(), data.as_chars(), sizeof(data));
return result;
}
inline CordRep* Cord::InlineRep::force_tree(size_t extra_hint) {
if (data_.is_tree()) {
return data_.as_tree();
}
CordRepFlat* result = MakeFlat(data_, extra_hint);
set_tree(result);
return result;
}
inline void Cord::InlineRep::reduce_size(size_t n) {
size_t tag = inline_size();
assert(tag <= kMaxInline);
@ -346,7 +326,7 @@ void Cord::InlineRep::AppendTreeToInlined(CordRep* tree,
MethodIdentifier method) {
assert(!is_tree());
if (!data_.is_empty()) {
CordRepFlat* flat = MakeFlat(data_);
CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
if (cord_ring_enabled()) {
tree = CordRepRing::Append(CordRepRing::Create(flat, 1), tree);
} else {
@ -376,14 +356,38 @@ void Cord::InlineRep::AppendTree(CordRep* tree, MethodIdentifier method) {
}
}
void Cord::InlineRep::PrependTree(CordRep* tree) {
void Cord::InlineRep::PrependTreeToInlined(CordRep* tree,
MethodIdentifier method) {
assert(!is_tree());
if (!data_.is_empty()) {
CordRepFlat* flat = MakeFlatWithExtraCapacity(0);
if (cord_ring_enabled()) {
tree = CordRepRing::Prepend(CordRepRing::Create(flat, 1), tree);
} else {
tree = Concat(tree, flat);
}
}
EmplaceTree(tree, method);
}
void Cord::InlineRep::PrependTreeToTree(CordRep* tree,
MethodIdentifier method) {
assert(is_tree());
const CordzUpdateScope scope(data_.cordz_info(), method);
if (cord_ring_enabled()) {
tree = CordRepRing::Prepend(ForceRing(data_.as_tree(), 1), tree);
} else {
tree = Concat(tree, data_.as_tree());
}
SetTree(tree, scope);
}
void Cord::InlineRep::PrependTree(CordRep* tree, MethodIdentifier method) {
assert(tree != nullptr);
if (data_.is_empty()) {
set_tree(tree);
} else if (cord_ring_enabled()) {
set_tree(CordRepRing::Prepend(ForceRing(force_tree(0), 1), tree));
if (data_.is_tree()) {
PrependTreeToTree(tree, method);
} else {
set_tree(Concat(tree, force_tree(0)));
PrependTreeToInlined(tree, method);
}
}
@ -435,78 +439,43 @@ static inline bool PrepareAppendRegion(CordRep* root, char** region,
return true;
}
template <bool has_length>
void Cord::InlineRep::GetAppendRegion(char** region, size_t* size,
size_t max_length) {
if (max_length == 0) {
*region = nullptr;
*size = 0;
return;
}
// Try to fit in the inline buffer if possible.
if (!is_tree()) {
size_t inline_length = inline_size();
if (max_length <= kMaxInline - inline_length) {
*region = data_.as_chars() + inline_length;
*size = max_length;
set_inline_size(inline_length + max_length);
size_t length) {
auto constexpr method = CordzUpdateTracker::kGetAppendRegion;
CordRep* root = tree();
size_t sz = root ? root->length : inline_size();
if (root == nullptr) {
size_t available = kMaxInline - sz;
if (available >= (has_length ? length : 1)) {
*region = data_.as_chars() + sz;
*size = has_length ? length : available;
set_inline_size(has_length ? sz + length : kMaxInline);
return;
}
}
CordRep* root = force_tree(max_length);
if (PrepareAppendRegion(root, region, size, max_length)) {
UpdateCordzStatistics();
size_t extra = has_length ? length : (std::max)(sz, kMinFlatLength);
CordRep* rep = root ? root : MakeFlatWithExtraCapacity(extra);
CordzUpdateScope scope(root ? data_.cordz_info() : nullptr, method);
if (PrepareAppendRegion(rep, region, size, length)) {
CommitTree(root, rep, scope, method);
return;
}
// Allocate new node.
CordRepFlat* new_node =
CordRepFlat::New(std::max(static_cast<size_t>(root->length), max_length));
new_node->length = std::min(new_node->Capacity(), max_length);
CordRepFlat* new_node = CordRepFlat::New(extra);
new_node->length = std::min(new_node->Capacity(), length);
*region = new_node->Data();
*size = new_node->length;
if (cord_ring_enabled()) {
replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
return;
}
replace_tree(Concat(root, new_node));
}
void Cord::InlineRep::GetAppendRegion(char** region, size_t* size) {
const size_t max_length = std::numeric_limits<size_t>::max();
// Try to fit in the inline buffer if possible.
if (!data_.is_tree()) {
size_t inline_length = inline_size();
if (inline_length < kMaxInline) {
*region = data_.as_chars() + inline_length;
*size = kMaxInline - inline_length;
set_inline_size(kMaxInline);
return;
}
}
CordRep* root = force_tree(max_length);
if (PrepareAppendRegion(root, region, size, max_length)) {
UpdateCordzStatistics();
return;
}
// Allocate new node.
CordRepFlat* new_node = CordRepFlat::New(root->length);
new_node->length = new_node->Capacity();
*region = new_node->Data();
*size = new_node->length;
if (cord_ring_enabled()) {
replace_tree(CordRepRing::Append(ForceRing(root, 1), new_node));
return;
rep = CordRepRing::Append(ForceRing(rep, 1), new_node);
} else {
rep = Concat(rep, new_node);
}
replace_tree(Concat(root, new_node));
CommitTree(root, rep, scope, method);
}
// If the rep is a leaf, this will increment the value at total_mem_usage and
@ -529,21 +498,26 @@ void Cord::InlineRep::UpdateCordzStatisticsSlow() {
}
void Cord::InlineRep::AssignSlow(const Cord::InlineRep& src) {
UnrefTree();
data_ = src.data_;
if (is_tree()) {
CordRep::Ref(tree());
clear_cordz_info();
CordzInfo::MaybeTrackCord(data_, CordzUpdateTracker::kAssignCord);
assert(&src != this);
assert(is_tree() || src.is_tree());
auto constexpr method = CordzUpdateTracker::kAssignCord;
if (CordRep* tree = this->tree()) {
CordzUpdateScope scope(data_.cordz_info(), method);
CordRep::Unref(tree);
if (CordRep* src_tree = src.tree()) {
SetTree(CordRep::Ref(src_tree), scope);
} else {
scope.SetCordRep(nullptr);
data_ = src.data_;
}
} else {
EmplaceTree(CordRep::Ref(src.as_tree()), method);
}
}
void Cord::InlineRep::UnrefTree() {
if (is_tree()) {
if (ABSL_PREDICT_FALSE(is_profiled())) {
absl::cord_internal::CordzInfo::UntrackCord(cordz_info());
}
CordzInfo::MaybeUntrackCord(data_.cordz_info());
CordRep::Unref(tree());
}
}
@ -595,12 +569,9 @@ template Cord::Cord(std::string&& src);
// The destruction code is separate so that the compiler can determine
// that it does not need to call the destructor on a moved-from Cord.
void Cord::DestroyCordSlow() {
if (ABSL_PREDICT_FALSE(contents_.is_profiled())) {
absl::cord_internal::CordzInfo::UntrackCord(contents_.cordz_info());
}
if (CordRep* tree = contents_.tree()) {
CordRep::Unref(VerifyTree(tree));
}
assert(contents_.is_tree());
CordzInfo::MaybeUntrackCord(contents_.cordz_info());
CordRep::Unref(VerifyTree(contents_.as_tree()));
}
// --------------------------------------------------------------------
@ -613,23 +584,18 @@ void Cord::Clear() {
}
Cord& Cord::operator=(absl::string_view src) {
if (ABSL_PREDICT_FALSE(contents_.is_profiled())) {
absl::cord_internal::CordzInfo::UntrackCord(contents_.cordz_info());
contents_.clear_cordz_info();
}
const char* data = src.data();
size_t length = src.size();
CordRep* tree = contents_.tree();
if (length <= InlineRep::kMaxInline) {
// Embed into this->contents_
if (tree) CordzInfo::MaybeUntrackCord(contents_.cordz_info());
contents_.set_data(data, length, true);
if (tree) CordRep::Unref(tree);
return *this;
}
if (tree != nullptr && tree->tag >= FLAT &&
tree->flat()->Capacity() >= length &&
tree->refcount.IsOne()) {
tree->flat()->Capacity() >= length && tree->refcount.IsOne()) {
// Copy in place if the existing FLAT node is reusable.
memmove(tree->flat()->Data(), data, length);
tree->length = length;
@ -655,71 +621,70 @@ template Cord& Cord::operator=(std::string&& src);
// TODO(sanjay): Move to Cord::InlineRep section of file. For now,
// we keep it here to make diffs easier.
void Cord::InlineRep::AppendArray(const char* src_data, size_t src_size) {
if (src_size == 0) return; // memcpy(_, nullptr, 0) is undefined.
void Cord::InlineRep::AppendArray(absl::string_view src,
MethodIdentifier method) {
if (src.empty()) return; // memcpy(_, nullptr, 0) is undefined.
size_t appended = 0;
CordRep* root = nullptr;
if (is_tree()) {
root = data_.as_tree();
CordRep* rep = tree();
const CordRep* const root = rep;
CordzUpdateScope scope(root ? cordz_info() : nullptr, method);
if (root != nullptr) {
char* region;
if (PrepareAppendRegion(root, &region, &appended, src_size)) {
memcpy(region, src_data, appended);
UpdateCordzStatistics();
if (PrepareAppendRegion(rep, &region, &appended, src.size())) {
memcpy(region, src.data(), appended);
}
} else {
// Try to fit in the inline buffer if possible.
size_t inline_length = inline_size();
if (src_size <= kMaxInline - inline_length) {
if (src.size() <= kMaxInline - inline_length) {
// Append new data to embedded array
memcpy(data_.as_chars() + inline_length, src_data, src_size);
set_inline_size(inline_length + src_size);
memcpy(data_.as_chars() + inline_length, src.data(), src.size());
set_inline_size(inline_length + src.size());
return;
}
// It is possible that src_data == data_, but when we transition from an
// It is possible that src.data() == data_, but when we transition from an
// InlineRep to a tree we need to assign data_ = root via set_tree. To
// avoid corrupting the source data before we copy it, delay calling
// set_tree until after we've copied data.
// We are going from an inline size to beyond inline size. Make the new size
// either double the inlined size, or the added size + 10%.
const size_t size1 = inline_length * 2 + src_size;
const size_t size2 = inline_length + src_size / 10;
root = CordRepFlat::New(std::max<size_t>(size1, size2));
appended = std::min(
src_size, root->flat()->Capacity() - inline_length);
memcpy(root->flat()->Data(), data_.as_chars(), inline_length);
memcpy(root->flat()->Data() + inline_length, src_data, appended);
root->length = inline_length + appended;
set_tree(root);
}
src_data += appended;
src_size -= appended;
if (src_size == 0) {
const size_t size1 = inline_length * 2 + src.size();
const size_t size2 = inline_length + src.size() / 10;
rep = CordRepFlat::New(std::max<size_t>(size1, size2));
appended = std::min(src.size(), rep->flat()->Capacity() - inline_length);
memcpy(rep->flat()->Data(), data_.as_chars(), inline_length);
memcpy(rep->flat()->Data() + inline_length, src.data(), appended);
rep->length = inline_length + appended;
}
src.remove_prefix(appended);
if (src.empty()) {
CommitTree(root, rep, scope, method);
return;
}
if (cord_ring_enabled()) {
absl::string_view data(src_data, src_size);
root = ForceRing(root, (data.size() - 1) / kMaxFlatLength + 1);
replace_tree(CordRepRing::Append(root->ring(), data));
return;
}
// Use new block(s) for any remaining bytes that were not handled above.
// Alloc extra memory only if the right child of the root of the new tree is
// going to be a FLAT node, which will permit further inplace appends.
size_t length = src_size;
if (src_size < kMaxFlatLength) {
// The new length is either
// - old size + 10%
// - old_size + src_size
// This will cause a reasonable conservative step-up in size that is still
// large enough to avoid excessive amounts of small fragments being added.
length = std::max<size_t>(root->length / 10, src_size);
rep = ForceRing(rep, (src.size() - 1) / kMaxFlatLength + 1);
rep = CordRepRing::Append(rep->ring(), src);
} else {
// Use new block(s) for any remaining bytes that were not handled above.
// Alloc extra memory only if the right child of the root of the new tree
// is going to be a FLAT node, which will permit further inplace appends.
size_t length = src.size();
if (src.size() < kMaxFlatLength) {
// The new length is either
// - old size + 10%
// - old_size + src.size()
// This will cause a reasonable conservative step-up in size that is
// still large enough to avoid excessive amounts of small fragments
// being added.
length = std::max<size_t>(rep->length / 10, src.size());
}
rep = Concat(rep, NewTree(src.data(), src.size(), length - src.size()));
}
set_tree(Concat(root, NewTree(src_data, src_size, length - src_size)));
CommitTree(root, rep, scope, method);
}
inline CordRep* Cord::TakeRep() const& {
@ -734,12 +699,13 @@ inline CordRep* Cord::TakeRep() && {
template <typename C>
inline void Cord::AppendImpl(C&& src) {
auto constexpr method = CordzUpdateTracker::kAppendCord;
if (empty()) {
// Since destination is empty, we can avoid allocating a node,
if (src.contents_.is_tree()) {
// by taking the tree directly
CordRep* rep = std::forward<C>(src).TakeRep();
contents_.EmplaceTree(rep, CordzUpdateTracker::kAppendCord);
contents_.EmplaceTree(rep, method);
} else {
// or copying over inline data
contents_.data_ = src.contents_.data_;
@ -753,12 +719,12 @@ inline void Cord::AppendImpl(C&& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree == nullptr) {
// src has embedded data.
contents_.AppendArray(src.contents_.data(), src_size);
contents_.AppendArray({src.contents_.data(), src_size}, method);
return;
}
if (src_tree->tag >= FLAT) {
// src tree just has one flat node.
contents_.AppendArray(src_tree->flat()->Data(), src_size);
contents_.AppendArray({src_tree->flat()->Data(), src_size}, method);
return;
}
if (&src == this) {
@ -797,7 +763,7 @@ void Cord::Prepend(const Cord& src) {
CordRep* src_tree = src.contents_.tree();
if (src_tree != nullptr) {
CordRep::Ref(src_tree);
contents_.PrependTree(src_tree);
contents_.PrependTree(src_tree, CordzUpdateTracker::kPrependCord);
return;
}
@ -820,7 +786,8 @@ void Cord::Prepend(absl::string_view src) {
return;
}
}
contents_.PrependTree(NewTree(src.data(), src.size(), 0));
CordRep* rep = NewTree(src.data(), src.size(), 0);
contents_.PrependTree(rep, CordzUpdateTracker::kPrependString);
}
template <typename T, Cord::EnableIfString<T>>
@ -1847,8 +1814,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
*os << absl::CEscape(std::string(rep->external()->base, rep->length));
*os << "]\n";
} else if (rep->tag >= FLAT) {
*os << "FLAT cap=" << rep->flat()->Capacity()
<< " [";
*os << "FLAT cap=" << rep->flat()->Capacity() << " [";
if (include_data)
*os << absl::CEscape(std::string(rep->flat()->Data(), rep->length));
*os << "]\n";
@ -1860,7 +1826,7 @@ static void DumpNode(CordRep* rep, bool include_data, std::ostream* os,
do {
DumpNode(ring->entry_child(head), include_data, os,
indent + kIndentStep);
head = ring->advance(head);;
head = ring->advance(head);
} while (head != ring->tail());
}
if (stack.empty()) break;
@ -1906,9 +1872,8 @@ static bool VerifyNode(CordRep* root, CordRep* start_node,
worklist.push_back(node->concat()->left);
}
} else if (node->tag >= FLAT) {
ABSL_INTERNAL_CHECK(
node->length <= node->flat()->Capacity(),
ReportError(root, node));
ABSL_INTERNAL_CHECK(node->length <= node->flat()->Capacity(),
ReportError(root, node));
} else if (node->tag == EXTERNAL) {
ABSL_INTERNAL_CHECK(node->external()->base != nullptr,
ReportError(root, node));

@ -671,6 +671,7 @@ class Cord {
private:
using CordRep = absl::cord_internal::CordRep;
using CordRepFlat = absl::cord_internal::CordRepFlat;
using CordzInfo = cord_internal::CordzInfo;
using CordzUpdateScope = cord_internal::CordzUpdateScope;
using CordzUpdateTracker = cord_internal::CordzUpdateTracker;
@ -728,12 +729,15 @@ class Cord {
// Returns non-null iff was holding a pointer
absl::cord_internal::CordRep* clear();
// Converts to pointer if necessary.
absl::cord_internal::CordRep* force_tree(size_t extra_hint);
void reduce_size(size_t n); // REQUIRES: holding data
void remove_prefix(size_t n); // REQUIRES: holding data
void AppendArray(const char* src_data, size_t src_size);
void AppendArray(absl::string_view src, MethodIdentifier method);
absl::string_view FindFlatStartPiece() const;
// Creates a CordRepFlat instance from the current inlined data with `extra'
// bytes of desired additional capacity.
CordRepFlat* MakeFlatWithExtraCapacity(size_t extra);
// Sets the tree value for this instance. `rep` must not be null.
// Requires the current instance to hold a tree, and a lock to be held on
// any CordzInfo referenced by this instance. The latter is enforced through
@ -747,12 +751,22 @@ class Cord {
// value to a non-inlined (tree / ring) value.
void EmplaceTree(CordRep* rep, MethodIdentifier method);
// Commits the change of a newly created, or updated `rep` root value into
// this cord. `old_rep` indicates the old (inlined or tree) value of the
// cord, and determines if the commit invokes SetTree() or EmplaceTree().
void CommitTree(const CordRep* old_rep, CordRep* rep,
const CordzUpdateScope& scope, MethodIdentifier method);
void AppendTreeToInlined(CordRep* tree, MethodIdentifier method);
void AppendTreeToTree(CordRep* tree, MethodIdentifier method);
void AppendTree(CordRep* tree, MethodIdentifier method);
void PrependTree(absl::cord_internal::CordRep* tree);
void GetAppendRegion(char** region, size_t* size, size_t max_length);
void GetAppendRegion(char** region, size_t* size);
void PrependTreeToInlined(CordRep* tree, MethodIdentifier method);
void PrependTreeToTree(CordRep* tree, MethodIdentifier method);
void PrependTree(CordRep* tree, MethodIdentifier method);
template <bool has_length>
void GetAppendRegion(char** region, size_t* size, size_t length);
bool IsSame(const InlineRep& other) const {
return memcmp(&data_, &other.data_, sizeof(data_)) == 0;
}
@ -1041,6 +1055,16 @@ inline size_t Cord::InlineRep::size() const {
return is_tree() ? as_tree()->length : inline_size();
}
inline cord_internal::CordRepFlat* Cord::InlineRep::MakeFlatWithExtraCapacity(
size_t extra) {
static_assert(cord_internal::kMinFlatLength >= sizeof(data_), "");
size_t len = data_.inline_size();
auto* result = CordRepFlat::New(len + extra);
result->length = len;
memcpy(result->Data(), data_.as_chars(), sizeof(data_));
return result;
}
inline void Cord::InlineRep::EmplaceTree(CordRep* rep,
MethodIdentifier method) {
data_.make_tree(rep);
@ -1055,10 +1079,20 @@ inline void Cord::InlineRep::SetTree(CordRep* rep,
scope.SetCordRep(rep);
}
inline void Cord::InlineRep::CommitTree(const CordRep* old_rep, CordRep* rep,
const CordzUpdateScope& scope,
MethodIdentifier method) {
if (old_rep) {
SetTree(rep, scope);
} else {
EmplaceTree(rep, method);
}
}
inline void Cord::InlineRep::set_tree(absl::cord_internal::CordRep* rep) {
if (rep == nullptr) {
if (ABSL_PREDICT_FALSE(is_profiled())) {
absl::cord_internal::CordzInfo::UntrackCord(cordz_info());
if (data_.is_tree()) {
CordzInfo::MaybeUntrackCord(data_.cordz_info());
}
ResetToEmpty();
} else {
@ -1086,8 +1120,8 @@ inline void Cord::InlineRep::replace_tree(absl::cord_internal::CordRep* rep) {
}
inline absl::cord_internal::CordRep* Cord::InlineRep::clear() {
if (ABSL_PREDICT_FALSE(is_profiled())) {
absl::cord_internal::CordzInfo::UntrackCord(cordz_info());
if (is_tree()) {
CordzInfo::MaybeUntrackCord(cordz_info());
}
absl::cord_internal::CordRep* result = tree();
ResetToEmpty();
@ -1180,7 +1214,7 @@ inline absl::string_view Cord::Flatten() {
}
inline void Cord::Append(absl::string_view src) {
contents_.AppendArray(src.data(), src.size());
contents_.AppendArray(src, CordzUpdateTracker::kAppendString);
}
extern template void Cord::Append(std::string&& src);

@ -17,12 +17,50 @@
#ifndef ABSL_STRINGS_CORD_TEST_HELPERS_H_
#define ABSL_STRINGS_CORD_TEST_HELPERS_H_
#include <cstdint>
#include <iostream>
#include "absl/strings/cord.h"
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/string_view.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
// Cord sizes relevant for testing
enum class TestCordSize {
kEmpty = 0,
kInlined = cord_internal::kMaxInline / 2 + 1,
kSmall = cord_internal::kMaxBytesToCopy / 2 + 1,
kMedium = cord_internal::kMaxFlatLength / 2 + 1,
kLarge = cord_internal::kMaxFlatLength * 4
};
// To string helper
inline absl::string_view ToString(TestCordSize size) {
switch (size) {
case TestCordSize::kEmpty:
return "Empty";
case TestCordSize::kInlined:
return "Inlined";
case TestCordSize::kSmall:
return "Small";
case TestCordSize::kMedium:
return "Medium";
case TestCordSize::kLarge:
return "Large";
}
return "???";
}
// Returns the length matching the specified size
inline size_t Length(TestCordSize size) { return static_cast<size_t>(size); }
// Stream output helper
inline std::ostream& operator<<(std::ostream& stream, TestCordSize size) {
return stream << ToString(size);
}
// Creates a multi-segment Cord from an iterable container of strings. The
// resulting Cord is guaranteed to have one segment for every string in the
// container. This allows code to be unit tested with multi-segment Cord

@ -12,6 +12,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
#include <cstdint>
#include <string>
#include "gmock/gmock.h"
@ -20,6 +21,7 @@
#include "absl/base/internal/raw_logging.h"
#include "absl/base/macros.h"
#include "absl/strings/cord.h"
#include "absl/strings/cord_test_helpers.h"
#include "absl/strings/cordz_test_helpers.h"
#include "absl/strings/internal/cordz_functions.h"
#include "absl/strings/internal/cordz_info.h"
@ -31,6 +33,8 @@
#ifdef ABSL_INTERNAL_CORDZ_ENABLED
using testing::Eq;
namespace absl {
ABSL_NAMESPACE_BEGIN
@ -40,48 +44,126 @@ using cord_internal::CordzStatistics;
using cord_internal::CordzUpdateTracker;
using Method = CordzUpdateTracker::MethodIdentifier;
// Do not print cord contents, we only care about 'size' perhaps.
// Note that this method must be inside the named namespace.
inline void PrintTo(const Cord& cord, std::ostream* s) {
if (s) *s << "Cord[" << cord.size() << "]";
}
namespace {
std::string MakeString(int length) {
std::string s(length, '.');
for (int i = 4; i < length; i += 2) {
s[i] = '\b';
}
return s;
// Returns a string_view value of the specified length
// We do this to avoid 'consuming' large strings in Cord by default.
absl::string_view MakeString(size_t size) {
thread_local std::string str;
str = std::string(size, '.');
return str;
}
absl::string_view MakeString(TestCordSize size) {
return MakeString(Length(size));
}
std::string TestParamToString(::testing::TestParamInfo<TestCordSize> size) {
return absl::StrCat("On", ToString(size.param), "Cord");
}
TEST(CordzTest, ConstructSmallStringView) {
CordzSamplingIntervalHelper sample_every(1);
Cord cord(absl::string_view(MakeString(50)));
class CordzUpdateTest : public testing::TestWithParam<TestCordSize> {
public:
Cord& cord() { return cord_; }
Method InitialOr(Method method) const {
return (GetParam() > TestCordSize::kInlined) ? Method::kConstructorString
: method;
}
private:
CordzSamplingIntervalHelper sample_every_{1};
Cord cord_{MakeString(GetParam())};
};
INSTANTIATE_TEST_SUITE_P(WithParam, CordzUpdateTest,
testing::Values(TestCordSize::kEmpty,
TestCordSize::kInlined,
TestCordSize::kLarge),
TestParamToString);
TEST(CordzTest, ConstructSmallString) {
CordzSamplingIntervalHelper sample_every{1};
Cord cord(MakeString(TestCordSize::kSmall));
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
}
TEST(CordzTest, ConstructLargeStringView) {
CordzSamplingIntervalHelper sample_every(1);
Cord cord(absl::string_view(MakeString(5000)));
TEST(CordzTest, ConstructLargeString) {
CordzSamplingIntervalHelper sample_every{1};
Cord cord(MakeString(TestCordSize::kLarge));
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
}
TEST(CordzTest, CopyConstruct) {
CordzSamplingIntervalHelper sample_every(1);
Cord src = UnsampledCord(MakeString(5000));
CordzSamplingIntervalHelper sample_every{1};
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
Cord cord(src);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorCord));
}
TEST(CordzTest, AppendLargeCordToEmpty) {
CordzSamplingIntervalHelper sample_every(1);
Cord cord;
Cord src = UnsampledCord(MakeString(5000));
cord.Append(src);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendCord));
TEST(CordzTest, MoveConstruct) {
CordzSamplingIntervalHelper sample_every{1};
Cord src(MakeString(TestCordSize::kLarge));
Cord cord(std::move(src));
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
}
TEST(CordzTest, MoveAppendLargeCordToEmpty) {
CordzSamplingIntervalHelper sample_every(1);
TEST_P(CordzUpdateTest, AssignCord) {
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
cord() = src;
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAssignCord)));
}
TEST(CordzTest, AssignInlinedCord) {
CordzSampleToken token;
CordzSamplingIntervalHelper sample_every{1};
Cord cord(MakeString(TestCordSize::kLarge));
const CordzInfo* info = GetCordzInfoForTesting(cord);
Cord src = UnsampledCord(MakeString(TestCordSize::kInlined));
cord = src;
EXPECT_THAT(GetCordzInfoForTesting(cord), Eq(nullptr));
EXPECT_FALSE(CordzInfoIsListed(info));
}
TEST(CordzTest, MoveAssignCord) {
CordzSamplingIntervalHelper sample_every{1};
Cord cord;
cord.Append(UnsampledCord(MakeString(5000)));
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kAppendCord));
Cord src(MakeString(TestCordSize::kLarge));
cord = std::move(src);
EXPECT_THAT(cord, HasValidCordzInfoOf(Method::kConstructorString));
}
TEST_P(CordzUpdateTest, AppendCord) {
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
cord().Append(src);
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
}
TEST_P(CordzUpdateTest, MoveAppendCord) {
cord().Append(UnsampledCord(MakeString(TestCordSize::kLarge)));
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendCord)));
}
TEST_P(CordzUpdateTest, PrependCord) {
Cord src = UnsampledCord(MakeString(TestCordSize::kLarge));
cord().Prepend(src);
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kPrependCord)));
}
TEST_P(CordzUpdateTest, AppendSmallArray) {
cord().Append(MakeString(TestCordSize::kSmall));
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
}
TEST_P(CordzUpdateTest, AppendLargeArray) {
cord().Append(MakeString(TestCordSize::kLarge));
EXPECT_THAT(cord(), HasValidCordzInfoOf(InitialOr(Method::kAppendString)));
}
} // namespace

@ -15,6 +15,8 @@
#ifndef ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
#define ABSL_STRINGS_CORDZ_TEST_HELPERS_H_
#include <utility>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "absl/base/config.h"
@ -32,6 +34,7 @@ ABSL_NAMESPACE_BEGIN
// Returns the CordzInfo for the cord, or nullptr if the cord is not sampled.
inline const cord_internal::CordzInfo* GetCordzInfoForTesting(
const Cord& cord) {
if (cord.size() <= cord_internal::kMaxInline) return nullptr;
return cord.contents_.cordz_info();
}
@ -44,11 +47,13 @@ inline bool CordzInfoIsListed(const cord_internal::CordzInfo* cordz_info,
return false;
}
// Matcher on Cord that verifies all of:
// Matcher on Cord* that verifies all of:
// - the cord is sampled
// - the CordzInfo of the cord is listed / discoverable.
// - the reported CordzStatistics match the cord's actual properties
// - the cord has an (initial) UpdateTracker count of 1 for `method`
// This matcher accepts a const Cord* to avoid having the matcher dump
// copious amounts of cord data on failures.
MATCHER_P(HasValidCordzInfoOf, method, "CordzInfo matches cord") {
const cord_internal::CordzInfo* cord_info = GetCordzInfoForTesting(arg);
if (cord_info == nullptr) {

@ -16,16 +16,19 @@
#include <atomic>
#include "absl/base/internal/raw_logging.h" // For ABSL_RAW_CHECK
#include "absl/base/internal/spinlock.h"
namespace absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
using ::absl::base_internal::SpinLockHolder;
ABSL_CONST_INIT CordzHandle::Queue CordzHandle::global_queue_(absl::kConstInit);
CordzHandle::CordzHandle(bool is_snapshot) : is_snapshot_(is_snapshot) {
if (is_snapshot) {
MutexLock lock(&queue_->mutex);
SpinLockHolder lock(&queue_->mutex);
CordzHandle* dq_tail = queue_->dq_tail.load(std::memory_order_acquire);
if (dq_tail != nullptr) {
dq_prev_ = dq_tail;
@ -40,7 +43,7 @@ CordzHandle::~CordzHandle() {
if (is_snapshot_) {
std::vector<CordzHandle*> to_delete;
{
absl::MutexLock lock(&queue_->mutex);
SpinLockHolder lock(&queue_->mutex);
CordzHandle* next = dq_next_;
if (dq_prev_ == nullptr) {
// We were head of the queue, delete every CordzHandle until we reach
@ -70,7 +73,7 @@ void CordzHandle::Delete(CordzHandle* handle) {
handle->ODRCheck();
Queue* const queue = handle->queue_;
if (!handle->is_snapshot_ && !queue->IsEmpty()) {
MutexLock lock(&queue->mutex);
SpinLockHolder lock(&queue->mutex);
CordzHandle* dq_tail = queue->dq_tail.load(std::memory_order_acquire);
if (dq_tail != nullptr) {
handle->dq_prev_ = dq_tail;
@ -85,7 +88,7 @@ void CordzHandle::Delete(CordzHandle* handle) {
std::vector<const CordzHandle*> CordzHandle::DiagnosticsGetDeleteQueue() {
std::vector<const CordzHandle*> handles;
MutexLock lock(&global_queue_.mutex);
SpinLockHolder lock(&global_queue_.mutex);
CordzHandle* dq_tail = global_queue_.dq_tail.load(std::memory_order_acquire);
for (const CordzHandle* p = dq_tail; p; p = p->dq_prev_) {
handles.push_back(p);
@ -100,7 +103,7 @@ bool CordzHandle::DiagnosticsHandleIsSafeToInspect(
if (handle == nullptr) return true;
if (handle->is_snapshot_) return false;
bool snapshot_found = false;
MutexLock lock(&queue_->mutex);
SpinLockHolder lock(&queue_->mutex);
for (const CordzHandle* p = queue_->dq_tail; p; p = p->dq_prev_) {
if (p == handle) return !snapshot_found;
if (p == this) snapshot_found = true;
@ -117,7 +120,7 @@ CordzHandle::DiagnosticsGetSafeToInspectDeletedHandles() {
return handles;
}
MutexLock lock(&queue_->mutex);
SpinLockHolder lock(&queue_->mutex);
for (const CordzHandle* p = dq_next_; p != nullptr; p = p->dq_next_) {
if (!p->is_snapshot()) {
handles.push_back(p);

@ -20,6 +20,7 @@
#include "absl/base/config.h"
#include "absl/base/internal/raw_logging.h"
#include "absl/base/internal/spinlock.h"
#include "absl/synchronization/mutex.h"
namespace absl {
@ -70,9 +71,11 @@ class CordzHandle {
// Global queue data. CordzHandle stores a pointer to the global queue
// instance to harden against ODR violations.
struct Queue {
constexpr explicit Queue(absl::ConstInitType) : mutex(absl::kConstInit) {}
constexpr explicit Queue(absl::ConstInitType)
: mutex(absl::kConstInit,
absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
absl::Mutex mutex;
absl::base_internal::SpinLock mutex;
std::atomic<CordzHandle*> dq_tail ABSL_GUARDED_BY(mutex){nullptr};
// Returns true if this delete queue is empty. This method does not acquire

@ -15,6 +15,7 @@
#include "absl/strings/internal/cordz_info.h"
#include "absl/base/config.h"
#include "absl/base/internal/spinlock.h"
#include "absl/debugging/stacktrace.h"
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cordz_handle.h"
@ -27,22 +28,34 @@ namespace absl {
ABSL_NAMESPACE_BEGIN
namespace cord_internal {
using ::absl::base_internal::SpinLockHolder;
constexpr int CordzInfo::kMaxStackDepth;
ABSL_CONST_INIT std::atomic<CordzInfo*> CordzInfo::ci_head_{nullptr};
ABSL_CONST_INIT absl::Mutex CordzInfo::ci_mutex_(absl::kConstInit);
ABSL_CONST_INIT CordzInfo::List CordzInfo::global_list_{absl::kConstInit};
CordzInfo* CordzInfo::Head(const CordzSnapshot& snapshot) {
ABSL_ASSERT(snapshot.is_snapshot());
ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(ci_head_unsafe()));
return ci_head_unsafe();
// We can do an 'unsafe' load of 'head', as we are guaranteed that the
// instance it points to is kept alive by the provided CordzSnapshot, so we
// can simply return the current value using an acquire load.
// We do enforce in DEBUG builds that the 'head' value is present in the
// delete queue: ODR violations may lead to 'snapshot' and 'global_list_'
// being in different libraries / modules.
CordzInfo* head = global_list_.head.load(std::memory_order_acquire);
ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(head));
return head;
}
CordzInfo* CordzInfo::Next(const CordzSnapshot& snapshot) const {
ABSL_ASSERT(snapshot.is_snapshot());
// Similar to the 'Head()' function, we do not need a mutex here.
CordzInfo* next = ci_next_.load(std::memory_order_acquire);
ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(this));
ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(ci_next_unsafe()));
return ci_next_unsafe();
ABSL_ASSERT(snapshot.DiagnosticsHandleIsSafeToInspect(next));
return next;
}
void CordzInfo::TrackCord(InlineData& cord, MethodIdentifier method) {
@ -63,14 +76,6 @@ void CordzInfo::TrackCord(InlineData& cord, const InlineData& src,
cordz_info->Track();
}
void CordzInfo::UntrackCord(CordzInfo* cordz_info) {
assert(cordz_info);
if (cordz_info) {
cordz_info->Untrack();
CordzHandle::Delete(cordz_info);
}
}
CordzInfo::MethodIdentifier CordzInfo::GetParentMethod(const CordzInfo* src) {
if (src == nullptr) return MethodIdentifier::kUnknown;
return src->parent_method_ != MethodIdentifier::kUnknown ? src->parent_method_
@ -110,14 +115,14 @@ CordzInfo::~CordzInfo() {
}
void CordzInfo::Track() {
absl::MutexLock l(&ci_mutex_);
SpinLockHolder l(&list_->mutex);
CordzInfo* const head = ci_head_.load(std::memory_order_acquire);
CordzInfo* const head = list_->head.load(std::memory_order_acquire);
if (head != nullptr) {
head->ci_prev_.store(this, std::memory_order_release);
}
ci_next_.store(head, std::memory_order_release);
ci_head_.store(this, std::memory_order_release);
list_->head.store(this, std::memory_order_release);
}
void CordzInfo::Untrack() {
@ -128,24 +133,28 @@ void CordzInfo::Untrack() {
rep_ = nullptr;
}
absl::MutexLock l(&ci_mutex_);
CordzInfo* const head = ci_head_.load(std::memory_order_acquire);
CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
if (next) {
ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
next->ci_prev_.store(prev, std::memory_order_release);
}
if (prev) {
ABSL_ASSERT(head != this);
ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
prev->ci_next_.store(next, std::memory_order_release);
} else {
ABSL_ASSERT(head == this);
ci_head_.store(next, std::memory_order_release);
ODRCheck();
{
SpinLockHolder l(&list_->mutex);
CordzInfo* const head = list_->head.load(std::memory_order_acquire);
CordzInfo* const next = ci_next_.load(std::memory_order_acquire);
CordzInfo* const prev = ci_prev_.load(std::memory_order_acquire);
if (next) {
ABSL_ASSERT(next->ci_prev_.load(std::memory_order_acquire) == this);
next->ci_prev_.store(prev, std::memory_order_release);
}
if (prev) {
ABSL_ASSERT(head != this);
ABSL_ASSERT(prev->ci_next_.load(std::memory_order_acquire) == this);
prev->ci_next_.store(next, std::memory_order_release);
} else {
ABSL_ASSERT(head == this);
list_->head.store(next, std::memory_order_release);
}
}
CordzHandle::Delete(this);
}
void CordzInfo::Lock(MethodIdentifier method)
@ -163,7 +172,6 @@ void CordzInfo::Unlock() ABSL_UNLOCK_FUNCTION(mutex_) {
mutex_.Unlock();
if (!tracked) {
Untrack();
CordzHandle::Delete(this);
}
}

@ -20,6 +20,8 @@
#include <functional>
#include "absl/base/config.h"
#include "absl/base/internal/raw_logging.h"
#include "absl/base/internal/spinlock.h"
#include "absl/base/thread_annotations.h"
#include "absl/strings/internal/cord_internal.h"
#include "absl/strings/internal/cordz_functions.h"
@ -79,17 +81,22 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
// and before the root cordrep of the sampled cord is unreffed.
// This function may extend the lifetime of the cordrep in cases where the
// CordInfo instance is being held by a concurrent collection thread.
static void UntrackCord(CordzInfo* cordz_info);
void Untrack();
// Invokes UntrackCord() on `info` if `info` is not null.
static void MaybeUntrackCord(CordzInfo* info);
CordzInfo() = delete;
CordzInfo(const CordzInfo&) = delete;
CordzInfo& operator=(const CordzInfo&) = delete;
// Retrieves the oldest existing CordzInfo.
static CordzInfo* Head(const CordzSnapshot& snapshot);
static CordzInfo* Head(const CordzSnapshot& snapshot)
ABSL_NO_THREAD_SAFETY_ANALYSIS;
// Retrieves the next oldest existing CordzInfo older than 'this' instance.
CordzInfo* Next(const CordzSnapshot& snapshot) const;
CordzInfo* Next(const CordzSnapshot& snapshot) const
ABSL_NO_THREAD_SAFETY_ANALYSIS;
// Locks this instance for the update identified by `method`.
// Increases the count for `method` in `update_tracker`.
@ -141,6 +148,17 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
}
private:
// Global cordz info list. CordzInfo stores a pointer to the global list
// instance to harden against ODR violations.
struct List {
constexpr explicit List(absl::ConstInitType)
: mutex(absl::kConstInit,
absl::base_internal::SCHEDULE_COOPERATIVE_AND_KERNEL) {}
absl::base_internal::SpinLock mutex;
std::atomic<CordzInfo*> head ABSL_GUARDED_BY(mutex){nullptr};
};
static constexpr int kMaxStackDepth = 64;
explicit CordzInfo(CordRep* rep, const CordzInfo* src,
@ -148,7 +166,6 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
~CordzInfo() override;
void Track();
void Untrack();
// Returns the parent method from `src`, which is either `parent_method_` or
// `method_` depending on `parent_method_` being kUnknown.
@ -161,23 +178,20 @@ class ABSL_LOCKABLE CordzInfo : public CordzHandle {
// Returns 0 if `src` is null.
static int FillParentStack(const CordzInfo* src, void** stack);
// 'Unsafe' head/next/prev accessors not requiring the lock being held.
// These are used exclusively for iterations (Head / Next) where we enforce
// a token being held, so reading an 'old' / deleted pointer is fine.
static CordzInfo* ci_head_unsafe() ABSL_NO_THREAD_SAFETY_ANALYSIS {
return ci_head_.load(std::memory_order_acquire);
}
CordzInfo* ci_next_unsafe() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
return ci_next_.load(std::memory_order_acquire);
}
CordzInfo* ci_prev_unsafe() const ABSL_NO_THREAD_SAFETY_ANALYSIS {
return ci_prev_.load(std::memory_order_acquire);
void ODRCheck() const {
#ifndef NDEBUG
ABSL_RAW_CHECK(list_ == &global_list_, "ODR violation in Cord");
#endif
}
static absl::Mutex ci_mutex_;
static std::atomic<CordzInfo*> ci_head_ ABSL_GUARDED_BY(ci_mutex_);
std::atomic<CordzInfo*> ci_prev_ ABSL_GUARDED_BY(ci_mutex_){nullptr};
std::atomic<CordzInfo*> ci_next_ ABSL_GUARDED_BY(ci_mutex_){nullptr};
ABSL_CONST_INIT static List global_list_;
List* const list_ = &global_list_;
// ci_prev_ and ci_next_ require the global list mutex to be held.
// Unfortunately we can't use thread annotations such that the thread safety
// analysis understands that list_ and global_list_ are one and the same.
std::atomic<CordzInfo*> ci_prev_{nullptr};
std::atomic<CordzInfo*> ci_next_{nullptr};
mutable absl::Mutex mutex_;
CordRep* rep_ ABSL_GUARDED_BY(mutex_);
@ -209,6 +223,13 @@ inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeTrackCord(
}
}
inline ABSL_ATTRIBUTE_ALWAYS_INLINE void CordzInfo::MaybeUntrackCord(
CordzInfo* info) {
if (ABSL_PREDICT_FALSE(info)) {
info->Untrack();
}
}
inline void CordzInfo::AssertHeld() ABSL_ASSERT_EXCLUSIVE_LOCK(mutex_) {
#ifndef NDEBUG
mutex_.AssertHeld();

@ -70,7 +70,7 @@ TEST(CordzInfoTest, TrackCord) {
EXPECT_FALSE(info->is_snapshot());
EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(info));
EXPECT_THAT(info->GetCordRepForTesting(), Eq(data.rep.rep));
CordzInfo::UntrackCord(info);
info->Untrack();
}
TEST(CordzInfoTest, UntrackCord) {
@ -79,7 +79,7 @@ TEST(CordzInfoTest, UntrackCord) {
CordzInfo* info = data.data.cordz_info();
CordzSnapshot snapshot;
CordzInfo::UntrackCord(info);
info->Untrack();
EXPECT_THAT(CordzInfo::Head(CordzSnapshot()), Eq(nullptr));
EXPECT_THAT(info->GetCordRepForTesting(), Eq(nullptr));
EXPECT_THAT(DeleteQueue(), ElementsAre(info, &snapshot));
@ -96,7 +96,7 @@ TEST(CordzInfoTest, SetCordRep) {
info->Unlock();
EXPECT_THAT(info->GetCordRepForTesting(), Eq(rep.rep));
CordzInfo::UntrackCord(info);
info->Untrack();
}
TEST(CordzInfoTest, SetCordRepNullUntracksCordOnUnlock) {
@ -121,7 +121,7 @@ TEST(CordzInfoTest, SetCordRepRequiresMutex) {
CordzInfo* info = data.data.cordz_info();
TestCordRep rep;
EXPECT_DEBUG_DEATH(info->SetCordRep(rep.rep), ".*");
CordzInfo::UntrackCord(info);
info->Untrack();
}
#endif // GTEST_HAS_DEATH_TEST
@ -143,11 +143,11 @@ TEST(CordzInfoTest, TrackUntrackHeadFirstV2) {
EXPECT_THAT(info2->Next(snapshot), Eq(info1));
EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
CordzInfo::UntrackCord(info2);
info2->Untrack();
ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info1));
EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
CordzInfo::UntrackCord(info1);
info1->Untrack();
ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
}
@ -168,11 +168,11 @@ TEST(CordzInfoTest, TrackUntrackTailFirstV2) {
EXPECT_THAT(info2->Next(snapshot), Eq(info1));
EXPECT_THAT(info1->Next(snapshot), Eq(nullptr));
CordzInfo::UntrackCord(info1);
info1->Untrack();
ASSERT_THAT(CordzInfo::Head(snapshot), Eq(info2));
EXPECT_THAT(info2->Next(snapshot), Eq(nullptr));
CordzInfo::UntrackCord(info2);
info2->Untrack();
ASSERT_THAT(CordzInfo::Head(snapshot), Eq(nullptr));
}
@ -208,7 +208,7 @@ TEST(CordzInfoTest, StackV2) {
// got_stack.
EXPECT_THAT(got_stack, HasSubstr(expected_stack));
CordzInfo::UntrackCord(info);
info->Untrack();
}
// Local helper functions to get different stacks for child and parent.
@ -231,7 +231,7 @@ TEST(CordzInfoTest, GetStatistics) {
EXPECT_THAT(statistics.parent_method, Eq(kUnknownMethod));
EXPECT_THAT(statistics.update_tracker.Value(kTrackCordMethod), Eq(1));
CordzInfo::UntrackCord(info);
info->Untrack();
}
TEST(CordzInfoTest, LockCountsMethod) {
@ -246,7 +246,7 @@ TEST(CordzInfoTest, LockCountsMethod) {
CordzStatistics statistics = info->GetCordzStatistics();
EXPECT_THAT(statistics.update_tracker.Value(kUpdateMethod), Eq(2));
CordzInfo::UntrackCord(info);
info->Untrack();
}
TEST(CordzInfoTest, FromParent) {
@ -265,8 +265,8 @@ TEST(CordzInfoTest, FromParent) {
EXPECT_THAT(statistics.parent_method, Eq(kTrackCordMethod));
EXPECT_THAT(statistics.update_tracker.Value(kChildMethod), Eq(1));
CordzInfo::UntrackCord(info_parent);
CordzInfo::UntrackCord(info_child);
info_parent->Untrack();
info_child->Untrack();
}
TEST(CordzInfoTest, FromParentInlined) {
@ -279,7 +279,7 @@ TEST(CordzInfoTest, FromParentInlined) {
EXPECT_THAT(statistics.method, Eq(kChildMethod));
EXPECT_THAT(statistics.parent_method, Eq(kUnknownMethod));
EXPECT_THAT(statistics.update_tracker.Value(kChildMethod), Eq(1));
CordzInfo::UntrackCord(info);
info->Untrack();
}
TEST(CordzInfoTest, RecordMetrics) {
@ -293,7 +293,7 @@ TEST(CordzInfoTest, RecordMetrics) {
CordzStatistics actual = info->GetCordzStatistics();
EXPECT_EQ(actual.size, expected.size);
CordzInfo::UntrackCord(info);
info->Untrack();
}
} // namespace

@ -96,9 +96,9 @@ TEST(CordzSampleTokenTest, Iterator) {
EXPECT_THAT(found, ElementsAre(info3, info2, info1));
CordzInfo::UntrackCord(info1);
CordzInfo::UntrackCord(info2);
CordzInfo::UntrackCord(info3);
info1->Untrack();
info2->Untrack();
info3->Untrack();
}
TEST(CordzSampleTokenTest, IteratorEquality) {
@ -135,9 +135,9 @@ TEST(CordzSampleTokenTest, IteratorEquality) {
// Both lhs and rhs are done, so they are on nullptr.
EXPECT_THAT(lhs, Eq(rhs));
CordzInfo::UntrackCord(info1);
CordzInfo::UntrackCord(info2);
CordzInfo::UntrackCord(info3);
info1->Untrack();
info2->Untrack();
info3->Untrack();
}
TEST(CordzSampleTokenTest, MultiThreaded) {
@ -166,7 +166,7 @@ TEST(CordzSampleTokenTest, MultiThreaded) {
// Track/untrack.
if (cord.data.is_profiled()) {
// 1) Untrack
CordzInfo::UntrackCord(cord.data.cordz_info());
cord.data.cordz_info()->Untrack();
cord.data.clear_cordz_info();;
} else {
// 2) Track
@ -192,9 +192,7 @@ TEST(CordzSampleTokenTest, MultiThreaded) {
}
}
for (TestCordData& cord : cords) {
if (cord.data.is_profiled()) {
CordzInfo::UntrackCord(cord.data.cordz_info());
}
CordzInfo::MaybeUntrackCord(cord.data.cordz_info());
}
});
}

@ -40,6 +40,12 @@ class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
}
}
// CordzUpdateScope can not be copied or assigned to.
CordzUpdateScope(CordzUpdateScope&& rhs) = delete;
CordzUpdateScope(const CordzUpdateScope&) = delete;
CordzUpdateScope& operator=(CordzUpdateScope&& rhs) = delete;
CordzUpdateScope& operator=(const CordzUpdateScope&) = delete;
~CordzUpdateScope() ABSL_UNLOCK_FUNCTION() {
if (ABSL_PREDICT_FALSE(info_)) {
info_->Unlock();
@ -55,7 +61,7 @@ class ABSL_SCOPED_LOCKABLE CordzUpdateScope {
CordzInfo* info() const { return info_; }
private:
CordzInfo* const info_;
CordzInfo* info_;
};
} // namespace cord_internal

@ -79,6 +79,7 @@ bool Waiter::Wait(KernelTimeout t) {
// Note that, since the thread ticker is just reset, we don't need to check
// whether the thread is idle on the very first pass of the loop.
bool first_pass = true;
while (true) {
int32_t x = futex_.load(std::memory_order_relaxed);
while (x != 0) {
@ -90,7 +91,6 @@ bool Waiter::Wait(KernelTimeout t) {
return true; // Consumed a wakeup, we are done.
}
if (!first_pass) MaybeBecomeIdle();
const int err = Futex::WaitUntil(&futex_, 0, t);
if (err != 0) {

Loading…
Cancel
Save