From d4607b3f05f3f22dda383efcb0aba7e7d9359823 Mon Sep 17 00:00:00 2001 From: Abseil Team Date: Mon, 19 Sep 2022 09:30:00 -0700 Subject: [PATCH] Make BTrees work with custom allocators that recycle memory. The btree data structure poisons regions of memory it's not using. It leaves these regions poisoned when it frees memory. This means that a custom memory allocator that tries to reuse freed memory will trigger an ASAN use-after-poison error. The fix is to unpoison each memory region right before freeing it. PiperOrigin-RevId: 475309671 Change-Id: I29d55c298d3d89a83e1f960deb6e93118891ff83 --- absl/container/btree_test.cc | 71 +++++++++++++++++++++++++++++++++ absl/container/internal/btree.h | 1 + 2 files changed, 72 insertions(+) diff --git a/absl/container/btree_test.cc b/absl/container/btree_test.cc index 4dae02c3..9386a6b1 100644 --- a/absl/container/btree_test.cc +++ b/absl/container/btree_test.cc @@ -3249,6 +3249,77 @@ TEST(Btree, NotAssignableType) { } } +struct ArenaLike { + void* recycled = nullptr; + size_t recycled_size = 0; +}; + +// A very simple implementation of arena allocation. +template +class ArenaLikeAllocator : public std::allocator { + public: + // Standard library containers require the ability to allocate objects of + // different types which they can do so via rebind.other. + template + struct rebind { + using other = ArenaLikeAllocator; + }; + + explicit ArenaLikeAllocator(ArenaLike* arena) noexcept : arena_(arena) {} + + ~ArenaLikeAllocator() { + if (arena_->recycled != nullptr) { + delete [] static_cast(arena_->recycled); + arena_->recycled = nullptr; + } + } + + template + explicit ArenaLikeAllocator(const ArenaLikeAllocator& other) noexcept + : arena_(other.arena_) {} + + T* allocate(size_t num_objects, const void* = nullptr) { + size_t size = num_objects * sizeof(T); + if (arena_->recycled != nullptr && arena_->recycled_size == size) { + T* result = static_cast(arena_->recycled); + arena_->recycled = nullptr; + return result; + } + return new T[num_objects]; + } + + void deallocate(T* p, size_t num_objects) { + size_t size = num_objects * sizeof(T); + + // Simulate writing to the freed memory as an actual arena allocator might + // do. This triggers an error report if the memory is poisoned. + memset(p, 0xde, size); + + if (arena_->recycled == nullptr) { + arena_->recycled = p; + arena_->recycled_size = size; + } else { + delete [] p; + } + } + + ArenaLike* arena_; +}; + +// This test verifies that an arena allocator that reuses memory will not be +// asked to free poisoned BTree memory. +TEST(Btree, ReusePoisonMemory) { + using Alloc = ArenaLikeAllocator; + using Set = absl::btree_set, Alloc>; + ArenaLike arena; + Alloc alloc(&arena); + Set set(alloc); + + set.insert(0); + set.erase(0); + set.insert(0); +} + } // namespace } // namespace container_internal ABSL_NAMESPACE_END diff --git a/absl/container/internal/btree.h b/absl/container/internal/btree.h index bdf2446e..e559bece 100644 --- a/absl/container/internal/btree.h +++ b/absl/container/internal/btree.h @@ -958,6 +958,7 @@ class btree_node { static void deallocate(const size_type size, btree_node *node, allocator_type *alloc) { + absl::container_internal::SanitizerUnpoisonMemoryRegion(node, size); absl::container_internal::Deallocate(alloc, node, size); }