diff --git a/java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto b/java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto index 1ab94eb3b7..fd479bb1ff 100644 --- a/java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto +++ b/java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto @@ -128,6 +128,3 @@ message ReservedAsMapFieldWithEnumValue { // null is not a 'reserved word' per se but as a literal needs similar care map null = 10; } -package map_for_proto2_lite_test; -option java_package = "map_lite_test"; -option optimize_for = LITE_RUNTIME; diff --git a/src/google/protobuf/arena.cc b/src/google/protobuf/arena.cc index 6ba81f87ab..7a0503792d 100644 --- a/src/google/protobuf/arena.cc +++ b/src/google/protobuf/arena.cc @@ -53,6 +53,17 @@ namespace google { namespace protobuf { namespace internal { +namespace { + +PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT ArenaBlock +kSentryArenaBlock = {}; + +ArenaBlock* SentryArenaBlock() { + // const_cast<> is okay as kSentryArenaBlock will never be mutated. + return const_cast(&kSentryArenaBlock); +} + +} static SerialArena::Memory AllocateMemory(const AllocationPolicy* policy_ptr, size_t last_size, size_t min_bytes) { @@ -105,8 +116,6 @@ class GetDeallocator { size_t* space_allocated_; }; -constexpr ArenaBlock SerialArena::kSentryBlock; - // It is guaranteed that this is constructed in `b`. IOW, this is not the first // arena and `b` cannot be sentry. SerialArena::SerialArena(ArenaBlock* b, ThreadSafeArena& parent) @@ -120,7 +129,7 @@ SerialArena::SerialArena(ArenaBlock* b, ThreadSafeArena& parent) // It is guaranteed that this is the first SerialArena. Use sentry block. SerialArena::SerialArena(ThreadSafeArena& parent) - : head_{SentryBlock()}, parent_{parent} {} + : head_{SentryArenaBlock()}, parent_{parent} {} // It is guaranteed that this is the first SerialArena but `b` may be user // provided or newly allocated to store AllocationPolicy. @@ -308,7 +317,7 @@ void SerialArena::CleanupList() { // where the size of "ids" and "arenas" is determined at runtime; hence the use // of Layout. struct SerialArenaChunkHeader { - constexpr SerialArenaChunkHeader(uint32_t capacity, uint32_t size) + PROTOBUF_CONSTEXPR SerialArenaChunkHeader(uint32_t capacity, uint32_t size) : next_chunk(nullptr), capacity(capacity), size(size) {} ThreadSafeArena::SerialArenaChunk* next_chunk; @@ -433,7 +442,7 @@ class ThreadSafeArena::SerialArenaChunk { } }; -constexpr SerialArenaChunkHeader kSentryArenaChunk = {0, 0}; +PROTOBUF_CONSTEXPR SerialArenaChunkHeader kSentryArenaChunk = {0, 0}; ThreadSafeArena::SerialArenaChunk* ThreadSafeArena::SentrySerialArenaChunk() { // const_cast is okay because the sentry chunk is never mutated. Also, @@ -487,7 +496,7 @@ ThreadSafeArena::ThreadSafeArena(void* mem, size_t size, ArenaBlock* ThreadSafeArena::FirstBlock(void* buf, size_t size) { GOOGLE_DCHECK_EQ(reinterpret_cast(buf) & 7, 0u); if (buf == nullptr || size <= kBlockHeaderSize) { - return SerialArena::SentryBlock(); + return SentryArenaBlock(); } // Record user-owned block. alloc_policy_.set_is_user_owned_initial_block(true); @@ -702,7 +711,7 @@ uint64_t ThreadSafeArena::Reset() { : kBlockHeaderSize + kAllocPolicySize; first_arena_.Init(new (mem.ptr) ArenaBlock{nullptr, mem.size}, offset); } else { - first_arena_.Init(SerialArena::SentryBlock(), 0); + first_arena_.Init(SentryArenaBlock(), 0); } // Since the first block and potential alloc_policy on the first block is diff --git a/src/google/protobuf/arena_impl.h b/src/google/protobuf/arena_impl.h index 43056f539f..fa4d6f50c8 100644 --- a/src/google/protobuf/arena_impl.h +++ b/src/google/protobuf/arena_impl.h @@ -91,7 +91,7 @@ inline PROTOBUF_ALWAYS_INLINE void* AlignTo(void* p, size_t a) { // a default memory order (std::memory_order_seq_cst). template struct Atomic { - constexpr explicit Atomic(T v) : val(v) {} + PROTOBUF_CONSTEXPR explicit Atomic(T v) : val(v) {} T relaxed_get() const { return val.load(std::memory_order_relaxed); } T relaxed_get() { return val.load(std::memory_order_relaxed); } @@ -114,7 +114,7 @@ struct Atomic { struct ArenaBlock { // For the sentry block with zero-size where ptr_, limit_, cleanup_nodes all // point to "this". - constexpr ArenaBlock() + PROTOBUF_CONSTEXPR ArenaBlock() : next(nullptr), cleanup_nodes(this), relaxed_size(0) {} ArenaBlock(ArenaBlock* next, size_t size) @@ -577,13 +577,6 @@ class PROTOBUF_EXPORT SerialArena { private: friend class ThreadSafeArena; - static constexpr ArenaBlock kSentryBlock = {}; - - static ArenaBlock* SentryBlock() { - // const_cast<> is okay as kSentryBlock will never be mutated. - return const_cast(&kSentryBlock); - } - // Creates a new SerialArena inside mem using the remaining memory as for // future allocations. // The `parent` arena must outlive the serial arena, which is guaranteed @@ -872,7 +865,7 @@ class PROTOBUF_EXPORT ThreadSafeArena { #pragma warning(disable : 4324) #endif struct alignas(kCacheAlignment) CacheAlignedLifecycleIdGenerator { - constexpr CacheAlignedLifecycleIdGenerator() : id{0} {} + PROTOBUF_CONSTEXPR CacheAlignedLifecycleIdGenerator() : id{0} {} Atomic id; };