@ -177,12 +177,12 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
constexpr RepeatedPtrFieldBase ( )
constexpr RepeatedPtrFieldBase ( )
: tagged_rep_or_elem_ ( nullptr ) ,
: tagged_rep_or_elem_ ( nullptr ) ,
current_size_ ( 0 ) ,
current_size_ ( 0 ) ,
total_size_ ( kSSOCapacity ) ,
capacity_proxy_ ( 0 ) ,
arena_ ( nullptr ) { }
arena_ ( nullptr ) { }
explicit RepeatedPtrFieldBase ( Arena * arena )
explicit RepeatedPtrFieldBase ( Arena * arena )
: tagged_rep_or_elem_ ( nullptr ) ,
: tagged_rep_or_elem_ ( nullptr ) ,
current_size_ ( 0 ) ,
current_size_ ( 0 ) ,
total_size_ ( kSSOCapacity ) ,
capacity_proxy_ ( 0 ) ,
arena_ ( arena ) { }
arena_ ( arena ) { }
RepeatedPtrFieldBase ( const RepeatedPtrFieldBase & ) = delete ;
RepeatedPtrFieldBase ( const RepeatedPtrFieldBase & ) = delete ;
@ -198,7 +198,13 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
bool empty ( ) const { return current_size_ = = 0 ; }
bool empty ( ) const { return current_size_ = = 0 ; }
int size ( ) const { return current_size_ ; }
int size ( ) const { return current_size_ ; }
int Capacity ( ) const { return total_size_ ; }
// Returns the size of the buffer with pointers to elements.
//
// Note:
//
// * prefer `SizeAtCapacity()` to `size() == Capacity()`;
// * prefer `AllocatedSizeAtCapacity()` to `allocated_size() == Capacity()`.
int Capacity ( ) const { return capacity_proxy_ + kSSOCapacity ; }
template < typename TypeHandler >
template < typename TypeHandler >
const Value < TypeHandler > & at ( int index ) const {
const Value < TypeHandler > & at ( int index ) const {
@ -271,7 +277,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
}
}
if ( ! using_sso ( ) ) {
if ( ! using_sso ( ) ) {
internal : : SizedDelete ( rep ( ) ,
internal : : SizedDelete ( rep ( ) ,
total_size_ * sizeof ( elems [ 0 ] ) + kRepHeaderSize ) ;
Capacity ( ) * sizeof ( elems [ 0 ] ) + kRepHeaderSize ) ;
}
}
}
}
@ -417,7 +423,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
size_t allocated_bytes =
size_t allocated_bytes =
using_sso ( )
using_sso ( )
? 0
? 0
: static_cast < size_t > ( total_size_ ) * sizeof ( void * ) + kRepHeaderSize ;
: static_cast < size_t > ( Capacity ( ) ) * sizeof ( void * ) + kRepHeaderSize ;
const int n = allocated_size ( ) ;
const int n = allocated_size ( ) ;
void * const * elems = elements ( ) ;
void * const * elems = elements ( ) ;
for ( int i = 0 ; i < n ; + + i ) {
for ( int i = 0 ; i < n ; + + i ) {
@ -451,11 +457,11 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
void UnsafeArenaAddAllocated ( Value < TypeHandler > * value ) {
void UnsafeArenaAddAllocated ( Value < TypeHandler > * value ) {
ABSL_DCHECK_NE ( value , nullptr ) ;
ABSL_DCHECK_NE ( value , nullptr ) ;
// Make room for the new pointer.
// Make room for the new pointer.
if ( current_size_ = = total_size_ ) {
if ( SizeAtCapacity ( ) ) {
// The array is completely full with no cleared objects, so grow it.
// The array is completely full with no cleared objects, so grow it.
Reserve ( total_size_ + 1 ) ;
InternalExtend ( 1 ) ;
+ + rep ( ) - > allocated_size ;
+ + rep ( ) - > allocated_size ;
} else if ( allocated_size ( ) = = total_size_ ) {
} else if ( AllocatedSizeAtCapacity ( ) ) {
// There is no more space in the pointer array because it contains some
// There is no more space in the pointer array because it contains some
// cleared objects awaiting reuse. We don't want to grow the array in
// cleared objects awaiting reuse. We don't want to grow the array in
// this case because otherwise a loop calling AddAllocated() followed by
// this case because otherwise a loop calling AddAllocated() followed by
@ -539,41 +545,41 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
void AddAllocatedInternal ( Value < TypeHandler > * value , std : : true_type ) {
void AddAllocatedInternal ( Value < TypeHandler > * value , std : : true_type ) {
Arena * element_arena = TypeHandler : : GetArena ( value ) ;
Arena * element_arena = TypeHandler : : GetArena ( value ) ;
Arena * arena = GetArena ( ) ;
Arena * arena = GetArena ( ) ;
if ( arena = = element_arena & & allocated_size ( ) < total_size_ ) {
if ( arena ! = element_arena | | AllocatedSizeAtCapacity ( ) ) {
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void * * elems = elements ( ) ;
if ( current_size_ < allocated_size ( ) ) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems [ allocated_size ( ) ] = elems [ current_size_ ] ;
}
elems [ ExchangeCurrentSize ( current_size_ + 1 ) ] = value ;
if ( ! using_sso ( ) ) + + rep ( ) - > allocated_size ;
} else {
AddAllocatedSlowWithCopy < TypeHandler > ( value , element_arena , arena ) ;
AddAllocatedSlowWithCopy < TypeHandler > ( value , element_arena , arena ) ;
return ;
}
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void * * elems = elements ( ) ;
if ( current_size_ < allocated_size ( ) ) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems [ allocated_size ( ) ] = elems [ current_size_ ] ;
}
}
elems [ ExchangeCurrentSize ( current_size_ + 1 ) ] = value ;
if ( ! using_sso ( ) ) + + rep ( ) - > allocated_size ;
}
}
// AddAllocated version that does not implement arena-safe copying behavior.
// AddAllocated version that does not implement arena-safe copying behavior.
template < typename TypeHandler >
template < typename TypeHandler >
void AddAllocatedInternal ( Value < TypeHandler > * value , std : : false_type ) {
void AddAllocatedInternal ( Value < TypeHandler > * value , std : : false_type ) {
if ( allocated_size ( ) < total_size_ ) {
if ( AllocatedSizeAtCapacity ( ) ) {
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void * * elems = elements ( ) ;
if ( current_size_ < allocated_size ( ) ) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems [ allocated_size ( ) ] = elems [ current_size_ ] ;
}
elems [ ExchangeCurrentSize ( current_size_ + 1 ) ] = value ;
if ( ! using_sso ( ) ) + + rep ( ) - > allocated_size ;
} else {
UnsafeArenaAddAllocated < TypeHandler > ( value ) ;
UnsafeArenaAddAllocated < TypeHandler > ( value ) ;
return ;
}
}
// Fast path: underlying arena representation (tagged pointer) is equal to
// our arena pointer, and we can add to array without resizing it (at
// least one slot that is not allocated).
void * * elems = elements ( ) ;
if ( current_size_ < allocated_size ( ) ) {
// Make space at [current] by moving first allocated element to end of
// allocated list.
elems [ allocated_size ( ) ] = elems [ current_size_ ] ;
}
elems [ ExchangeCurrentSize ( current_size_ + 1 ) ] = value ;
if ( ! using_sso ( ) ) + + rep ( ) - > allocated_size ;
}
}
// Slowpath handles all cases, copying if necessary.
// Slowpath handles all cases, copying if necessary.
@ -715,6 +721,25 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
inline int ExchangeCurrentSize ( int new_size ) {
inline int ExchangeCurrentSize ( int new_size ) {
return std : : exchange ( current_size_ , new_size ) ;
return std : : exchange ( current_size_ , new_size ) ;
}
}
inline bool SizeAtCapacity ( ) const {
// Harden invariant size() <= allocated_size() <= Capacity().
ABSL_DCHECK_LE ( size ( ) , allocated_size ( ) ) ;
ABSL_DCHECK_LE ( allocated_size ( ) , Capacity ( ) ) ;
// This is equivalent to `current_size_ == Capacity()`.
// Assuming `Capacity()` function is inlined, compiler is likely to optimize
// away "+ kSSOCapacity" and reduce it to "current_size_ > capacity_proxy_"
// which is an instruction less than "current_size_ == capacity_proxy_ + 1".
return current_size_ > = Capacity ( ) ;
}
inline bool AllocatedSizeAtCapacity ( ) const {
// Harden invariant size() <= allocated_size() <= Capacity().
ABSL_DCHECK_LE ( size ( ) , allocated_size ( ) ) ;
ABSL_DCHECK_LE ( allocated_size ( ) , Capacity ( ) ) ;
// This combines optimization mentioned in `SizeAtCapacity()` and simplifies
// `allocated_size()` in sso case.
return using_sso ( ) ? ( tagged_rep_or_elem_ ! = nullptr )
: rep ( ) - > allocated_size > = Capacity ( ) ;
}
void * const * elements ( ) const {
void * const * elements ( ) const {
return using_sso ( ) ? & tagged_rep_or_elem_ : + rep ( ) - > elements ;
return using_sso ( ) ? & tagged_rep_or_elem_ : + rep ( ) - > elements ;
@ -800,8 +825,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// Ensures that capacity is big enough to store one more allocated element.
// Ensures that capacity is big enough to store one more allocated element.
inline void MaybeExtend ( ) {
inline void MaybeExtend ( ) {
if ( using_sso ( ) ? ( tagged_rep_or_elem_ ! = nullptr )
if ( AllocatedSizeAtCapacity ( ) ) {
: ( rep ( ) - > allocated_size = = total_size_ ) ) {
ABSL_DCHECK_EQ ( allocated_size ( ) , Capacity ( ) ) ;
ABSL_DCHECK_EQ ( allocated_size ( ) , Capacity ( ) ) ;
InternalExtend ( 1 ) ;
InternalExtend ( 1 ) ;
} else {
} else {
@ -812,11 +836,11 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// Ensures that capacity is at least `n` elements.
// Ensures that capacity is at least `n` elements.
// Returns a pointer to the element directly beyond the last element.
// Returns a pointer to the element directly beyond the last element.
inline void * * InternalReserve ( int n ) {
inline void * * InternalReserve ( int n ) {
if ( n < = total_size_ ) {
if ( n < = Capacity ( ) ) {
void * * elements = using_sso ( ) ? & tagged_rep_or_elem_ : rep ( ) - > elements ;
void * * elements = using_sso ( ) ? & tagged_rep_or_elem_ : rep ( ) - > elements ;
return elements + current_size_ ;
return elements + current_size_ ;
}
}
return InternalExtend ( n - total_size_ ) ;
return InternalExtend ( n - Capacity ( ) ) ;
}
}
// Internal helper for Add: adds "obj" as the next element in the
// Internal helper for Add: adds "obj" as the next element in the
@ -838,7 +862,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// significant performance for memory-sensitive workloads.
// significant performance for memory-sensitive workloads.
void * tagged_rep_or_elem_ ;
void * tagged_rep_or_elem_ ;
int current_size_ ;
int current_size_ ;
int total_size_ ;
int capacity_proxy_ ; // we store `capacity - kSSOCapacity` as an optimization
Arena * arena_ ;
Arena * arena_ ;
} ;
} ;