Support split repeated fields.

Design details:
- We store the split repeated fields inline in the Split struct by pointer. We use pointers so that we can save memory when the fields are absent and so that the split struct will still be trivially copyable.
- If we had instead stored the repeated fields by value, then we would need to add virtual functions to each Message that would call the constructors of the repeated fields which we would call during parsing after allocating non-default split structs.

PiperOrigin-RevId: 549673919
pull/13354/head
Protobuf Team Bot 2 years ago committed by Copybara-Service
parent 4bdd53a55c
commit 5b5e5bf544
  1. 134
      src/google/protobuf/compiler/cpp/field_generators/enum_field.cc
  2. 99
      src/google/protobuf/compiler/cpp/field_generators/message_field.cc
  3. 126
      src/google/protobuf/compiler/cpp/field_generators/primitive_field.cc
  4. 93
      src/google/protobuf/compiler/cpp/field_generators/string_field.cc
  5. 34
      src/google/protobuf/compiler/cpp/helpers.cc
  6. 11
      src/google/protobuf/compiler/cpp/helpers.h
  7. 9
      src/google/protobuf/compiler/plugin.pb.cc
  8. 99
      src/google/protobuf/descriptor.pb.cc
  9. 20
      src/google/protobuf/descriptor.pb.h
  10. 69
      src/google/protobuf/generated_message_reflection.cc
  11. 5
      src/google/protobuf/generated_message_reflection.h
  12. 38
      src/google/protobuf/generated_message_tctable_impl.h
  13. 111
      src/google/protobuf/generated_message_tctable_lite.cc
  14. 25
      src/google/protobuf/message.h
  15. 3
      src/google/protobuf/raw_ptr.h
  16. 14
      src/google/protobuf/raw_ptr_test.cc
  17. 7
      src/google/protobuf/repeated_field.h
  18. 4
      src/google/protobuf/repeated_ptr_field.h

@ -242,15 +242,22 @@ class RepeatedEnum : public FieldGeneratorBase {
field_(field),
opts_(&opts),
has_cached_size_(field_->is_packed() &&
HasGeneratedMethods(field_->file(), opts)) {}
HasGeneratedMethods(field_->file(), opts) &&
!ShouldSplit(descriptor_, options_)) {}
~RepeatedEnum() override = default;
std::vector<Sub> MakeVars() const override { return Vars(field_, *opts_); }
void GeneratePrivateMembers(io::Printer* p) const override {
p->Emit(R"cc(
$pb$::RepeatedField<int> $name$_;
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$pbi$::RawPtr<$pb$::RepeatedField<int>> $name$_;
)cc");
} else {
p->Emit(R"cc(
$pb$::RepeatedField<int> $name$_;
)cc");
}
if (has_cached_size_) {
p->Emit(R"cc(
@ -266,9 +273,22 @@ class RepeatedEnum : public FieldGeneratorBase {
}
void GenerateMergingCode(io::Printer* p) const override {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
// TODO(b/239716377): experiment with simplifying this to be
// `if (!from.empty()) { body(); }` for both split and non-split cases.
auto body = [&] {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
};
if (!ShouldSplit(descriptor_, options_)) {
body();
} else {
p->Emit({{"body", body}}, R"cc(
if (!from.$field_$.IsDefault()) {
$body$;
}
)cc");
}
}
void GenerateSwappingCode(io::Printer* p) const override {
@ -279,9 +299,15 @@ class RepeatedEnum : public FieldGeneratorBase {
}
void GenerateDestructorCode(io::Printer* p) const override {
p->Emit(R"cc(
_internal_mutable_$name$()->~RepeatedField();
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$field_$.DeleteIfNotDefault();
)cc");
} else {
p->Emit(R"cc(
_internal_mutable_$name$()->~RepeatedField();
)cc");
}
}
void GenerateConstexprAggregateInitializer(io::Printer* p) const override {
@ -321,7 +347,13 @@ class RepeatedEnum : public FieldGeneratorBase {
}
void GenerateCopyConstructorCode(io::Printer* p) const override {
ABSL_CHECK(!ShouldSplit(field_, *opts_));
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
if (!from._internal_$name$().empty()) {
_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
}
)cc");
}
}
void GenerateConstructorCode(io::Printer* p) const override {}
@ -392,29 +424,69 @@ void RepeatedEnum::GenerateInlineAccessorDefinitions(io::Printer* p) const {
$TsanDetectConcurrentMutation$;
return _internal_mutable_$name$();
}
inline const $pb$::RepeatedField<int>& $Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::RepeatedField<int>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
inline const $pb$::RepeatedField<int>& $Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return *$field_$;
}
inline $pb$::RepeatedField<int>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
$PrepareSplitMessageForWrite$;
if ($field_$.IsDefault()) {
$field_$.Set($pb$::Arena::CreateMessage<$pb$::RepeatedField<int>>(
GetArenaForAllocation()));
}
return $field_$.Get();
}
)cc");
} else {
p->Emit(R"cc(
inline const $pb$::RepeatedField<int>& $Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::RepeatedField<int>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
}
}
void RepeatedEnum::GenerateSerializeWithCachedSizesToArray(
io::Printer* p) const {
if (field_->is_packed()) {
p->Emit(R"cc(
{
int byte_size = $cached_size_$.Get();
if (byte_size > 0) {
target = stream->WriteEnumPacked($number$, _internal_$name$(),
byte_size, target);
}
}
)cc");
p->Emit(
{
{"byte_size",
[&] {
if (has_cached_size_) {
p->Emit(
R"cc(std::size_t byte_size = $cached_size_$.Get();)cc");
} else {
p->Emit(R"cc(
std::size_t byte_size = 0;
auto count = static_cast<std::size_t>(this->_internal_$name$_size());
for (std::size_t i = 0; i < count; ++i) {
byte_size += ::_pbi::WireFormatLite::EnumSize(
this->_internal_$name$().Get(static_cast<int>(i)));
}
)cc");
}
}},
},
R"cc(
{
$byte_size$;
if (byte_size > 0) {
target = stream->WriteEnumPacked($number$, _internal_$name$(),
byte_size, target);
}
}
)cc");
return;
}
p->Emit(R"cc(
@ -445,8 +517,12 @@ void RepeatedEnum::GenerateByteSize(io::Printer* p) const {
total_size += ::_pbi::WireFormatLite::Int32Size(
static_cast<int32_t>(data_size));
}
$cached_size_$.Set(::_pbi::ToCachedSize(data_size));
)cc");
if (has_cached_size_) {
p->Emit(R"cc(
$cached_size_$.Set(::_pbi::ToCachedSize(data_size));
)cc");
}
}},
},
R"cc(

@ -720,6 +720,7 @@ class RepeatedMessage : public FieldGeneratorBase {
field_(field),
opts_(&opts),
weak_(IsImplicitWeakField(field, opts, scc)),
split_(ShouldSplit(field, opts)),
has_required_(scc->HasRequiredFields(field->message_type())) {}
~RepeatedMessage() override = default;
@ -735,7 +736,7 @@ class RepeatedMessage : public FieldGeneratorBase {
void GenerateMergingCode(io::Printer* p) const override;
void GenerateSwappingCode(io::Printer* p) const override;
void GenerateConstructorCode(io::Printer* p) const override;
void GenerateCopyConstructorCode(io::Printer* p) const override {}
void GenerateCopyConstructorCode(io::Printer* p) const override;
void GenerateDestructorCode(io::Printer* p) const override;
void GenerateSerializeWithCachedSizesToArray(io::Printer* p) const override;
void GenerateByteSize(io::Printer* p) const override;
@ -745,11 +746,18 @@ class RepeatedMessage : public FieldGeneratorBase {
const FieldDescriptor* field_;
const Options* opts_;
bool weak_;
bool split_;
bool has_required_;
};
void RepeatedMessage::GeneratePrivateMembers(io::Printer* p) const {
p->Emit("$pb$::$Weak$RepeatedPtrField< $Submsg$ > $name$_;\n");
if (split_) {
p->Emit(R"cc(
$pbi$::RawPtr<$pb$::$Weak$RepeatedPtrField<$Submsg$>> $name$_;
)cc");
} else {
p->Emit("$pb$::$Weak$RepeatedPtrField< $Submsg$ > $name$_;\n");
}
}
void RepeatedMessage::GenerateAccessorDeclarations(io::Printer* p) const {
@ -831,18 +839,39 @@ void RepeatedMessage::GenerateInlineAccessorDefinitions(io::Printer* p) const {
" return _internal_$name$();\n"
"}\n");
p->Emit(R"cc(
inline const $pb$::$Weak$RepeatedPtrField<$Submsg$>&
$Msg$::_internal$_weak$_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::$Weak$RepeatedPtrField<$Submsg$>*
$Msg$::_internal_mutable$_weak$_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
if (split_) {
p->Emit(R"cc(
inline const $pb$::$Weak$RepeatedPtrField<$Submsg$>&
$Msg$::_internal$_weak$_$name$() const {
$TsanDetectConcurrentRead$;
return *$field_$;
}
inline $pb$::$Weak$RepeatedPtrField<$Submsg$>*
$Msg$::_internal_mutable$_weak$_$name$() {
$TsanDetectConcurrentRead$;
$PrepareSplitMessageForWrite$;
if ($field_$.IsDefault()) {
$field_$.Set(
CreateMaybeMessage<$pb$::$Weak$RepeatedPtrField<$Submsg$>>(
GetArenaForAllocation()));
}
return $field_$.Get();
}
)cc");
} else {
p->Emit(R"cc(
inline const $pb$::$Weak$RepeatedPtrField<$Submsg$>&
$Msg$::_internal$_weak$_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::$Weak$RepeatedPtrField<$Submsg$>*
$Msg$::_internal_mutable$_weak$_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
}
if (weak_) {
p->Emit(R"cc(
inline const $pb$::RepeatedPtrField<$Submsg$>& $Msg$::_internal_$name$()
@ -861,13 +890,27 @@ void RepeatedMessage::GenerateClearingCode(io::Printer* p) const {
}
void RepeatedMessage::GenerateMergingCode(io::Printer* p) const {
p->Emit(
"_this->_internal_mutable$_weak$_$name$()->MergeFrom(from._internal"
"$_weak$_$name$());\n");
// TODO(b/239716377): experiment with simplifying this to be
// `if (!from.empty()) { body(); }` for both split and non-split cases.
auto body = [&] {
p->Emit(R"cc(
_this->_internal_mutable$_weak$_$name$()->MergeFrom(
from._internal$_weak$_$name$());
)cc");
};
if (!split_) {
body();
} else {
p->Emit({{"body", body}}, R"cc(
if (!from.$field_$.IsDefault()) {
$body$;
}
)cc");
}
}
void RepeatedMessage::GenerateSwappingCode(io::Printer* p) const {
ABSL_CHECK(!ShouldSplit(descriptor_, options_));
ABSL_CHECK(!split_);
p->Emit(R"cc(
$field_$.InternalSwap(&other->$field_$);
)cc");
@ -877,8 +920,26 @@ void RepeatedMessage::GenerateConstructorCode(io::Printer* p) const {
// Not needed for repeated fields.
}
void RepeatedMessage::GenerateCopyConstructorCode(io::Printer* p) const {
// TODO(b/291633281): For split repeated fields we might want to use type
// erasure to reduce binary size costs.
if (split_) {
p->Emit(R"cc(
if (!from._internal$_weak$_$name$().empty()) {
_internal_mutable$_weak$_$name$()->MergeFrom(from._internal$_weak$_$name$());
}
)cc");
}
}
void RepeatedMessage::GenerateDestructorCode(io::Printer* p) const {
p->Emit("$field_$.~$Weak$RepeatedPtrField();\n");
if (split_) {
p->Emit(R"cc(
$field_$.DeleteIfNotDefault();
)cc");
} else {
p->Emit("$field_$.~$Weak$RepeatedPtrField();\n");
}
}
void RepeatedMessage::GenerateSerializeWithCachedSizesToArray(

@ -322,9 +322,22 @@ class RepeatedPrimitive final : public FieldGeneratorBase {
}
void GenerateMergingCode(io::Printer* p) const override {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
// TODO(b/239716377): experiment with simplifying this to be
// `if (!from.empty()) { body(); }` for both split and non-split cases.
auto body = [&] {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
};
if (!ShouldSplit(descriptor_, options_)) {
body();
} else {
p->Emit({{"body", body}}, R"cc(
if (!from.$field_$.IsDefault()) {
$body$;
}
)cc");
}
}
void GenerateSwappingCode(io::Printer* p) const override {
@ -335,14 +348,28 @@ class RepeatedPrimitive final : public FieldGeneratorBase {
}
void GenerateDestructorCode(io::Printer* p) const override {
p->Emit(R"cc(
$field_$.~RepeatedField();
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$field_$.DeleteIfNotDefault();
)cc");
} else {
p->Emit(R"cc(
$field_$.~RepeatedField();
)cc");
}
}
void GenerateConstructorCode(io::Printer* p) const override {}
void GenerateCopyConstructorCode(io::Printer* p) const override {}
void GenerateCopyConstructorCode(io::Printer* p) const override {
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
if (!from._internal_$name$().empty()) {
_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
}
)cc");
}
}
void GenerateConstexprAggregateInitializer(io::Printer* p) const override {
p->Emit(R"cc(
@ -377,7 +404,8 @@ class RepeatedPrimitive final : public FieldGeneratorBase {
bool HasCachedSize() const {
bool is_packed_varint =
field_->is_packed() && !FixedSize(field_->type()).has_value();
return is_packed_varint && HasGeneratedMethods(field_->file(), *opts_);
return is_packed_varint && HasGeneratedMethods(field_->file(), *opts_) &&
!ShouldSplit(descriptor_, options_);
}
void GenerateCacheSizeInitializer(io::Printer* p) const {
@ -394,9 +422,15 @@ class RepeatedPrimitive final : public FieldGeneratorBase {
};
void RepeatedPrimitive::GeneratePrivateMembers(io::Printer* p) const {
p->Emit(R"cc(
$pb$::RepeatedField<$Type$> $name$_;
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$pbi$::RawPtr<$pb$::RepeatedField<$Type$>> $name$_;
)cc");
} else {
p->Emit(R"cc(
$pb$::RepeatedField<$Type$> $name$_;
)cc");
}
if (HasCachedSize()) {
p->Emit({{"_cached_size_", MakeVarintCachedSizeName(field_)}},
@ -459,15 +493,37 @@ void RepeatedPrimitive::GenerateInlineAccessorDefinitions(
return _internal_mutable_$name$();
}
inline const $pb$::RepeatedField<$Type$>& $Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::RepeatedField<$Type$>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
inline const $pb$::RepeatedField<$Type$>& $Msg$::_internal_$name$()
const {
$TsanDetectConcurrentRead$;
return *$field_$;
}
inline $pb$::RepeatedField<$Type$>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
$PrepareSplitMessageForWrite$;
if ($field_$.IsDefault()) {
$field_$.Set($pb$::Arena::CreateMessage<$pb$::RepeatedField<$Type$>>(
GetArenaForAllocation()));
}
return $field_$.Get();
}
)cc");
} else {
p->Emit(R"cc(
inline const $pb$::RepeatedField<$Type$>& $Msg$::_internal_$name$()
const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline $pb$::RepeatedField<$Type$>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
}
}
void RepeatedPrimitive::GenerateSerializeWithCachedSizesToArray(
@ -492,15 +548,29 @@ void RepeatedPrimitive::GenerateSerializeWithCachedSizesToArray(
return;
}
p->Emit(R"cc(
{
int byte_size = $_field_cached_byte_size_$.Get();
if (byte_size > 0) {
target = stream->Write$DeclaredType$Packed($number$, _internal_$name$(),
byte_size, target);
}
}
)cc");
p->Emit(
{
{"byte_size",
[&] {
if (HasCachedSize()) {
p->Emit(R"cc($_field_cached_byte_size_$.Get();)cc");
} else {
p->Emit(R"cc(
::_pbi::WireFormatLite::$DeclaredType$Size(
this->_internal_$name$());
)cc");
}
}},
},
R"cc(
{
int byte_size = $byte_size$;
if (byte_size > 0) {
target = stream->Write$DeclaredType$Packed(
$number$, _internal_$name$(), byte_size, target);
}
}
)cc");
}
void RepeatedPrimitive::GenerateByteSize(io::Printer* p) const {

@ -709,9 +709,15 @@ class RepeatedString : public FieldGeneratorBase {
std::vector<Sub> MakeVars() const override { return Vars(field_, *opts_); }
void GeneratePrivateMembers(io::Printer* p) const override {
p->Emit(R"cc(
$pb$::RepeatedPtrField<std::string> $name$_;
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$pbi$::RawPtr<$pb$::RepeatedPtrField<std::string>> $name$_;
)cc");
} else {
p->Emit(R"cc(
$pb$::RepeatedPtrField<std::string> $name$_;
)cc");
}
}
void GenerateClearingCode(io::Printer* p) const override {
@ -721,9 +727,22 @@ class RepeatedString : public FieldGeneratorBase {
}
void GenerateMergingCode(io::Printer* p) const override {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
// TODO(b/239716377): experiment with simplifying this to be
// `if (!from.empty()) { body(); }` for both split and non-split cases.
auto body = [&] {
p->Emit(R"cc(
_this->_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
)cc");
};
if (!ShouldSplit(descriptor_, options_)) {
body();
} else {
p->Emit({{"body", body}}, R"cc(
if (!from.$field_$.IsDefault()) {
$body$;
}
)cc");
}
}
void GenerateSwappingCode(io::Printer* p) const override {
@ -734,15 +753,27 @@ class RepeatedString : public FieldGeneratorBase {
}
void GenerateDestructorCode(io::Printer* p) const override {
p->Emit(R"cc(
_internal_mutable_$name$()->~RepeatedPtrField();
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
$field_$.DeleteIfNotDefault();
)cc");
} else {
p->Emit(R"cc(
_internal_mutable_$name$()->~RepeatedPtrField();
)cc");
}
}
void GenerateConstructorCode(io::Printer* p) const override {}
void GenerateCopyConstructorCode(io::Printer* p) const override {
ABSL_CHECK(!ShouldSplit(field_, options_));
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
if (!from._internal_$name$().empty()) {
_internal_mutable_$name$()->MergeFrom(from._internal_$name$());
}
)cc");
}
}
void GenerateByteSize(io::Printer* p) const override {
@ -904,17 +935,39 @@ void RepeatedString::GenerateInlineAccessorDefinitions(io::Printer* p) const {
$TsanDetectConcurrentMutation$;
return _internal_mutable_$name$();
}
inline const ::$proto_ns$::RepeatedPtrField<std::string>&
$Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline ::$proto_ns$::RepeatedPtrField<std::string>*
$Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
if (ShouldSplit(descriptor_, options_)) {
p->Emit(R"cc(
inline const $pb$::RepeatedPtrField<std::string>&
$Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return *$field_$;
}
inline $pb$::RepeatedPtrField<std::string>* $Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
$PrepareSplitMessageForWrite$;
if ($field_$.IsDefault()) {
$field_$.Set(
$pb$::Arena::CreateMessage<$pb$::RepeatedPtrField<std::string>>(
GetArenaForAllocation()));
}
return $field_$.Get();
}
)cc");
} else {
p->Emit(R"cc(
inline const ::$proto_ns$::RepeatedPtrField<std::string>&
$Msg$::_internal_$name$() const {
$TsanDetectConcurrentRead$;
return $field_$;
}
inline ::$proto_ns$::RepeatedPtrField<std::string>*
$Msg$::_internal_mutable_$name$() {
$TsanDetectConcurrentRead$;
return &$field_$;
}
)cc");
}
}
void RepeatedString::GenerateSerializeWithCachedSizesToArray(

@ -599,6 +599,40 @@ int EstimateAlignmentSize(const FieldDescriptor* field) {
return -1; // Make compiler happy.
}
int EstimateSize(const FieldDescriptor* field) {
if (field == nullptr) return 0;
if (field->is_repeated()) {
if (field->is_map()) {
return sizeof(google::protobuf::Map<int32_t, int32_t>);
}
return field->cpp_type() < FieldDescriptor::CPPTYPE_STRING || IsCord(field)
? sizeof(RepeatedField<int32_t>)
: sizeof(internal::RepeatedPtrFieldBase);
}
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_BOOL:
return 1;
case FieldDescriptor::CPPTYPE_INT32:
case FieldDescriptor::CPPTYPE_UINT32:
case FieldDescriptor::CPPTYPE_ENUM:
case FieldDescriptor::CPPTYPE_FLOAT:
return 4;
case FieldDescriptor::CPPTYPE_INT64:
case FieldDescriptor::CPPTYPE_UINT64:
case FieldDescriptor::CPPTYPE_DOUBLE:
case FieldDescriptor::CPPTYPE_MESSAGE:
return 8;
case FieldDescriptor::CPPTYPE_STRING:
if (IsCord(field)) return sizeof(absl::Cord);
return sizeof(internal::ArenaStringPtr);
}
ABSL_LOG(FATAL) << "Can't get here.";
return -1; // Make compiler happy.
}
std::string FieldConstantName(const FieldDescriptor* field) {
std::string field_name = UnderscoresToCamelCase(field->name(), true);
std::string result = absl::StrCat("k", field_name, "FieldNumber");

@ -229,6 +229,12 @@ std::string FieldMemberName(const FieldDescriptor* field, bool split);
// 64-bit pointers.
int EstimateAlignmentSize(const FieldDescriptor* field);
// Returns an estimate of the size of the field. This
// can't guarantee to be correct because the generated code could be compiled on
// different systems with different alignment rules. The estimates below assume
// 64-bit pointers.
int EstimateSize(const FieldDescriptor* field);
// Get the unqualified name that should be used for a field's field
// number constant.
std::string FieldConstantName(const FieldDescriptor* field);
@ -345,7 +351,7 @@ inline bool IsWeak(const FieldDescriptor* field, const Options& options) {
return false;
}
inline bool IsCord(const FieldDescriptor* field, const Options& options) {
inline bool IsCord(const FieldDescriptor* field) {
return field->cpp_type() == FieldDescriptor::CPPTYPE_STRING &&
internal::cpp::EffectiveStringCType(field) == FieldOptions::CORD;
}
@ -355,8 +361,7 @@ inline bool IsString(const FieldDescriptor* field, const Options& options) {
internal::cpp::EffectiveStringCType(field) == FieldOptions::STRING;
}
inline bool IsStringPiece(const FieldDescriptor* field,
const Options& options) {
inline bool IsStringPiece(const FieldDescriptor* field) {
return field->cpp_type() == FieldDescriptor::CPPTYPE_STRING &&
internal::cpp::EffectiveStringCType(field) ==
FieldOptions::STRING_PIECE;

@ -909,8 +909,10 @@ void CodeGeneratorRequest::MergeImpl(::google::protobuf::Message& to_msg, const
(void) cached_has_bits;
_this->_internal_mutable_file_to_generate()->MergeFrom(from._internal_file_to_generate());
_this->_internal_mutable_proto_file()->MergeFrom(from._internal_proto_file());
_this->_internal_mutable_source_file_descriptors()->MergeFrom(from._internal_source_file_descriptors());
_this->_internal_mutable_proto_file()->MergeFrom(
from._internal_proto_file());
_this->_internal_mutable_source_file_descriptors()->MergeFrom(
from._internal_source_file_descriptors());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
if (cached_has_bits & 0x00000001u) {
@ -1536,7 +1538,8 @@ void CodeGeneratorResponse::MergeImpl(::google::protobuf::Message& to_msg, const
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_file()->MergeFrom(from._internal_file());
_this->_internal_mutable_file()->MergeFrom(
from._internal_file());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
if (cached_has_bits & 0x00000001u) {

@ -2480,7 +2480,8 @@ void FileDescriptorSet::MergeImpl(::google::protobuf::Message& to_msg, const ::g
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_file()->MergeFrom(from._internal_file());
_this->_internal_mutable_file()->MergeFrom(
from._internal_file());
_this->_internal_metadata_.MergeFrom<::google::protobuf::UnknownFieldSet>(from._internal_metadata_);
}
@ -3066,10 +3067,14 @@ void FileDescriptorProto::MergeImpl(::google::protobuf::Message& to_msg, const :
(void) cached_has_bits;
_this->_internal_mutable_dependency()->MergeFrom(from._internal_dependency());
_this->_internal_mutable_message_type()->MergeFrom(from._internal_message_type());
_this->_internal_mutable_enum_type()->MergeFrom(from._internal_enum_type());
_this->_internal_mutable_service()->MergeFrom(from._internal_service());
_this->_internal_mutable_extension()->MergeFrom(from._internal_extension());
_this->_internal_mutable_message_type()->MergeFrom(
from._internal_message_type());
_this->_internal_mutable_enum_type()->MergeFrom(
from._internal_enum_type());
_this->_internal_mutable_service()->MergeFrom(
from._internal_service());
_this->_internal_mutable_extension()->MergeFrom(
from._internal_extension());
_this->_internal_mutable_public_dependency()->MergeFrom(from._internal_public_dependency());
_this->_internal_mutable_weak_dependency()->MergeFrom(from._internal_weak_dependency());
cached_has_bits = from._impl_._has_bits_[0];
@ -4066,13 +4071,20 @@ void DescriptorProto::MergeImpl(::google::protobuf::Message& to_msg, const ::goo
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_field()->MergeFrom(from._internal_field());
_this->_internal_mutable_nested_type()->MergeFrom(from._internal_nested_type());
_this->_internal_mutable_enum_type()->MergeFrom(from._internal_enum_type());
_this->_internal_mutable_extension_range()->MergeFrom(from._internal_extension_range());
_this->_internal_mutable_extension()->MergeFrom(from._internal_extension());
_this->_internal_mutable_oneof_decl()->MergeFrom(from._internal_oneof_decl());
_this->_internal_mutable_reserved_range()->MergeFrom(from._internal_reserved_range());
_this->_internal_mutable_field()->MergeFrom(
from._internal_field());
_this->_internal_mutable_nested_type()->MergeFrom(
from._internal_nested_type());
_this->_internal_mutable_enum_type()->MergeFrom(
from._internal_enum_type());
_this->_internal_mutable_extension_range()->MergeFrom(
from._internal_extension_range());
_this->_internal_mutable_extension()->MergeFrom(
from._internal_extension());
_this->_internal_mutable_oneof_decl()->MergeFrom(
from._internal_oneof_decl());
_this->_internal_mutable_reserved_range()->MergeFrom(
from._internal_reserved_range());
_this->_internal_mutable_reserved_name()->MergeFrom(from._internal_reserved_name());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
@ -4756,8 +4768,10 @@ void ExtensionRangeOptions::MergeImpl(::google::protobuf::Message& to_msg, const
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_declaration()->MergeFrom(from._internal_declaration());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_declaration()->MergeFrom(
from._internal_declaration());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
if (cached_has_bits & 0x00000001u) {
@ -6183,8 +6197,10 @@ void EnumDescriptorProto::MergeImpl(::google::protobuf::Message& to_msg, const :
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_value()->MergeFrom(from._internal_value());
_this->_internal_mutable_reserved_range()->MergeFrom(from._internal_reserved_range());
_this->_internal_mutable_value()->MergeFrom(
from._internal_value());
_this->_internal_mutable_reserved_range()->MergeFrom(
from._internal_reserved_range());
_this->_internal_mutable_reserved_name()->MergeFrom(from._internal_reserved_name());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
@ -6767,7 +6783,8 @@ void ServiceDescriptorProto::MergeImpl(::google::protobuf::Message& to_msg, cons
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_method()->MergeFrom(from._internal_method());
_this->_internal_mutable_method()->MergeFrom(
from._internal_method());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
if (cached_has_bits & 0x00000001u) {
@ -8106,7 +8123,8 @@ void FileOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::google:
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x000000ffu) {
if (cached_has_bits & 0x00000001u) {
@ -8571,7 +8589,8 @@ void MessageOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::goog
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x0000003fu) {
if (cached_has_bits & 0x00000001u) {
@ -9384,8 +9403,10 @@ void FieldOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::google
(void) cached_has_bits;
_this->_internal_mutable_targets()->MergeFrom(from._internal_targets());
_this->_internal_mutable_edition_defaults()->MergeFrom(from._internal_edition_defaults());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_edition_defaults()->MergeFrom(
from._internal_edition_defaults());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x000000ffu) {
if (cached_has_bits & 0x00000001u) {
@ -9675,7 +9696,8 @@ void OneofOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::google
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
if ((from._impl_._has_bits_[0] & 0x00000001u) != 0) {
_this->_internal_mutable_features()->::google::protobuf::FeatureSet::MergeFrom(
from._internal_features());
@ -10000,7 +10022,8 @@ void EnumOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::google:
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x0000000fu) {
if (cached_has_bits & 0x00000001u) {
@ -10323,7 +10346,8 @@ void EnumValueOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::go
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000007u) {
if (cached_has_bits & 0x00000001u) {
@ -10614,7 +10638,8 @@ void ServiceOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::goog
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000003u) {
if (cached_has_bits & 0x00000001u) {
@ -10936,7 +10961,8 @@ void MethodOptions::MergeImpl(::google::protobuf::Message& to_msg, const ::googl
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_uninterpreted_option()->MergeFrom(from._internal_uninterpreted_option());
_this->_internal_mutable_uninterpreted_option()->MergeFrom(
from._internal_uninterpreted_option());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x00000007u) {
if (cached_has_bits & 0x00000001u) {
@ -11599,7 +11625,8 @@ void UninterpretedOption::MergeImpl(::google::protobuf::Message& to_msg, const :
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_name()->MergeFrom(from._internal_name());
_this->_internal_mutable_name()->MergeFrom(
from._internal_name());
cached_has_bits = from._impl_._has_bits_[0];
if (cached_has_bits & 0x0000003fu) {
if (cached_has_bits & 0x00000001u) {
@ -12263,8 +12290,8 @@ constexpr ::_pbi::TcParseTable<3, 5, 0, 106, 2> SourceCodeInfo_Location::_table_
{
int byte_size = _impl_._path_cached_byte_size_.Get();
if (byte_size > 0) {
target = stream->WriteInt32Packed(1, _internal_path(),
byte_size, target);
target = stream->WriteInt32Packed(
1, _internal_path(), byte_size, target);
}
}
@ -12272,8 +12299,8 @@ constexpr ::_pbi::TcParseTable<3, 5, 0, 106, 2> SourceCodeInfo_Location::_table_
{
int byte_size = _impl_._span_cached_byte_size_.Get();
if (byte_size > 0) {
target = stream->WriteInt32Packed(2, _internal_span(),
byte_size, target);
target = stream->WriteInt32Packed(
2, _internal_span(), byte_size, target);
}
}
@ -12575,7 +12602,8 @@ void SourceCodeInfo::MergeImpl(::google::protobuf::Message& to_msg, const ::goog
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_location()->MergeFrom(from._internal_location());
_this->_internal_mutable_location()->MergeFrom(
from._internal_location());
_this->_internal_metadata_.MergeFrom<::google::protobuf::UnknownFieldSet>(from._internal_metadata_);
}
@ -12783,8 +12811,8 @@ constexpr ::_pbi::TcParseTable<3, 5, 1, 64, 2> GeneratedCodeInfo_Annotation::_ta
{
int byte_size = _impl_._path_cached_byte_size_.Get();
if (byte_size > 0) {
target = stream->WriteInt32Packed(1, _internal_path(),
byte_size, target);
target = stream->WriteInt32Packed(
1, _internal_path(), byte_size, target);
}
}
@ -13091,7 +13119,8 @@ void GeneratedCodeInfo::MergeImpl(::google::protobuf::Message& to_msg, const ::g
::uint32_t cached_has_bits = 0;
(void) cached_has_bits;
_this->_internal_mutable_annotation()->MergeFrom(from._internal_annotation());
_this->_internal_mutable_annotation()->MergeFrom(
from._internal_annotation());
_this->_internal_metadata_.MergeFrom<::google::protobuf::UnknownFieldSet>(from._internal_metadata_);
}

@ -10087,8 +10087,8 @@ inline ::google::protobuf::RepeatedField<::int32_t>* FileDescriptorProto::mutabl
PROTOBUF_TSAN_WRITE(&_impl_._tsan_detect_race);
return _internal_mutable_public_dependency();
}
inline const ::google::protobuf::RepeatedField<::int32_t>& FileDescriptorProto::_internal_public_dependency() const {
inline const ::google::protobuf::RepeatedField<::int32_t>& FileDescriptorProto::_internal_public_dependency()
const {
PROTOBUF_TSAN_READ(&_impl_._tsan_detect_race);
return _impl_.public_dependency_;
}
@ -10129,8 +10129,8 @@ inline ::google::protobuf::RepeatedField<::int32_t>* FileDescriptorProto::mutabl
PROTOBUF_TSAN_WRITE(&_impl_._tsan_detect_race);
return _internal_mutable_weak_dependency();
}
inline const ::google::protobuf::RepeatedField<::int32_t>& FileDescriptorProto::_internal_weak_dependency() const {
inline const ::google::protobuf::RepeatedField<::int32_t>& FileDescriptorProto::_internal_weak_dependency()
const {
PROTOBUF_TSAN_READ(&_impl_._tsan_detect_race);
return _impl_.weak_dependency_;
}
@ -17533,8 +17533,8 @@ inline ::google::protobuf::RepeatedField<::int32_t>* SourceCodeInfo_Location::mu
PROTOBUF_TSAN_WRITE(&_impl_._tsan_detect_race);
return _internal_mutable_path();
}
inline const ::google::protobuf::RepeatedField<::int32_t>& SourceCodeInfo_Location::_internal_path() const {
inline const ::google::protobuf::RepeatedField<::int32_t>& SourceCodeInfo_Location::_internal_path()
const {
PROTOBUF_TSAN_READ(&_impl_._tsan_detect_race);
return _impl_.path_;
}
@ -17575,8 +17575,8 @@ inline ::google::protobuf::RepeatedField<::int32_t>* SourceCodeInfo_Location::mu
PROTOBUF_TSAN_WRITE(&_impl_._tsan_detect_race);
return _internal_mutable_span();
}
inline const ::google::protobuf::RepeatedField<::int32_t>& SourceCodeInfo_Location::_internal_span() const {
inline const ::google::protobuf::RepeatedField<::int32_t>& SourceCodeInfo_Location::_internal_span()
const {
PROTOBUF_TSAN_READ(&_impl_._tsan_detect_race);
return _impl_.span_;
}
@ -17905,8 +17905,8 @@ inline ::google::protobuf::RepeatedField<::int32_t>* GeneratedCodeInfo_Annotatio
PROTOBUF_TSAN_WRITE(&_impl_._tsan_detect_race);
return _internal_mutable_path();
}
inline const ::google::protobuf::RepeatedField<::int32_t>& GeneratedCodeInfo_Annotation::_internal_path() const {
inline const ::google::protobuf::RepeatedField<::int32_t>& GeneratedCodeInfo_Annotation::_internal_path()
const {
PROTOBUF_TSAN_READ(&_impl_._tsan_detect_race);
return _impl_.path_;
}

@ -2683,12 +2683,15 @@ bool Reflection::SupportsUnknownEnumValues() const {
template <class Type>
const Type& Reflection::GetRawNonOneof(const Message& message,
const FieldDescriptor* field) const {
if (schema_.IsSplit(field)) {
return *GetConstPointerAtOffset<Type>(
GetSplitField(&message), schema_.GetFieldOffsetNonOneof(field));
const uint32_t field_offset = schema_.GetFieldOffsetNonOneof(field);
if (!schema_.IsSplit(field)) {
return GetConstRefAtOffset<Type>(message, field_offset);
}
return GetConstRefAtOffset<Type>(message,
schema_.GetFieldOffsetNonOneof(field));
const void* split = GetSplitField(&message);
if (SplitFieldHasExtraIndirection(field)) {
return **GetConstPointerAtOffset<Type*>(split, field_offset);
}
return *GetConstPointerAtOffset<Type>(split, field_offset);
}
void Reflection::PrepareSplitMessageForWrite(Message* message) const {
@ -2704,27 +2707,57 @@ void Reflection::PrepareSplitMessageForWrite(Message* message) const {
}
}
template <class Type>
static Type* AllocIfDefault(const FieldDescriptor* field, Type*& ptr,
Arena* arena) {
if (ptr == internal::DefaultRawPtr()) {
// Note: we can't rely on Type to distinguish between these cases (Type can
// be e.g. char).
if (field->cpp_type() < FieldDescriptor::CPPTYPE_STRING ||
(field->cpp_type() == FieldDescriptor::CPPTYPE_STRING &&
internal::cpp::EffectiveStringCType(field) == FieldOptions::CORD)) {
ptr = reinterpret_cast<Type*>(
Arena::CreateMessage<RepeatedField<int32_t>>(arena));
} else {
ptr = reinterpret_cast<Type*>(
Arena::CreateMessage<RepeatedPtrFieldBase>(arena));
}
}
return ptr;
}
template <class Type>
Type* Reflection::MutableRawNonOneof(Message* message,
const FieldDescriptor* field) const {
if (schema_.IsSplit(field)) {
PrepareSplitMessageForWrite(message);
return GetPointerAtOffset<Type>(*MutableSplitField(message),
schema_.GetFieldOffsetNonOneof(field));
const uint32_t field_offset = schema_.GetFieldOffsetNonOneof(field);
if (!schema_.IsSplit(field)) {
return GetPointerAtOffset<Type>(message, field_offset);
}
return GetPointerAtOffset<Type>(message,
schema_.GetFieldOffsetNonOneof(field));
PrepareSplitMessageForWrite(message);
void** split = MutableSplitField(message);
if (SplitFieldHasExtraIndirection(field)) {
return AllocIfDefault(field,
*GetPointerAtOffset<Type*>(*split, field_offset),
message->GetArenaForAllocation());
}
return GetPointerAtOffset<Type>(*split, field_offset);
}
template <typename Type>
Type* Reflection::MutableRaw(Message* message,
const FieldDescriptor* field) const {
if (schema_.IsSplit(field)) {
PrepareSplitMessageForWrite(message);
return GetPointerAtOffset<Type>(*MutableSplitField(message),
schema_.GetFieldOffset(field));
const uint32_t field_offset = schema_.GetFieldOffset(field);
if (!schema_.IsSplit(field)) {
return GetPointerAtOffset<Type>(message, field_offset);
}
PrepareSplitMessageForWrite(message);
void** split = MutableSplitField(message);
if (SplitFieldHasExtraIndirection(field)) {
return AllocIfDefault(field,
*GetPointerAtOffset<Type*>(*split, field_offset),
message->GetArenaForAllocation());
}
return GetPointerAtOffset<Type>(message, schema_.GetFieldOffset(field));
return GetPointerAtOffset<Type>(*split, field_offset);
}
const uint32_t* Reflection::GetHasBits(const Message& message) const {
@ -3785,6 +3818,10 @@ bool IsDescendant(Message& root, const Message& message) {
return false;
}
bool SplitFieldHasExtraIndirection(const FieldDescriptor* field) {
return field->is_repeated();
}
} // namespace internal
} // namespace protobuf
} // namespace google

@ -388,6 +388,11 @@ const std::string& NameOfDenseEnum(int v) {
return NameOfDenseEnumSlow(v, &deci);
}
// Returns whether this type of field is stored in the split struct as a raw
// pointer.
PROTOBUF_EXPORT bool SplitFieldHasExtraIndirection(
const FieldDescriptor* field);
} // namespace internal
} // namespace protobuf
} // namespace google

@ -41,8 +41,12 @@
#include "google/protobuf/extension_set.h"
#include "google/protobuf/generated_message_tctable_decl.h"
#include "google/protobuf/map.h"
#include "google/protobuf/message_lite.h"
#include "google/protobuf/metadata_lite.h"
#include "google/protobuf/parse_context.h"
#include "google/protobuf/raw_ptr.h"
#include "google/protobuf/repeated_field.h"
#include "google/protobuf/repeated_ptr_field.h"
#include "google/protobuf/wire_format_lite.h"
// Must come last:
@ -607,6 +611,30 @@ class PROTOBUF_EXPORT TcParser final {
return *target;
}
template <typename T, bool is_split>
static inline T& MaybeCreateRepeatedRefAt(void* x, size_t offset,
MessageLite* msg) {
if (!is_split) return RefAt<T>(x, offset);
void*& ptr = RefAt<void*>(x, offset);
if (ptr == DefaultRawPtr()) {
ptr = Arena::CreateMessage<T>(msg->GetArenaForAllocation());
}
return *static_cast<T*>(ptr);
}
template <typename T, bool is_split>
static inline RepeatedField<T>& MaybeCreateRepeatedFieldRefAt(
void* x, size_t offset, MessageLite* msg) {
return MaybeCreateRepeatedRefAt<RepeatedField<T>, is_split>(x, offset, msg);
}
template <typename T, bool is_split>
static inline RepeatedPtrField<T>& MaybeCreateRepeatedPtrFieldRefAt(
void* x, size_t offset, MessageLite* msg) {
return MaybeCreateRepeatedRefAt<RepeatedPtrField<T>, is_split>(x, offset,
msg);
}
template <typename T>
static inline T ReadAt(const void* x, size_t offset) {
T out;
@ -661,7 +689,7 @@ class PROTOBUF_EXPORT TcParser final {
static const char* FastVarintS1(PROTOBUF_TC_PARAM_DECL);
friend class GeneratedTcTableLiteTest;
static void* MaybeGetSplitBase(MessageLite* msg, const bool is_split,
static void* MaybeGetSplitBase(MessageLite* msg, bool is_split,
const TcParseTableBase* table);
// Test only access to verify that the right function is being called via
@ -864,21 +892,27 @@ class PROTOBUF_EXPORT TcParser final {
// Mini parsing:
template <bool is_split>
static const char* MpVarint(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpRepeatedVarint(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpPackedVarint(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpFixed(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpRepeatedFixed(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpPackedFixed(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpString(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpRepeatedString(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpMessage(PROTOBUF_TC_PARAM_DECL);
template <bool is_group>
template <bool is_split, bool is_group>
static const char* MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL);
static const char* MpLazyMessage(PROTOBUF_TC_PARAM_DECL);
static const char* MpFallback(PROTOBUF_TC_PARAM_DECL);
template <bool is_split>
static const char* MpMap(PROTOBUF_TC_PARAM_DECL);
};

@ -47,6 +47,7 @@
#include "google/protobuf/message_lite.h"
#include "google/protobuf/parse_context.h"
#include "google/protobuf/repeated_field.h"
#include "google/protobuf/repeated_ptr_field.h"
#include "google/protobuf/varint_shuffle.h"
#include "google/protobuf/wire_format_lite.h"
#include "utf8_validity.h"
@ -300,22 +301,22 @@ inline PROTOBUF_ALWAYS_INLINE const char* TcParser::MiniParse(
entry->type_card & (+field_layout::kSplitMask | FieldKind::kFkMask);
static constexpr TailCallParseFunc kMiniParseTable[] = {
&MpFallback, // FieldKind::kFkNone
&MpVarint<false>, // FieldKind::kFkVarint
&MpPackedVarint, // FieldKind::kFkPackedVarint
&MpFixed<false>, // FieldKind::kFkFixed
&MpPackedFixed, // FieldKind::kFkPackedFixed
&MpString<false>, // FieldKind::kFkString
&MpMessage<false>, // FieldKind::kFkMessage
&MpMap, // FieldKind::kFkMap
&Error, // kSplitMask | FieldKind::kFkNone
&MpVarint<true>, // kSplitMask | FieldKind::kFkVarint
&Error, // kSplitMask | FieldKind::kFkPackedVarint
&MpFixed<true>, // kSplitMask | FieldKind::kFkFixed
&Error, // kSplitMask | FieldKind::kFkPackedFixed
&MpString<true>, // kSplitMask | FieldKind::kFkString
&MpMessage<true>, // kSplitMask | FieldKind::kFkMessage
&Error, // kSplitMask | FieldKind::kFkMap
&MpFallback, // FieldKind::kFkNone
&MpVarint<false>, // FieldKind::kFkVarint
&MpPackedVarint<false>, // FieldKind::kFkPackedVarint
&MpFixed<false>, // FieldKind::kFkFixed
&MpPackedFixed<false>, // FieldKind::kFkPackedFixed
&MpString<false>, // FieldKind::kFkString
&MpMessage<false>, // FieldKind::kFkMessage
&MpMap<false>, // FieldKind::kFkMap
&Error, // kSplitMask | FieldKind::kFkNone
&MpVarint<true>, // kSplitMask | FieldKind::kFkVarint
&MpPackedVarint<true>, // kSplitMask | FieldKind::kFkPackedVarint
&MpFixed<true>, // kSplitMask | FieldKind::kFkFixed
&MpPackedFixed<true>, // kSplitMask | FieldKind::kFkPackedFixed
&MpString<true>, // kSplitMask | FieldKind::kFkString
&MpMessage<true>, // kSplitMask | FieldKind::kFkMessage
&MpMap<true>, // kSplitMask | FieldKind::kFkMap
};
// Just to be sure we got the order right, above.
static_assert(0 == FieldKind::kFkNone, "Invalid table order");
@ -1796,7 +1797,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpFixed(PROTOBUF_TC_PARAM_DECL) {
// Check for repeated parsing (wiretype fallback is handled there):
if (card == field_layout::kFcRepeated) {
PROTOBUF_MUSTTAIL return MpRepeatedFixed(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpRepeatedFixed<is_split>(PROTOBUF_TC_PARAM_PASS);
}
// Check for mismatched wiretype:
const uint16_t rep = type_card & field_layout::kRepMask;
@ -1829,6 +1830,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpFixed(PROTOBUF_TC_PARAM_DECL) {
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpRepeatedFixed(
PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
@ -1837,16 +1839,18 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedFixed(
// Check for packed repeated fallback:
if (decoded_wiretype == WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
PROTOBUF_MUSTTAIL return MpPackedFixed(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpPackedFixed<is_split>(PROTOBUF_TC_PARAM_PASS);
}
void* const base = MaybeGetSplitBase(msg, is_split, table);
const uint16_t type_card = entry.type_card;
const uint16_t rep = type_card & field_layout::kRepMask;
if (rep == field_layout::kRep64Bits) {
if (decoded_wiretype != WireFormatLite::WIRETYPE_FIXED64) {
PROTOBUF_MUSTTAIL return table->fallback(PROTOBUF_TC_PARAM_PASS);
}
auto& field = RefAt<RepeatedField<uint64_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint64_t, is_split>(
base, entry.offset, msg);
constexpr auto size = sizeof(uint64_t);
const char* ptr2 = ptr;
uint32_t next_tag;
@ -1863,7 +1867,8 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedFixed(
if (decoded_wiretype != WireFormatLite::WIRETYPE_FIXED32) {
PROTOBUF_MUSTTAIL return table->fallback(PROTOBUF_TC_PARAM_PASS);
}
auto& field = RefAt<RepeatedField<uint32_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint32_t, is_split>(
base, entry.offset, msg);
constexpr auto size = sizeof(uint32_t);
const char* ptr2 = ptr;
uint32_t next_tag;
@ -1884,6 +1889,7 @@ error:
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpPackedFixed(PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
const uint16_t type_card = entry.type_card;
@ -1891,17 +1897,20 @@ PROTOBUF_NOINLINE const char* TcParser::MpPackedFixed(PROTOBUF_TC_PARAM_DECL) {
// Check for non-packed repeated fallback:
if (decoded_wiretype != WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
PROTOBUF_MUSTTAIL return MpRepeatedFixed(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpRepeatedFixed<is_split>(PROTOBUF_TC_PARAM_PASS);
}
void* const base = MaybeGetSplitBase(msg, is_split, table);
int size = ReadSize(&ptr);
uint16_t rep = type_card & field_layout::kRepMask;
if (rep == field_layout::kRep64Bits) {
auto& field = RefAt<RepeatedField<uint64_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint64_t, is_split>(
base, entry.offset, msg);
ptr = ctx->ReadPackedFixed(ptr, size, &field);
} else {
ABSL_DCHECK_EQ(rep, static_cast<uint16_t>(field_layout::kRep32Bits));
auto& field = RefAt<RepeatedField<uint32_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint32_t, is_split>(
base, entry.offset, msg);
ptr = ctx->ReadPackedFixed(ptr, size, &field);
}
@ -1919,7 +1928,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpVarint(PROTOBUF_TC_PARAM_DECL) {
// Check for repeated parsing:
if (card == field_layout::kFcRepeated) {
PROTOBUF_MUSTTAIL return MpRepeatedVarint(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpRepeatedVarint<is_split>(PROTOBUF_TC_PARAM_PASS);
}
// Check for wire type mismatch:
if ((data.tag() & 7) != WireFormatLite::WIRETYPE_VARINT) {
@ -1975,6 +1984,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpVarint(PROTOBUF_TC_PARAM_DECL) {
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpRepeatedVarint(
PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
@ -1984,7 +1994,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedVarint(
// Check for packed repeated fallback:
if (decoded_wiretype == WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
PROTOBUF_MUSTTAIL return MpPackedVarint(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpPackedVarint<is_split>(PROTOBUF_TC_PARAM_PASS);
}
// Check for wire type mismatch:
if (decoded_wiretype != WireFormatLite::WIRETYPE_VARINT) {
@ -1994,9 +2004,11 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedVarint(
const bool is_zigzag = xform_val == field_layout::kTvZigZag;
const bool is_validated_enum = xform_val & field_layout::kTvEnum;
void* const base = MaybeGetSplitBase(msg, is_split, table);
uint16_t rep = type_card & field_layout::kRepMask;
if (rep == field_layout::kRep64Bits) {
auto& field = RefAt<RepeatedField<uint64_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint64_t, is_split>(
base, entry.offset, msg);
const char* ptr2 = ptr;
uint32_t next_tag;
do {
@ -2009,7 +2021,8 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedVarint(
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) goto error;
} while (next_tag == decoded_tag);
} else if (rep == field_layout::kRep32Bits) {
auto& field = RefAt<RepeatedField<uint32_t>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedFieldRefAt<uint32_t, is_split>(
base, entry.offset, msg);
const char* ptr2 = ptr;
uint32_t next_tag;
do {
@ -2032,7 +2045,8 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedVarint(
} while (next_tag == decoded_tag);
} else {
ABSL_DCHECK_EQ(rep, static_cast<uint16_t>(field_layout::kRep8Bits));
auto& field = RefAt<RepeatedField<bool>>(msg, entry.offset);
auto& field =
MaybeCreateRepeatedFieldRefAt<bool, is_split>(base, entry.offset, msg);
const char* ptr2 = ptr;
uint32_t next_tag;
do {
@ -2053,6 +2067,7 @@ error:
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpPackedVarint(PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
auto type_card = entry.type_card;
@ -2060,7 +2075,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpPackedVarint(PROTOBUF_TC_PARAM_DECL) {
// Check for non-packed repeated fallback:
if (decoded_wiretype != WireFormatLite::WIRETYPE_LENGTH_DELIMITED) {
PROTOBUF_MUSTTAIL return MpRepeatedVarint(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpRepeatedVarint<is_split>(PROTOBUF_TC_PARAM_PASS);
}
const uint16_t xform_val = (type_card & field_layout::kTvMask);
const bool is_zigzag = xform_val == field_layout::kTvZigZag;
@ -2070,14 +2085,17 @@ PROTOBUF_NOINLINE const char* TcParser::MpPackedVarint(PROTOBUF_TC_PARAM_DECL) {
// pending hasbits now:
SyncHasbits(msg, hasbits, table);
void* const base = MaybeGetSplitBase(msg, is_split, table);
uint16_t rep = type_card & field_layout::kRepMask;
if (rep == field_layout::kRep64Bits) {
auto* field = &RefAt<RepeatedField<uint64_t>>(msg, entry.offset);
auto* field = &MaybeCreateRepeatedFieldRefAt<uint64_t, is_split>(
base, entry.offset, msg);
return ctx->ReadPackedVarint(ptr, [field, is_zigzag](uint64_t value) {
field->Add(is_zigzag ? WireFormatLite::ZigZagDecode64(value) : value);
});
} else if (rep == field_layout::kRep32Bits) {
auto* field = &RefAt<RepeatedField<uint32_t>>(msg, entry.offset);
auto* field = &MaybeCreateRepeatedFieldRefAt<uint32_t, is_split>(
base, entry.offset, msg);
if (is_validated_enum) {
const TcParseTableBase::FieldAux aux = *table->field_aux(entry.aux_idx);
return ctx->ReadPackedVarint(ptr, [=](int32_t value) {
@ -2096,7 +2114,8 @@ PROTOBUF_NOINLINE const char* TcParser::MpPackedVarint(PROTOBUF_TC_PARAM_DECL) {
}
} else {
ABSL_DCHECK_EQ(rep, static_cast<uint16_t>(field_layout::kRep8Bits));
auto* field = &RefAt<RepeatedField<bool>>(msg, entry.offset);
auto* field =
&MaybeCreateRepeatedFieldRefAt<bool, is_split>(base, entry.offset, msg);
return ctx->ReadPackedVarint(
ptr, [field](uint64_t value) { field->Add(static_cast<bool>(value)); });
}
@ -2146,7 +2165,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpString(PROTOBUF_TC_PARAM_DECL) {
PROTOBUF_MUSTTAIL return table->fallback(PROTOBUF_TC_PARAM_PASS);
}
if (card == field_layout::kFcRepeated) {
PROTOBUF_MUSTTAIL return MpRepeatedString(PROTOBUF_TC_PARAM_PASS);
PROTOBUF_MUSTTAIL return MpRepeatedString<is_split>(PROTOBUF_TC_PARAM_PASS);
}
const uint16_t xform_val = type_card & field_layout::kTvMask;
const uint16_t rep = type_card & field_layout::kRepMask;
@ -2222,6 +2241,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::ParseRepeatedStringOnce(
return ptr;
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpRepeatedString(
PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
@ -2235,9 +2255,11 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedString(
const uint16_t rep = type_card & field_layout::kRepMask;
const uint16_t xform_val = type_card & field_layout::kTvMask;
void* const base = MaybeGetSplitBase(msg, is_split, table);
switch (rep) {
case field_layout::kRepSString: {
auto& field = RefAt<RepeatedPtrField<std::string>>(msg, entry.offset);
auto& field = MaybeCreateRepeatedPtrFieldRefAt<std::string, is_split>(
base, entry.offset, msg);
const char* ptr2 = ptr;
uint32_t next_tag;
@ -2300,10 +2322,10 @@ PROTOBUF_NOINLINE const char* TcParser::MpMessage(PROTOBUF_TC_PARAM_DECL) {
const uint16_t rep = type_card & field_layout::kRepMask;
switch (rep) {
case field_layout::kRepMessage:
PROTOBUF_MUSTTAIL return MpRepeatedMessageOrGroup<false>(
PROTOBUF_MUSTTAIL return MpRepeatedMessageOrGroup<is_split, false>(
PROTOBUF_TC_PARAM_PASS);
case field_layout::kRepGroup:
PROTOBUF_MUSTTAIL return MpRepeatedMessageOrGroup<true>(
PROTOBUF_MUSTTAIL return MpRepeatedMessageOrGroup<is_split, true>(
PROTOBUF_TC_PARAM_PASS);
default:
PROTOBUF_MUSTTAIL return table->fallback(PROTOBUF_TC_PARAM_PASS);
@ -2372,7 +2394,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpMessage(PROTOBUF_TC_PARAM_DECL) {
}
}
template <bool is_group>
template <bool is_split, bool is_group>
const char* TcParser::MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
const uint16_t type_card = entry.type_card;
@ -2396,7 +2418,10 @@ const char* TcParser::MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL) {
}
}
auto& field = RefAt<RepeatedPtrFieldBase>(msg, entry.offset);
void* const base = MaybeGetSplitBase(msg, is_split, table);
RepeatedPtrFieldBase& field =
MaybeCreateRepeatedRefAt<RepeatedPtrFieldBase, is_split>(
base, entry.offset, msg);
const auto aux = *table->field_aux(&entry);
if ((type_card & field_layout::kTvMask) == field_layout::kTvTable) {
auto* inner_table = aux.table;
@ -2405,7 +2430,7 @@ const char* TcParser::MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL) {
uint32_t next_tag;
do {
MessageLite* value =
field.Add<GenericTypeHandler<MessageLite>>(default_instance);
field.template Add<GenericTypeHandler<MessageLite>>(default_instance);
ptr = is_group ? ctx->ParseGroup<TcParser>(value, ptr2, decoded_tag,
inner_table)
: ctx->ParseMessage<TcParser>(value, ptr2, inner_table);
@ -2427,7 +2452,7 @@ const char* TcParser::MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL) {
uint32_t next_tag;
do {
MessageLite* value =
field.Add<GenericTypeHandler<MessageLite>>(default_instance);
field.template Add<GenericTypeHandler<MessageLite>>(default_instance);
ptr = is_group ? ctx->ParseGroup(value, ptr2, decoded_tag)
: ctx->ParseMessage(value, ptr2);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) goto error;
@ -2683,6 +2708,7 @@ const char* TcParser::ParseOneMapEntry(
return ptr;
}
template <bool is_split>
PROTOBUF_NOINLINE const char* TcParser::MpMap(PROTOBUF_TC_PARAM_DECL) {
const auto& entry = RefAt<FieldEntry>(table, data.entry_offset());
// `aux[0]` points into a MapAuxInfo.
@ -2702,10 +2728,11 @@ PROTOBUF_NOINLINE const char* TcParser::MpMap(PROTOBUF_TC_PARAM_DECL) {
// Otherwise, it points into a MapField and we must synchronize with
// reflection. It is done by calling the MutableMap() virtual function on the
// field's base class.
void* const base = MaybeGetSplitBase(msg, is_split, table);
UntypedMapBase& map =
map_info.use_lite
? RefAt<UntypedMapBase>(msg, entry.offset)
: *RefAt<MapFieldBaseForParse>(msg, entry.offset).MutableMap();
? RefAt<UntypedMapBase>(base, entry.offset)
: *RefAt<MapFieldBaseForParse>(base, entry.offset).MutableMap();
const uint32_t saved_tag = data.tag();

@ -1557,17 +1557,32 @@ void** Reflection::MutableSplitField(Message* message) const {
return internal::GetPointerAtOffset<void*>(message, schema_.SplitOffset());
}
namespace internal {
template <typename T>
bool SplitFieldHasExtraIndirectionStatic(const FieldDescriptor* field) {
const bool ret = std::is_base_of<RepeatedFieldBase, T>() ||
std::is_base_of<RepeatedPtrFieldBase, T>();
ABSL_DCHECK_EQ(SplitFieldHasExtraIndirection(field), ret);
return ret;
}
} // namespace internal
template <typename Type>
const Type& Reflection::GetRaw(const Message& message,
const FieldDescriptor* field) const {
ABSL_DCHECK(!schema_.InRealOneof(field) || HasOneofField(message, field))
<< "Field = " << field->full_name();
if (schema_.IsSplit(field)) {
return *internal::GetConstPointerAtOffset<Type>(
GetSplitField(&message), schema_.GetFieldOffset(field));
const uint32_t field_offset = schema_.GetFieldOffset(field);
if (!schema_.IsSplit(field)) {
return internal::GetConstRefAtOffset<Type>(message, field_offset);
}
const void* split = GetSplitField(&message);
if (internal::SplitFieldHasExtraIndirectionStatic<Type>(field)) {
return **internal::GetConstPointerAtOffset<Type*>(split, field_offset);
}
return internal::GetConstRefAtOffset<Type>(message,
schema_.GetFieldOffset(field));
return *internal::GetConstPointerAtOffset<Type>(split, field_offset);
}
template <typename T>

@ -58,6 +58,9 @@ class RawPtr {
explicit constexpr RawPtr(const void* p) : p_(const_cast<void*>(p)) {}
bool IsDefault() const { return p_ == kZeroBuffer; }
void DeleteIfNotDefault() {
if (!IsDefault()) delete Get();
}
void Set(const void* p) { p_ = const_cast<void*>(p); }
T* Get() const { return reinterpret_cast<T*>(p_); }

@ -82,6 +82,20 @@ TEST(RawPtr, Constexpr) {
EXPECT_FALSE(raw2.IsDefault());
}
TEST(RawPtr, DeleteIfNotDefault) {
RawPtr<Obj> raw;
EXPECT_TRUE(raw.IsDefault());
// Shouldn't trigger an allocator problem by deallocating default ptr.
raw.DeleteIfNotDefault();
raw.Set(new Obj());
EXPECT_FALSE(raw.IsDefault());
// Shouldn't leak.
raw.DeleteIfNotDefault();
}
} // namespace
} // namespace internal
} // namespace protobuf

@ -142,13 +142,16 @@ void memswap(char* a, char* b) {
template <typename Element>
class RepeatedIterator;
// Sentinel base class.
struct RepeatedFieldBase {};
// We can't skip the destructor for, e.g., arena allocated RepeatedField<Cord>.
template <typename Element,
bool Trivial = Arena::is_destructor_skippable<Element>::value>
struct RepeatedFieldDestructorSkippableBase {};
struct RepeatedFieldDestructorSkippableBase : RepeatedFieldBase {};
template <typename Element>
struct RepeatedFieldDestructorSkippableBase<Element, true> {
struct RepeatedFieldDestructorSkippableBase<Element, true> : RepeatedFieldBase {
using DestructorSkippable_ = void;
};

@ -1275,7 +1275,9 @@ constexpr RepeatedPtrField<Element>::RepeatedPtrField()
template <typename Element>
inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
: RepeatedPtrFieldBase(arena) {
StaticValidityCheck();
// We can't have StaticValidityCheck here because that requires Element to be
// a complete type, and in split repeated fields cases, we call
// CreateMaybeMessage<RepeatedPtrField<T>> for incomplete Ts.
}
template <typename Element>

Loading…
Cancel
Save