Optimize the generation of TDP tables by replacing the provider virtual

function call with precalculated values.
We have to query every field anyway, so better to do it once at the start and
keep the results.

PiperOrigin-RevId: 621650969
pull/16400/head
Protobuf Team Bot 8 months ago committed by Copybara-Service
parent 314383430e
commit 00caffaf00
  1. 13
      src/google/protobuf/compiler/cpp/helpers.cc
  2. 4
      src/google/protobuf/compiler/cpp/helpers.h
  3. 52
      src/google/protobuf/compiler/cpp/parse_function_generator.cc
  4. 92
      src/google/protobuf/generated_message_reflection.cc
  5. 100
      src/google/protobuf/generated_message_tctable_gen.cc
  6. 20
      src/google/protobuf/generated_message_tctable_gen.h
  7. 7
      src/google/protobuf/message.h

@ -40,6 +40,7 @@
#include "google/protobuf/descriptor.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/generated_message_tctable_impl.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/strtod.h"
#include "google/protobuf/wire_format.h"
@ -200,6 +201,18 @@ bool IsLazilyVerifiedLazy(const FieldDescriptor* field,
return false;
}
internal::field_layout::TransformValidation GetLazyStyle(
const FieldDescriptor* field, const Options& options,
MessageSCCAnalyzer* scc_analyzer) {
if (IsEagerlyVerifiedLazy(field, options, scc_analyzer)) {
return internal::field_layout::kTvEager;
}
if (IsLazilyVerifiedLazy(field, options)) {
return internal::field_layout::kTvLazy;
}
return {};
}
absl::flat_hash_map<absl::string_view, std::string> MessageVars(
const Descriptor* desc) {
absl::string_view prefix = IsMapEntryMessage(desc) ? "" : "_impl_.";

@ -394,6 +394,10 @@ inline bool IsExplicitLazy(const FieldDescriptor* field) {
return field->options().lazy() || field->options().unverified_lazy();
}
internal::field_layout::TransformValidation GetLazyStyle(
const FieldDescriptor* field, const Options& options,
MessageSCCAnalyzer* scc_analyzer);
bool IsEagerlyVerifiedLazy(const FieldDescriptor* field, const Options& options,
MessageSCCAnalyzer* scc_analyzer);

@ -8,6 +8,7 @@
#include "google/protobuf/compiler/cpp/parse_function_generator.h"
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <string>
#include <utility>
@ -54,33 +55,6 @@ std::vector<const FieldDescriptor*> GetOrderedFields(
} // namespace
class ParseFunctionGenerator::GeneratedOptionProvider final
: public internal::TailCallTableInfo::OptionProvider {
public:
explicit GeneratedOptionProvider(ParseFunctionGenerator* gen) : gen_(gen) {}
TailCallTableInfo::PerFieldOptions GetForField(
const FieldDescriptor* field) const final {
const auto verify_flag = [&] {
if (IsEagerlyVerifiedLazy(field, gen_->options_, gen_->scc_analyzer_))
return internal::field_layout::kTvEager;
if (IsLazilyVerifiedLazy(field, gen_->options_))
return internal::field_layout::kTvLazy;
return internal::field_layout::TransformValidation{};
};
return {
GetPresenceProbability(field, gen_->options_),
verify_flag(),
IsStringInlined(field, gen_->options_),
IsImplicitWeakField(field, gen_->options_, gen_->scc_analyzer_),
/* use_direct_tcparser_table */ true,
ShouldSplit(field, gen_->options_),
};
}
private:
ParseFunctionGenerator* gen_;
};
ParseFunctionGenerator::ParseFunctionGenerator(
const Descriptor* descriptor, int max_has_bit_index,
const std::vector<int>& has_bit_indices,
@ -96,12 +70,32 @@ ParseFunctionGenerator::ParseFunctionGenerator(
ordered_fields_(GetOrderedFields(descriptor_, options_)),
num_hasbits_(max_has_bit_index),
index_in_file_messages_(index_in_file_messages) {
std::vector<TailCallTableInfo::FieldOptions> fields;
fields.reserve(ordered_fields_.size());
for (size_t i = 0; i < ordered_fields_.size(); ++i) {
auto* field = ordered_fields_[i];
fields.push_back({
field,
field->index() < has_bit_indices.size()
? has_bit_indices[field->index()]
: -1,
GetPresenceProbability(field, options_),
GetLazyStyle(field, options_, scc_analyzer_),
IsStringInlined(field, options_),
IsImplicitWeakField(field, options_, scc_analyzer_),
/* use_direct_tcparser_table */ true,
ShouldSplit(field, options_),
field->index() < inlined_string_indices.size()
? inlined_string_indices[field->index()]
: -1,
});
}
tc_table_info_.reset(new TailCallTableInfo(
descriptor_, ordered_fields_,
descriptor_,
{/* is_lite */ GetOptimizeFor(descriptor->file(), options_) ==
FileOptions::LITE_RUNTIME,
/* uses_codegen */ true, options_.profile_driven_cluster_aux_subtable},
GeneratedOptionProvider(this), has_bit_indices, inlined_string_indices));
fields));
SetCommonMessageDataVariables(descriptor_, &variables_);
SetUnknownFieldsVariable(descriptor_, options_, &variables_);
variables_["classname"] = ClassName(descriptor, false);

@ -357,6 +357,17 @@ bool Reflection::IsEagerlyVerifiedLazyField(
schema_.IsEagerlyVerifiedLazyField(field));
}
internal::field_layout::TransformValidation Reflection::GetLazyStyle(
const FieldDescriptor* field) const {
if (IsEagerlyVerifiedLazyField(field)) {
return internal::field_layout::kTvEager;
}
if (IsLazilyVerifiedLazyField(field)) {
return internal::field_layout::kTvLazy;
}
return {};
}
size_t Reflection::SpaceUsedLong(const Message& message) const {
// object_size_ already includes the in-memory representation of each field
// in the message, so we only need to account for additional memory used by
@ -3292,6 +3303,8 @@ void Reflection::PopulateTcParseEntries(
for (const auto& entry : table_info.field_entries) {
const FieldDescriptor* field = entry.field;
if (field->type() == field->TYPE_ENUM &&
(entry.type_card & internal::field_layout::kTvMask) ==
internal::field_layout::kTvEnum &&
table_info.aux_entries[entry.aux_idx].type ==
internal::TailCallTableInfo::kEnumValidator) {
// Mini parse can't handle it. Fallback to reflection.
@ -3370,67 +3383,40 @@ void Reflection::PopulateTcParseFieldAux(
const internal::TcParseTableBase* Reflection::CreateTcParseTable() const {
using TcParseTableBase = internal::TcParseTableBase;
std::vector<const FieldDescriptor*> fields;
constexpr int kNoHasbit = -1;
std::vector<int> has_bit_indices(
static_cast<size_t>(descriptor_->field_count()), kNoHasbit);
std::vector<int> inlined_string_indices = has_bit_indices;
std::vector<internal::TailCallTableInfo::FieldOptions> fields;
fields.reserve(descriptor_->field_count());
for (int i = 0; i < descriptor_->field_count(); ++i) {
auto* field = descriptor_->field(i);
fields.push_back(field);
has_bit_indices[static_cast<size_t>(field->index())] =
static_cast<int>(schema_.HasBitIndex(field));
if (IsInlined(field)) {
inlined_string_indices[static_cast<size_t>(field->index())] =
schema_.InlinedStringIndex(field);
}
}
std::sort(fields.begin(), fields.end(),
[](const FieldDescriptor* a, const FieldDescriptor* b) {
return a->number() < b->number();
});
class ReflectionOptionProvider final
: public internal::TailCallTableInfo::OptionProvider {
public:
explicit ReflectionOptionProvider(const Reflection& ref) : ref_(ref) {}
internal::TailCallTableInfo::PerFieldOptions GetForField(
const FieldDescriptor* field) const final {
const auto verify_flag = [&] {
if (ref_.IsEagerlyVerifiedLazyField(field))
return internal::field_layout::kTvEager;
if (ref_.IsLazilyVerifiedLazyField(field))
return internal::field_layout::kTvLazy;
return internal::field_layout::TransformValidation{};
};
return {
1.f, // All fields are assumed present.
verify_flag(), //
ref_.IsInlined(field), //
// Only LITE can be implicitly weak.
/* is_implicitly_weak */ false,
// We could change this to use direct table.
// Might be easier to do when all messages support TDP.
/* use_direct_tcparser_table */ false,
ref_.schema_.IsSplit(field), //
};
}
private:
const Reflection& ref_;
};
const bool is_inlined = IsInlined(field);
fields.push_back({
field, //
static_cast<int>(schema_.HasBitIndex(field)),
1.f, // All fields are assumed present.
GetLazyStyle(field),
is_inlined,
// Only LITE can be implicitly weak.
/* is_implicitly_weak */ false,
// We could change this to use direct table.
// Might be easier to do when all messages support TDP.
/* use_direct_tcparser_table */ false,
schema_.IsSplit(field),
is_inlined ? static_cast<int>(schema_.InlinedStringIndex(field))
: kNoHasbit,
});
}
std::sort(fields.begin(), fields.end(), [](const auto& a, const auto& b) {
return a.field->number() < b.field->number();
});
internal::TailCallTableInfo table_info(
descriptor_, fields,
descriptor_,
{
/* is_lite */ false,
/* uses_codegen */ false,
/* should_profile_driven_cluster_aux_table */ false,
},
ReflectionOptionProvider(*this), has_bit_indices, inlined_string_indices);
fields);
const size_t fast_entries_count = table_info.fast_path_fields.size();
ABSL_CHECK_EQ(static_cast<int>(fast_entries_count),
@ -3456,7 +3442,7 @@ const internal::TcParseTableBase* Reflection::CreateTcParseTable() const {
schema_.HasExtensionSet()
? static_cast<uint16_t>(schema_.GetExtensionSetOffset())
: uint16_t{0},
static_cast<uint32_t>(fields.empty() ? 0 : fields.back()->number()),
static_cast<uint32_t>(fields.empty() ? 0 : fields.back().field->number()),
static_cast<uint8_t>((fast_entries_count - 1) << 3),
lookup_table_offset,
table_info.num_to_entry_table.skipmap32,

@ -17,6 +17,7 @@
#include "absl/log/absl_check.h"
#include "absl/strings/str_cat.h"
#include "absl/types/span.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/generated_message_tctable_decl.h"
@ -98,15 +99,15 @@ EnumRangeInfo GetEnumRangeInfo(const FieldDescriptor* field,
// make sure we only use lazy rep for singular TYPE_MESSAGE fields.
// We can't trust the `lazy=true` annotation.
bool HasLazyRep(const FieldDescriptor* field,
const TailCallTableInfo::PerFieldOptions options) {
const TailCallTableInfo::FieldOptions& options) {
return field->type() == field->TYPE_MESSAGE && !field->is_repeated() &&
options.lazy_opt != 0;
}
TailCallTableInfo::FastFieldInfo::Field MakeFastFieldEntry(
const TailCallTableInfo::FieldEntryInfo& entry,
const TailCallTableInfo::MessageOptions& message_options,
const TailCallTableInfo::PerFieldOptions& options) {
const TailCallTableInfo::FieldOptions& options,
const TailCallTableInfo::MessageOptions& message_options) {
TailCallTableInfo::FastFieldInfo::Field info{};
#define PROTOBUF_PICK_FUNCTION(fn) \
(field->number() < 16 ? TcParseFunction::fn##1 : TcParseFunction::fn##2)
@ -219,6 +220,7 @@ TailCallTableInfo::FastFieldInfo::Field MakeFastFieldEntry(
ABSL_CHECK(picked != TcParseFunction::kNone);
info.func = picked;
info.presence_probability = options.presence_probability;
return info;
#undef PROTOBUF_PICK_FUNCTION
@ -230,10 +232,9 @@ TailCallTableInfo::FastFieldInfo::Field MakeFastFieldEntry(
bool IsFieldEligibleForFastParsing(
const TailCallTableInfo::FieldEntryInfo& entry,
const TailCallTableInfo::MessageOptions& message_options,
const TailCallTableInfo::OptionProvider& option_provider) {
const TailCallTableInfo::FieldOptions& options,
const TailCallTableInfo::MessageOptions& message_options) {
const auto* field = entry.field;
const auto options = option_provider.GetForField(field);
// Map, oneof, weak, and split fields are not handled on the fast path.
if (field->is_map() || field->real_containing_oneof() ||
field->options().weak() || options.is_implicitly_weak ||
@ -345,7 +346,7 @@ std::vector<TailCallTableInfo::FastFieldInfo> SplitFastFieldsForSize(
const std::vector<TailCallTableInfo::FieldEntryInfo>& field_entries,
int table_size_log2,
const TailCallTableInfo::MessageOptions& message_options,
const TailCallTableInfo::OptionProvider& option_provider) {
absl::Span<const TailCallTableInfo::FieldOptions> fields) {
std::vector<TailCallTableInfo::FastFieldInfo> result(1 << table_size_log2);
const uint32_t idx_mask = static_cast<uint32_t>(result.size() - 1);
const auto tag_to_idx = [&](uint32_t tag) {
@ -376,14 +377,14 @@ std::vector<TailCallTableInfo::FastFieldInfo> SplitFastFieldsForSize(
};
}
for (const auto& entry : field_entries) {
if (!IsFieldEligibleForFastParsing(entry, message_options,
option_provider)) {
for (int i = 0; i < field_entries.size(); ++i) {
const auto& entry = field_entries[i];
const auto& options = fields[i];
if (!IsFieldEligibleForFastParsing(entry, options, message_options)) {
continue;
}
const auto* field = entry.field;
const auto options = option_provider.GetForField(field);
const uint32_t tag = RecodeTagForFastParsing(WireFormat::MakeTag(field));
const uint32_t fast_idx = tag_to_idx(tag);
@ -395,8 +396,7 @@ std::vector<TailCallTableInfo::FastFieldInfo> SplitFastFieldsForSize(
if (auto* as_field = info.AsField()) {
// This field entry is already filled. Skip if previous entry is more
// likely present.
const auto prev_options = option_provider.GetForField(as_field->field);
if (prev_options.presence_probability >= options.presence_probability) {
if (as_field->presence_probability >= options.presence_probability) {
continue;
}
}
@ -405,7 +405,7 @@ std::vector<TailCallTableInfo::FastFieldInfo> SplitFastFieldsForSize(
// Fill in this field's entry:
auto& fast_field =
info.data.emplace<TailCallTableInfo::FastFieldInfo::Field>(
MakeFastFieldEntry(entry, message_options, options));
MakeFastFieldEntry(entry, options, message_options));
fast_field.field = field;
fast_field.coded_tag = tag;
// If this field does not have presence, then it can set an out-of-bounds
@ -434,7 +434,7 @@ std::vector<uint8_t> GenerateFieldNames(
const Descriptor* descriptor,
const std::vector<TailCallTableInfo::FieldEntryInfo>& entries,
const TailCallTableInfo::MessageOptions& message_options,
const TailCallTableInfo::OptionProvider& option_provider) {
absl::Span<const TailCallTableInfo::FieldOptions> fields) {
static constexpr int kMaxNameLength = 255;
std::vector<uint8_t> out;
@ -481,7 +481,7 @@ std::vector<uint8_t> GenerateFieldNames(
}
TailCallTableInfo::NumToEntryTable MakeNumToEntryTable(
const std::vector<const FieldDescriptor*>& field_descriptors) {
absl::Span<const TailCallTableInfo::FieldOptions> ordered_fields) {
TailCallTableInfo::NumToEntryTable num_to_entry_table;
num_to_entry_table.skipmap32 = static_cast<uint32_t>(-1);
@ -489,11 +489,11 @@ TailCallTableInfo::NumToEntryTable MakeNumToEntryTable(
// appending to. cur_block_first_fnum is the number of the first
// field represented by the block.
uint16_t field_entry_index = 0;
uint16_t N = field_descriptors.size();
uint16_t N = ordered_fields.size();
// First, handle field numbers 1-32, which affect only the initial
// skipmap32 and don't generate additional skip-entry blocks.
for (; field_entry_index != N; ++field_entry_index) {
auto* field_descriptor = field_descriptors[field_entry_index];
auto* field_descriptor = ordered_fields[field_entry_index].field;
if (field_descriptor->number() > 32) break;
auto skipmap32_index = field_descriptor->number() - 1;
num_to_entry_table.skipmap32 -= 1 << skipmap32_index;
@ -508,7 +508,7 @@ TailCallTableInfo::NumToEntryTable MakeNumToEntryTable(
// the start of the most recent skip entry.
uint32_t last_skip_entry_start = 0;
for (; field_entry_index != N; ++field_entry_index) {
auto* field_descriptor = field_descriptors[field_entry_index];
auto* field_descriptor = ordered_fields[field_entry_index].field;
uint32_t fnum = static_cast<uint32_t>(field_descriptor->number());
ABSL_CHECK_GT(fnum, last_skip_entry_start);
if (start_new_block == false) {
@ -545,7 +545,7 @@ TailCallTableInfo::NumToEntryTable MakeNumToEntryTable(
uint16_t MakeTypeCardForField(
const FieldDescriptor* field, bool has_hasbit,
const TailCallTableInfo::MessageOptions& message_options,
const TailCallTableInfo::PerFieldOptions& options) {
const TailCallTableInfo::FieldOptions& options) {
uint16_t type_card;
namespace fl = internal::field_layout;
if (has_hasbit) {
@ -736,12 +736,8 @@ bool HasWeakFields(const Descriptor* descriptor) {
} // namespace
TailCallTableInfo::TailCallTableInfo(
const Descriptor* descriptor,
const std::vector<const FieldDescriptor*>& ordered_fields,
const MessageOptions& message_options,
const OptionProvider& option_provider,
const std::vector<int>& has_bit_indices,
const std::vector<int>& inlined_string_indices) {
const Descriptor* descriptor, const MessageOptions& message_options,
absl::Span<const FieldOptions> ordered_fields) {
fallback_function =
// Map entries discard unknown data
descriptor->options().map_entry()
@ -755,7 +751,6 @@ TailCallTableInfo::TailCallTableInfo(
if (descriptor->options().message_set_wire_format()) {
ABSL_DCHECK(ordered_fields.empty());
ABSL_DCHECK(inlined_string_indices.empty());
if (message_options.uses_codegen) {
fast_path_fields = {{TailCallTableInfo::FastFieldInfo::NonField{
message_options.is_lite
@ -776,18 +771,19 @@ TailCallTableInfo::TailCallTableInfo(
table_size_log2 = 0;
num_to_entry_table = MakeNumToEntryTable(ordered_fields);
field_name_data = GenerateFieldNames(descriptor, field_entries,
message_options, option_provider);
message_options, ordered_fields);
return;
}
ABSL_DCHECK(std::is_sorted(ordered_fields.begin(), ordered_fields.end(),
[](const auto* lhs, const auto* rhs) {
return lhs->number() < rhs->number();
[](const auto& lhs, const auto& rhs) {
return lhs.field->number() < rhs.field->number();
}));
// If this message has any inlined string fields, store the donation state
// offset in the first auxiliary entry, which is kInlinedStringAuxIdx.
if (!inlined_string_indices.empty()) {
if (std::any_of(ordered_fields.begin(), ordered_fields.end(),
[](auto& f) { return f.is_string_inlined; })) {
aux_entries.resize(kInlinedStringAuxIdx + 1); // Allocate our slot
aux_entries[kInlinedStringAuxIdx] = {kInlinedStringDonatedOffset};
}
@ -795,17 +791,15 @@ TailCallTableInfo::TailCallTableInfo(
// If this message is split, store the split pointer offset in the second
// and third auxiliary entries, which are kSplitOffsetAuxIdx and
// kSplitSizeAuxIdx.
for (auto* field : ordered_fields) {
if (option_provider.GetForField(field).should_split) {
static_assert(kSplitOffsetAuxIdx + 1 == kSplitSizeAuxIdx, "");
aux_entries.resize(kSplitSizeAuxIdx + 1); // Allocate our 2 slots
aux_entries[kSplitOffsetAuxIdx] = {kSplitOffset};
aux_entries[kSplitSizeAuxIdx] = {kSplitSizeof};
break;
}
if (std::any_of(ordered_fields.begin(), ordered_fields.end(),
[](auto& f) { return f.should_split; })) {
static_assert(kSplitOffsetAuxIdx + 1 == kSplitSizeAuxIdx, "");
aux_entries.resize(kSplitSizeAuxIdx + 1); // Allocate our 2 slots
aux_entries[kSplitOffsetAuxIdx] = {kSplitOffset};
aux_entries[kSplitSizeAuxIdx] = {kSplitSizeof};
}
auto is_non_cold = [](PerFieldOptions options) {
const auto is_non_cold = [](const FieldOptions& options) {
return options.presence_probability >= 0.005;
};
size_t num_non_cold_subtables = 0;
@ -813,8 +807,8 @@ TailCallTableInfo::TailCallTableInfo(
// We found that clustering non-cold subtables to the top of aux_entries
// achieves the best load tests results than other strategies (e.g.,
// clustering all non-cold entries).
auto is_non_cold_subtable = [&](const FieldDescriptor* field) {
auto options = option_provider.GetForField(field);
const auto is_non_cold_subtable = [&](const FieldOptions& options) {
auto* field = options.field;
// In the following code where we assign kSubTable to aux entries, only
// the following typed fields are supported.
return (field->type() == FieldDescriptor::TYPE_MESSAGE ||
@ -823,8 +817,8 @@ TailCallTableInfo::TailCallTableInfo(
!HasLazyRep(field, options) && !options.is_implicitly_weak &&
options.use_direct_tcparser_table && is_non_cold(options);
};
for (const FieldDescriptor* field : ordered_fields) {
if (is_non_cold_subtable(field)) {
for (const FieldOptions& options : ordered_fields) {
if (is_non_cold_subtable(options)) {
num_non_cold_subtables++;
}
}
@ -835,12 +829,9 @@ TailCallTableInfo::TailCallTableInfo(
aux_entries.resize(aux_entries.size() + num_non_cold_subtables);
// Fill in mini table entries.
for (const FieldDescriptor* field : ordered_fields) {
auto options = option_provider.GetForField(field);
field_entries.push_back(
{field, static_cast<size_t>(field->index()) < has_bit_indices.size()
? has_bit_indices[static_cast<size_t>(field->index())]
: -1});
for (const auto& options : ordered_fields) {
auto* field = options.field;
field_entries.push_back({field, options.has_bit_index});
auto& entry = field_entries.back();
entry.type_card = MakeTypeCardForField(field, entry.hasbit_idx >= 0,
message_options, options);
@ -921,7 +912,7 @@ TailCallTableInfo::TailCallTableInfo(
options.is_string_inlined) {
ABSL_CHECK(!field->is_repeated());
// Inlined strings have an extra marker to represent their donation state.
int idx = inlined_string_indices[static_cast<size_t>(field->index())];
int idx = options.inlined_string_index;
// For mini parsing, the donation state index is stored as an `offset`
// auxiliary entry.
entry.aux_idx = aux_entries.size();
@ -942,7 +933,7 @@ TailCallTableInfo::TailCallTableInfo(
size_t try_size = 1 << try_size_log2;
auto split_fields =
SplitFastFieldsForSize(end_group_tag, field_entries, try_size_log2,
message_options, option_provider);
message_options, ordered_fields);
ABSL_CHECK_EQ(split_fields.size(), try_size);
int try_num_fast_fields = 0;
for (const auto& info : split_fields) {
@ -954,11 +945,10 @@ TailCallTableInfo::TailCallTableInfo(
}
auto* as_field = info.AsField();
const auto option = option_provider.GetForField(as_field->field);
// 0.05 was selected based on load tests where 0.1 and 0.01 were also
// evaluated and worse.
constexpr float kMinPresence = 0.05f;
if (option.presence_probability >= kMinPresence) {
if (as_field->presence_probability >= kMinPresence) {
++try_num_fast_fields;
}
}
@ -991,7 +981,7 @@ TailCallTableInfo::TailCallTableInfo(
num_to_entry_table = MakeNumToEntryTable(ordered_fields);
ABSL_CHECK_EQ(field_entries.size(), ordered_fields.size());
field_name_data = GenerateFieldNames(descriptor, field_entries,
message_options, option_provider);
message_options, ordered_fields);
}
} // namespace internal

@ -41,7 +41,9 @@ struct PROTOBUF_EXPORT TailCallTableInfo {
// TODO: remove this after A/B test is done.
bool should_profile_driven_cluster_aux_subtable;
};
struct PerFieldOptions {
struct FieldOptions {
const FieldDescriptor* field;
int has_bit_index;
// For presence awareness (e.g. PDProto).
float presence_probability;
// kTvEager, kTvLazy, or 0
@ -50,21 +52,12 @@ struct PROTOBUF_EXPORT TailCallTableInfo {
bool is_implicitly_weak;
bool use_direct_tcparser_table;
bool should_split;
};
class OptionProvider {
public:
virtual PerFieldOptions GetForField(const FieldDescriptor*) const = 0;
protected:
~OptionProvider() = default;
int inlined_string_index;
};
TailCallTableInfo(const Descriptor* descriptor,
const std::vector<const FieldDescriptor*>& ordered_fields,
const MessageOptions& message_options,
const OptionProvider& option_provider,
const std::vector<int>& has_bit_indices,
const std::vector<int>& inlined_string_indices);
absl::Span<const FieldOptions> ordered_fields);
TcParseFunction fallback_function;
@ -73,10 +66,11 @@ struct PROTOBUF_EXPORT TailCallTableInfo {
struct Empty {};
struct Field {
TcParseFunction func;
uint16_t coded_tag;
const FieldDescriptor* field;
uint16_t coded_tag;
uint8_t hasbit_idx;
uint8_t aux_idx;
float presence_probability;
};
struct NonField {
TcParseFunction func;

@ -149,6 +149,11 @@ class ReflectionVisit;
class SwapFieldHelper;
class CachedSize;
struct TailCallTableInfo;
namespace field_layout {
enum TransformValidation : uint16_t;
} // namespace field_layout
} // namespace internal
class UnknownFieldSet; // unknown_field_set.h
namespace io {
@ -1018,6 +1023,8 @@ class PROTOBUF_EXPORT Reflection final {
bool IsLazilyVerifiedLazyField(const FieldDescriptor* field) const;
bool IsEagerlyVerifiedLazyField(const FieldDescriptor* field) const;
internal::field_layout::TransformValidation GetLazyStyle(
const FieldDescriptor* field) const;
bool IsSplit(const FieldDescriptor* field) const {
return schema_.IsSplit(field);

Loading…
Cancel
Save