Use ABSL_PREDICT_TRUE|FALSE instead of PROTOBUF_PREDICT_TRUE|FALSE.

Considering that protobuf depends on absl already, we don't need protobuf's
version of PREDICT_TRUE|FALSE. This CL shrinks port_def.inc.

PiperOrigin-RevId: 694015588
pull/19148/head
Protobuf Team Bot 4 months ago committed by Copybara-Service
parent b66792e054
commit fd477301f8
  1. 17
      csharp/src/Google.Protobuf/Reflection/FeatureSetDescriptor.g.cs
  2. 8
      python/google/protobuf/pyext/message.cc
  3. 5
      src/google/protobuf/BUILD.bazel
  4. 7
      src/google/protobuf/arena.cc
  5. 10
      src/google/protobuf/arena.h
  6. 3
      src/google/protobuf/arena_cleanup.h
  7. 1
      src/google/protobuf/arenastring.cc
  8. 2
      src/google/protobuf/arenastring.h
  9. 4
      src/google/protobuf/arenaz_sampler.cc
  10. 8
      src/google/protobuf/arenaz_sampler.h
  11. 21
      src/google/protobuf/compiler/cpp/message.cc
  12. 2
      src/google/protobuf/compiler/java/java_features.pb.cc
  13. 8
      src/google/protobuf/compiler/plugin.pb.cc
  14. 2
      src/google/protobuf/cpp_features.pb.cc
  15. 66
      src/google/protobuf/descriptor.pb.cc
  16. 19
      src/google/protobuf/extension_set.cc
  17. 10
      src/google/protobuf/extension_set.h
  18. 8
      src/google/protobuf/feature_resolver.cc
  19. 4
      src/google/protobuf/generated_enum_util.h
  20. 2
      src/google/protobuf/generated_message_bases.cc
  21. 5
      src/google/protobuf/generated_message_reflection.cc
  22. 5
      src/google/protobuf/generated_message_reflection.h
  23. 3
      src/google/protobuf/generated_message_tctable_full.cc
  24. 10
      src/google/protobuf/generated_message_tctable_impl.h
  25. 219
      src/google/protobuf/generated_message_tctable_lite.cc
  26. 3
      src/google/protobuf/generated_message_util.h
  27. 17
      src/google/protobuf/io/coded_stream.cc
  28. 43
      src/google/protobuf/io/coded_stream.h
  29. 3
      src/google/protobuf/lazy_repeated_field.cc
  30. 15
      src/google/protobuf/lazy_repeated_field.h
  31. 3
      src/google/protobuf/map.cc
  32. 13
      src/google/protobuf/map.h
  33. 2
      src/google/protobuf/message.cc
  34. 5
      src/google/protobuf/message.h
  35. 15
      src/google/protobuf/message_lite.cc
  36. 7
      src/google/protobuf/metadata_lite.h
  37. 23
      src/google/protobuf/parse_context.cc
  38. 36
      src/google/protobuf/parse_context.h
  39. 11
      src/google/protobuf/port_def.inc
  40. 2
      src/google/protobuf/port_undef.inc
  41. 3
      src/google/protobuf/reflection_ops.cc
  42. 7
      src/google/protobuf/reflection_visit_fields.h
  43. 2
      src/google/protobuf/repeated_field.h
  44. 5
      src/google/protobuf/repeated_ptr_field.cc
  45. 7
      src/google/protobuf/repeated_ptr_field.h
  46. 24
      src/google/protobuf/serial_arena.h
  47. 4
      src/google/protobuf/stubs/status_macros.h
  48. 9
      src/google/protobuf/thread_safe_arena.h
  49. 31
      src/google/protobuf/varint_shuffle.h
  50. 9
      src/google/protobuf/wire_format.cc
  51. 1
      src/google/protobuf/wire_format_lite.h

@ -1,17 +0,0 @@
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#endregion
namespace Google.Protobuf.Reflection;
internal sealed partial class FeatureSetDescriptor
{
// Canonical serialized form of the edition defaults, generated by embed_edition_defaults.
private const string DefaultsBase64 =
"ChMYhAciACoMCAEQAhgCIAMoATACChMY5wciACoMCAIQARgBIAIoATABChMY6AciDAgBEAEYASACKAEwASoAIOYHKOgH";
}

@ -491,7 +491,7 @@ void OutOfRangeError(PyObject* arg) {
template <class RangeType, class ValueType>
bool VerifyIntegerCastAndRange(PyObject* arg, ValueType value) {
if (PROTOBUF_PREDICT_FALSE(value == -1 && PyErr_Occurred())) {
if (ABSL_PREDICT_FALSE(value == -1 && PyErr_Occurred())) {
if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
// Replace it with the same ValueError as pure python protos instead of
// the default one.
@ -500,7 +500,7 @@ bool VerifyIntegerCastAndRange(PyObject* arg, ValueType value) {
} // Otherwise propagate existing error.
return false;
}
if (PROTOBUF_PREDICT_FALSE(!IsValidNumericCast<RangeType>(value))) {
if (ABSL_PREDICT_FALSE(!IsValidNumericCast<RangeType>(value))) {
OutOfRangeError(arg);
return false;
}
@ -514,7 +514,7 @@ bool CheckAndGetInteger(PyObject* arg, T* value) {
// This definition includes everything with a valid __index__() implementation
// and shouldn't cast the net too wide.
if (!strcmp(Py_TYPE(arg)->tp_name, "numpy.ndarray") ||
PROTOBUF_PREDICT_FALSE(!PyIndex_Check(arg))) {
ABSL_PREDICT_FALSE(!PyIndex_Check(arg))) {
FormatTypeError(arg, "int");
return false;
}
@ -558,7 +558,7 @@ template bool CheckAndGetInteger<uint64_t>(PyObject*, uint64_t*);
bool CheckAndGetDouble(PyObject* arg, double* value) {
*value = PyFloat_AsDouble(arg);
if (!strcmp(Py_TYPE(arg)->tp_name, "numpy.ndarray") ||
PROTOBUF_PREDICT_FALSE(*value == -1 && PyErr_Occurred())) {
ABSL_PREDICT_FALSE(*value == -1 && PyErr_Occurred())) {
FormatTypeError(arg, "int, float");
return false;
}

@ -296,7 +296,10 @@ cc_library(
"//:__subpackages__",
"//src/google/protobuf:__subpackages__",
],
deps = [":port"],
deps = [
":port",
"@com_google_absl//absl/base:core_headers",
],
)
cc_test(

@ -17,6 +17,7 @@
#include "absl/base/attributes.h"
#include "absl/base/dynamic_annotations.h"
#include "absl/base/optimization.h"
#include "absl/base/prefetch.h"
#include "absl/container/internal/layout.h"
#include "absl/log/absl_check.h"
@ -643,7 +644,7 @@ uint64_t ThreadSafeArena::GetNextLifeCycleId() {
ThreadCache& tc = thread_cache();
uint64_t id = tc.next_lifecycle_id;
constexpr uint64_t kInc = ThreadCache::kPerThreadIds;
if (PROTOBUF_PREDICT_FALSE((id & (kInc - 1)) == 0)) {
if (ABSL_PREDICT_FALSE((id & (kInc - 1)) == 0)) {
// On platforms that don't support uint64_t atomics we can certainly not
// afford to increment by large intervals and expect uniqueness due to
// wrapping, hence we only add by 1.
@ -815,7 +816,7 @@ uint64_t ThreadSafeArena::Reset() {
void* ThreadSafeArena::AllocateAlignedWithCleanup(size_t n, size_t align,
void (*destructor)(void*)) {
SerialArena* arena;
if (PROTOBUF_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
if (ABSL_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
return arena->AllocateAlignedWithCleanup(n, align, destructor);
} else {
return AllocateAlignedWithCleanupFallback(n, align, destructor);
@ -828,7 +829,7 @@ void ThreadSafeArena::AddCleanup(void* elem, void (*cleanup)(void*)) {
SerialArena* ThreadSafeArena::GetSerialArena() {
SerialArena* arena;
if (PROTOBUF_PREDICT_FALSE(!GetSerialArenaFast(&arena))) {
if (ABSL_PREDICT_FALSE(!GetSerialArenaFast(&arena))) {
arena = GetSerialArenaFallback(kMaxCleanupNodeSize);
}
return arena;

@ -229,7 +229,7 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
},
// Non arena-constructable
[arena](auto&&... args) {
if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new T(std::forward<Args>(args)...);
}
return new (arena->AllocateInternal<T>())
@ -277,7 +277,7 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
"CreateArray requires a trivially destructible type");
ABSL_CHECK_LE(num_elements, std::numeric_limits<size_t>::max() / sizeof(T))
<< "Requested size is too large to fit into size_t.";
if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new T[num_elements];
} else {
// We count on compiler to realize that if sizeof(T) is a multiple of
@ -518,7 +518,7 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
Args&&... args) {
static_assert(is_arena_constructable<T>::value,
"Can only construct types that are ArenaConstructable");
if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
return new T(nullptr, static_cast<Args&&>(args)...);
} else {
return arena->DoCreateMessage<T>(static_cast<Args&&>(args)...);
@ -532,7 +532,7 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
PROTOBUF_NDEBUG_INLINE static T* CreateArenaCompatible(Arena* arena) {
static_assert(is_arena_constructable<T>::value,
"Can only construct types that are ArenaConstructable");
if (PROTOBUF_PREDICT_FALSE(arena == nullptr)) {
if (ABSL_PREDICT_FALSE(arena == nullptr)) {
// Generated arena constructor T(Arena*) is protected. Call via
// InternalHelper.
return InternalHelper<T>::New();
@ -583,7 +583,7 @@ class PROTOBUF_EXPORT PROTOBUF_ALIGNAS(8) Arena final {
static void CreateInArenaStorage(T* ptr, Arena* arena, Args&&... args) {
CreateInArenaStorageInternal(ptr, arena, is_arena_constructable<T>(),
std::forward<Args>(args)...);
if (PROTOBUF_PREDICT_TRUE(arena != nullptr)) {
if (ABSL_PREDICT_TRUE(arena != nullptr)) {
RegisterDestructorInternal(ptr, arena, is_destructor_skippable<T>());
}
}

@ -13,6 +13,7 @@
#include <vector>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/base/prefetch.h"
// Must be included last.
@ -60,7 +61,7 @@ class ChunkList {
public:
PROTOBUF_ALWAYS_INLINE void Add(void* elem, void (*destructor)(void*),
SerialArena& arena) {
if (PROTOBUF_PREDICT_TRUE(next_ < limit_)) {
if (ABSL_PREDICT_TRUE(next_ < limit_)) {
AddFromExisting(elem, destructor);
return;
}

@ -9,6 +9,7 @@
#include <cstddef>
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/strings/string_view.h"
#include "absl/synchronization/mutex.h"

@ -58,7 +58,7 @@ class PROTOBUF_EXPORT LazyString {
const std::string& get() const {
// This check generates less code than a call-once invocation.
auto* res = inited_.load(std::memory_order_acquire);
if (PROTOBUF_PREDICT_FALSE(res == nullptr)) return Init();
if (ABSL_PREDICT_FALSE(res == nullptr)) return Init();
return *res;
}

@ -12,6 +12,8 @@
#include <limits>
#include <utility>
#include "absl/base/optimization.h"
// Must be included last.
#include "google/protobuf/port_def.inc"
@ -129,7 +131,7 @@ ThreadSafeArenaStats* SampleSlow(SamplingState& sampling_state) {
// We will only be negative on our first count, so we should just retry in
// that case.
if (first) {
if (PROTOBUF_PREDICT_TRUE(--sampling_state.next_sample > 0)) return nullptr;
if (ABSL_PREDICT_TRUE(--sampling_state.next_sample > 0)) return nullptr;
return SampleSlow(sampling_state);
}

@ -84,7 +84,7 @@ struct ThreadSafeArenaStats
void* stack[kMaxStackDepth];
static void RecordAllocateStats(ThreadSafeArenaStats* info, size_t used,
size_t allocated, size_t wasted) {
if (PROTOBUF_PREDICT_TRUE(info == nullptr)) return;
if (ABSL_PREDICT_TRUE(info == nullptr)) return;
RecordAllocateSlow(info, used, allocated, wasted);
}
@ -117,7 +117,7 @@ class ThreadSafeArenaStatsHandle {
: info_(info) {}
~ThreadSafeArenaStatsHandle() {
if (PROTOBUF_PREDICT_TRUE(info_ == nullptr)) return;
if (ABSL_PREDICT_TRUE(info_ == nullptr)) return;
UnsampleSlow(info_);
}
@ -126,7 +126,7 @@ class ThreadSafeArenaStatsHandle {
ThreadSafeArenaStatsHandle& operator=(
ThreadSafeArenaStatsHandle&& other) noexcept {
if (PROTOBUF_PREDICT_FALSE(info_ != nullptr)) {
if (ABSL_PREDICT_FALSE(info_ != nullptr)) {
UnsampleSlow(info_);
}
info_ = std::exchange(other.info_, nullptr);
@ -154,7 +154,7 @@ extern PROTOBUF_THREAD_LOCAL SamplingState global_sampling_state;
// Returns an RAII sampling handle that manages registration and unregistation
// with the global sampler.
inline ThreadSafeArenaStatsHandle Sample() {
if (PROTOBUF_PREDICT_TRUE(--global_sampling_state.next_sample > 0)) {
if (ABSL_PREDICT_TRUE(--global_sampling_state.next_sample > 0)) {
return ThreadSafeArenaStatsHandle(nullptr);
}
return ThreadSafeArenaStatsHandle(SampleSlow(global_sampling_state));

@ -571,7 +571,7 @@ bool MaybeEmitHaswordsCheck(ChunkIterator it, ChunkIterator end,
}
}}},
R"cc(
if (PROTOBUF_PREDICT_FALSE($cond$)) {
if (ABSL_PREDICT_FALSE($cond$)) {
)cc");
p->Indent();
return true;
@ -1238,7 +1238,7 @@ void MessageGenerator::GenerateFieldClear(const FieldDescriptor* field,
// TODO: figure out if early return breaks tracking
if (ShouldSplit(field, options_)) {
p->Emit(R"cc(
if (PROTOBUF_PREDICT_TRUE(IsSplitMessageDefault()))
if (ABSL_PREDICT_TRUE(IsSplitMessageDefault()))
return;
)cc");
}
@ -2518,7 +2518,7 @@ void MessageGenerator::GenerateClassMethods(io::Printer* p) {
DefaultInstanceName(descriptor_, options_, /*split=*/false)}},
R"cc(
void $classname$::PrepareSplitMessageForWrite() {
if (PROTOBUF_PREDICT_TRUE(IsSplitMessageDefault())) {
if (ABSL_PREDICT_TRUE(IsSplitMessageDefault())) {
void* chunk = $pbi$::CreateSplitMessageGeneric(
GetArena(), &$split_default$, sizeof(Impl_::Split), this,
&$default$);
@ -2964,7 +2964,7 @@ void MessageGenerator::GenerateSharedDestructorCode(io::Printer* p) {
[&] { emit_field_dtors(/* split_fields= */ true); }},
},
R"cc(
if (PROTOBUF_PREDICT_FALSE(!this_.IsSplitMessageDefault())) {
if (ABSL_PREDICT_FALSE(!this_.IsSplitMessageDefault())) {
auto* $cached_split_ptr$ = this_.$split$;
$split_field_dtors_impl$;
delete $cached_split_ptr$;
@ -3056,8 +3056,7 @@ void MessageGenerator::GenerateArenaDestructorCode(io::Printer* p) {
[&] { emit_field_dtors(/* split_fields= */ true); }},
},
R"cc(
if (PROTOBUF_PREDICT_FALSE(
!_this->IsSplitMessageDefault())) {
if (ABSL_PREDICT_FALSE(!_this->IsSplitMessageDefault())) {
$split_field_dtors_impl$;
}
)cc");
@ -3311,7 +3310,7 @@ void MessageGenerator::GenerateCopyInitFields(io::Printer* p) const {
if (ShouldSplit(descriptor_, options_)) {
p->Emit({{"copy_split_fields", generate_copy_split_fields}},
R"cc(
if (PROTOBUF_PREDICT_FALSE(!from.IsSplitMessageDefault())) {
if (ABSL_PREDICT_FALSE(!from.IsSplitMessageDefault())) {
PrepareSplitMessageForWrite();
$copy_split_fields$;
}
@ -4252,7 +4251,7 @@ void MessageGenerator::GenerateClassSpecificMergeImpl(io::Printer* p) {
if (ShouldSplit(descriptor_, options_)) {
format(
"if (PROTOBUF_PREDICT_FALSE(!from.IsSplitMessageDefault())) {\n"
"if (ABSL_PREDICT_FALSE(!from.IsSplitMessageDefault())) {\n"
" _this->PrepareSplitMessageForWrite();\n"
"}\n");
}
@ -4913,7 +4912,7 @@ void MessageGenerator::GenerateSerializeWithCachedSizesBody(io::Printer* p) {
(void)cached_has_bits;
$handle_lazy_fields$;
if (PROTOBUF_PREDICT_FALSE(this_.$have_unknown_fields$)) {
if (ABSL_PREDICT_FALSE(this_.$have_unknown_fields$)) {
$handle_unknown_fields$;
}
)cc");
@ -5008,7 +5007,7 @@ void MessageGenerator::GenerateSerializeWithCachedSizesBodyShuffled(
}
}
}
if (PROTOBUF_PREDICT_FALSE(this_.$have_unknown_fields$)) {
if (ABSL_PREDICT_FALSE(this_.$have_unknown_fields$)) {
$handle_unknown_fields$;
}
)cc");
@ -5252,7 +5251,7 @@ void MessageGenerator::GenerateByteSize(io::Printer* p) {
// even relaxed memory order might have perf impact to replace it
// with ordinary loads and stores.
p->Emit(R"cc(
if (PROTOBUF_PREDICT_FALSE(this_.$have_unknown_fields$)) {
if (ABSL_PREDICT_FALSE(this_.$have_unknown_fields$)) {
total_size += this_.$unknown_fields$.size();
}
this_.$cached_size$.Set(::_pbi::ToCachedSize(total_size));

@ -336,7 +336,7 @@ PROTOBUF_NOINLINE void JavaFeatures::Clear() {
4, this_._internal_use_old_outer_classname_default(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);

@ -542,7 +542,7 @@ PROTOBUF_NOINLINE void Version::Clear() {
target = stream->WriteStringMaybeAliased(4, _s, target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -940,7 +940,7 @@ PROTOBUF_NOINLINE void CodeGeneratorRequest::Clear() {
target, stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -1324,7 +1324,7 @@ PROTOBUF_NOINLINE void CodeGeneratorResponse_File::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -1704,7 +1704,7 @@ PROTOBUF_NOINLINE void CodeGeneratorResponse::Clear() {
target, stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);

@ -330,7 +330,7 @@ PROTOBUF_NOINLINE void CppFeatures::Clear() {
3, this_._internal_enum_name_uses_string_view(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);

@ -2576,7 +2576,7 @@ PROTOBUF_NOINLINE void FileDescriptorSet::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -3119,7 +3119,7 @@ PROTOBUF_NOINLINE void FileDescriptorProto::Clear() {
14, this_._internal_edition(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -3569,7 +3569,7 @@ PROTOBUF_NOINLINE void DescriptorProto_ExtensionRange::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -3859,7 +3859,7 @@ PROTOBUF_NOINLINE void DescriptorProto_ReservedRange::Clear() {
stream, this_._internal_end(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -4359,7 +4359,7 @@ PROTOBUF_NOINLINE void DescriptorProto::Clear() {
target = stream->WriteString(10, s, target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -4814,7 +4814,7 @@ PROTOBUF_NOINLINE void ExtensionRangeOptions_Declaration::Clear() {
6, this_._internal_repeated(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -5196,7 +5196,7 @@ PROTOBUF_NOINLINE void ExtensionRangeOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -5709,7 +5709,7 @@ PROTOBUF_NOINLINE void FieldDescriptorProto::Clear() {
17, this_._internal_proto3_optional(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -6095,7 +6095,7 @@ PROTOBUF_NOINLINE void OneofDescriptorProto::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -6375,7 +6375,7 @@ PROTOBUF_NOINLINE void EnumDescriptorProto_EnumReservedRange::Clear() {
stream, this_._internal_end(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -6747,7 +6747,7 @@ PROTOBUF_NOINLINE void EnumDescriptorProto::Clear() {
target = stream->WriteString(5, s, target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -7105,7 +7105,7 @@ PROTOBUF_NOINLINE void EnumValueDescriptorProto::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -7454,7 +7454,7 @@ PROTOBUF_NOINLINE void ServiceDescriptorProto::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -7855,7 +7855,7 @@ PROTOBUF_NOINLINE void MethodDescriptorProto::Clear() {
6, this_._internal_server_streaming(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -8578,7 +8578,7 @@ PROTOBUF_NOINLINE void FileOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -9158,7 +9158,7 @@ PROTOBUF_NOINLINE void MessageOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -9501,7 +9501,7 @@ PROTOBUF_NOINLINE void FieldOptions_EditionDefault::Clear() {
3, this_._internal_edition(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -9822,7 +9822,7 @@ PROTOBUF_NOINLINE void FieldOptions_FeatureSupport::Clear() {
4, this_._internal_edition_removed(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -10357,7 +10357,7 @@ PROTOBUF_NOINLINE void FieldOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -10800,7 +10800,7 @@ PROTOBUF_NOINLINE void OneofOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -11185,7 +11185,7 @@ PROTOBUF_NOINLINE void EnumOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -11611,7 +11611,7 @@ PROTOBUF_NOINLINE void EnumValueOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -11998,7 +11998,7 @@ PROTOBUF_NOINLINE void ServiceOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -12391,7 +12391,7 @@ PROTOBUF_NOINLINE void MethodOptions::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -12717,7 +12717,7 @@ PROTOBUF_NOINLINE void UninterpretedOption_NamePart::Clear() {
2, this_._internal_is_extension(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -13117,7 +13117,7 @@ PROTOBUF_NOINLINE void UninterpretedOption::Clear() {
target = stream->WriteStringMaybeAliased(8, _s, target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -13534,7 +13534,7 @@ PROTOBUF_NOINLINE void FeatureSet::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -13888,7 +13888,7 @@ PROTOBUF_NOINLINE void FeatureSetDefaults_FeatureSetEditionDefault::Clear() {
stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -14236,7 +14236,7 @@ PROTOBUF_NOINLINE void FeatureSetDefaults::Clear() {
5, this_._internal_maximum_edition(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -14623,7 +14623,7 @@ PROTOBUF_NOINLINE void SourceCodeInfo_Location::Clear() {
target = stream->WriteString(6, s, target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -14927,7 +14927,7 @@ PROTOBUF_NOINLINE void SourceCodeInfo::Clear() {
// All extensions.
target = this_._impl_._extensions_._InternalSerializeAll(
internal_default_instance(), target, stream);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -15277,7 +15277,7 @@ PROTOBUF_NOINLINE void GeneratedCodeInfo_Annotation::Clear() {
5, this_._internal_semantic(), target);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);
@ -15572,7 +15572,7 @@ PROTOBUF_NOINLINE void GeneratedCodeInfo::Clear() {
target, stream);
}
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target =
::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<::google::protobuf::UnknownFieldSet>(::google::protobuf::UnknownFieldSet::default_instance), target, stream);

@ -21,6 +21,7 @@
#include <type_traits>
#include <utility>
#include "absl/base/optimization.h"
#include "absl/container/flat_hash_set.h"
#include "absl/hash/hash.h"
#include "absl/log/absl_check.h"
@ -175,7 +176,7 @@ ExtensionSet::~ExtensionSet() {
if (arena_ == nullptr) {
ForEach([](int /* number */, Extension& ext) { ext.Free(); },
PrefetchNta{});
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
delete map_.large;
} else {
DeleteFlatMap(map_.flat, flat_capacity_);
@ -984,8 +985,8 @@ size_t SizeOfUnion(ItX it_dest, ItX end_dest, ItY it_source, ItY end_source) {
void ExtensionSet::MergeFrom(const MessageLite* extendee,
const ExtensionSet& other) {
Prefetch5LinesFrom1Line(&other);
if (PROTOBUF_PREDICT_TRUE(!is_large())) {
if (PROTOBUF_PREDICT_TRUE(!other.is_large())) {
if (ABSL_PREDICT_TRUE(!is_large())) {
if (ABSL_PREDICT_TRUE(!other.is_large())) {
GrowCapacity(SizeOfUnion(flat_begin(), flat_end(), other.flat_begin(),
other.flat_end()));
} else {
@ -1215,7 +1216,7 @@ bool ExtensionSet::IsInitialized(const MessageLite* extendee) const {
// Extensions are never required. However, we need to check that all
// embedded messages are initialized.
Arena* const arena = arena_;
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
for (const auto& kv : *map_.large) {
if (!kv.second.IsInitialized(this, extendee, kv.first, arena)) {
return false;
@ -1265,7 +1266,7 @@ bool ExtensionSet::FieldTypeIsPointer(FieldType type) {
uint8_t* ExtensionSet::_InternalSerializeImpl(
const MessageLite* extendee, int start_field_number, int end_field_number,
uint8_t* target, io::EpsCopyOutputStream* stream) const {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
return _InternalSerializeImplLarge(extendee, start_field_number,
end_field_number, target, stream);
}
@ -1626,7 +1627,7 @@ void ExtensionSet::LazyMessageExtension::UnusedKeyMethod() {}
const ExtensionSet::Extension* ExtensionSet::FindOrNull(int key) const {
if (flat_size_ == 0) {
return nullptr;
} else if (PROTOBUF_PREDICT_TRUE(!is_large())) {
} else if (ABSL_PREDICT_TRUE(!is_large())) {
for (auto it = flat_begin(), end = flat_end();
it != end && it->first <= key; ++it) {
if (it->first == key) return &it->second;
@ -1659,7 +1660,7 @@ ExtensionSet::Extension* ExtensionSet::FindOrNullInLargeMap(int key) {
}
std::pair<ExtensionSet::Extension*, bool> ExtensionSet::Insert(int key) {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
auto maybe = map_.large->insert({key, Extension()});
return {&maybe.first->second, maybe.second};
}
@ -1684,7 +1685,7 @@ constexpr bool IsPowerOfTwo(size_t n) { return (n & (n - 1)) == 0; }
} // namespace
void ExtensionSet::GrowCapacity(size_t minimum_new_capacity) {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
return; // LargeMap does not have a "reserve" method.
}
if (flat_capacity_ >= minimum_new_capacity) {
@ -1738,7 +1739,7 @@ constexpr uint16_t ExtensionSet::kMaximumFlatCapacity;
// && _MSC_VER < 1912))
void ExtensionSet::Erase(int key) {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
map_.large->erase(key);
return;
}

@ -827,7 +827,7 @@ class PROTOBUF_EXPORT ExtensionSet {
void Erase(int key);
size_t Size() const {
return PROTOBUF_PREDICT_FALSE(is_large()) ? map_.large->size() : flat_size_;
return ABSL_PREDICT_FALSE(is_large()) ? map_.large->size() : flat_size_;
}
// For use as `PrefetchFunctor`s in `ForEach`.
@ -870,7 +870,7 @@ class PROTOBUF_EXPORT ExtensionSet {
// prefetches ahead.
template <typename KeyValueFunctor, typename PrefetchFunctor>
void ForEach(KeyValueFunctor func, PrefetchFunctor prefetch_func) {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
ForEachPrefetchImpl(map_.large->begin(), map_.large->end(),
std::move(func), std::move(prefetch_func));
return;
@ -881,7 +881,7 @@ class PROTOBUF_EXPORT ExtensionSet {
// As above, but const.
template <typename KeyValueFunctor, typename PrefetchFunctor>
void ForEach(KeyValueFunctor func, PrefetchFunctor prefetch_func) const {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
ForEachPrefetchImpl(map_.large->begin(), map_.large->end(),
std::move(func), std::move(prefetch_func));
return;
@ -901,7 +901,7 @@ class PROTOBUF_EXPORT ExtensionSet {
// Applies a functor to the <int, Extension&> pairs in sorted order.
template <typename KeyValueFunctor>
void ForEachNoPrefetch(KeyValueFunctor func) {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
ForEachNoPrefetch(map_.large->begin(), map_.large->end(),
std::move(func));
return;
@ -912,7 +912,7 @@ class PROTOBUF_EXPORT ExtensionSet {
// As above, but const.
template <typename KeyValueFunctor>
void ForEachNoPrefetch(KeyValueFunctor func) const {
if (PROTOBUF_PREDICT_FALSE(is_large())) {
if (ABSL_PREDICT_FALSE(is_large())) {
ForEachNoPrefetch(map_.large->begin(), map_.large->end(),
std::move(func));
return;

@ -38,10 +38,10 @@
// Must be included last.
#include "google/protobuf/port_def.inc"
#define RETURN_IF_ERROR(expr) \
do { \
const absl::Status _status = (expr); \
if (PROTOBUF_PREDICT_FALSE(!_status.ok())) return _status; \
#define RETURN_IF_ERROR(expr) \
do { \
const absl::Status _status = (expr); \
if (ABSL_PREDICT_FALSE(!_status.ok())) return _status; \
} while (0)
namespace google {

@ -82,14 +82,14 @@ PROTOBUF_ALWAYS_INLINE bool ValidateEnumInlined(int value,
uint64_t adjusted =
static_cast<uint64_t>(static_cast<int64_t>(value)) - min_seq;
// Check if the value is within the sequential part.
if (PROTOBUF_PREDICT_TRUE(adjusted < length_seq)) {
if (ABSL_PREDICT_TRUE(adjusted < length_seq)) {
return true;
}
const uint16_t length_bitmap = static_cast<uint16_t>(data[1] & 0xFFFF);
adjusted -= length_seq;
// Check if the value is within the bitmap.
if (PROTOBUF_PREDICT_TRUE(adjusted < length_bitmap)) {
if (ABSL_PREDICT_TRUE(adjusted < length_bitmap)) {
return ((data[2 + (adjusted / 32)] >> (adjusted % 32)) & 1) == 1;
}

@ -51,7 +51,7 @@ size_t ZeroFieldsBase::ByteSizeLong(const MessageLite& base) {
::uint8_t* target,
io::EpsCopyOutputStream* stream) {
auto& this_ = static_cast<const ZeroFieldsBase&>(msg);
if (PROTOBUF_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(this_._internal_metadata_.have_unknown_fields())) {
target = internal::WireFormat::InternalSerializeUnknownFieldsToArray(
this_._internal_metadata_.unknown_fields<UnknownFieldSet>(
UnknownFieldSet::default_instance),

@ -25,6 +25,7 @@
#include "absl/base/attributes.h"
#include "absl/base/call_once.h"
#include "absl/base/const_init.h"
#include "absl/base/optimization.h"
#include "absl/container/flat_hash_set.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
@ -2870,7 +2871,7 @@ void* Reflection::MutableRawSplitImpl(Message* message,
void* Reflection::MutableRawNonOneofImpl(Message* message,
const FieldDescriptor* field) const {
if (PROTOBUF_PREDICT_FALSE(schema_.IsSplit(field))) {
if (ABSL_PREDICT_FALSE(schema_.IsSplit(field))) {
return MutableRawSplitImpl(message, field);
}
@ -2880,7 +2881,7 @@ void* Reflection::MutableRawNonOneofImpl(Message* message,
void* Reflection::MutableRawImpl(Message* message,
const FieldDescriptor* field) const {
if (PROTOBUF_PREDICT_TRUE(!schema_.InRealOneof(field))) {
if (ABSL_PREDICT_TRUE(!schema_.InRealOneof(field))) {
return MutableRawNonOneofImpl(message, field);
}

@ -21,6 +21,7 @@
#include <string>
#include "absl/base/call_once.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/generated_enum_reflection.h"
@ -368,8 +369,8 @@ const std::string& NameOfDenseEnum(int v) {
static DenseEnumCacheInfo deci = {/* atomic ptr */ {}, min_val, max_val,
descriptor_fn};
const std::string** cache = deci.cache.load(std::memory_order_acquire );
if (PROTOBUF_PREDICT_TRUE(cache != nullptr)) {
if (PROTOBUF_PREDICT_TRUE(v >= min_val && v <= max_val)) {
if (ABSL_PREDICT_TRUE(cache != nullptr)) {
if (ABSL_PREDICT_TRUE(v >= min_val && v <= max_val)) {
return *cache[v - min_val];
}
}

@ -30,6 +30,7 @@
#include <cstdint>
#include "absl/base/optimization.h"
#include "google/protobuf/extension_set.h"
#include "google/protobuf/generated_message_tctable_impl.h"
#include "google/protobuf/message.h"
@ -53,7 +54,7 @@ const char* TcParser::GenericFallback(PROTOBUF_TC_PARAM_DECL) {
const char* TcParser::ReflectionFallback(PROTOBUF_TC_PARAM_DECL) {
bool must_fallback_to_generic = (ptr == nullptr);
if (PROTOBUF_PREDICT_FALSE(must_fallback_to_generic)) {
if (ABSL_PREDICT_FALSE(must_fallback_to_generic)) {
PROTOBUF_MUSTTAIL return GenericFallback(PROTOBUF_TC_PARAM_PASS);
}

@ -716,8 +716,8 @@ class PROTOBUF_EXPORT TcParser final {
#if !defined(NDEBUG) && !(defined(_MSC_VER) && defined(_M_IX86))
// Check the alignment in debug mode, except in 32-bit msvc because it does
// not respect the alignment as expressed by `alignof(T)`
if (PROTOBUF_PREDICT_FALSE(
reinterpret_cast<uintptr_t>(target) % alignof(T) != 0)) {
if (ABSL_PREDICT_FALSE(reinterpret_cast<uintptr_t>(target) % alignof(T) !=
0)) {
AlignFail(std::integral_constant<size_t, alignof(T)>(),
reinterpret_cast<uintptr_t>(target));
// Explicit abort to let compilers know this code-path does not return
@ -734,8 +734,8 @@ class PROTOBUF_EXPORT TcParser final {
#if !defined(NDEBUG) && !(defined(_MSC_VER) && defined(_M_IX86))
// Check the alignment in debug mode, except in 32-bit msvc because it does
// not respect the alignment as expressed by `alignof(T)`
if (PROTOBUF_PREDICT_FALSE(
reinterpret_cast<uintptr_t>(target) % alignof(T) != 0)) {
if (ABSL_PREDICT_FALSE(reinterpret_cast<uintptr_t>(target) % alignof(T) !=
0)) {
AlignFail(std::integral_constant<size_t, alignof(T)>(),
reinterpret_cast<uintptr_t>(target));
// Explicit abort to let compilers know this code-path does not return
@ -907,7 +907,7 @@ class PROTOBUF_EXPORT TcParser final {
template <class MessageBaseT, class UnknownFieldsT>
PROTOBUF_CC static const char* GenericFallbackImpl(PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
// This is the ABI used by GetUnknownFieldOps(). Return the vtable.
static constexpr UnknownFieldOps kOps = {
WriteVarintToUnknown<UnknownFieldsT>,

@ -257,10 +257,10 @@ const TcParseTableBase::FieldEntry* TcParser::FindFieldEntry(
uint32_t fstart = 1;
uint32_t adj_fnum = field_num - fstart;
if (PROTOBUF_PREDICT_TRUE(adj_fnum < 32)) {
if (ABSL_PREDICT_TRUE(adj_fnum < 32)) {
uint32_t skipmap = table->skipmap32;
uint32_t skipbit = 1 << adj_fnum;
if (PROTOBUF_PREDICT_FALSE(skipmap & skipbit)) return nullptr;
if (ABSL_PREDICT_FALSE(skipmap & skipbit)) return nullptr;
skipmap &= skipbit - 1;
adj_fnum -= absl::popcount(skipmap);
auto* entry = field_entries + adj_fnum;
@ -279,7 +279,7 @@ const TcParseTableBase::FieldEntry* TcParser::FindFieldEntry(
if (field_num < fstart) return nullptr;
adj_fnum = field_num - fstart;
uint32_t skip_num = adj_fnum / 16;
if (PROTOBUF_PREDICT_TRUE(skip_num < num_skip_entries)) {
if (ABSL_PREDICT_TRUE(skip_num < num_skip_entries)) {
// for each group of 16 fields we have:
// a bitmap of 16 bits
// a 16-bit field-entry offset for the first of them.
@ -289,7 +289,7 @@ const TcParseTableBase::FieldEntry* TcParser::FindFieldEntry(
adj_fnum &= 15;
uint32_t skipmap = se.skipmap;
uint16_t skipbit = 1 << adj_fnum;
if (PROTOBUF_PREDICT_FALSE(skipmap & skipbit)) return nullptr;
if (ABSL_PREDICT_FALSE(skipmap & skipbit)) return nullptr;
skipmap &= skipbit - 1;
adj_fnum += se.field_entry_offset;
adj_fnum -= absl::popcount(skipmap);
@ -399,7 +399,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::MiniParse(PROTOBUF_TC_PARAM_DECL) {
uint32_t tag;
ptr = ReadTagInlined(ptr, &tag);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
if (export_called_function) *test_out = {Error};
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
@ -491,7 +491,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpFallback(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType>
const char* TcParser::FastEndGroupImpl(PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
ctx->SetLastTag(data.decoded_tag());
@ -526,7 +526,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularParseMessageAuxImpl(
PROTOBUF_TC_PARAM_DECL) {
PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 192);
PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 256);
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto saved_tag = UnalignedLoad<TagType>(ptr);
@ -606,7 +606,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastMlS2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, bool group_coding, bool aux_is_table>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedParseMessageAuxImpl(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
PROTOBUF_PREFETCH_WITH_OFFSET(ptr, 256);
@ -624,10 +624,10 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedParseMessageAuxImpl(
ptr = group_coding ? ctx->ParseGroupInlined(
ptr, FastDecodeTag(expected_tag), inner_loop)
: ctx->ParseLengthDelimitedInlined(ptr, inner_loop);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
@ -682,7 +682,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastGtR2(PROTOBUF_TC_PARAM_DECL) {
template <typename LayoutType, typename TagType>
PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularFixed(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
ptr += sizeof(TagType); // Consume tag
@ -712,7 +712,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastF64S2(PROTOBUF_TC_PARAM_DECL) {
template <typename LayoutType, typename TagType>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedFixed(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto& field = RefAt<RepeatedField<LayoutType>>(msg, data.offset());
@ -720,7 +720,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedFixed(
do {
field.Add(UnalignedLoad<LayoutType>(ptr + sizeof(TagType)));
ptr += sizeof(TagType) + sizeof(LayoutType);
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
} while (UnalignedLoad<TagType>(ptr) == tag);
@ -747,7 +747,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastF64R2(PROTOBUF_TC_PARAM_DECL) {
template <typename LayoutType, typename TagType>
PROTOBUF_ALWAYS_INLINE const char* TcParser::PackedFixed(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
ptr += sizeof(TagType);
@ -807,7 +807,7 @@ PROTOBUF_ALWAYS_INLINE const char* ParseVarint(const char* p, Type* value) {
// different requirements and performance opportunities than ints.
PROTOBUF_ALWAYS_INLINE const char* ParseVarint(const char* p, bool* value) {
unsigned char byte = static_cast<unsigned char>(*p++);
if (PROTOBUF_PREDICT_TRUE(byte == 0 || byte == 1)) {
if (ABSL_PREDICT_TRUE(byte == 0 || byte == 1)) {
// This is the code path almost always taken,
// so we take care to make it very efficient.
if (sizeof(byte) == sizeof(*value)) {
@ -821,27 +821,27 @@ PROTOBUF_ALWAYS_INLINE const char* ParseVarint(const char* p, bool* value) {
// This part, we just care about code size.
// Although it's almost never used, we have to support it because we guarantee
// compatibility for users who change a field from an int32 or int64 to a bool
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
byte = (byte - 0x80) | *p++;
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
// We only care about the continuation bit and the first bit
// of the 10th byte.
byte = (byte - 0x80) | (*p++ & 0x81);
if (PROTOBUF_PREDICT_FALSE(byte & 0x80)) {
if (ABSL_PREDICT_FALSE(byte & 0x80)) {
return nullptr;
}
}
@ -904,7 +904,7 @@ PROTOBUF_ALWAYS_INLINE bool EnumIsValidAux(int32_t val, uint16_t xform_val,
template <typename FieldType, typename TagType, bool zigzag>
PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularVarint(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
ptr += sizeof(TagType); // Consume tag
@ -913,7 +913,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularVarint(
// clang isn't smart enough to be able to only conditionally save
// registers to the stack, so we turn the integer-greater-than-128
// case into a separate routine.
if (PROTOBUF_PREDICT_FALSE(static_cast<int8_t>(*ptr) < 0)) {
if (ABSL_PREDICT_FALSE(static_cast<int8_t>(*ptr) < 0)) {
PROTOBUF_MUSTTAIL return SingularVarBigint<FieldType, TagType, zigzag>(
PROTOBUF_TC_PARAM_PASS);
}
@ -954,7 +954,7 @@ PROTOBUF_NOINLINE const char* TcParser::SingularVarBigint(
table = spill.table;
hasbits = spill.hasbits;
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
RefAt<FieldType>(msg, data.offset()) =
@ -966,12 +966,12 @@ template <typename FieldType>
PROTOBUF_ALWAYS_INLINE const char* TcParser::FastVarintS1(
PROTOBUF_TC_PARAM_DECL) {
using TagType = uint8_t;
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
int64_t res;
ptr = ShiftMixParseVarint<FieldType>(ptr + sizeof(TagType), res);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
hasbits |= (uint64_t{1} << data.hasbit_idx());
@ -986,7 +986,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastV8S1(PROTOBUF_TC_PARAM_DECL) {
// The coded_tag() field will actually contain the value too and we can check
// both at the same time.
auto coded_tag = data.coded_tag<uint16_t>();
if (PROTOBUF_PREDICT_TRUE(coded_tag == 0x0000 || coded_tag == 0x0100)) {
if (ABSL_PREDICT_TRUE(coded_tag == 0x0000 || coded_tag == 0x0100)) {
auto& field = RefAt<bool>(msg, data.offset());
// Note: we use `data.data` because Clang generates suboptimal code when
// using coded_tag.
@ -1048,7 +1048,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastZ64S2(PROTOBUF_TC_PARAM_DECL) {
template <typename FieldType, typename TagType, bool zigzag>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedVarint(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto& field = RefAt<RepeatedField<FieldType>>(msg, data.offset());
@ -1057,11 +1057,11 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedVarint(
ptr += sizeof(TagType);
FieldType tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
field.Add(ZigZagDecodeHelper<FieldType, zigzag>(tmp));
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
@ -1113,7 +1113,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastZ64R2(PROTOBUF_TC_PARAM_DECL) {
template <typename FieldType, typename TagType, bool zigzag>
PROTOBUF_ALWAYS_INLINE const char* TcParser::PackedVarint(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
ptr += sizeof(TagType);
@ -1187,12 +1187,12 @@ PROTOBUF_NOINLINE const char* TcParser::FastUnknownEnumFallback(
// enum values.
uint32_t tag;
ptr = ReadTag(ptr, &tag);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
uint64_t tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
AddUnknownEnum(msg, table, tag, static_cast<int32_t>(tmp));
@ -1205,7 +1205,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpUnknownEnumFallback(
uint32_t tag = data.tag();
uint64_t tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
AddUnknownEnum(msg, table, tag, static_cast<int32_t>(tmp));
@ -1215,7 +1215,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpUnknownEnumFallback(
template <typename TagType, uint16_t xform_val>
PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularEnum(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
const TcParseTableBase::FieldAux aux = *table->field_aux(data.aux_idx());
@ -1224,10 +1224,10 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularEnum(
ptr += sizeof(TagType); // Consume tag
uint64_t tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(
if (ABSL_PREDICT_FALSE(
!EnumIsValidAux(static_cast<int32_t>(tmp), xform_val, aux))) {
ptr = ptr2;
PROTOBUF_MUSTTAIL return FastUnknownEnumFallback(PROTOBUF_TC_PARAM_PASS);
@ -1257,7 +1257,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastEvS2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, uint16_t xform_val>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedEnum(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto& field = RefAt<RepeatedField<int32_t>>(msg, data.offset());
@ -1269,10 +1269,10 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedEnum(
ptr += sizeof(TagType);
uint64_t tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(
if (ABSL_PREDICT_FALSE(
!EnumIsValidAux(static_cast<int32_t>(tmp), xform_val, aux))) {
// We can avoid duplicate work in MiniParse by directly calling
// table->fallback.
@ -1280,7 +1280,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedEnum(
PROTOBUF_MUSTTAIL return FastUnknownEnumFallback(PROTOBUF_TC_PARAM_PASS);
}
field.Add(static_cast<int32_t>(tmp));
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
@ -1309,7 +1309,7 @@ PROTOBUF_NOINLINE void TcParser::AddUnknownEnum(MessageLite* msg,
template <typename TagType, uint16_t xform_val>
PROTOBUF_ALWAYS_INLINE const char* TcParser::PackedEnum(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
const auto saved_tag = UnalignedLoad<TagType>(ptr);
@ -1366,12 +1366,12 @@ PROTOBUF_NOINLINE const char* TcParser::FastEvP2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, uint8_t min>
PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularEnumSmallRange(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
uint8_t v = ptr[sizeof(TagType)];
if (PROTOBUF_PREDICT_FALSE(min > v || v > data.aux_idx())) {
if (ABSL_PREDICT_FALSE(min > v || v > data.aux_idx())) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
@ -1404,7 +1404,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastEr1S2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, uint8_t min>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedEnumSmallRange(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto& field = RefAt<RepeatedField<int32_t>>(msg, data.offset());
@ -1412,12 +1412,12 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedEnumSmallRange(
const uint8_t max = data.aux_idx();
do {
uint8_t v = ptr[sizeof(TagType)];
if (PROTOBUF_PREDICT_FALSE(min > v || v > max)) {
if (ABSL_PREDICT_FALSE(min > v || v > max)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
field.Add(static_cast<int32_t>(v));
ptr += sizeof(TagType) + 1;
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
@ -1446,7 +1446,7 @@ PROTOBUF_NOINLINE const char* TcParser::FastEr1R2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, uint8_t min>
PROTOBUF_ALWAYS_INLINE const char* TcParser::PackedEnumSmallRange(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
@ -1462,7 +1462,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::PackedEnumSmallRange(
return ctx->ReadPackedVarint(
ptr,
[=](int32_t v) {
if (PROTOBUF_PREDICT_FALSE(min > v || v > max)) {
if (ABSL_PREDICT_FALSE(min > v || v > max)) {
AddUnknownEnum(msg, table, FastDecodeTag(saved_tag), v);
} else {
field->Add(v);
@ -1574,7 +1574,7 @@ void EnsureArenaStringIsNotDefault(const MessageLite* msg,
template <typename TagType, typename FieldType, TcParser::Utf8Type utf8>
PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularString(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
auto saved_tag = UnalignedLoad<TagType>(ptr);
@ -1588,7 +1588,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularString(
} else {
ptr = ReadStringNoArena(msg, ptr, ctx, data.aux_idx(), table, field);
}
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
EnsureArenaStringIsNotDefault(msg, &field);
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
@ -1599,7 +1599,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::SingularString(
#endif
break;
default:
if (PROTOBUF_PREDICT_TRUE(IsValidUTF8(field))) {
if (ABSL_PREDICT_TRUE(IsValidUTF8(field))) {
break;
}
ReportFastUtf8Error(FastDecodeTag(saved_tag), table);
@ -1683,7 +1683,7 @@ const char* TcParser::FastUcS2(PROTOBUF_TC_PARAM_DECL) {
template <typename TagType, typename FieldType, TcParser::Utf8Type utf8>
PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedString(
PROTOBUF_TC_PARAM_DECL) {
if (PROTOBUF_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
if (ABSL_PREDICT_FALSE(data.coded_tag<TagType>() != 0)) {
PROTOBUF_MUSTTAIL return MiniParse(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
const auto expected_tag = UnalignedLoad<TagType>(ptr);
@ -1697,7 +1697,7 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedString(
#endif
return true;
default:
if (PROTOBUF_PREDICT_TRUE(
if (ABSL_PREDICT_TRUE(
utf8_range::IsStructurallyValid(field[field.size() - 1]))) {
return true;
}
@ -1709,27 +1709,27 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::RepeatedString(
auto* arena = field.GetArena();
SerialArena* serial_arena;
if (PROTOBUF_PREDICT_TRUE(arena != nullptr &&
arena->impl_.GetSerialArenaFast(&serial_arena) &&
field.PrepareForParse())) {
if (ABSL_PREDICT_TRUE(arena != nullptr &&
arena->impl_.GetSerialArenaFast(&serial_arena) &&
field.PrepareForParse())) {
do {
ptr += sizeof(TagType);
ptr = ParseRepeatedStringOnce(ptr, serial_arena, ctx, field);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr || !validate_last_string())) {
if (ABSL_PREDICT_FALSE(ptr == nullptr || !validate_last_string())) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
} else {
do {
ptr += sizeof(TagType);
std::string* str = field.Add();
ptr = InlineGreedyStringParser(str, ptr, ctx);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr || !validate_last_string())) {
if (ABSL_PREDICT_FALSE(ptr == nullptr || !validate_last_string())) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
} while (UnalignedLoad<TagType>(ptr) == expected_tag);
}
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
@ -1946,9 +1946,9 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedFixed(
ptr = ptr2;
*field.Add() = UnalignedLoad<uint64_t>(ptr);
ptr += size;
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) goto error;
} while (next_tag == decoded_tag);
} else {
ABSL_DCHECK_EQ(rep, static_cast<uint16_t>(field_layout::kRep32Bits));
@ -1964,9 +1964,9 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedFixed(
ptr = ptr2;
*field.Add() = UnalignedLoad<uint32_t>(ptr);
ptr += size;
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) goto error;
} while (next_tag == decoded_tag);
}
@ -2002,7 +2002,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpPackedFixed(PROTOBUF_TC_PARAM_DECL) {
ptr = ctx->ReadPackedFixed(ptr, size, &field);
}
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
@ -2098,7 +2098,7 @@ const char* TcParser::MpRepeatedVarintT(PROTOBUF_TC_PARAM_DECL) {
do {
uint64_t tmp;
ptr = ParseVarint(ptr2, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) goto error;
if (is_validated_enum) {
if (!EnumIsValidAux(static_cast<int32_t>(tmp), xform_val, aux)) {
ptr = ptr2;
@ -2109,9 +2109,9 @@ const char* TcParser::MpRepeatedVarintT(PROTOBUF_TC_PARAM_DECL) {
: WireFormatLite::ZigZagDecode32(tmp);
}
field.Add(static_cast<FieldType>(tmp));
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) goto error;
} while (next_tag == decoded_tag);
parse_loop:
@ -2378,7 +2378,7 @@ PROTOBUF_NOINLINE const char* TcParser::MpString(PROTOBUF_TC_PARAM_DECL) {
Unreachable();
}
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr || !is_valid)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr || !is_valid)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
@ -2388,11 +2388,11 @@ PROTOBUF_ALWAYS_INLINE const char* TcParser::ParseRepeatedStringOnce(
const char* ptr, SerialArena* serial_arena, ParseContext* ctx,
RepeatedPtrField<std::string>& field) {
int size = ReadSize(&ptr);
if (PROTOBUF_PREDICT_FALSE(!ptr)) return {};
if (ABSL_PREDICT_FALSE(!ptr)) return {};
auto* str = new (serial_arena->AllocateFromStringBlock()) std::string();
field.AddAllocatedForParse(str);
ptr = ctx->ReadString(ptr, size, str);
if (PROTOBUF_PREDICT_FALSE(!ptr)) return {};
if (ABSL_PREDICT_FALSE(!ptr)) return {};
PROTOBUF_ASSUME(ptr != nullptr);
return ptr;
}
@ -2421,19 +2421,18 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedString(
auto* arena = field.GetArena();
SerialArena* serial_arena;
if (PROTOBUF_PREDICT_TRUE(
arena != nullptr &&
arena->impl_.GetSerialArenaFast(&serial_arena) &&
field.PrepareForParse())) {
if (ABSL_PREDICT_TRUE(arena != nullptr &&
arena->impl_.GetSerialArenaFast(&serial_arena) &&
field.PrepareForParse())) {
do {
ptr = ptr2;
ptr = ParseRepeatedStringOnce(ptr, serial_arena, ctx, field);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr ||
!MpVerifyUtf8(field[field.size() - 1],
table, entry, xform_val))) {
if (ABSL_PREDICT_FALSE(ptr == nullptr ||
!MpVerifyUtf8(field[field.size() - 1], table,
entry, xform_val))) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
} while (next_tag == decoded_tag);
} else {
@ -2441,12 +2440,12 @@ PROTOBUF_NOINLINE const char* TcParser::MpRepeatedString(
ptr = ptr2;
std::string* str = field.Add();
ptr = InlineGreedyStringParser(str, ptr, ctx);
if (PROTOBUF_PREDICT_FALSE(
if (ABSL_PREDICT_FALSE(
ptr == nullptr ||
!MpVerifyUtf8(*str, table, entry, xform_val))) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
} while (next_tag == decoded_tag);
}
@ -2588,10 +2587,10 @@ const char* TcParser::MpRepeatedMessageOrGroup(PROTOBUF_TC_PARAM_DECL) {
};
ptr = is_group ? ctx->ParseGroupInlined(ptr2, decoded_tag, inner_loop)
: ctx->ParseLengthDelimitedInlined(ptr2, inner_loop);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) goto error;
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) goto parse_loop;
ptr2 = ReadTag(ptr, &next_tag);
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) goto error;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) goto error;
} while (next_tag == decoded_tag);
PROTOBUF_MUSTTAIL return ToTagDispatch(PROTOBUF_TC_PARAM_NO_DATA_PASS);
parse_loop:
@ -2746,14 +2745,12 @@ const char* TcParser::ParseOneMapEntry(
while (!ctx->Done(&ptr)) {
uint32_t inner_tag = ptr[0];
if (PROTOBUF_PREDICT_FALSE(inner_tag != key_tag &&
inner_tag != value_tag)) {
if (ABSL_PREDICT_FALSE(inner_tag != key_tag && inner_tag != value_tag)) {
// Do a full parse and check again in case the tag has non-canonical
// encoding.
ptr = ReadTag(ptr, &inner_tag);
if (PROTOBUF_PREDICT_FALSE(inner_tag != key_tag &&
inner_tag != value_tag)) {
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(inner_tag != key_tag && inner_tag != value_tag)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (inner_tag == 0 || (inner_tag & 7) == WFL::WIRETYPE_END_GROUP) {
ctx->SetLastTag(inner_tag);
@ -2761,7 +2758,7 @@ const char* TcParser::ParseOneMapEntry(
}
ptr = UnknownFieldParse(inner_tag, nullptr, ptr, ctx);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
continue;
}
} else {
@ -2782,7 +2779,7 @@ const char* TcParser::ParseOneMapEntry(
case WFL::WIRETYPE_VARINT:
uint64_t tmp;
ptr = ParseVarint(ptr, &tmp);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
switch (type_card.cpp_type()) {
case MapTypeCard::kBool:
*reinterpret_cast<bool*>(obj) = static_cast<bool>(tmp);
@ -2809,10 +2806,10 @@ const char* TcParser::ParseOneMapEntry(
case WFL::WIRETYPE_LENGTH_DELIMITED:
if (type_card.cpp_type() == MapTypeCard::kString) {
const int size = ReadSize(&ptr);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
std::string* str = reinterpret_cast<std::string*>(obj);
ptr = ctx->ReadString(ptr, size, str);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
bool do_utf8_check = map_info.fail_on_utf8_failure;
#ifndef NDEBUG
do_utf8_check |= map_info.log_debug_utf8_failure;
@ -2830,7 +2827,7 @@ const char* TcParser::ParseOneMapEntry(
ABSL_DCHECK_EQ(+type_card.cpp_type(), +MapTypeCard::kMessage);
ABSL_DCHECK_EQ(inner_tag, value_tag);
ptr = ctx->ParseMessage(reinterpret_cast<MessageLite*>(obj), ptr);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
continue;
}
default:
@ -2850,9 +2847,9 @@ PROTOBUF_NOINLINE const char* TcParser::MpMap(PROTOBUF_TC_PARAM_DECL) {
const auto* aux = table->field_aux(&entry);
const auto map_info = aux[0].map_info;
if (PROTOBUF_PREDICT_FALSE(!map_info.is_supported ||
(data.tag() & 7) !=
WireFormatLite::WIRETYPE_LENGTH_DELIMITED)) {
if (ABSL_PREDICT_FALSE(!map_info.is_supported ||
(data.tag() & 7) !=
WireFormatLite::WIRETYPE_LENGTH_DELIMITED)) {
PROTOBUF_MUSTTAIL return MpFallback(PROTOBUF_TC_PARAM_PASS);
}
@ -2880,12 +2877,12 @@ PROTOBUF_NOINLINE const char* TcParser::MpMap(PROTOBUF_TC_PARAM_DECL) {
return ParseOneMapEntry(node, ptr, ctx, aux, table, entry, map.arena());
});
if (PROTOBUF_PREDICT_TRUE(ptr != nullptr)) {
if (PROTOBUF_PREDICT_FALSE(map_info.value_is_validated_enum &&
!internal::ValidateEnumInlined(
*static_cast<int32_t*>(node->GetVoidValue(
map_info.node_size_info)),
aux[1].enum_data))) {
if (ABSL_PREDICT_TRUE(ptr != nullptr)) {
if (ABSL_PREDICT_FALSE(map_info.value_is_validated_enum &&
!internal::ValidateEnumInlined(
*static_cast<int32_t*>(node->GetVoidValue(
map_info.node_size_info)),
aux[1].enum_data))) {
WriteMapEntryAsUnknown(msg, table, saved_tag, node, map_info);
} else {
// Done parsing the node, try to insert it.
@ -2917,15 +2914,15 @@ PROTOBUF_NOINLINE const char* TcParser::MpMap(PROTOBUF_TC_PARAM_DECL) {
// Destroy the node if we have it.
// It could be because we failed to parse, or because insertion returned
// an overwritten node.
if (PROTOBUF_PREDICT_FALSE(node != nullptr && map.arena() == nullptr)) {
if (ABSL_PREDICT_FALSE(node != nullptr && map.arena() == nullptr)) {
DestroyMapNode(node, map_info, map);
}
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) {
PROTOBUF_MUSTTAIL return Error(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
PROTOBUF_MUSTTAIL return ToParseLoop(PROTOBUF_TC_PARAM_NO_DATA_PASS);
}

@ -31,6 +31,7 @@
#include "google/protobuf/stubs/common.h"
#include "absl/base/call_once.h"
#include "absl/base/casts.h"
#include "absl/base/optimization.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "google/protobuf/any.h"
@ -69,7 +70,7 @@ namespace internal {
PROTOBUF_EXPORT extern std::atomic<bool> init_protobuf_defaults_state;
PROTOBUF_EXPORT void InitProtobufDefaultsSlow();
PROTOBUF_EXPORT inline void InitProtobufDefaults() {
if (PROTOBUF_PREDICT_FALSE(
if (ABSL_PREDICT_FALSE(
!init_protobuf_defaults_state.load(std::memory_order_acquire))) {
InitProtobufDefaultsSlow();
}

@ -29,6 +29,7 @@
#include <string>
#include <utility>
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/cord.h"
@ -121,9 +122,9 @@ CodedInputStream::Limit CodedInputStream::PushLimit(int byte_limit) {
// security: byte_limit is possibly evil, so check for negative values
// and overflow. Also check that the new requested limit is before the
// previous limit; otherwise we continue to enforce the previous limit.
if (PROTOBUF_PREDICT_TRUE(byte_limit >= 0 &&
byte_limit <= INT_MAX - current_position &&
byte_limit < current_limit_ - current_position)) {
if (ABSL_PREDICT_TRUE(byte_limit >= 0 &&
byte_limit <= INT_MAX - current_position &&
byte_limit < current_limit_ - current_position)) {
current_limit_ = current_position + byte_limit;
RecomputeBufferLimits();
}
@ -334,7 +335,7 @@ bool CodedInputStream::ReadCord(absl::Cord* output, int size) {
// Make sure to not cross a limit set by PushLimit() or SetTotalBytesLimit().
const int closest_limit = std::min(current_limit_, total_bytes_limit_);
const int available = closest_limit - total_bytes_read_;
if (PROTOBUF_PREDICT_FALSE(size > available)) {
if (ABSL_PREDICT_FALSE(size > available)) {
total_bytes_read_ = closest_limit;
input_->ReadCord(output, available);
return false;
@ -842,7 +843,7 @@ uint8_t* EpsCopyOutputStream::GetDirectBufferForNBytesAndAdvance(int size,
uint8_t* EpsCopyOutputStream::Next() {
ABSL_DCHECK(!had_error_); // NOLINT
if (PROTOBUF_PREDICT_FALSE(stream_ == nullptr)) return Error();
if (ABSL_PREDICT_FALSE(stream_ == nullptr)) return Error();
if (buffer_end_) {
// We're in the patch buffer and need to fill up the previous buffer.
std::memcpy(buffer_end_, buffer_, end_ - buffer_);
@ -850,14 +851,14 @@ uint8_t* EpsCopyOutputStream::Next() {
int size;
do {
void* data;
if (PROTOBUF_PREDICT_FALSE(!stream_->Next(&data, &size))) {
if (ABSL_PREDICT_FALSE(!stream_->Next(&data, &size))) {
// Stream has an error, we use the patch buffer to continue to be
// able to write.
return Error();
}
ptr = static_cast<uint8_t*>(data);
} while (size == 0);
if (PROTOBUF_PREDICT_TRUE(size > kSlopBytes)) {
if (ABSL_PREDICT_TRUE(size > kSlopBytes)) {
std::memcpy(ptr, end_, kSlopBytes);
end_ = ptr + size - kSlopBytes;
buffer_end_ = nullptr;
@ -880,7 +881,7 @@ uint8_t* EpsCopyOutputStream::Next() {
uint8_t* EpsCopyOutputStream::EnsureSpaceFallback(uint8_t* ptr) {
do {
if (PROTOBUF_PREDICT_FALSE(had_error_)) return buffer_;
if (ABSL_PREDICT_FALSE(had_error_)) return buffer_;
int overrun = ptr - end_;
ABSL_DCHECK(overrun >= 0); // NOLINT
ABSL_DCHECK(overrun <= kSlopBytes); // NOLINT

@ -660,14 +660,14 @@ class PROTOBUF_EXPORT EpsCopyOutputStream {
// will never fail! The underlying stream can produce an error. Use HadError
// to check for errors.
[[nodiscard]] uint8_t* EnsureSpace(uint8_t* ptr) {
if (PROTOBUF_PREDICT_FALSE(ptr >= end_)) {
if (ABSL_PREDICT_FALSE(ptr >= end_)) {
return EnsureSpaceFallback(ptr);
}
return ptr;
}
uint8_t* WriteRaw(const void* data, int size, uint8_t* ptr) {
if (PROTOBUF_PREDICT_FALSE(end_ - ptr < size)) {
if (ABSL_PREDICT_FALSE(end_ - ptr < size)) {
return WriteRawFallback(data, size, ptr);
}
std::memcpy(ptr, data, static_cast<unsigned int>(size));
@ -696,8 +696,8 @@ class PROTOBUF_EXPORT EpsCopyOutputStream {
uint8_t* WriteStringMaybeAliased(uint32_t num, const std::string& s,
uint8_t* ptr) {
std::ptrdiff_t size = s.size();
if (PROTOBUF_PREDICT_FALSE(
size >= 128 || end_ - ptr + 16 - TagSize(num << 3) - 1 < size)) {
if (ABSL_PREDICT_FALSE(size >= 128 ||
end_ - ptr + 16 - TagSize(num << 3) - 1 < size)) {
return WriteStringMaybeAliasedOutline(num, s, ptr);
}
ptr = UnsafeVarint((num << 3) | 2, ptr);
@ -714,8 +714,8 @@ class PROTOBUF_EXPORT EpsCopyOutputStream {
PROTOBUF_ALWAYS_INLINE uint8_t* WriteString(uint32_t num, const T& s,
uint8_t* ptr) {
std::ptrdiff_t size = s.size();
if (PROTOBUF_PREDICT_FALSE(
size >= 128 || end_ - ptr + 16 - TagSize(num << 3) - 1 < size)) {
if (ABSL_PREDICT_FALSE(size >= 128 ||
end_ - ptr + 16 - TagSize(num << 3) - 1 < size)) {
return WriteStringOutline(num, s, ptr);
}
ptr = UnsafeVarint((num << 3) | 2, ptr);
@ -898,7 +898,7 @@ class PROTOBUF_EXPORT EpsCopyOutputStream {
PROTOBUF_ALWAYS_INLINE static uint8_t* UnsafeVarint(T value, uint8_t* ptr) {
static_assert(std::is_unsigned<T>::value,
"Varint serialization must be unsigned");
while (PROTOBUF_PREDICT_FALSE(value >= 0x80)) {
while (ABSL_PREDICT_FALSE(value >= 0x80)) {
*ptr = static_cast<uint8_t>(value | 0x80);
value >>= 7;
++ptr;
@ -909,7 +909,7 @@ class PROTOBUF_EXPORT EpsCopyOutputStream {
PROTOBUF_ALWAYS_INLINE static uint8_t* UnsafeWriteSize(uint32_t value,
uint8_t* ptr) {
while (PROTOBUF_PREDICT_FALSE(value >= 0x80)) {
while (ABSL_PREDICT_FALSE(value >= 0x80)) {
*ptr = static_cast<uint8_t>(value | 0x80);
value >>= 7;
++ptr;
@ -1295,7 +1295,7 @@ class PROTOBUF_EXPORT CodedOutputStream {
inline bool CodedInputStream::ReadVarint32(uint32_t* value) {
uint32_t v = 0;
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_)) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_)) {
v = *buffer_;
if (v < 0x80) {
*value = v;
@ -1309,7 +1309,7 @@ inline bool CodedInputStream::ReadVarint32(uint32_t* value) {
}
inline bool CodedInputStream::ReadVarint64(uint64_t* value) {
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_) && *buffer_ < 0x80) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_) && *buffer_ < 0x80) {
*value = *buffer_;
Advance(1);
return true;
@ -1320,7 +1320,7 @@ inline bool CodedInputStream::ReadVarint64(uint64_t* value) {
}
inline bool CodedInputStream::ReadVarintSizeAsInt(int* value) {
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_)) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_)) {
int v = *buffer_;
if (v < 0x80) {
*value = v;
@ -1355,7 +1355,7 @@ inline const uint8_t* CodedInputStream::ReadLittleEndian64FromArray(
}
inline bool CodedInputStream::ReadLittleEndian16(uint16_t* value) {
if (PROTOBUF_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
if (ABSL_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
buffer_ = ReadLittleEndian16FromArray(buffer_, value);
return true;
} else {
@ -1366,7 +1366,7 @@ inline bool CodedInputStream::ReadLittleEndian16(uint16_t* value) {
inline bool CodedInputStream::ReadLittleEndian32(uint32_t* value) {
#if defined(ABSL_IS_LITTLE_ENDIAN) && \
!defined(PROTOBUF_DISABLE_LITTLE_ENDIAN_OPT_FOR_TEST)
if (PROTOBUF_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
if (ABSL_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
buffer_ = ReadLittleEndian32FromArray(buffer_, value);
return true;
} else {
@ -1380,7 +1380,7 @@ inline bool CodedInputStream::ReadLittleEndian32(uint32_t* value) {
inline bool CodedInputStream::ReadLittleEndian64(uint64_t* value) {
#if defined(ABSL_IS_LITTLE_ENDIAN) && \
!defined(PROTOBUF_DISABLE_LITTLE_ENDIAN_OPT_FOR_TEST)
if (PROTOBUF_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
if (ABSL_PREDICT_TRUE(BufferSize() >= static_cast<int>(sizeof(*value)))) {
buffer_ = ReadLittleEndian64FromArray(buffer_, value);
return true;
} else {
@ -1393,7 +1393,7 @@ inline bool CodedInputStream::ReadLittleEndian64(uint64_t* value) {
inline uint32_t CodedInputStream::ReadTagNoLastTag() {
uint32_t v = 0;
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_)) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_)) {
v = *buffer_;
if (v < 0x80) {
Advance(1);
@ -1410,7 +1410,7 @@ inline std::pair<uint32_t, bool> CodedInputStream::ReadTagWithCutoffNoLastTag(
// constant, and things like "cutoff >= kMax1ByteVarint" to be evaluated at
// compile time.
uint32_t first_byte_or_zero = 0;
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_)) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_)) {
// Hot case: buffer_ non_empty, buffer_[0] in [1, 128).
// TODO: Is it worth rearranging this? E.g., if the number of fields
// is large enough then is it better to check for the two-byte case first?
@ -1424,8 +1424,8 @@ inline std::pair<uint32_t, bool> CodedInputStream::ReadTagWithCutoffNoLastTag(
// Other hot case: cutoff >= 0x80, buffer_ has at least two bytes available,
// and tag is two bytes. The latter is tested by bitwise-and-not of the
// first byte and the second byte.
if (cutoff >= 0x80 && PROTOBUF_PREDICT_TRUE(buffer_ + 1 < buffer_end_) &&
PROTOBUF_PREDICT_TRUE((buffer_[0] & ~buffer_[1]) >= 0x80)) {
if (cutoff >= 0x80 && ABSL_PREDICT_TRUE(buffer_ + 1 < buffer_end_) &&
ABSL_PREDICT_TRUE((buffer_[0] & ~buffer_[1]) >= 0x80)) {
const uint32_t kMax2ByteVarint = (0x7f << 7) + 0x7f;
uint32_t tag = (1u << 7) * buffer_[1] + (buffer_[0] - 0x80);
Advance(2);
@ -1454,15 +1454,14 @@ inline bool CodedInputStream::ConsumedEntireMessage() {
inline bool CodedInputStream::ExpectTag(uint32_t expected) {
if (expected < (1 << 7)) {
if (PROTOBUF_PREDICT_TRUE(buffer_ < buffer_end_) &&
buffer_[0] == expected) {
if (ABSL_PREDICT_TRUE(buffer_ < buffer_end_) && buffer_[0] == expected) {
Advance(1);
return true;
} else {
return false;
}
} else if (expected < (1 << 14)) {
if (PROTOBUF_PREDICT_TRUE(BufferSize() >= 2) &&
if (ABSL_PREDICT_TRUE(BufferSize() >= 2) &&
buffer_[0] == static_cast<uint8_t>(expected | 0x80) &&
buffer_[1] == static_cast<uint8_t>(expected >> 7)) {
Advance(2);
@ -1632,7 +1631,7 @@ template <class Stream>
inline void CodedOutputStream::InitEagerly(Stream* stream) {
void* data;
int size;
if (PROTOBUF_PREDICT_TRUE(stream->Next(&data, &size) && size > 0)) {
if (ABSL_PREDICT_TRUE(stream->Next(&data, &size) && size > 0)) {
cur_ = impl_.SetInitialBuffer(data, size);
}
}

@ -14,6 +14,7 @@
#include <string>
#include <utility>
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/cord.h"
@ -47,7 +48,7 @@ inline const char* InternalParseRepeated(const char* ptr,
const MessageLite* prototype) {
uint32_t expected_tag;
ptr = ReadTag(ptr, &expected_tag);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
// TODO: Try to optimize this. The tags and lengths are read again
// which is a bit wasteful.
return LazyRepeatedPtrField::ParseToRepeatedMessage<uint32_t>(

@ -17,6 +17,7 @@
#include <utility>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/strings/cord.h"
#include "absl/strings/str_cat.h"
@ -331,14 +332,14 @@ class LazyRepeatedPtrField {
MessageLite* submsg = value->AddMessage(prototype);
// ptr2 points to the start of the element's encoded length.
ptr = ctx->ParseMessage(submsg, ptr2);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ctx->Done(&ptr)) {
break;
}
}
ptr2 = ReadTagInternal(ptr, &next_tag);
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) return nullptr;
} while (next_tag == expected_tag);
return ptr;
}
@ -373,7 +374,7 @@ class LazyRepeatedPtrField {
taglen_size = tmp.size();
return ctx->AppendString(p, &tmp);
});
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
const auto tmp_size = tmp.size();
ABSL_DCHECK_GE(tmp_size, taglen_size);
if (unparsed_.IsCord()) {
@ -392,9 +393,9 @@ class LazyRepeatedPtrField {
prototype, arena, ptr, ctx, expected_tag,
absl::string_view(tmp.data() + taglen_size,
tmp_size - taglen_size));
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
}
if (PROTOBUF_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
if (ABSL_PREDICT_FALSE(!ctx->DataAvailable(ptr))) {
// `Done` advances the stream to the next buffer chunk.
if (ctx->Done(&ptr)) {
break;
@ -406,7 +407,7 @@ class LazyRepeatedPtrField {
// TODO: Try to remove the following condition for 8 and 16 bits
// TagType.
if (PROTOBUF_PREDICT_FALSE(ptr2 == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr2 == nullptr)) return nullptr;
} while (next_tag == expected_tag);
if (unparsed_.IsArray()) {
unparsed_.ZeroOutTailingBytes();

@ -13,6 +13,7 @@
#include <iterator>
#include <string>
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "google/protobuf/arena.h"
#include "google/protobuf/message_lite.h"
@ -122,7 +123,7 @@ void UntypedMapBase::ClearTable(const ClearInput input) {
for (map_index_t b = index_of_first_non_null_, end = num_buckets_;
b < end; ++b) {
NodeBase* node =
PROTOBUF_PREDICT_FALSE(internal::TableEntryIsTree(table[b]))
ABSL_PREDICT_FALSE(internal::TableEntryIsTree(table[b]))
? DestroyTree(TableEntryToTree(table[b]))
: TableEntryToNode(table[b]);

@ -27,6 +27,7 @@
#include <type_traits>
#include <utility>
#include "absl/base/optimization.h"
#include "absl/memory/memory.h"
#include "google/protobuf/message_lite.h"
@ -828,7 +829,7 @@ inline UntypedMapIterator UntypedMapBase::begin() const {
} else {
bucket_index = index_of_first_non_null_;
TableEntryPtr entry = table_[bucket_index];
node = PROTOBUF_PREDICT_TRUE(internal::TableEntryIsList(entry))
node = ABSL_PREDICT_TRUE(internal::TableEntryIsList(entry))
? TableEntryToNode(entry)
: TableEntryToTree(entry)->begin()->second;
PROTOBUF_ASSUME(node != nullptr);
@ -843,7 +844,7 @@ inline void UntypedMapIterator::SearchFrom(map_index_t start_bucket) {
TableEntryPtr entry = m_->table_[i];
if (entry == TableEntryPtr{}) continue;
bucket_index_ = i;
if (PROTOBUF_PREDICT_TRUE(TableEntryIsList(entry))) {
if (ABSL_PREDICT_TRUE(TableEntryIsList(entry))) {
node_ = TableEntryToNode(entry);
} else {
TreeForMap* tree = TableEntryToTree(entry);
@ -966,7 +967,7 @@ class KeyMapBase : public UntypedMapBase {
EraseFromTree(b, tree_it);
}
--num_elements_;
if (PROTOBUF_PREDICT_FALSE(b == index_of_first_non_null_)) {
if (ABSL_PREDICT_FALSE(b == index_of_first_non_null_)) {
while (index_of_first_non_null_ < num_buckets_ &&
TableEntryIsEmpty(index_of_first_non_null_)) {
++index_of_first_non_null_;
@ -1064,13 +1065,13 @@ class KeyMapBase : public UntypedMapBase {
// We don't care how many elements are in trees. If a lot are,
// we may resize even though there are many empty buckets. In
// practice, this seems fine.
if (PROTOBUF_PREDICT_FALSE(new_size > hi_cutoff)) {
if (ABSL_PREDICT_FALSE(new_size > hi_cutoff)) {
if (num_buckets_ <= max_size() / 2) {
Resize(num_buckets_ * 2);
return true;
}
} else if (PROTOBUF_PREDICT_FALSE(new_size <= lo_cutoff &&
num_buckets_ > kMinTableSize)) {
} else if (ABSL_PREDICT_FALSE(new_size <= lo_cutoff &&
num_buckets_ > kMinTableSize)) {
size_type lg2_of_size_reduction_factor = 1;
// It's possible we want to shrink a lot here... size() could even be 0.
// So, estimate how much to shrink by making sure we don't shrink so

@ -195,7 +195,7 @@ size_t Message::ComputeUnknownFieldsSize(
size_t Message::MaybeComputeUnknownFieldsSize(
size_t total_size, const internal::CachedSize* cached_size) const {
if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
return ComputeUnknownFieldsSize(total_size, cached_size);
}
cached_size->Set(internal::ToCachedSize(total_size));

@ -97,6 +97,7 @@
#include "absl/base/attributes.h"
#include "absl/base/call_once.h"
#include "absl/base/macros.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/memory/memory.h"
#include "absl/strings/cord.h"
@ -1629,7 +1630,7 @@ const Type& Reflection::GetRawSplit(const Message& message,
template <class Type>
const Type& Reflection::GetRawNonOneof(const Message& message,
const FieldDescriptor* field) const {
if (PROTOBUF_PREDICT_FALSE(schema_.IsSplit(field))) {
if (ABSL_PREDICT_FALSE(schema_.IsSplit(field))) {
return GetRawSplit<Type>(message, field);
}
const uint32_t field_offset = schema_.GetFieldOffsetNonOneof(field);
@ -1642,7 +1643,7 @@ const Type& Reflection::GetRaw(const Message& message,
ABSL_DCHECK(!schema_.InRealOneof(field) || HasOneofField(message, field))
<< "Field = " << field->full_name();
if (PROTOBUF_PREDICT_TRUE(!schema_.InRealOneof(field))) {
if (ABSL_PREDICT_TRUE(!schema_.InRealOneof(field))) {
return GetRawNonOneof<Type>(message, field);
}

@ -23,6 +23,7 @@
#include <utility>
#include "absl/base/config.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/cord.h"
@ -185,7 +186,7 @@ inline bool CheckFieldPresence(const internal::ParseContext& ctx,
const MessageLite& msg,
MessageLite::ParseFlags parse_flags) {
(void)ctx; // Parameter is used by Google-internal code.
if (PROTOBUF_PREDICT_FALSE((parse_flags & MessageLite::kMergePartial) != 0)) {
if (ABSL_PREDICT_FALSE((parse_flags & MessageLite::kMergePartial) != 0)) {
return true;
}
return msg.IsInitializedWithErrors();
@ -219,7 +220,7 @@ bool MergeFromImpl(absl::string_view input, MessageLite* msg,
aliasing, &ptr, input);
ptr = internal::TcParser::ParseLoop(msg, ptr, &ctx, tc_table);
// ctx has an explicit limit set (length of string_view).
if (PROTOBUF_PREDICT_TRUE(ptr && ctx.EndedAtLimit())) {
if (ABSL_PREDICT_TRUE(ptr && ctx.EndedAtLimit())) {
return CheckFieldPresence(ctx, *msg, parse_flags);
}
return false;
@ -234,7 +235,7 @@ bool MergeFromImpl(io::ZeroCopyInputStream* input, MessageLite* msg,
aliasing, &ptr, input);
ptr = internal::TcParser::ParseLoop(msg, ptr, &ctx, tc_table);
// ctx has no explicit limit (hence we end on end of stream)
if (PROTOBUF_PREDICT_TRUE(ptr && ctx.EndedAtEndOfStream())) {
if (ABSL_PREDICT_TRUE(ptr && ctx.EndedAtEndOfStream())) {
return CheckFieldPresence(ctx, *msg, parse_flags);
}
return false;
@ -248,9 +249,9 @@ bool MergeFromImpl(BoundedZCIS input, MessageLite* msg,
internal::ParseContext ctx(io::CodedInputStream::GetDefaultRecursionLimit(),
aliasing, &ptr, input.zcis, input.limit);
ptr = internal::TcParser::ParseLoop(msg, ptr, &ctx, tc_table);
if (PROTOBUF_PREDICT_FALSE(!ptr)) return false;
if (ABSL_PREDICT_FALSE(!ptr)) return false;
ctx.BackUp(ptr);
if (PROTOBUF_PREDICT_TRUE(ctx.EndedAtLimit())) {
if (ABSL_PREDICT_TRUE(ctx.EndedAtLimit())) {
return CheckFieldPresence(ctx, *msg, parse_flags);
}
return false;
@ -294,7 +295,7 @@ class ZeroCopyCodedInputStream : public io::ZeroCopyInputStream {
bool ReadCord(absl::Cord* cord, int count) final {
// Fast path: tail call into ReadCord reading new value.
if (PROTOBUF_PREDICT_TRUE(cord->empty())) {
if (ABSL_PREDICT_TRUE(cord->empty())) {
return cis_->ReadCord(cord, count);
}
absl::Cord tmp;
@ -320,7 +321,7 @@ bool MessageLite::MergeFromImpl(io::CodedInputStream* input,
ctx.data().pool = input->GetExtensionPool();
ctx.data().factory = input->GetExtensionFactory();
ptr = internal::TcParser::ParseLoop(this, ptr, &ctx, GetTcParseTable());
if (PROTOBUF_PREDICT_FALSE(!ptr)) return false;
if (ABSL_PREDICT_FALSE(!ptr)) return false;
ctx.BackUp(ptr);
if (!ctx.EndedAtEndOfStream()) {
ABSL_DCHECK_NE(ctx.LastTag(), 1u); // We can't end on a pushed limit.

@ -10,6 +10,7 @@
#include <string>
#include "absl/base/optimization.h"
#include "google/protobuf/arena.h"
#include "google/protobuf/port.h"
@ -63,7 +64,7 @@ class PROTOBUF_EXPORT InternalMetadata {
}
PROTOBUF_NDEBUG_INLINE Arena* arena() const {
if (PROTOBUF_PREDICT_FALSE(have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(have_unknown_fields())) {
return PtrValue<ContainerBase>()->arena;
} else {
return PtrValue<Arena>();
@ -81,7 +82,7 @@ class PROTOBUF_EXPORT InternalMetadata {
template <typename T>
PROTOBUF_NDEBUG_INLINE const T& unknown_fields(
const T& (*default_instance)()) const {
if (PROTOBUF_PREDICT_FALSE(have_unknown_fields())) {
if (ABSL_PREDICT_FALSE(have_unknown_fields())) {
return PtrValue<Container<T>>()->unknown_fields;
} else {
return default_instance();
@ -90,7 +91,7 @@ class PROTOBUF_EXPORT InternalMetadata {
template <typename T>
PROTOBUF_NDEBUG_INLINE T* mutable_unknown_fields() {
if (PROTOBUF_PREDICT_TRUE(have_unknown_fields())) {
if (ABSL_PREDICT_TRUE(have_unknown_fields())) {
return &PtrValue<Container<T>>()->unknown_fields;
} else {
return mutable_unknown_fields_slow<T>();

@ -10,6 +10,7 @@
#include <algorithm>
#include <cstring>
#include "absl/base/optimization.h"
#include "absl/strings/cord.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/message_lite.h"
@ -148,7 +149,7 @@ const char* EpsCopyInputStream::Next() {
std::pair<const char*, bool> EpsCopyInputStream::DoneFallback(int overrun,
int depth) {
// Did we exceeded the limit (parse error).
if (PROTOBUF_PREDICT_FALSE(overrun > limit_)) return {nullptr, true};
if (ABSL_PREDICT_FALSE(overrun > limit_)) return {nullptr, true};
ABSL_DCHECK(overrun != limit_); // Guaranteed by caller.
ABSL_DCHECK(overrun < limit_); // Follows from above
// TODO Instead of this dcheck we could just assign, and remove
@ -166,7 +167,7 @@ std::pair<const char*, bool> EpsCopyInputStream::DoneFallback(int overrun,
p = NextBuffer(overrun, depth);
if (p == nullptr) {
// We are at the end of the stream
if (PROTOBUF_PREDICT_FALSE(overrun != 0)) return {nullptr, true};
if (ABSL_PREDICT_FALSE(overrun != 0)) return {nullptr, true};
ABSL_DCHECK_GT(limit_, 0);
limit_end_ = buffer_end_;
// Distinguish ending on a pushed limit or ending on end-of-stream.
@ -188,7 +189,7 @@ const char* EpsCopyInputStream::SkipFallback(const char* ptr, int size) {
const char* EpsCopyInputStream::ReadStringFallback(const char* ptr, int size,
std::string* str) {
str->clear();
if (PROTOBUF_PREDICT_TRUE(size <= buffer_end_ - ptr + limit_)) {
if (ABSL_PREDICT_TRUE(size <= buffer_end_ - ptr + limit_)) {
// Reserve the string up to a static safe size. If strings are bigger than
// this we proceed by growing the string as needed. This protects against
// malicious payloads making protobuf hold on to a lot of memory.
@ -200,7 +201,7 @@ const char* EpsCopyInputStream::ReadStringFallback(const char* ptr, int size,
const char* EpsCopyInputStream::AppendStringFallback(const char* ptr, int size,
std::string* str) {
if (PROTOBUF_PREDICT_TRUE(size <= buffer_end_ - ptr + limit_)) {
if (ABSL_PREDICT_TRUE(size <= buffer_end_ - ptr + limit_)) {
// Reserve the string up to a static safe size. If strings are bigger than
// this we proceed by growing the string as needed. This protects against
// malicious payloads making protobuf hold on to a lot of memory.
@ -337,14 +338,14 @@ std::pair<const char*, uint32_t> VarintParseSlow32(const char* p,
for (std::uint32_t i = 1; i < 5; i++) {
uint32_t byte = static_cast<uint8_t>(p[i]);
res += (byte - 1) << (7 * i);
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
if (ABSL_PREDICT_TRUE(byte < 128)) {
return {p + i + 1, res};
}
}
// Accept >5 bytes
for (std::uint32_t i = 5; i < 10; i++) {
uint32_t byte = static_cast<uint8_t>(p[i]);
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
if (ABSL_PREDICT_TRUE(byte < 128)) {
return {p + i + 1, res};
}
}
@ -357,7 +358,7 @@ std::pair<const char*, uint64_t> VarintParseSlow64(const char* p,
for (std::uint32_t i = 1; i < 10; i++) {
uint64_t byte = static_cast<uint8_t>(p[i]);
res += (byte - 1) << (7 * i);
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
if (ABSL_PREDICT_TRUE(byte < 128)) {
return {p + i + 1, res};
}
}
@ -368,7 +369,7 @@ std::pair<const char*, uint32_t> ReadTagFallback(const char* p, uint32_t res) {
for (std::uint32_t i = 2; i < 5; i++) {
uint32_t byte = static_cast<uint8_t>(p[i]);
res += (byte - 1) << (7 * i);
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
if (ABSL_PREDICT_TRUE(byte < 128)) {
return {p + i + 1, res};
}
}
@ -379,17 +380,17 @@ std::pair<const char*, int32_t> ReadSizeFallback(const char* p, uint32_t res) {
for (std::uint32_t i = 1; i < 4; i++) {
uint32_t byte = static_cast<uint8_t>(p[i]);
res += (byte - 1) << (7 * i);
if (PROTOBUF_PREDICT_TRUE(byte < 128)) {
if (ABSL_PREDICT_TRUE(byte < 128)) {
return {p + i + 1, res};
}
}
std::uint32_t byte = static_cast<uint8_t>(p[4]);
if (PROTOBUF_PREDICT_FALSE(byte >= 8)) return {nullptr, 0}; // size >= 2gb
if (ABSL_PREDICT_FALSE(byte >= 8)) return {nullptr, 0}; // size >= 2gb
res += (byte - 1) << 28;
// Protect against sign integer overflow in PushLimit. Limits are relative
// to buffer ends and ptr could potential be kSlopBytes beyond a buffer end.
// To protect against overflow we reject limits absurdly close to INT_MAX.
if (PROTOBUF_PREDICT_FALSE(res > INT_MAX - ParseContext::kSlopBytes)) {
if (ABSL_PREDICT_FALSE(res > INT_MAX - ParseContext::kSlopBytes)) {
return {nullptr, 0};
}
return {p + 5, res};

@ -169,7 +169,7 @@ class PROTOBUF_EXPORT EpsCopyInputStream {
// We must update the limit first before the early return. Otherwise, we can
// end up with an invalid limit and it can lead to integer overflows.
limit_ = limit_ + std::move(delta).token();
if (PROTOBUF_PREDICT_FALSE(!EndedAtLimit())) return false;
if (ABSL_PREDICT_FALSE(!EndedAtLimit())) return false;
// TODO We could remove this line and hoist the code to
// DoneFallback. Study the perf/bin-size effects.
limit_end_ = buffer_end_ + (std::min)(0, limit_);
@ -265,7 +265,7 @@ class PROTOBUF_EXPORT EpsCopyInputStream {
// If limit is exceeded, it returns true and ptr is set to null.
bool DoneWithCheck(const char** ptr, int d) {
ABSL_DCHECK(*ptr);
if (PROTOBUF_PREDICT_TRUE(*ptr < limit_end_)) return false;
if (ABSL_PREDICT_TRUE(*ptr < limit_end_)) return false;
int overrun = static_cast<int>(*ptr - buffer_end_);
ABSL_DCHECK_LE(overrun, kSlopBytes); // Guaranteed by parse loop.
if (overrun ==
@ -549,7 +549,7 @@ class PROTOBUF_EXPORT ParseContext : public EpsCopyInputStream {
}
group_depth_--;
depth_++;
if (PROTOBUF_PREDICT_FALSE(!ConsumeEndGroup(tag))) return nullptr;
if (ABSL_PREDICT_FALSE(!ConsumeEndGroup(tag))) return nullptr;
return ptr;
}
@ -809,12 +809,12 @@ PROTOBUF_ALWAYS_INLINE std::pair<const char*, uint64_t> VarintParseSlowArm64(
// This immediate ends in 14 zeroes since valid_chunk_bits is too low by 14.
uint64_t result_mask = kResultMaskUnshifted << info.valid_chunk_bits;
// iff the Varint i invalid.
if (PROTOBUF_PREDICT_FALSE(info.masked_cont_bits == 0)) {
if (ABSL_PREDICT_FALSE(info.masked_cont_bits == 0)) {
return {nullptr, 0};
}
// Test for early exit if Varint does not exceed 6 chunks. Branching on one
// bit is faster on ARM than via a compare and branch.
if (PROTOBUF_PREDICT_FALSE((info.valid_bits & 0x20) != 0)) {
if (ABSL_PREDICT_FALSE((info.valid_bits & 0x20) != 0)) {
// Extract data bits from high four chunks.
uint64_t merged_67 = ExtractAndMergeTwoChunks(first8, /*first_chunk=*/6);
// Last two chunks come from last two bytes of info.last8.
@ -852,7 +852,7 @@ PROTOBUF_ALWAYS_INLINE std::pair<const char*, uint32_t> VarintParseSlowArm32(
// condition isn't on the critical path. Here we make sure that we don't do so
// until result has been computed.
info.masked_cont_bits = ValueBarrier(info.masked_cont_bits, result);
if (PROTOBUF_PREDICT_FALSE(info.masked_cont_bits == 0)) {
if (ABSL_PREDICT_FALSE(info.masked_cont_bits == 0)) {
return {nullptr, 0};
}
return {info.p, result};
@ -881,11 +881,11 @@ template <typename T>
// This optimization is not supported in big endian mode
uint64_t first8;
std::memcpy(&first8, p, sizeof(first8));
if (PROTOBUF_PREDICT_TRUE((first8 & 0x80) == 0)) {
if (ABSL_PREDICT_TRUE((first8 & 0x80) == 0)) {
*out = static_cast<uint8_t>(first8);
return p + 1;
}
if (PROTOBUF_PREDICT_TRUE((first8 & 0x8000) == 0)) {
if (ABSL_PREDICT_TRUE((first8 & 0x8000) == 0)) {
uint64_t chunk1;
uint64_t chunk2;
// Extracting the two chunks this way gives a speedup for this path.
@ -969,18 +969,18 @@ RotRight7AndReplaceLowByte(uint64_t res, const char& byte) {
PROTOBUF_ALWAYS_INLINE const char* ReadTagInlined(const char* ptr,
uint32_t* out) {
uint64_t res = 0xFF & ptr[0];
if (PROTOBUF_PREDICT_FALSE(res >= 128)) {
if (ABSL_PREDICT_FALSE(res >= 128)) {
res = RotRight7AndReplaceLowByte(res, ptr[1]);
if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
if (ABSL_PREDICT_FALSE(res & 0x80)) {
res = RotRight7AndReplaceLowByte(res, ptr[2]);
if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
if (ABSL_PREDICT_FALSE(res & 0x80)) {
res = RotRight7AndReplaceLowByte(res, ptr[3]);
if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
if (ABSL_PREDICT_FALSE(res & 0x80)) {
// Note: this wouldn't work if res were 32-bit,
// because then replacing the low byte would overwrite
// the bottom 4 bits of the result.
res = RotRight7AndReplaceLowByte(res, ptr[4]);
if (PROTOBUF_PREDICT_FALSE(res & 0x80)) {
if (ABSL_PREDICT_FALSE(res & 0x80)) {
// The proto format does not permit longer than 5-byte encodings for
// tags.
*out = 0;
@ -1037,7 +1037,7 @@ inline const char* ParseBigVarint(const char* p, uint64_t* out) {
auto pnew = p;
auto tmp = DecodeTwoBytes(&pnew);
uint64_t res = tmp >> 1;
if (PROTOBUF_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
if (ABSL_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
*out = res;
return pnew;
}
@ -1045,7 +1045,7 @@ inline const char* ParseBigVarint(const char* p, uint64_t* out) {
pnew = p + 2 * i;
tmp = DecodeTwoBytes(&pnew);
res += (static_cast<std::uint64_t>(tmp) - 2) << (14 * i - 1);
if (PROTOBUF_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
if (ABSL_PREDICT_TRUE(static_cast<std::int16_t>(tmp) >= 0)) {
*out = res;
return pnew;
}
@ -1130,14 +1130,14 @@ ParseContext::ParseGroupInlined(const char* ptr, uint32_t start_tag,
}
group_depth_--;
depth_++;
if (PROTOBUF_PREDICT_FALSE(!ConsumeEndGroup(start_tag))) return nullptr;
if (ABSL_PREDICT_FALSE(!ConsumeEndGroup(start_tag))) return nullptr;
return ptr;
}
inline const char* ParseContext::ReadSizeAndPushLimitAndDepthInlined(
const char* ptr, LimitToken* old_limit) {
int size = ReadSize(&ptr);
if (PROTOBUF_PREDICT_FALSE(!ptr) || depth_ <= 0) {
if (ABSL_PREDICT_FALSE(!ptr) || depth_ <= 0) {
return nullptr;
}
*old_limit = PushLimit(ptr, size);
@ -1152,7 +1152,7 @@ const char* EpsCopyInputStream::ReadRepeatedFixed(const char* ptr,
do {
out->Add(UnalignedLoad<T>(ptr));
ptr += sizeof(T);
if (PROTOBUF_PREDICT_FALSE(ptr >= limit_end_)) return ptr;
if (ABSL_PREDICT_FALSE(ptr >= limit_end_)) return ptr;
} while (UnalignedLoad<Tag>(ptr) == expected_tag && (ptr += sizeof(Tag)));
return ptr;
}

@ -354,17 +354,6 @@ static_assert(PROTOBUF_ABSL_MIN(20230125, 3),
# define PROTOC_EXPORT
#endif
#if defined(PROTOBUF_PREDICT_TRUE) || defined(PROTOBUF_PREDICT_FALSE)
#error PROTOBUF_PREDICT_(TRUE|FALSE) was previously defined
#endif
#if defined(__GNUC__)
# define PROTOBUF_PREDICT_TRUE(x) (__builtin_expect(false || (x), true))
# define PROTOBUF_PREDICT_FALSE(x) (__builtin_expect(false || (x), false))
#else
# define PROTOBUF_PREDICT_TRUE(x) (x)
# define PROTOBUF_PREDICT_FALSE(x) (x)
#endif
#ifdef PROTOBUF_RESTRICT
#error PROTOBUF_RESTRICT was previously defined
#endif

@ -33,8 +33,6 @@
#undef PROTOBUF_IGNORE_DEPRECATION_STOP
#undef PROTOBUF_RTTI
#undef PROTOBUF_FIELD_OFFSET
#undef PROTOBUF_PREDICT_TRUE
#undef PROTOBUF_PREDICT_FALSE
#undef PROTOBUF_EXPORT
#undef PROTOC_EXPORT
#undef PROTOBUF_RESTRICT

@ -13,6 +13,7 @@
#include <string>
#include <vector>
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/str_cat.h"
@ -191,7 +192,7 @@ bool ReflectionOps::IsInitialized(const Message& message, bool check_fields,
for (const FieldDescriptor* field = begin; field != end; ++field) {
if (field->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
const Descriptor* message_type = field->message_type();
if (PROTOBUF_PREDICT_FALSE(message_type->options().map_entry())) {
if (ABSL_PREDICT_FALSE(message_type->options().map_entry())) {
if (message_type->field(1)->cpp_type() ==
FieldDescriptor::CPPTYPE_MESSAGE) {
const MapFieldBase* map_field =

@ -6,6 +6,7 @@
#include <utility>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/strings/cord.h"
#include "google/protobuf/descriptor.h"
@ -85,7 +86,7 @@ class ReflectionVisit final {
};
inline bool ShouldVisit(FieldMask mask, FieldDescriptor::CppType cpptype) {
if (PROTOBUF_PREDICT_TRUE(mask == FieldMask::kAll)) return true;
if (ABSL_PREDICT_TRUE(mask == FieldMask::kAll)) return true;
return (static_cast<uint32_t>(mask) & (1 << cpptype)) != 0;
}
@ -140,7 +141,7 @@ void ReflectionVisit::VisitFields(MessageT& message, CallbackFn&& func,
#define PROTOBUF_HANDLE_REPEATED_PTR_CASE(TYPE, CPPTYPE, NAME) \
case FieldDescriptor::TYPE_##TYPE: { \
if (PROTOBUF_PREDICT_TRUE(!field->is_map())) { \
if (ABSL_PREDICT_TRUE(!field->is_map())) { \
/* Handle repeated fields. */ \
const auto& rep = reflection->GetRawNonOneof<RepeatedPtrField<CPPTYPE>>( \
message, field); \
@ -249,7 +250,7 @@ void ReflectionVisit::VisitFields(MessageT& message, CallbackFn&& func,
} else {
auto index = has_bits_indices[i];
bool check_hasbits = has_bits && index != static_cast<uint32_t>(-1);
if (PROTOBUF_PREDICT_TRUE(check_hasbits)) {
if (ABSL_PREDICT_TRUE(check_hasbits)) {
if ((has_bits[index / 32] & (1u << (index % 32))) == 0) continue;
} else {
// Skip if it has default values.

@ -1148,7 +1148,7 @@ inline int CalculateReserveSize(int capacity, int new_size) {
}
constexpr int kMaxSizeBeforeClamp =
(std::numeric_limits<int>::max() - kHeapRepHeaderSize) / 2;
if (PROTOBUF_PREDICT_FALSE(capacity > kMaxSizeBeforeClamp)) {
if (ABSL_PREDICT_FALSE(capacity > kMaxSizeBeforeClamp)) {
return std::numeric_limits<int>::max();
}
constexpr int kSooCapacityElements = SooCapacityElements(sizeof(T));

@ -19,6 +19,7 @@
#include <new>
#include <string>
#include "absl/base/optimization.h"
#include "absl/base/prefetch.h"
#include "absl/log/absl_check.h"
#include "google/protobuf/arena.h"
@ -193,7 +194,7 @@ void RepeatedPtrFieldBase::MergeFromConcreteMessage(
const void* const* src = from.elements();
auto end = src + from.current_size_;
constexpr ptrdiff_t kPrefetchstride = 1;
if (PROTOBUF_PREDICT_FALSE(ClearedCount() > 0)) {
if (ABSL_PREDICT_FALSE(ClearedCount() > 0)) {
int recycled = MergeIntoClearedMessages(from);
dst += recycled;
src += recycled;
@ -228,7 +229,7 @@ void RepeatedPtrFieldBase::MergeFrom<MessageLite>(
auto end = src + from.current_size_;
const MessageLite* prototype = src[0];
ABSL_DCHECK(prototype != nullptr);
if (PROTOBUF_PREDICT_FALSE(ClearedCount() > 0)) {
if (ABSL_PREDICT_FALSE(ClearedCount() > 0)) {
int recycled = MergeIntoClearedMessages(from);
dst += recycled;
src += recycled;

@ -32,6 +32,7 @@
#include <utility>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/base/prefetch.h"
#include "absl/log/absl_check.h"
#include "absl/meta/type_traits.h"
@ -218,7 +219,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// TODO: arena check is redundant once all `RepeatedPtrField`s
// with non-null arena are owned by the arena.
if (PROTOBUF_PREDICT_FALSE(arena_ != nullptr)) return;
if (ABSL_PREDICT_FALSE(arena_ != nullptr)) return;
using H = CommonHandler<TypeHandler>;
int n = allocated_size();
@ -296,7 +297,7 @@ class PROTOBUF_EXPORT RepeatedPtrFieldBase {
// Pre-condition: PrepareForParse() is true.
void AddAllocatedForParse(void* value) {
ABSL_DCHECK(PrepareForParse());
if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) {
if (ABSL_PREDICT_FALSE(SizeAtCapacity())) {
*InternalExtend(1) = value;
++rep()->allocated_size;
} else {
@ -791,7 +792,7 @@ void* RepeatedPtrFieldBase::AddInternal(Factory factory) {
return result;
}
Rep* r = rep();
if (PROTOBUF_PREDICT_FALSE(SizeAtCapacity())) {
if (ABSL_PREDICT_FALSE(SizeAtCapacity())) {
InternalExtend(1);
r = rep();
} else {

@ -90,12 +90,12 @@ class PROTOBUF_EXPORT SerialArena {
// See comments on `cached_blocks_` member for details.
PROTOBUF_ALWAYS_INLINE void* TryAllocateFromCachedBlock(size_t size) {
if (PROTOBUF_PREDICT_FALSE(size < 16)) return nullptr;
if (ABSL_PREDICT_FALSE(size < 16)) return nullptr;
// We round up to the next larger block in case the memory doesn't match
// the pattern we are looking for.
const size_t index = absl::bit_width(size - 1) - 4;
if (PROTOBUF_PREDICT_FALSE(index >= cached_block_length_)) return nullptr;
if (ABSL_PREDICT_FALSE(index >= cached_block_length_)) return nullptr;
auto& cached_head = cached_blocks_[index];
if (cached_head == nullptr) return nullptr;
@ -124,7 +124,7 @@ class PROTOBUF_EXPORT SerialArena {
}
void* ptr;
if (PROTOBUF_PREDICT_TRUE(MaybeAllocateAligned(n, &ptr))) {
if (ABSL_PREDICT_TRUE(MaybeAllocateAligned(n, &ptr))) {
return ptr;
}
return AllocateAlignedFallback(n);
@ -152,7 +152,7 @@ class PROTOBUF_EXPORT SerialArena {
// In 64-bit platforms the minimum allocation size from Repeated*Field will
// be 16 guaranteed.
if (sizeof(void*) < 8) {
if (PROTOBUF_PREDICT_FALSE(size < 16)) return;
if (ABSL_PREDICT_FALSE(size < 16)) return;
} else {
PROTOBUF_ASSUME(size >= 16);
}
@ -162,7 +162,7 @@ class PROTOBUF_EXPORT SerialArena {
// on the repeated field.
const size_t index = absl::bit_width(size) - 5;
if (PROTOBUF_PREDICT_FALSE(index >= cached_block_length_)) {
if (ABSL_PREDICT_FALSE(index >= cached_block_length_)) {
// We can't put this object on the freelist so make this object the
// freelist. It is guaranteed it is larger than the one we have, and
// large enough to hold another allocation of `size`.
@ -205,8 +205,8 @@ class PROTOBUF_EXPORT SerialArena {
// ret + n may point out of the block bounds, or ret may be nullptr.
// Both computations have undefined behavior when done on pointers,
// so do them on uintptr_t instead.
if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
reinterpret_cast<uintptr_t>(limit_))) {
if (ABSL_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
reinterpret_cast<uintptr_t>(limit_))) {
return false;
}
PROTOBUF_UNPOISON_MEMORY_REGION(ret, n);
@ -231,8 +231,8 @@ class PROTOBUF_EXPORT SerialArena {
n = ArenaAlignDefault::Ceil(n);
char* ret = ArenaAlignAs(align).CeilDefaultAligned(ptr());
// See the comment in MaybeAllocateAligned re uintptr_t.
if (PROTOBUF_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
reinterpret_cast<uintptr_t>(limit_))) {
if (ABSL_PREDICT_FALSE(reinterpret_cast<uintptr_t>(ret) + n >
reinterpret_cast<uintptr_t>(limit_))) {
return AllocateAlignedWithCleanupFallback(n, align, destructor);
}
PROTOBUF_UNPOISON_MEMORY_REGION(ret, n);
@ -283,9 +283,9 @@ class PROTOBUF_EXPORT SerialArena {
static const char* MaybePrefetchImpl(const ptrdiff_t prefetch_degree,
const char* next, const char* limit,
const char* prefetch_ptr) {
if (PROTOBUF_PREDICT_TRUE(prefetch_ptr - next > prefetch_degree))
if (ABSL_PREDICT_TRUE(prefetch_ptr - next > prefetch_degree))
return prefetch_ptr;
if (PROTOBUF_PREDICT_TRUE(prefetch_ptr < limit)) {
if (ABSL_PREDICT_TRUE(prefetch_ptr < limit)) {
prefetch_ptr = std::max(next, prefetch_ptr);
ABSL_DCHECK(prefetch_ptr != nullptr);
const char* end = std::min(limit, prefetch_ptr + prefetch_degree);
@ -411,7 +411,7 @@ class PROTOBUF_EXPORT SerialArena {
PROTOBUF_ALWAYS_INLINE bool SerialArena::MaybeAllocateString(void*& p) {
// Check how many unused instances are in the current block.
size_t unused_bytes = string_block_unused_.load(std::memory_order_relaxed);
if (PROTOBUF_PREDICT_TRUE(unused_bytes != 0)) {
if (ABSL_PREDICT_TRUE(unused_bytes != 0)) {
unused_bytes -= sizeof(std::string);
string_block_unused_.store(unused_bytes, std::memory_order_relaxed);
p = string_block_.load(std::memory_order_relaxed)->AtOffset(unused_bytes);

@ -30,7 +30,7 @@ namespace util {
do { \
/* Using _status below to avoid capture problems if expr is "status". */ \
const absl::Status _status = (expr); \
if (PROTOBUF_PREDICT_FALSE(!_status.ok())) return _status; \
if (ABSL_PREDICT_FALSE(!_status.ok())) return _status; \
} while (0)
// Internal helper for concatenating macro values.
@ -47,7 +47,7 @@ absl::Status DoAssignOrReturn(T& lhs, absl::StatusOr<T> result) {
#define ASSIGN_OR_RETURN_IMPL(status, lhs, rexpr) \
absl::Status status = DoAssignOrReturn(lhs, (rexpr)); \
if (PROTOBUF_PREDICT_FALSE(!status.ok())) return status;
if (ABSL_PREDICT_FALSE(!status.ok())) return status;
// Executes an expression that returns a util::StatusOr, extracting its value
// into the variable defined by lhs (or returning on error).

@ -17,6 +17,7 @@
#include <vector>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/synchronization/mutex.h"
#include "google/protobuf/arena_align.h"
#include "google/protobuf/arena_allocation_policy.h"
@ -68,7 +69,7 @@ class PROTOBUF_EXPORT ThreadSafeArena {
template <AllocationClient alloc_client = AllocationClient::kDefault>
void* AllocateAligned(size_t n) {
SerialArena* arena;
if (PROTOBUF_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
if (ABSL_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
return arena->AllocateAligned<alloc_client>(n);
} else {
return AllocateAlignedFallback<alloc_client>(n);
@ -77,7 +78,7 @@ class PROTOBUF_EXPORT ThreadSafeArena {
void ReturnArrayMemory(void* p, size_t size) {
SerialArena* arena = nullptr;
if (PROTOBUF_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
if (ABSL_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
arena->ReturnArrayMemory(p, size);
}
}
@ -89,7 +90,7 @@ class PROTOBUF_EXPORT ThreadSafeArena {
// code for the happy path.
PROTOBUF_NDEBUG_INLINE bool MaybeAllocateAligned(size_t n, void** out) {
SerialArena* arena = nullptr;
if (PROTOBUF_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
if (ABSL_PREDICT_TRUE(GetSerialArenaFast(&arena))) {
return arena->MaybeAllocateAligned(n, out);
}
return false;
@ -175,7 +176,7 @@ class PROTOBUF_EXPORT ThreadSafeArena {
// This fast path optimizes the case where multiple threads allocate from
// the same arena.
ThreadCache* tc = &thread_cache();
if (PROTOBUF_PREDICT_TRUE(tc->last_lifecycle_id_seen == tag_and_id_)) {
if (ABSL_PREDICT_TRUE(tc->last_lifecycle_id_seen == tag_and_id_)) {
*arena = tc->last_serial_arena;
return true;
}

@ -15,6 +15,7 @@
#include <utility>
// Must be included last.
#include "absl/base/optimization.h"
#include "google/protobuf/port_def.inc"
namespace google {
@ -78,47 +79,47 @@ PROTOBUF_ALWAYS_INLINE const char* ShiftMixParseVarint(const char* p,
int64_t res2, res3; // accumulated result chunks
res1 = next();
if (PROTOBUF_PREDICT_TRUE(res1 >= 0)) return p;
if (ABSL_PREDICT_TRUE(res1 >= 0)) return p;
if (limit <= 1) goto limit0;
// Densify all ops with explicit FALSE predictions from here on, except that
// we predict length = 5 as a common length for fields like timestamp.
if (PROTOBUF_PREDICT_FALSE(VarintShl<1>(next(), res1, res2))) goto done1;
if (ABSL_PREDICT_FALSE(VarintShl<1>(next(), res1, res2))) goto done1;
if (limit <= 2) goto limit1;
if (PROTOBUF_PREDICT_FALSE(VarintShl<2>(next(), res1, res3))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShl<2>(next(), res1, res3))) goto done2;
if (limit <= 3) goto limit2;
if (PROTOBUF_PREDICT_FALSE(VarintShlAnd<3>(next(), res1, res2))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShlAnd<3>(next(), res1, res2))) goto done2;
if (limit <= 4) goto limit2;
if (PROTOBUF_PREDICT_TRUE(VarintShlAnd<4>(next(), res1, res3))) goto done2;
if (ABSL_PREDICT_TRUE(VarintShlAnd<4>(next(), res1, res3))) goto done2;
if (limit <= 5) goto limit2;
if (kIs64BitVarint) {
if (PROTOBUF_PREDICT_FALSE(VarintShlAnd<5>(next(), res1, res2))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShlAnd<5>(next(), res1, res2))) goto done2;
if (limit <= 6) goto limit2;
if (PROTOBUF_PREDICT_FALSE(VarintShlAnd<6>(next(), res1, res3))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShlAnd<6>(next(), res1, res3))) goto done2;
if (limit <= 7) goto limit2;
if (PROTOBUF_PREDICT_FALSE(VarintShlAnd<7>(next(), res1, res2))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShlAnd<7>(next(), res1, res2))) goto done2;
if (limit <= 8) goto limit2;
if (PROTOBUF_PREDICT_FALSE(VarintShlAnd<8>(next(), res1, res3))) goto done2;
if (ABSL_PREDICT_FALSE(VarintShlAnd<8>(next(), res1, res3))) goto done2;
if (limit <= 9) goto limit2;
} else {
// An overlong int32 is expected to span the full 10 bytes
if (PROTOBUF_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (ABSL_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (limit <= 6) goto limit2;
if (PROTOBUF_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (ABSL_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (limit <= 7) goto limit2;
if (PROTOBUF_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (ABSL_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (limit <= 8) goto limit2;
if (PROTOBUF_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (ABSL_PREDICT_FALSE(!(next() & 0x80))) goto done2;
if (limit <= 9) goto limit2;
}
// For valid 64bit varints, the 10th byte/ptr[9] should be exactly 1. In this
// case, the continuation bit of ptr[8] already set the top bit of res3
// correctly, so all we have to do is check that the expected case is true.
if (PROTOBUF_PREDICT_TRUE(next() == 1)) goto done2;
if (ABSL_PREDICT_TRUE(next() == 1)) goto done2;
if (PROTOBUF_PREDICT_FALSE(last() & 0x80)) {
if (ABSL_PREDICT_FALSE(last() & 0x80)) {
// If the continue bit is set, it is an unterminated varint.
return nullptr;
}

@ -19,6 +19,7 @@
#include <vector>
#include "absl/base/attributes.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/cord.h"
@ -734,7 +735,7 @@ struct WireFormat::MessageSetParser {
while (!ctx->Done(&ptr)) {
uint32_t tag;
ptr = ReadTag(ptr, &tag);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (tag == 0 || (tag & 7) == WireFormatLite::WIRETYPE_END_GROUP) {
ctx->SetLastTag(tag);
break;
@ -758,7 +759,7 @@ struct WireFormat::MessageSetParser {
ptr = WireFormat::_InternalParseAndMergeField(msg, ptr, ctx, tag,
reflection, field);
}
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
}
return ptr;
}
@ -819,7 +820,7 @@ const char* WireFormat::_InternalParse(Message* msg, const char* ptr,
while (!ctx->Done(&ptr)) {
uint32_t tag;
ptr = ReadTag(ptr, &tag);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (tag == 0 || (tag & 7) == WireFormatLite::WIRETYPE_END_GROUP) {
ctx->SetLastTag(tag);
break;
@ -840,7 +841,7 @@ const char* WireFormat::_InternalParse(Message* msg, const char* ptr,
}
ptr = _InternalParseAndMergeField(msg, ptr, ctx, tag, reflection, field);
if (PROTOBUF_PREDICT_FALSE(ptr == nullptr)) return nullptr;
if (ABSL_PREDICT_FALSE(ptr == nullptr)) return nullptr;
}
return ptr;
}

@ -25,6 +25,7 @@
#include <utility>
#include "absl/base/casts.h"
#include "absl/base/optimization.h"
#include "absl/log/absl_check.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/arenastring.h"

Loading…
Cancel
Save