Merging with main and addressing latest round of comments

ghaPassFail
Deanna Garcia 4 months ago
parent 8aa4407664
commit 373af8ac98
  1. 1
      .bcr/presubmit.yml
  2. 3
      .github/workflows/janitor.yml
  3. 18
      .github/workflows/staleness_check.yml
  4. 24
      .github/workflows/test_bazel.yml
  5. 133
      .github/workflows/test_cpp.yml
  6. 49
      .github/workflows/test_java.yml
  7. 45
      .github/workflows/test_objectivec.yml
  8. 35
      .github/workflows/test_php.yml
  9. 17
      .github/workflows/test_php_ext.yml
  10. 21
      .github/workflows/test_python.yml
  11. 55
      .github/workflows/test_ruby.yml
  12. 39
      .github/workflows/test_runner.yml
  13. 91
      .github/workflows/test_upb.yml
  14. 2
      CMakeLists.txt
  15. 35
      MODULE.bazel
  16. 2
      Protobuf-C++.podspec
  17. 7
      Protobuf.podspec
  18. 19
      WORKSPACE
  19. 40
      WORKSPACE.bzlmod
  20. 6
      bazel/common/BUILD.bazel
  21. 351
      bazel/common/proto_common.bzl
  22. 4
      bazel/common/proto_lang_toolchain_info.bzl
  23. 36
      bazel/private/BUILD.bazel
  24. 35
      bazel/private/native_bool_flag.bzl
  25. 50
      bazel/private/proto_bazel_features.bzl
  26. 154
      bazel/private/proto_lang_toolchain_rule.bzl
  27. 357
      bazel/private/proto_library_rule.bzl
  28. 49
      bazel/private/toolchain_helpers.bzl
  29. 1
      bazel/private/upb_proto_library_internal/aspect.bzl
  30. 21
      bazel/proto_library.bzl
  31. 3
      bazel/tests/BUILD
  32. 361
      bazel/tests/proto_common_compile_tests.bzl
  33. 130
      bazel/tests/testdata/BUILD
  34. 50
      bazel/tests/testdata/compile_rule.bzl
  35. 10
      bazel/toolchains/proto_lang_toolchain.bzl
  36. 1
      bazel/upb_c_proto_library.bzl
  37. 1
      bazel/upb_minitable_proto_library.bzl
  38. 2
      bazel/upb_proto_reflection_library.bzl
  39. 19
      cmake/install.cmake
  40. 10
      cmake/upb.pc.cmake
  41. 2
      conformance/binary_json_conformance_suite.cc
  42. 2
      csharp/Google.Protobuf.Tools.nuspec
  43. 3
      csharp/src/Google.Protobuf/Google.Protobuf.csproj
  44. 1
      editions/BUILD
  45. 2
      editions/codegen_tests/BUILD
  46. 27
      editions/codegen_tests/rust_bazel_crate_mapping.txt
  47. 3
      editions/defaults.bzl
  48. 28
      editions/golden/compare_cpp_codegen_failure.txt
  49. 4
      editions/golden/compare_cpp_codegen_failure.xml
  50. 47
      hpb/BUILD
  51. 4
      hpb/bazel/upb_cc_proto_library.bzl
  52. 6
      hpb/extension_lock.cc
  53. 4
      hpb/extension_lock.h
  54. 20
      hpb/extension_lock_test.cc
  55. 8
      hpb/hpb.cc
  56. 85
      hpb/hpb.h
  57. 4
      hpb/internal.h
  58. 21
      hpb/internal_test.cc
  59. 6
      hpb/repeated_field.h
  60. 2
      hpb/repeated_field_iterator.h
  61. 27
      hpb/repeated_field_iterator_test.cc
  62. 4
      hpb/requires.h
  63. 4
      hpb/traits.h
  64. 5
      hpb_generator/BUILD
  65. 6
      hpb_generator/README.md
  66. 17
      hpb_generator/gen_accessors.cc
  67. 17
      hpb_generator/gen_accessors.h
  68. 13
      hpb_generator/gen_enums.cc
  69. 15
      hpb_generator/gen_enums.h
  70. 13
      hpb_generator/gen_extensions.cc
  71. 15
      hpb_generator/gen_extensions.h
  72. 24
      hpb_generator/gen_messages.cc
  73. 15
      hpb_generator/gen_messages.h
  74. 22
      hpb_generator/gen_repeated_fields.cc
  75. 16
      hpb_generator/gen_repeated_fields.h
  76. 12
      hpb_generator/gen_utils.cc
  77. 16
      hpb_generator/gen_utils.h
  78. 15
      hpb_generator/names.cc
  79. 17
      hpb_generator/names.h
  80. 9
      hpb_generator/output.cc
  81. 11
      hpb_generator/output.h
  82. 23
      hpb_generator/protoc-gen-upb-protos.cc
  83. 13
      hpb_generator/tests/BUILD
  84. 4
      hpb_generator/tests/child_model.proto
  85. 2
      hpb_generator/tests/legacy-name.proto
  86. 2
      hpb_generator/tests/naming_conflict.proto
  87. 4
      hpb_generator/tests/no_package_enum_user.proto
  88. 2
      hpb_generator/tests/test_enum.proto
  89. 4
      hpb_generator/tests/test_extension.proto
  90. 116
      hpb_generator/tests/test_generated.cc
  91. 4
      hpb_generator/tests/test_model.proto
  92. 2
      java/bom/pom.xml
  93. 2
      java/core/pom.xml
  94. 4
      java/core/src/main/java/com/google/protobuf/AbstractMessage.java
  95. 3
      java/core/src/main/java/com/google/protobuf/ArrayDecoders.java
  96. 5
      java/core/src/main/java/com/google/protobuf/ByteString.java
  97. 4
      java/core/src/main/java/com/google/protobuf/DebugFormat.java
  98. 2
      java/core/src/main/java/com/google/protobuf/Descriptors.java
  99. 3
      java/core/src/main/java/com/google/protobuf/ExtensionRegistry.java
  100. 101
      java/core/src/main/java/com/google/protobuf/FieldSet.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -14,7 +14,6 @@ tasks:
- '@protobuf//:protobuf'
- '@protobuf//:protobuf_lite'
- '@protobuf//:protobuf_python'
- '@protobuf//:protobuf_rust'
- '@protobuf//:protoc'
- '@protobuf//:test_messages_proto2_cc_proto'
- '@protobuf//:test_messages_proto3_cc_proto'

@ -45,7 +45,8 @@ jobs:
This issue is labeled `inactive` because the last activity was over
90 days ago.
90 days ago. This issue will be closed and archived after 14
additional days without activity.
close-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active

@ -6,6 +6,11 @@ on:
- cron: 0 10 * * *
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: false
description: "The SHA key for the commit we want to run over"
@ -18,17 +23,22 @@ jobs:
strategy:
fail-fast: false
matrix:
branch: [main, 25.x, 27.x]
branch: [main, 25.x, 27.x, 28.x]
os: [{ name: Linux, value: ubuntu-latest}]
exclude:
# If we are in a presubmit run, only test main
- branch: ${{ !inputs.continuous-run && '25.x' }}
- branch: ${{ !inputs.continuous-run && '27.x' }}
- branch: ${{ !inputs.continuous-run && '28.x' }}
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }}
name: Test staleness ${{ matrix.os.name }} ${{ matrix.branch }}
runs-on: ${{ matrix.os.value }}
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }}
steps:
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }}
- name: Checkout ${{ matrix.branch }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }}
ref: ${{ inputs.safe-checkout || matrix.branch }}
- name: Mark runs associated with commits
if: ${{ github.event_name != 'schedule' && github.event_name != 'workflow_dispatch' }}

@ -3,9 +3,10 @@ name: Bazel Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -27,39 +28,36 @@ jobs:
runner: [ ubuntu, windows, macos ]
bazelversion: [ '7.1.2' ]
bzlmod: [true, false ]
presubmit: [ true ]
continuous-only: [ false ]
include:
- runner: ubuntu
bazelversion: '6.4.0'
bzlmod: true
presubmit: false
- runner: ubuntu
bazelversion: '6.4.0'
# Not running Bazel 6 with bzlmod, because it doesn't support use_repo_rule in rules_jvm_external
bzlmod: false
presubmit: false
continuous-only: true
runs-on: ${{ matrix.runner }}-latest
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }}
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Windows startup flags
if: ${{ runner.os == 'Windows' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
working-directory: examples
shell: bash
run: echo "startup --output_user_root=C:/ --windows_enable_symlinks" >> .bazelrc
- name: Configure Bazel version
if: ${{ matrix.presubmit || inputs.test-type == 'continous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
working-directory: examples
shell: bash
run: echo "${{ matrix.bazelversion }}" > .bazelversion
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}

@ -3,9 +3,10 @@ name: C++ Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -26,44 +27,52 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: Optimized, flags: --config=opt, presubmit: true }
- { name: Debug, flags: --config=dbg, presubmit: false }
- { name: ASAN, flags: --config=asan, runner: ubuntu-20-large, presubmit: true }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-20-large, presubmit: false }
- { name: TSAN, flags: --config=tsan, runner: ubuntu-20-large, presubmit: false }
- { name: UBSAN, flags: --config=ubsan, presubmit: false }
- { name: No-RTTI, flags: --cxxopt=-fno-rtti, presubmit: false }
- { name: Optimized, flags: --config=opt }
- { name: Debug, flags: --config=dbg, continuous-only: true }
- { name: ASAN, flags: --config=asan, runner: ubuntu-22-4core }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-22-4core, continuous-only: true }
- { name: TSAN, flags: --config=tsan, runner: ubuntu-22-4core, continuous-only: true }
- { name: UBSAN, flags: --config=ubsan, continuous-only: true }
- { name: No-RTTI, flags: --cxxopt=-fno-rtti, continuous-only: true }
include:
# Set defaults
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:3d959f731dc5c54af4865c31ee2bd581ec40028adcdf4c038f3122581f595191
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
# Override cases with custom images
- config: { name: "Bazel7" }
- config: { name: "Bazel7", flags: --noenable_bzlmod }
cache_key: Bazel7
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "Bazel7 with Bzlmod", flags: --enable_bzlmod --enable_workspace }
cache_key: Bazel7bzlmod
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "TCMalloc" }
cache_key: TcMalloc
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:1c5133455481f4d1bb8afa477029604f41f1a3c46cebe4d9958cf1af95b5c87c"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "aarch64" }
cache_key: TcMalloc
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test //third_party/utf8_range/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.3.0-aarch64-68e662b3a56b881804dc4e9d45f949791cbc4b94"
name: ${{ !matrix.config.presubmit && inputs.continuous-prefix || '' }} Linux ${{ matrix.config.name }}
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/${{ matrix.config.name }}
bazel-cache: cpp_linux/${{ matrix.cache_key }}
bazel: test ${{ matrix.targets }} ${{ matrix.config.flags }}
exclude-targets: ${{ matrix.exclude-targets }}
linux-gcc:
strategy:
@ -91,7 +100,7 @@ jobs:
matrix:
arch: [x86_64, aarch64]
name: Linux Release ${{ matrix.arch}}
runs-on: ubuntu-20-large
runs-on: ubuntu-20-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -132,38 +141,38 @@ jobs:
matrix:
include:
- flags: -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
presubmit: true
continuous-only: false
- name: Ninja
flags: -G Ninja -DCMAKE_CXX_STANDARD=14
presubmit: false
continuous-only: true
- name: Shared
flags: -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
presubmit: false
continuous-only: true
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
presubmit: true
continuous-only: false
# TODO Re-enable this.
#- name: C++20
# flags: -DCMAKE_CXX_STANDARD=20
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Linux CMake ${{ matrix.name}}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux CMake ${{ matrix.name}}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup sccache
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
@ -209,7 +218,7 @@ jobs:
linux-cmake-examples:
name: Linux CMake Examples
# Skip this test on presubmit
if: ${{ inputs.test-type == 'continuous' }}
if: ${{ inputs.continuous-run }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
@ -244,33 +253,33 @@ jobs:
include:
- name: C++14
flags: -DCMAKE_CXX_STANDARD=14
presubmit: true
continuous-only: false
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
presubmit: false
continuous-only: true
- name: C++20
flags: -DCMAKE_CXX_STANDARD=20
presubmit: true
continuous-only: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Linux CMake GCC ${{ matrix.name }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux CMake GCC ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-gcc
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
@ -350,41 +359,41 @@ jobs:
os: macos-12
cache_key: macos-12
bazel: test //src/... //third_party/utf8_range/...
presubmit: true
continuous-only: false
- name: MacOS Bazel 7
os: macos-12
cache_key: macos-12-bazel7
bazel: test //src/... //third_party/utf8_range/...
bazel_version: '7.1.2'
presubmit: false
continuous-only: true
- name: MacOS Apple Silicon (build only) Bazel
os: macos-12
cache_key: macos-12-arm
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/...
presubmit: true
continuous-only: false
- name: Windows Bazel
os: windows-2022
cache_key: windows-2022
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
presubmit: true
continuous-only: false
- name: Windows Bazel 7
os: windows-2022
cache_key: windows-2022-bazel7
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
bazel_version: '7.1.2'
presubmit: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} ${{ matrix.name }}
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -410,7 +419,7 @@ jobs:
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
presubmit: false
continuous-only: true
- name: Windows CMake 2019
os: windows-2019
flags: >-
@ -421,7 +430,7 @@ jobs:
cache-prefix: windows-2019-cmake
# windows-2019 has python3.7 installed, which is incompatible with the latest gcloud
python-version: '3.9'
presubmit: false
continuous-only: true
- name: Windows CMake 32-bit
os: windows-2022
flags: >-
@ -429,7 +438,7 @@ jobs:
vsversion: '2022'
windows-arch: 'win32'
cache-prefix: windows-2022-win32-cmake
presubmit: false
continuous-only: true
- name: Windows CMake Shared
os: windows-2022
flags: >-
@ -437,7 +446,7 @@ jobs:
-Dprotobuf_BUILD_SHARED_LIBS=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
presubmit: true
continuous-only: false
- name: Windows CMake Install
os: windows-2022
install-flags: -G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF -Dprotobuf_BUILD_TESTS=OFF
@ -447,37 +456,43 @@ jobs:
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
vsversion: '2022'
cache-prefix: windows-2022-cmake
presubmit: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} ${{ matrix.name }}
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
if: ${{ runner.os == 'Windows' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup MSVC
if: ${{ runner.os == 'Windows' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: ilammy/msvc-dev-cmd@cec98b9d092141f74527d0afa6feb2af698cfe89 # v1.12.1
with:
arch: ${{ matrix.windows-arch || 'x64' }}
vsversion: ${{ matrix.vsversion }}
# Workaround for Abseil incompatibility with CMake 3.30 (b/352354235).
- name: Downgrade CMake
if: ${{ runner.os == 'Windows' }}
run: choco install cmake --version 3.29.6 --force
shell: bash
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
if: ${{ matrix.python-version && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.python-version && (!matrix.continuous-only || inputs.continuous-run) }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Use custom python for gcloud
if: ${{ matrix.python-version && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.python-version && (!matrix.continuous-only || inputs.continuous-run) }}
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV
shell: bash
- name: Setup sccache
if: ${{ runner.os == 'Windows' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: ${{ matrix.cache-prefix }}
@ -485,46 +500,46 @@ jobs:
# Install phase.
- name: Configure CMake for install
if: ${{ matrix.install-flags && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.install-flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build for install
if: ${{ matrix.install-flags && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Install
if: ${{ matrix.install-flags && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: cmake --build . --target install
- name: Report and clear sccache stats
if: ${{ matrix.install-flags && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: sccache -s && sccache -z
- name: Clear CMake cache
if: ${{ matrix.install-flags && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: cmake --build . --target clean && rm CMakeCache.txt
- name: Configure CMake
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Test
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: ctest --verbose --parallel 20 -C Debug
- name: Report sccache stats
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: sccache -s

@ -3,9 +3,10 @@ name: Java Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -27,44 +28,54 @@ jobs:
matrix:
include:
- name: OpenJDK 8
version: '8'
cache_key: '8'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
# TODO: b/318555165 - enable the layering check. Currently it does
# not work correctly with the toolchain in this Docker image.
targets: //java/... //java/internal:java_version --features=-layering_check
presubmit: false
targets: //java/... //java/internal:java_version //compatibility/... --features=-layering_check
continuous-only: true
- name: OpenJDK 11
version: '11'
cache_key: '11'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
targets: //java/... //java/internal:java_version
presubmit: false
targets: //java/... //java/internal:java_version //compatibility/...
continuous-only: true
- name: OpenJDK 17
version: '17'
cache_key: '17'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:17-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
targets: //java/... //java/internal:java_version
presubmit: true
targets: //java/... //java/internal:java_version //compatibility/...
continuous-only: false
- name: Bazel7
cache_key: 'bazel7nobzlmod'
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
targets: //java/... //java/internal:java_version //compatibility/...
flags: --noenable_bzlmod
- name: Bazel7 with Bzlmod
cache_key: 'bazel7bzlmod'
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
targets: //java/... //java/internal:java_version //compatibility/...
flags: --enable_bzlmod --enable_workspace
- name: aarch64
version: 'aarch64'
cache_key: 'aarch64'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
targets: //java/... //src/google/protobuf/compiler:protoc_aarch64_test
presubmit: true
targets: //java/... //compatibility/... //src/google/protobuf/compiler:protoc_aarch64_test
continuous-only: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/${{ matrix.version }}
bazel: test ${{ matrix.targets }} --test_env=KOKORO_JAVA_VERSION
bazel-cache: java_linux/${{ matrix.cache_key }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_JAVA_VERSION
# TODO restore this test (or a better one) when gRPC has rebuilt with 26.x
# linkage-monitor:

@ -3,9 +3,10 @@ name: Objective-C Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -38,30 +39,30 @@ jobs:
xc_project: "ProtocolBuffers_iOS.xcodeproj"
# We run presubmits on all "Debug" entries, but not on "Release" entries
- xc_config: "Debug"
presubmit: true
continuous-only: false
- xc_config: "Release"
presubmit: false
continuous-only: true
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
- name: Checkout pending changes
if: ${{ !matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup ccache
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/ccache@v3
with:
cache-prefix: objectivec_${{ matrix.platform }}_${{ matrix.xc_config }}
support-modules: true
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
env:
CC: ${{ github.workspace }}/ci/clang_wrapper
@ -78,7 +79,7 @@ jobs:
| xcpretty
- name: Report ccache stats
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: ccache -s -v
@ -96,22 +97,22 @@ jobs:
XCODE: "15.2"
# We run presubmits on all "Debug" entries, but not on "Release" entries
- CONFIGURATION: "Debug"
presubmit: true
continuous-only: false
- CONFIGURATION: "Release"
presubmit: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }}
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }}
runs-on: ${{ matrix.OS }}
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Xcode version
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: sudo xcode-select -switch /Applications/Xcode_${{ matrix.XCODE }}.app
- name: Pod lib lint
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -131,36 +132,36 @@ jobs:
- name: Optimized
flags: --config=opt
bazel_action: test
presubmit: false
continuous-only: true
- name: Debug
flags: --config=dbg
bazel_action: test
presubmit: true
continuous-only: false
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
- name: Apple_Silicon_Optimized
flags: --config=opt --cpu=darwin_arm64
bazel_action: build
presubmit: false
continuous-only: true
- name: Apple_Silicon_Debug
flags: --config=dbg --cpu=darwin_arm64
bazel_action: build
presubmit: true
continuous-only: false
# TODO: Could add iOS to atleast build the objc_library targets for that.
platform: ["macOS"]
include:
- platform: "macOS"
bazel_targets: //objectivec/...
name: ${{ !matrix.config.presubmit && inputs.continuous-prefix || '' }} Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
runs-on: macos-12
steps:
- name: Checkout pending changes
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: bazel ${{ matrix.config.bazel_action }}
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}

@ -4,9 +4,10 @@ name: PHP Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -58,18 +59,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup composer
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version-short }}
directory: php
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:${{ matrix.version }}-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
@ -104,13 +105,13 @@ jobs:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:836f2cedcfe351d9a30055076630408e61994fc7d783e8333a99570968990eeb
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for i386
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
@ -119,14 +120,14 @@ jobs:
architecture: linux-i386
- name: Setup composer
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: ${{ env.image }}
@ -191,38 +192,38 @@ jobs:
runs-on: macos-12
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Uninstall problematic libgd
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
run: brew uninstall --ignore-dependencies gd
- name: Install dependencies
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
run: brew install coreutils gd
- name: Pin PHP version
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: shivammathur/setup-php@8872c784b04a1420e81191df5d64fbd59d3d3033 # 2.30.2
with:
php-version: ${{ matrix.version }}
- name: Check PHP version
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
run: php --version | grep ${{ matrix.version }} || (echo "Invalid PHP version - $(php --version)" && exit 1)
- name: Setup composer
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -235,7 +236,7 @@ jobs:
popd
- name: Run conformance tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ matrix.presubmit || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}

@ -3,9 +3,10 @@ name: PHP Extension Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -52,21 +53,21 @@ jobs:
matrix:
include:
- version: "8.1"
presubmit: false
continuous-only: true
- version: "8.2"
presubmit: false
continuous-only: true
- version: "8.3"
presubmit: true
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Build ${{ matrix.version }}
continuous-only: false
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Build ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
name: protobuf-php-release
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php-extension:${{ matrix.version }}-a48f26c08d9a803dd0177dda63563f6ea6f7b2d4

@ -3,9 +3,10 @@ name: Python Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -41,26 +42,26 @@ jobs:
# TODO Enable this once conformance tests are fixed.
flags: --define=use_fast_cpp_protos=true --test_tag_filters=-conformance
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
presubmit: true
continuous-only: false
- version: "3.8"
presubmit: true
continuous-only: false
- version: "3.9"
presubmit: false
continuous-only: true
- version: "3.10"
presubmit: false
continuous-only: true
- version: "3.11"
presubmit: true
continuous-only: false
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Linux ${{ matrix.type }} ${{ matrix.version }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.type }} ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:{0}-63dd26c0c7a808d92673a3e52e848189d4ab0f17', matrix.version) }}

@ -3,9 +3,10 @@ name: Ruby Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -28,25 +29,25 @@ jobs:
include:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x.
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE, presubmit: true }
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI, presubmit: false }
- { name: Ruby 3.1, ruby: ruby-3.1.0, presubmit: false }
- { name: Ruby 3.2, ruby: ruby-3.2.0, presubmit: false }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE, presubmit: true }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI, presubmit: false }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE, presubmit: true }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI, presubmit: false }
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }} ${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE }
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI, continuous-only: true }
- { name: Ruby 3.1, ruby: ruby-3.1.0, continuous-only: true }
- { name: Ruby 3.2, ruby: ruby-3.2.0, continuous-only: true }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI, continuous-only: true }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI, continuous-only: true }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }} ${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:{0}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec', matrix.ruby) }}
@ -132,34 +133,34 @@ jobs:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby, but only on Bazel 5.x.
# Quote versions numbers otherwise 3.0 will render as 3
- { version: "3.0", ffi: NATIVE, presubmit: true }
- { version: "3.0", ffi: FFI, presubmit: false }
- { version: "3.1", presubmit: false }
- { version: "3.2", presubmit: false }
- { version: "3.3", ffi: NATIVE, presubmit: true }
- { version: "3.3", ffi: FFI, presubmit: false }
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
- { version: "3.0", ffi: NATIVE }
- { version: "3.0", ffi: FFI, continuous-only: true }
- { version: "3.1", continuous-only: true }
- { version: "3.2", continuous-only: true }
- { version: "3.3", ffi: NATIVE }
- { version: "3.3", ffi: FFI, continuous-only: true }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: macos-12
steps:
- name: Checkout pending changes
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Pin Ruby version
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: ruby/setup-ruby@961f85197f92e4842e3cb92a4f97bd8e010cdbaf # v1.165.0
with:
ruby-version: ${{ matrix.version }}
- name: Validate version
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: ruby --version | grep ${{ matrix.version }} || (echo "Invalid Ruby version - $(ruby --version)" && exit 1)
- name: Run tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -183,7 +184,7 @@ jobs:
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI }
name: Install ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }} (Continuous)
# None of these ruby gem tests should be run on presubmit
if: ${{ inputs.test-type == 'continuous' }}
if: ${{ inputs.continuous-run }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes

@ -48,7 +48,7 @@ on:
# manual
workflow_dispatch:
permissions:
contents: read
@ -77,9 +77,11 @@ jobs:
# Store the sha for checkout so we can easily use it later. For safe
# events, this will be blank and use the defaults.
checkout-sha: ${{ steps.safe-checkout.outputs.sha }}
# Stores a string denoting whether this is a presubmit or continuous
# run. This helps us determine which tests to block on.
test-type: ${{ steps.set-test-type-vars.outputs.test-type }}
# Stores a string to be used as a boolean denoting whether this is a
# continuous run. An empty string denotes that the run is on presubmit,
# otherwise we are in a continuous run. This helps us determine which
# tests to block on.
continuous-run: ${{ steps.set-test-type-vars.outputs.continuous-run }}
# Stores a string that will serve as the prefix for all continuous tests.
# Either way we prepend "(Continuous)" but in the case that we are in
# a presubmit run, we should also mark them "[SKIPPED]"
@ -104,10 +106,10 @@ jobs:
id: set-test-type-vars
run: |
if [ "${{ github.event_name }}" == 'pull_request' ] || [ "${{ github.event_name }}" == 'pull_request_target' ]; then
echo "test-type=presubmit" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=[SKIPPED](Continuous)" >> "$GITHUB_OUTPUT"
echo "continuous-run=" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=[SKIPPED] (Continuous)" >> "$GITHUB_OUTPUT"
else
echo "test-type=continuous" >> "$GITHUB_OUTPUT"
echo "continuous-run=continuous" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=(Continuous)" >> "$GITHUB_OUTPUT"
fi
@ -132,7 +134,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_bazel.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -142,7 +144,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_cpp.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -152,7 +154,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_java.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -162,7 +164,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_python.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -172,7 +174,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_ruby.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -182,7 +184,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_php.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -192,7 +194,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_php_ext.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -210,7 +212,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_objectivec.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -228,7 +230,7 @@ jobs:
needs: [set-vars]
uses: ./.github/workflows/test_upb.yml
with:
test-type: ${{ needs.set-vars.outputs.test-type }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
@ -240,6 +242,7 @@ jobs:
# Staleness tests have scheduled runs during off-hours to avoid race conditions.
if: ${{ github.event_name != 'schedule' }}
with:
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
secrets: inherit
@ -250,7 +253,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check test results
run: "${{ !contains(join(needs.*.result, ' '), 'failure') && !contains(join(needs.*.result, ' '), 'cancelled') && !contains(join(needs.*.result, ' '), 'skipped') }}"
run: "${{ !contains(join(needs.*.result, ' '), 'failure') && !contains(join(needs.*.result, ' '), 'cancelled') }}"
# This workflow must run even if one or more of the dependent workflows
# failed.
if: always()
if: always() && ${{ needs.check-tag.result != 'skipped' }}

@ -3,9 +3,10 @@ name: μpb Tests
on:
workflow_call:
inputs:
test-type:
continuous-run:
required: true
description: "The type of test this is run from -- presubmit or continuous"
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
@ -26,32 +27,32 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "Bazel 7", bazel_version: "7.1.1", presubmit: false }
- { name: "Fastbuild", presubmit: true }
- { name: "Optimized", flags: "-c opt", presubmit: false }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/...", runner: ubuntu-20-large, presubmit: true }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/...", presubmit: false }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/...", presubmit: true }
- { name: "Bazel 7", bazel_version: "7.1.1", continuous-only: true }
- { name: "Fastbuild" }
- { name: "Optimized", flags: "-c opt", continuous-only: true }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/...", runner: ubuntu-22-4core }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/...", continuous-only: true }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/..." }
# TODO: Add 32-bit ASAN test
# TODO: Restore the FastTable tests
name: ${{ !matrix.config.presubmit && inputs.continuous-prefix || '' }} ${{ matrix.config.name }}
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ matrix.config.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-75f2a85ece6526cc3d54087018c0f1097d78d42b
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //hpb_generator/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
exclude-targets: ${{ matrix.config.exclude-targets }}
linux-gcc:
@ -70,7 +71,7 @@ jobs:
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-gcc"
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //hpb_generator/... //python/... //upb/... //upb_generator/...
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
windows:
strategy:
@ -91,7 +92,7 @@ jobs:
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-windows"
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/... //hpb_generator/...
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/...
version: 6.3.0
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test
@ -118,7 +119,7 @@ jobs:
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-macos"
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //hpb_generator/... //python/... //upb/... //upb_generator/...
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
version: 6.3.0
no-python:
@ -180,27 +181,27 @@ jobs:
# a single wheel. As a result we can just test the oldest and newest
# supported Python versions and assume this gives us sufficient test
# coverage.
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary', presubmit: true }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'binary', presubmit: true }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary', presubmit: true }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'binary', presubmit: true }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source' }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source' }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
# Windows uses the full API up until Python 3.10.
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary', presubmit: true }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary', presubmit: true }
name: ${{ !matrix.presubmit && inputs.continuous-prefix || '' }} Test Wheels Python ${{ matrix.python-version }} ${{ matrix.os }} ${{ matrix.architecture }} ${{ matrix.type }}
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Test Wheels Python ${{ matrix.python-version }} ${{ matrix.os }} ${{ matrix.architecture }} ${{ matrix.type }}
needs: build_wheels
runs-on: ${{ matrix.os }}
if: ${{ github.event_name != 'pull_request_target' }}
@ -209,24 +210,24 @@ jobs:
shell: bash
steps:
- name: Download Wheels
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
with:
name: python-wheels
path: wheels
- name: Download Requirements
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
with:
name: requirements
path: requirements
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
- name: Setup Python venv
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: |
python -m pip install --upgrade pip
python -m venv env
@ -236,28 +237,28 @@ jobs:
- name: Install tzdata
run: pip install tzdata
# Only needed on Windows, Linux ships with tzdata.
if: ${{ contains(matrix.os, 'windows') && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ contains(matrix.os, 'windows') && (!matrix.continuous-only || inputs.continuous-run) }}
- name: Install requirements
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: pip install -r requirements/requirements.txt
- name: Install Protobuf Binary Wheel
if: ${{ matrix.type == 'binary' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.type == 'binary' && (!matrix.continuous-only || inputs.continuous-run) }}
run: pip install -vvv --no-index --find-links wheels protobuf
- name: Install Protobuf Source Wheel
if: ${{ matrix.type == 'source' && (matrix.presubmit || inputs.test-type == 'continuous') }}
if: ${{ matrix.type == 'source' && (!matrix.continuous-only || inputs.continuous-run) }}
run: |
cd wheels
tar -xzvf *.tar.gz
cd protobuf-*/
pip install .
- name: Test that module is importable
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: python -v -c 'from google._upb import _message; assert "google._upb._message.MessageMeta" in str(_message.MessageMeta)'
- name: Install Protobuf Test Wheel
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: pip install -vvv --no-index --find-links wheels protobuftests
- name: Run the unit tests
if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: |
TESTS=$(pip show -f protobuftests | grep pb_unit_tests.*py$ | sed 's,/,.,g' | sed 's,\\,.,g' | sed -E 's,.py$,,g')
for test in $TESTS; do

@ -84,7 +84,7 @@ if (protobuf_BUILD_SHARED_LIBS)
endif ()
# Version metadata
set(protobuf_VERSION_STRING "5.28.0")
set(protobuf_VERSION_STRING "5.29.0")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")

@ -3,7 +3,7 @@
module(
name = "protobuf",
version = "28.0-dev", # Automatically updated on release
version = "29.0-dev", # Automatically updated on release
compatibility_level = 1,
repo_name = "com_google_protobuf",
)
@ -13,17 +13,19 @@ module(
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution
# Thus the highest version in their module graph is resolved.
bazel_dep(name = "abseil-cpp", version = "20230802.0.bcr.1", repo_name = "com_google_absl")
bazel_dep(name = "bazel_skylib", version = "1.4.1")
bazel_dep(name = "bazel_skylib", version = "1.7.0")
bazel_dep(name = "jsoncpp", version = "1.9.5")
bazel_dep(name = "rules_cc", version = "0.0.9")
bazel_dep(name = "rules_fuzzing", version = "0.5.2")
bazel_dep(name = "rules_java", version = "5.3.5")
bazel_dep(name = "rules_jvm_external", version = "5.1")
bazel_dep(name = "rules_jvm_external", version = "6.0")
bazel_dep(name = "rules_kotlin", version = "1.9.0")
bazel_dep(name = "rules_pkg", version = "0.7.0")
bazel_dep(name = "rules_python", version = "0.28.0")
bazel_dep(name = "rules_rust", version = "0.45.1")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "zlib", version = "1.3.1")
bazel_dep(name = "bazel_features", version = "1.13.0", repo_name = "proto_bazel_features")
# TODO: remove after toolchain types are moved to protobuf
bazel_dep(name = "rules_proto", version = "4.0.0")
@ -70,3 +72,30 @@ crate.spec(
)
crate.from_specs()
use_repo(crate, crate_index = "crates")
maven = use_extension("@rules_jvm_external//:extensions.bzl", "maven")
maven.install(
artifacts = [
"com.google.caliper:caliper:1.0-beta-3",
"com.google.code.findbugs:jsr305:3.0.2",
"com.google.code.gson:gson:2.8.9",
"com.google.errorprone:error_prone_annotations:2.5.1",
"com.google.j2objc:j2objc-annotations:2.8",
"com.google.guava:guava:32.0.1-jre",
"com.google.guava:guava-testlib:32.0.1-jre",
"com.google.truth:truth:1.1.2",
"junit:junit:4.13.2",
"org.mockito:mockito-core:4.3.1",
"biz.aQute.bnd:biz.aQute.bndlib:6.4.0",
"info.picocli:picocli:4.6.3",
],
repositories = [
"https://repo1.maven.org/maven2",
"https://repo.maven.apache.org/maven2",
],
)
use_repo(maven, "maven")
# Development dependencies
bazel_dep(name = "googletest", version = "1.14.0", repo_name = "com_google_googletest", dev_dependency = True)
bazel_dep(name = "rules_testing", version = "0.6.0", dev_dependency = True)

@ -1,6 +1,6 @@
Pod::Spec.new do |s|
s.name = 'Protobuf-C++'
s.version = '5.28.0'
s.version = '5.29.0'
s.summary = 'Protocol Buffers v3 runtime library for C++.'
s.homepage = 'https://github.com/google/protobuf'
s.license = 'BSD-3-Clause'

@ -5,7 +5,7 @@
# dependent projects use the :git notation to refer to the library.
Pod::Spec.new do |s|
s.name = 'Protobuf'
s.version = '3.28.0'
s.version = '3.29.0'
s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.'
s.homepage = 'https://github.com/protocolbuffers/protobuf'
s.license = 'BSD-3-Clause'
@ -18,7 +18,7 @@ Pod::Spec.new do |s|
s.source = { :git => 'https://github.com/protocolbuffers/protobuf.git',
:tag => "v#{s.version}" }
s.source_files = 'objectivec/*.{h,m}',
s.source_files = 'objectivec/*.{h,m,swift}',
'objectivec/google/protobuf/Any.pbobjc.h',
'objectivec/google/protobuf/Api.pbobjc.h',
'objectivec/google/protobuf/Duration.pbobjc.h',
@ -33,6 +33,9 @@ Pod::Spec.new do |s|
# left out, as it's an umbrella implementation file.
s.exclude_files = 'objectivec/GPBProtocolBuffers.m'
# Now that there is a Swift source file, set a version.
s.swift_version = '5.0'
s.resource_bundle = {
"Protobuf_Privacy" => "PrivacyInfo.xcprivacy"
}

@ -41,10 +41,10 @@ http_archive(
http_archive(
name = "com_google_googletest",
sha256 = "730215d76eace9dd49bf74ce044e8daa065d175f1ac891cc1d6bb184ef94e565",
strip_prefix = "googletest-f53219cdcb7b084ef57414efea92ee5b71989558",
sha256 = "7315acb6bf10e99f332c8a43f00d5fbb1ee6ca48c52f6b936991b216c586aaad",
strip_prefix = "googletest-1.15.0",
urls = [
"https://github.com/google/googletest/archive/f53219cdcb7b084ef57414efea92ee5b71989558.tar.gz" # 2023-03-16
"https://github.com/google/googletest/releases/download/v1.15.0/googletest-1.15.0.tar.gz" # 2024-07-15
],
)
@ -99,11 +99,11 @@ load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies")
rules_cc_dependencies()
# For `kt_jvm_library`
load("@io_bazel_rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
load("@rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
kotlin_repositories()
load("@io_bazel_rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
load("@rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
kt_register_toolchains()
@ -245,3 +245,12 @@ http_archive(
strip_prefix = "utf8_range-d863bc33e15cba6d873c878dcca9e6fe52b2f8cb",
url = "https://github.com/protocolbuffers/utf8_range/archive/d863bc33e15cba6d873c878dcca9e6fe52b2f8cb.zip",
)
# Needed for testing only
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_testing",
sha256 = "02c62574631876a4e3b02a1820cb51167bb9cdcdea2381b2fa9d9b8b11c407c4",
strip_prefix = "rules_testing-0.6.0",
url = "https://github.com/bazelbuild/rules_testing/releases/download/v0.6.0/rules_testing-v0.6.0.tar.gz",
)

@ -0,0 +1,40 @@
# This is a WORKSPACE file used by bzlmod in combination with MODULE.bazel.
# It's used for a gradual migration and it should be empty.
# Don't remove this file. If the file doesn't exist, bzlmod falls back to WORKSPACE file.
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# TODO: either replace rules_ruby with a maintained version on BCR
# or use bzlmod extensions to depend on this specific repo
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
ruby_runtime("system_ruby")
register_toolchains("@system_ruby//:toolchain")
# Follwing are just needed to run conformance tests, not really needed to support them via MODULE.bazel
# For testing runtime against old gencode from a previous major version.
http_archive(
name = "com_google_protobuf_v25.0",
strip_prefix = "protobuf-25.0",
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz",
)
# Needed as a dependency of @com_google_protobuf_v25.x, which was before
# utf8_range was merged in.
http_archive(
name = "utf8_range",
strip_prefix = "utf8_range-d863bc33e15cba6d873c878dcca9e6fe52b2f8cb",
url = "https://github.com/protocolbuffers/utf8_range/archive/d863bc33e15cba6d873c878dcca9e6fe52b2f8cb.zip",
)

@ -7,7 +7,9 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
":proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@proto_bazel_features//:features",
],
)
@ -29,6 +31,6 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
":proto_common.bzl",
"//bazel/private:native_bzl",
],
)

@ -1,5 +1,350 @@
"""proto_common"""
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Definition of proto_common module, together with bazel providers for proto rules."""
load("//bazel/private:native.bzl", "native_proto_common")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
proto_common = native_proto_common
def _import_virtual_proto_path(path):
"""Imports all paths for virtual imports.
They're of the form:
'bazel-out/k8-fastbuild/bin/external/foo/e/_virtual_imports/e' or
'bazel-out/foo/k8-fastbuild/bin/e/_virtual_imports/e'"""
if path.count("/") > 4:
return "-I%s" % path
return None
def _import_repo_proto_path(path):
"""Imports all paths for generated files in external repositories.
They are of the form:
'bazel-out/k8-fastbuild/bin/external/foo' or
'bazel-out/foo/k8-fastbuild/bin'"""
path_count = path.count("/")
if path_count > 2 and path_count <= 4:
return "-I%s" % path
return None
def _import_main_output_proto_path(path):
"""Imports all paths for generated files or source files in external repositories.
They're of the form:
'bazel-out/k8-fastbuild/bin'
'external/foo'
'../foo'
"""
if path.count("/") <= 2 and path != ".":
return "-I%s" % path
return None
def _remove_repo(file):
"""Removes `../repo/` prefix from path, e.g. `../repo/package/path -> package/path`"""
short_path = file.short_path
workspace_root = file.owner.workspace_root
if workspace_root:
if workspace_root.startswith("external/"):
workspace_root = "../" + workspace_root.removeprefix("external/")
return short_path.removeprefix(workspace_root + "/")
return short_path
def _get_import_path(proto_file):
"""Returns the import path of a .proto file
This is the path as used for the file that can be used in an `import` statement in another
.proto file.
Args:
proto_file: (File) The .proto file
Returns:
(str) import path
"""
repo_path = _remove_repo(proto_file)
index = repo_path.find("_virtual_imports/")
if index >= 0:
index = repo_path.find("/", index + len("_virtual_imports/"))
repo_path = repo_path[index + 1:]
return repo_path
def _output_directory(proto_info, root):
proto_source_root = proto_info.proto_source_root
if proto_source_root.startswith(root.path):
#TODO: remove this branch when bin_dir is removed from proto_source_root
proto_source_root = proto_source_root.removeprefix(root.path).removeprefix("/")
if proto_source_root == "" or proto_source_root == ".":
return root.path
return root.path + "/" + proto_source_root
def _check_collocated(label, proto_info, proto_lang_toolchain_info):
"""Checks if lang_proto_library is collocated with proto_library.
Exceptions are allowed by an allowlist defined on `proto_lang_toolchain` and
on an allowlist defined on `proto_library`'s `allow_exports` attribute.
If checks are not successful the function fails.
Args:
label: (Label) The label of lang_proto_library
proto_info: (ProtoInfo) The ProtoInfo from the proto_library dependency.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target.
"""
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
if not _PackageSpecificationInfo:
if proto_lang_toolchain_info.allowlist_different_package or getattr(proto_info, "allow_exports", None):
fail("Allowlist checks not supported before Bazel 6.4.0")
return
if (proto_info.direct_descriptor_set.owner.package != label.package and
proto_lang_toolchain_info.allowlist_different_package):
if not proto_lang_toolchain_info.allowlist_different_package[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
if (proto_info.direct_descriptor_set.owner.package != label.package and
hasattr(proto_info, "allow_exports")):
if not proto_info.allow_exports[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
def _compile(
actions,
proto_info,
proto_lang_toolchain_info,
generated_files,
plugin_output = None,
additional_args = None,
additional_tools = [],
additional_inputs = depset(),
resource_set = None,
experimental_exec_group = None,
experimental_progress_message = None,
experimental_output_files = "legacy"):
"""Creates proto compile action for compiling *.proto files to language specific sources.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to register the actions.
proto_info: (ProtoInfo) The ProtoInfo from proto_library to generate the sources for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc..
generated_files: (list[File]) The output files generated by the proto compiler.
Callee needs to declare files using `ctx.actions.declare_file`.
See also: `proto_common.declare_generated_files`.
plugin_output: (File|str) Deprecated: Set `proto_lang_toolchain.output_files`
and remove the parameter.
For backwards compatibility, when the proto_lang_toolchain isn't updated
the value is used.
additional_args: (Args) Additional arguments to add to the action.
Accepts a ctx.actions.args() object that is added at the beginning
of the command line.
additional_tools: (list[File]) Additional tools to add to the action.
additional_inputs: (Depset[File]) Additional input files to add to the action.
resource_set: (func) A callback function that is passed to the created action.
See `ctx.actions.run`, `resource_set` parameter for full definition of
the callback.
experimental_exec_group: (str) Sets `exec_group` on proto compile action.
Avoid using this parameter.
experimental_progress_message: Overrides progress_message from the toolchain.
Don't use this parameter. It's only intended for the transition.
experimental_output_files: (str) Overwrites output_files from the toolchain.
Don't use this parameter. It's only intended for the transition.
"""
if type(generated_files) != type([]):
fail("generated_files is expected to be a list of Files")
if not generated_files:
return # nothing to do
if experimental_output_files not in ["single", "multiple", "legacy"]:
fail('experimental_output_files expected to be one of ["single", "multiple", "legacy"]')
args = actions.args()
args.use_param_file(param_file_arg = "@%s")
args.set_param_file_format("multiline")
tools = list(additional_tools)
if experimental_output_files != "legacy":
output_files = experimental_output_files
else:
output_files = getattr(proto_lang_toolchain_info, "output_files", "legacy")
if output_files != "legacy":
if proto_lang_toolchain_info.out_replacement_format_flag:
if output_files == "single":
if len(generated_files) > 1:
fail("generated_files only expected a single file")
plugin_output = generated_files[0]
else:
plugin_output = _output_directory(proto_info, generated_files[0].root)
if plugin_output:
args.add(plugin_output, format = proto_lang_toolchain_info.out_replacement_format_flag)
if proto_lang_toolchain_info.plugin:
tools.append(proto_lang_toolchain_info.plugin)
args.add(proto_lang_toolchain_info.plugin.executable, format = proto_lang_toolchain_info.plugin_format_flag)
# Protoc searches for .protos -I paths in order they are given and then
# uses the path within the directory as the package.
# This requires ordering the paths from most specific (longest) to least
# specific ones, so that no path in the list is a prefix of any of the
# following paths in the list.
# For example: 'bazel-out/k8-fastbuild/bin/external/foo' needs to be listed
# before 'bazel-out/k8-fastbuild/bin'. If not, protoc will discover file under
# the shorter path and use 'external/foo/...' as its package path.
args.add_all(proto_info.transitive_proto_path, map_each = _import_virtual_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_repo_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_main_output_proto_path)
args.add("-I.") # Needs to come last
args.add_all(proto_lang_toolchain_info.protoc_opts)
args.add_all(proto_info.direct_sources)
if additional_args:
additional_args.use_param_file(param_file_arg = "@%s")
additional_args.set_param_file_format("multiline")
actions.run(
mnemonic = proto_lang_toolchain_info.mnemonic,
progress_message = experimental_progress_message if experimental_progress_message else proto_lang_toolchain_info.progress_message,
executable = proto_lang_toolchain_info.proto_compiler,
arguments = [additional_args, args] if additional_args else [args],
inputs = depset(transitive = [proto_info.transitive_sources, additional_inputs]),
outputs = generated_files,
tools = tools,
use_default_shell_env = True,
resource_set = resource_set,
exec_group = experimental_exec_group,
toolchain = _toolchain_type(proto_lang_toolchain_info),
)
_BAZEL_TOOLS_PREFIX = "external/bazel_tools/"
def _experimental_filter_sources(proto_info, proto_lang_toolchain_info):
if not proto_info.direct_sources:
return [], []
# Collect a set of provided protos
provided_proto_sources = proto_lang_toolchain_info.provided_proto_sources
provided_paths = {}
for src in provided_proto_sources:
path = src.path
# For listed protos bundled with the Bazel tools repository, their exec paths start
# with external/bazel_tools/. This prefix needs to be removed first, because the protos in
# user repositories will not have that prefix.
if path.startswith(_BAZEL_TOOLS_PREFIX):
provided_paths[path[len(_BAZEL_TOOLS_PREFIX):]] = None
else:
provided_paths[path] = None
# Filter proto files
proto_files = proto_info._direct_proto_sources
excluded = []
included = []
for proto_file in proto_files:
if proto_file.path in provided_paths:
excluded.append(proto_file)
else:
included.append(proto_file)
return included, excluded
def _experimental_should_generate_code(
proto_info,
proto_lang_toolchain_info,
rule_name,
target_label):
"""Checks if the code should be generated for the given proto_library.
The code shouldn't be generated only when the toolchain already provides it
to the language through its runtime dependency.
It fails when the proto_library contains mixed proto files, that should and
shouldn't generate code.
Args:
proto_info: (ProtoInfo) The ProtoInfo from proto_library to check the generation for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc.
rule_name: (str) Name of the rule used in the failure message.
target_label: (Label) The label of the target used in the failure message.
Returns:
(bool) True when the code should be generated.
"""
included, excluded = _experimental_filter_sources(proto_info, proto_lang_toolchain_info)
if included and excluded:
fail(("The 'srcs' attribute of '%s' contains protos for which '%s' " +
"shouldn't generate code (%s), in addition to protos for which it should (%s).\n" +
"Separate '%s' into 2 proto_library rules.") % (
target_label,
rule_name,
", ".join([f.short_path for f in excluded]),
", ".join([f.short_path for f in included]),
target_label,
))
return bool(included)
def _declare_generated_files(
actions,
proto_info,
extension,
name_mapper = None):
"""Declares generated files with a specific extension.
Use this in lang_proto_library-es when protocol compiler generates files
that correspond to .proto file names.
The function removes ".proto" extension with given one (e.g. ".pb.cc") and
declares new output files.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to declare the files.
proto_info: (ProtoInfo) The ProtoInfo to declare the files for.
extension: (str) The extension to use for generated files.
name_mapper: (str->str) A function mapped over the base filename without
the extension. Used it to replace characters in the name that
cause problems in a specific programming language.
Returns:
(list[File]) The list of declared files.
"""
proto_sources = proto_info.direct_sources
outputs = []
for src in proto_sources:
basename_no_ext = src.basename[:-(len(src.extension) + 1)]
if name_mapper:
basename_no_ext = name_mapper(basename_no_ext)
# Note that two proto_library rules can have the same source file, so this is actually a
# shared action. NB: This can probably result in action conflicts if the proto_library rules
# are not the same.
outputs.append(actions.declare_file(basename_no_ext + extension, sibling = src))
return outputs
def _toolchain_type(proto_lang_toolchain_info):
if toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
return getattr(proto_lang_toolchain_info, "toolchain_type", None)
else:
return None
proto_common = struct(
compile = _compile,
declare_generated_files = _declare_generated_files,
check_collocated = _check_collocated,
experimental_should_generate_code = _experimental_should_generate_code,
experimental_filter_sources = _experimental_filter_sources,
get_import_path = _get_import_path,
ProtoLangToolchainInfo = ProtoLangToolchainInfo,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION,
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = True,
)

@ -1,5 +1,5 @@
"""ProtoLangToolchainInfo"""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:native.bzl", "native_proto_common")
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo
ProtoLangToolchainInfo = native_proto_common.ProtoLangToolchainInfo

@ -6,6 +6,7 @@
# https://developers.google.com/open-source/licenses/bsd
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
load("//bazel/private:native_bool_flag.bzl", "native_bool_flag")
licenses(["notice"])
@ -40,3 +41,38 @@ bzl_library(
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "toolchain_helpers_bzl",
srcs = [
"toolchain_helpers.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":native_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
],
)
native_bool_flag(
name = "experimental_proto_descriptor_sets_include_source_info",
flag = "experimental_proto_descriptor_sets_include_source_info",
match_value = "true",
visibility = ["//visibility:public"],
)
native_bool_flag(
name = "strict_proto_deps",
flag = "strict_proto_deps",
match_value = "off",
result = False,
visibility = ["//visibility:public"],
)
native_bool_flag(
name = "strict_public_imports",
flag = "strict_public_imports",
match_value = "off",
result = False,
visibility = ["//visibility:public"],
)

@ -0,0 +1,35 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
A helper rule that reads a native boolean flag.
"""
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
def _impl(ctx):
return [BuildSettingInfo(value = ctx.attr.value)]
_native_bool_flag_rule = rule(
implementation = _impl,
attrs = {"value": attr.bool()},
)
def native_bool_flag(*, name, flag, match_value = "true", result = True, **kwargs):
_native_bool_flag_rule(
name = name,
value = select({
name + "_setting": result,
"//conditions:default": not result,
}),
**kwargs
)
native.config_setting(
name = name + "_setting",
values = {flag: match_value},
visibility = ["//visibility:private"],
)

@ -0,0 +1,50 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Vendored version of bazel_features for protobuf, to keep a one-step setup"""
_PROTO_BAZEL_FEATURES = """bazel_features = struct(
proto = struct(
starlark_proto_info = {starlark_proto_info},
),
globals = struct(
PackageSpecificationInfo = {PackageSpecificationInfo},
),
)
"""
def _proto_bazel_features_impl(rctx):
# An empty string is treated as a "dev version", which is greater than anything.
bazel_version = native.bazel_version or "999999.999999.999999"
version_parts = bazel_version.split("-")[0].split(".")
if len(version_parts) != 3:
fail("invalid Bazel version '{}': got {} dot-separated segments, want 3".format(bazel_version, len(version_parts)))
major_version_int = int(version_parts[0])
minor_version_int = int(version_parts[1])
starlark_proto_info = major_version_int >= 7
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4)
rctx.file("BUILD.bazel", """
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
name = "features",
srcs = ["features.bzl"],
visibility = ["//visibility:public"],
)
exports_files(["features.bzl"])
""")
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format(
starlark_proto_info = repr(starlark_proto_info),
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None",
))
proto_bazel_features = repository_rule(
implementation = _proto_bazel_features_impl,
# Force reruns on server restarts to keep native.bazel_version up-to-date.
local = True,
)

@ -0,0 +1,154 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Implementation of the proto_lang_toolchain rule."""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _rule_impl(ctx):
provided_proto_sources = depset(transitive = [bp[ProtoInfo]._transitive_proto_sources for bp in ctx.attr.blacklisted_protos]).to_list()
flag = ctx.attr.command_line
if flag.find("$(PLUGIN_OUT)") > -1:
fail("in attribute 'command_line': Placeholder '$(PLUGIN_OUT)' is not supported.")
flag = flag.replace("$(OUT)", "%s")
plugin = None
if ctx.attr.plugin != None:
plugin = ctx.attr.plugin[DefaultInfo].files_to_run
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
proto_compiler = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.proto_compiler
protoc_opts = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.protoc_opts
else:
proto_compiler = ctx.attr._proto_compiler.files_to_run
protoc_opts = ctx.fragments.proto.experimental_protoc_opts
if ctx.attr.protoc_minimal_do_not_use:
proto_compiler = ctx.attr.protoc_minimal_do_not_use.files_to_run
proto_lang_toolchain_info = ProtoLangToolchainInfo(
out_replacement_format_flag = flag,
output_files = ctx.attr.output_files,
plugin_format_flag = ctx.attr.plugin_format_flag,
plugin = plugin,
runtime = ctx.attr.runtime,
provided_proto_sources = provided_proto_sources,
proto_compiler = proto_compiler,
protoc_opts = protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
allowlist_different_package = ctx.attr.allowlist_different_package,
toolchain_type = ctx.attr.toolchain_type.label if ctx.attr.toolchain_type else None,
)
return [
DefaultInfo(files = depset(), runfiles = ctx.runfiles()),
platform_common.ToolchainInfo(proto = proto_lang_toolchain_info),
# TODO: remove when --incompatible_enable_proto_toolchains is flipped and removed
proto_lang_toolchain_info,
]
proto_lang_toolchain = rule(
_rule_impl,
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Specifies how a LANG_proto_library rule (e.g., <code>java_proto_library</code>) should invoke the
proto-compiler.
Some LANG_proto_library rules allow specifying which toolchain to use using command-line flags;
consult their documentation.
<p>Normally you should not write those kind of rules unless you want to
tune your Java compiler.
<p>There's no compiler. The proto-compiler is taken from the proto_library rule we attach to. It is
passed as a command-line flag to Blaze.
Several features require a proto-compiler to be invoked on the proto_library rule itself.
It's beneficial to enforce the compiler that LANG_proto_library uses is the same as the one
<code>proto_library</code> does.
<h4>Examples</h4>
<p>A simple example would be:
<pre><code class="lang-starlark">
proto_lang_toolchain(
name = "javalite_toolchain",
command_line = "--javalite_out=shared,immutable:$(OUT)",
plugin = ":javalite_plugin",
runtime = ":protobuf_lite",
)
</code></pre>
""",
attrs = {
"progress_message": attr.string(default = "Generating proto_library %{label}", doc = """
This value will be set as the progress message on protoc action."""),
"mnemonic": attr.string(default = "GenProto", doc = """
This value will be set as the mnemonic on protoc action."""),
"command_line": attr.string(mandatory = True, doc = """
This value will be passed to proto-compiler to generate the code. Only include the parts
specific to this code-generator/plugin (e.g., do not include -I parameters)
<ul>
<li><code>$(OUT)</code> is LANG_proto_library-specific. The rules are expected to define
how they interpret this variable. For Java, for example, $(OUT) will be replaced with
the src-jar filename to create.</li>
</ul>"""),
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "legacy", doc = """
Controls how <code>$(OUT)</code> in <code>command_line</code> is formatted, either by
a path to a single file or output directory in case of multiple files.
Possible values are: "single", "multiple"."""),
"plugin_format_flag": attr.string(doc = """
If provided, this value will be passed to proto-compiler to use the plugin.
The value must contain a single %s which is replaced with plugin executable.
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>"""),
"plugin": attr.label(
executable = True,
cfg = "exec",
doc = """
If provided, will be made available to the action that calls the proto-compiler, and will be
passed to the proto-compiler:
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>""",
),
"runtime": attr.label(doc = """
A language-specific library that the generated code is compiled against.
The exact behavior is LANG_proto_library-specific.
Java, for example, should compile against the runtime."""),
"blacklisted_protos": attr.label_list(
providers = [ProtoInfo],
doc = """
No code will be generated for files in the <code>srcs</code> attribute of
<code>blacklisted_protos</code>.
This is used for .proto files that are already linked into proto runtimes, such as
<code>any.proto</code>.""",
),
# TODO: add doc
"allowlist_different_package": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
),
# TODO: add doc
"toolchain_type": attr.label(),
# DO NOT USE. For Protobuf incremental changes only: b/305068148.
"protoc_minimal_do_not_use": attr.label(
cfg = "exec",
executable = True,
),
} | ({} if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION else {
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}),
provides = [ProtoLangToolchainInfo],
fragments = ["proto"],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), # Used to obtain protoc
)

@ -0,0 +1,357 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Implementation of proto_library rule.
"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
STRICT_DEPS_FLAG_TEMPLATE = (
#
"--direct_dependencies_violation_msg=" +
"%%s is imported, but %s doesn't directly depend on a proto_library that 'srcs' it."
)
def _check_srcs_package(target_package, srcs):
"""Check that .proto files in sources are from the same package.
This is done to avoid clashes with the generated sources."""
#TODO: this does not work with filegroups that contain files that are not in the package
for src in srcs:
if target_package != src.label.package:
fail("Proto source with label '%s' must be in same package as consuming rule." % src.label)
def _get_import_prefix(ctx):
"""Gets and verifies import_prefix attribute if it is declared."""
import_prefix = ctx.attr.import_prefix
if not paths.is_normalized(import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "import_prefix")
if paths.is_absolute(import_prefix):
fail("should be a relative path", attr = "import_prefix")
return import_prefix
def _get_strip_import_prefix(ctx):
"""Gets and verifies strip_import_prefix."""
strip_import_prefix = ctx.attr.strip_import_prefix
if not paths.is_normalized(strip_import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "strip_import_prefix")
if paths.is_absolute(strip_import_prefix):
strip_import_prefix = strip_import_prefix[1:]
else: # Relative to current package
strip_import_prefix = _join(ctx.label.package, strip_import_prefix)
return strip_import_prefix.removesuffix("/")
def _proto_library_impl(ctx):
# Verifies attributes.
_check_srcs_package(ctx.label.package, ctx.attr.srcs)
srcs = ctx.files.srcs
deps = [dep[ProtoInfo] for dep in ctx.attr.deps]
exports = [dep[ProtoInfo] for dep in ctx.attr.exports]
import_prefix = _get_import_prefix(ctx)
strip_import_prefix = _get_strip_import_prefix(ctx)
check_for_reexport = deps + exports if not srcs else exports
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
for proto in check_for_reexport:
if getattr(proto, "allow_exports", None):
if not _PackageSpecificationInfo:
fail("Allowlist checks not supported before Bazel 6.4.0")
if not proto.allow_exports[_PackageSpecificationInfo].contains(ctx.label):
fail("proto_library '%s' can't be reexported in package '//%s'" % (proto.direct_descriptor_set.owner, ctx.label.package))
proto_path, virtual_srcs = _process_srcs(ctx, srcs, import_prefix, strip_import_prefix)
descriptor_set = ctx.actions.declare_file(ctx.label.name + "-descriptor-set.proto.bin")
proto_info = ProtoInfo(
srcs = virtual_srcs,
deps = deps,
descriptor_set = descriptor_set,
proto_path = proto_path,
workspace_root = ctx.label.workspace_root,
bin_dir = ctx.bin_dir.path,
allow_exports = ctx.attr.allow_exports,
)
_write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set)
# We assume that the proto sources will not have conflicting artifacts
# with the same root relative path
data_runfiles = ctx.runfiles(
files = [proto_info.direct_descriptor_set],
transitive_files = depset(transitive = [proto_info.transitive_sources]),
)
return [
proto_info,
DefaultInfo(
files = depset([proto_info.direct_descriptor_set]),
default_runfiles = ctx.runfiles(), # empty
data_runfiles = data_runfiles,
),
]
def _process_srcs(ctx, srcs, import_prefix, strip_import_prefix):
"""Returns proto_path and sources, optionally symlinking them to _virtual_imports.
Returns:
(str, [File]) A pair of proto_path and virtual_sources.
"""
if import_prefix != "" or strip_import_prefix != "":
# Use virtual source roots
return _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix)
else:
# No virtual source roots
return "", srcs
def _join(*path):
return "/".join([p for p in path if p != ""])
def _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix):
"""Symlinks srcs to _virtual_imports.
Returns:
A pair proto_path, directs_sources.
"""
virtual_imports = _join("_virtual_imports", ctx.label.name)
proto_path = _join(ctx.label.package, virtual_imports)
if ctx.label.workspace_name == "":
full_strip_import_prefix = strip_import_prefix
else:
full_strip_import_prefix = _join("..", ctx.label.workspace_name, strip_import_prefix)
if full_strip_import_prefix:
full_strip_import_prefix += "/"
virtual_srcs = []
for src in srcs:
# Remove strip_import_prefix
if not src.short_path.startswith(full_strip_import_prefix):
fail(".proto file '%s' is not under the specified strip prefix '%s'" %
(src.short_path, full_strip_import_prefix))
import_path = src.short_path[len(full_strip_import_prefix):]
# Add import_prefix
virtual_src = ctx.actions.declare_file(_join(virtual_imports, import_prefix, import_path))
ctx.actions.symlink(
output = virtual_src,
target_file = src,
progress_message = "Symlinking virtual .proto sources for %{label}",
)
virtual_srcs.append(virtual_src)
return proto_path, virtual_srcs
def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set):
"""Writes descriptor set."""
if proto_info.direct_sources == []:
ctx.actions.write(descriptor_set, "")
return
dependencies_descriptor_sets = depset(transitive = [dep.transitive_descriptor_sets for dep in deps])
args = ctx.actions.args()
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value:
args.add("--include_source_info")
if hasattr(ctx.attr, "_retain_options") and ctx.attr._retain_options:
args.add("--retain_options")
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value
if strict_deps:
if proto_info.direct_sources:
strict_importable_sources = depset(
direct = proto_info._direct_proto_sources,
transitive = [dep._exported_sources for dep in deps],
)
else:
strict_importable_sources = None
if strict_importable_sources:
args.add_joined(
"--direct_dependencies",
strict_importable_sources,
map_each = proto_common.get_import_path,
join_with = ":",
)
# Example: `--direct_dependencies a.proto:b.proto`
else:
# The proto compiler requires an empty list to turn on strict deps checking
args.add("--direct_dependencies=")
# Set `-direct_dependencies_violation_msg=`
args.add(ctx.label, format = STRICT_DEPS_FLAG_TEMPLATE)
strict_imports = ctx.attr._strict_public_imports[BuildSettingInfo].value
if strict_imports:
public_import_protos = depset(transitive = [export._exported_sources for export in exports])
if not public_import_protos:
# This line is necessary to trigger the check.
args.add("--allowed_public_imports=")
else:
args.add_joined(
"--allowed_public_imports",
public_import_protos,
map_each = proto_common.get_import_path,
join_with = ":",
)
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[toolchains.PROTO_TOOLCHAIN]
if not toolchain:
fail("Protocol compiler toolchain could not be resolved.")
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = proto_common.ProtoLangToolchainInfo(
out_replacement_format_flag = "--descriptor_set_out=%s",
output_files = "single",
mnemonic = "GenProtoDescriptorSet",
progress_message = "Generating Descriptor Set proto_library %{label}",
proto_compiler = ctx.executable._proto_compiler,
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
plugin = None,
)
proto_common.compile(
ctx.actions,
proto_info,
proto_lang_toolchain_info,
generated_files = [descriptor_set],
additional_inputs = dependencies_descriptor_sets,
additional_args = args,
)
proto_library = rule(
_proto_library_impl,
# TODO: proto_common docs are missing
# TODO: ProtoInfo link doesn't work and docs are missing
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Use <code>proto_library</code> to define libraries of protocol buffers which
may be used from multiple languages. A <code>proto_library</code> may be listed
in the <code>deps</code> clause of supported rules, such as
<code>java_proto_library</code>.
<p>When compiled on the command-line, a <code>proto_library</code> creates a file
named <code>foo-descriptor-set.proto.bin</code>, which is the descriptor set for
the messages the rule srcs. The file is a serialized
<code>FileDescriptorSet</code>, which is described in
<a href="https://developers.google.com/protocol-buffers/docs/techniques#self-description">
https://developers.google.com/protocol-buffers/docs/techniques#self-description</a>.
<p>It only contains information about the <code>.proto</code> files directly
mentioned by a <code>proto_library</code> rule; the collection of transitive
descriptor sets is available through the
<code>[ProtoInfo].transitive_descriptor_sets</code> Starlark provider.
See documentation in <code>proto_info.bzl</code>.
<p>Recommended code organization:
<ul>
<li>One <code>proto_library</code> rule per <code>.proto</code> file.
<li>A file named <code>foo.proto</code> will be in a rule named <code>foo_proto</code>,
which is located in the same package.
<li>A <code>[language]_proto_library</code> that wraps a <code>proto_library</code>
named <code>foo_proto</code> should be called <code>foo_[language]_proto</code>,
and be located in the same package.
</ul>""",
attrs = {
"srcs": attr.label_list(
allow_files = [".proto", ".protodevel"],
flags = ["DIRECT_COMPILE_TIME_INPUT"],
# TODO: Should .protodevel be advertised or deprecated?
doc = """
The list of <code>.proto</code> and <code>.protodevel</code> files that are
processed to create the target. This is usually a non empty list. One usecase
where <code>srcs</code> can be empty is an <i>alias-library</i>. This is a
proto_library rule having one or more other proto_library in <code>deps</code>.
This pattern can be used to e.g. export a public api under a persistent name.""",
),
"deps": attr.label_list(
providers = [ProtoInfo],
doc = """
The list of other <code>proto_library</code> rules that the target depends upon.
A <code>proto_library</code> may only depend on other <code>proto_library</code>
targets. It may not depend on language-specific libraries.""",
),
"exports": attr.label_list(
providers = [ProtoInfo],
doc = """
List of proto_library targets that can be referenced via "import public" in the
proto source.
It's an error if you use "import public" but do not list the corresponding library
in the exports attribute.
Note that you have list the library both in deps and exports since not all
lang_proto_library implementations have been changed yet.""",
),
"strip_import_prefix": attr.string(
default = "/",
doc = """
The prefix to strip from the paths of the .proto files in this rule.
<p>When set, .proto source files in the <code>srcs</code> attribute of this rule are
accessible at their path with this prefix cut off.
<p>If it's a relative path (not starting with a slash), it's taken as a package-relative
one. If it's an absolute one, it's understood as a repository-relative path.
<p>The prefix in the <code>import_prefix</code> attribute is added after this prefix is
stripped.""",
),
"import_prefix": attr.string(
doc = """
The prefix to add to the paths of the .proto files in this rule.
<p>When set, the .proto source files in the <code>srcs</code> attribute of this rule are
accessible at is the value of this attribute prepended to their repository-relative path.
<p>The prefix in the <code>strip_import_prefix</code> attribute is removed before this
prefix is added.""",
),
"allow_exports": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
doc = """
An optional allowlist that prevents proto library to be reexported or used in
lang_proto_library that is not in one of the listed packages.""",
),
"data": attr.label_list(
allow_files = True,
flags = ["SKIP_CONSTRAINTS_OVERRIDE"],
),
# buildifier: disable=attr-license (calling attr.license())
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(),
"_experimental_proto_descriptor_sets_include_source_info": attr.label(
default = "//bazel/private:experimental_proto_descriptor_sets_include_source_info",
),
"_strict_proto_deps": attr.label(
default =
"//bazel/private:strict_proto_deps",
),
"_strict_public_imports": attr.label(
default = "//bazel/private:strict_public_imports",
),
} | toolchains.if_legacy_toolchain({
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}), # buildifier: disable=attr-licenses (attribute called licenses)
fragments = ["proto"],
provides = [ProtoInfo],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN),
)

@ -0,0 +1,49 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""
Toolchain helpers.
The helpers here should be used for a migration to toolchain in proto rules.
Anybody that needs them in another repository should copy them, because after
the migration is finished, the helpers can be removed.
"""
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:native.bzl", "native_proto_common")
_incompatible_toolchain_resolution = getattr(native_proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _find_toolchain(ctx, legacy_attr, toolchain_type):
if _incompatible_toolchain_resolution:
toolchain = ctx.toolchains[toolchain_type]
if not toolchain:
fail("No toolchains registered for '%s'." % toolchain_type)
return toolchain.proto
else:
return getattr(ctx.attr, legacy_attr)[ProtoLangToolchainInfo]
def _use_toolchain(toolchain_type):
if _incompatible_toolchain_resolution:
return [config_common.toolchain_type(toolchain_type, mandatory = False)]
else:
return []
def _if_legacy_toolchain(legacy_attr_dict):
if _incompatible_toolchain_resolution:
return {}
else:
return legacy_attr_dict
toolchains = struct(
use_toolchain = _use_toolchain,
find_toolchain = _find_toolchain,
if_legacy_toolchain = _if_legacy_toolchain,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution,
PROTO_TOOLCHAIN = "@rules_proto//proto:toolchain_type",
)

@ -2,6 +2,7 @@
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load(":upb_proto_library_internal/cc_library_func.bzl", "cc_library_func")
load(":upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo")

@ -1,3 +1,20 @@
"""proto_library rule"""
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Macro of proto_library rule.
"""
proto_library = native.proto_library
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:proto_library_rule.bzl", _proto_library = "proto_library")
def proto_library(**kwattrs):
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_library(**kwattrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_library(**kwattrs)

@ -0,0 +1,3 @@
load(":proto_common_compile_tests.bzl", "proto_common_compile_test_suite")
proto_common_compile_test_suite(name = "proto_common_compile_test_suite")

@ -0,0 +1,361 @@
"""Tests for `proto_common.compile` function."""
load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
load("@rules_testing//lib:truth.bzl", "matching")
load("@rules_testing//lib:util.bzl", "util")
load("//bazel:proto_library.bzl", "proto_library")
load("//bazel/tests/testdata:compile_rule.bzl", "compile_rule")
protocol_compiler = "/protoc"
def proto_common_compile_test_suite(name):
util.helper_target(
proto_library,
name = "simple_proto",
srcs = ["A.proto"],
)
test_suite(
name = name,
tests = [
_test_compile_basic,
_test_compile_noplugin,
_test_compile_with_plugin_output,
_test_compile_with_directory_plugin_output,
_test_compile_additional_args,
_test_compile_additional_tools,
_test_compile_additional_tools_no_plugin,
_test_compile_additional_inputs,
_test_compile_resource_set,
_test_compile_protoc_opts,
_test_compile_direct_generated_protos,
_test_compile_indirect_generated_protos,
],
)
# Verifies basic usage of `proto_common.compile`.
def _test_compile_basic(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_basic_impl,
)
def _test_compile_basic_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
action.mnemonic().equals("MyMnemonic")
# Verifies usage of proto_common.generate_code with no plugin specified by toolchain.
def _test_compile_noplugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_noplugin_impl,
)
def _test_compile_noplugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "single",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_plugin_output_impl,
)
def _test_compile_with_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/test_compile_with_plugin_output_compile"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_directory_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "multiple",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_directory_plugin_output_impl,
)
def _test_compile_with_directory_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/bin"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_args` parameter
def _test_compile_additional_args(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_args = ["--a", "--b"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_args_impl,
)
def _test_compile_additional_args_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--a"),
matching.equals_wrapper("--b"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter
def _test_compile_additional_tools(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_impl,
)
def _test_compile_additional_tools_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
matching.file_basename_equals("plugin"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_additional_tools_no_plugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_no_plugin_impl,
)
def _test_compile_additional_tools_no_plugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
],
)
action.inputs().not_contains_predicate(matching.file_basename_equals("plugin"))
# Verifies usage of `proto_common.compile` with `additional_inputs` parameter.
def _test_compile_additional_inputs(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_inputs = ["input1.txt", "input2.txt"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_inputs_impl,
)
def _test_compile_additional_inputs_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("input1.txt"),
matching.file_basename_equals("input2.txt"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_resource_set(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
use_resource_set = True,
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_resource_set_impl,
)
def _test_compile_resource_set_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic") # @unused
# We can't check the specification of the resource set, but we at least verify analysis passes
# Verifies `--protocopts` are passed to command line.
def _test_compile_protoc_opts(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
config_settings = {"//command_line_option:protocopt": ["--foo", "--bar"]},
impl = _test_compile_protoc_opts_impl,
)
def _test_compile_protoc_opts_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--foo"),
matching.equals_wrapper("--bar"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies `proto_common.compile`> correctly handles direct generated `.proto` files.
def _test_compile_direct_generated_protos(name):
util.helper_target(native.genrule, name = name + "_generate_G", cmd = "", outs = ["G.proto"])
util.helper_target(
proto_library,
name = name + "_directly_generated_proto",
srcs = ["A.proto", "G.proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_directly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_direct_generated_protos_impl,
)
def _test_compile_direct_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
matching.str_matches("*-out/*/*/*/G.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter
def _test_compile_indirect_generated_protos(name):
util.helper_target(native.genrule, name = "_generate_h", srcs = ["A.txt"], cmd = "", outs = ["H.proto"])
util.helper_target(proto_library, name = "_generated_proto", srcs = ["H.proto"])
util.helper_target(
proto_library,
name = name + "_indirectly_generated_proto",
srcs = ["A.proto"],
deps = [":_generated_proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_indirectly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_indirect_generated_protos_impl,
)
def _test_compile_indirect_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)

@ -0,0 +1,130 @@
package(default_visibility = ["//visibility:public"])
proto_lang_toolchain(
name = "toolchain",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
plugin = ":plugin",
plugin_format_flag = "--plugin=%s",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_lang_toolchain(
name = "toolchain_noplugin",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "plugin",
srcs = ["plugin.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_library(
name = "runtime",
srcs = ["runtime.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "descriptors",
srcs = [
"descriptor.proto",
"metadata.proto",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "any",
srcs = ["any.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "something",
srcs = ["something.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "mixed",
srcs = [
":descriptors",
":something",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "denied",
srcs = [
":any",
":descriptors",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool1",
srcs = ["tool1.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool2",
srcs = ["tool2.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)

@ -0,0 +1,50 @@
"""Testing function for proto_common module"""
load("//bazel/common:proto_common.bzl", "proto_common")
def _resource_set_callback(_os, inputs_size):
return {"memory": 25 + 0.15 * inputs_size, "cpu": 1}
def _impl(ctx):
outfile = ctx.actions.declare_file(ctx.attr.name)
kwargs = {}
if ctx.attr.plugin_output == "single":
kwargs["plugin_output"] = outfile.path
elif ctx.attr.plugin_output == "multiple":
kwargs["plugin_output"] = ctx.bin_dir.path
elif ctx.attr.plugin_output == "wrong":
kwargs["plugin_output"] = ctx.bin_dir.path + "///"
if ctx.attr.additional_args:
additional_args = ctx.actions.args()
additional_args.add_all(ctx.attr.additional_args)
kwargs["additional_args"] = additional_args
if ctx.files.additional_tools:
kwargs["additional_tools"] = ctx.files.additional_tools
if ctx.files.additional_inputs:
kwargs["additional_inputs"] = depset(ctx.files.additional_inputs)
if ctx.attr.use_resource_set:
kwargs["resource_set"] = _resource_set_callback
if ctx.attr.progress_message:
kwargs["experimental_progress_message"] = ctx.attr.progress_message
proto_common.compile(
ctx.actions,
ctx.attr.proto_dep[ProtoInfo],
ctx.attr.toolchain[proto_common.ProtoLangToolchainInfo],
[outfile],
**kwargs
)
return [DefaultInfo(files = depset([outfile]))]
compile_rule = rule(
_impl,
attrs = {
"proto_dep": attr.label(),
"plugin_output": attr.string(),
"toolchain": attr.label(default = ":toolchain"),
"additional_args": attr.string_list(),
"additional_tools": attr.label_list(cfg = "exec"),
"additional_inputs": attr.label_list(allow_files = True),
"use_resource_set": attr.bool(),
"progress_message": attr.string(),
},
)

@ -1,6 +1,8 @@
"""proto_lang_toolchain rule"""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:proto_lang_toolchain_rule.bzl", _proto_lang_toolchain_rule = "proto_lang_toolchain")
def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with = [], target_compatible_with = [], **attrs):
"""Creates a proto_lang_toolchain and corresponding toolchain target.
@ -21,8 +23,12 @@ def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with =
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
attrs["toolchain_type"] = toolchain_type
# buildifier: disable=native-proto
native.proto_lang_toolchain(name = name, **attrs)
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_lang_toolchain_rule(name = name, **attrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_lang_toolchain(name = name, **attrs)
if toolchain_type:
native.toolchain(

@ -1,6 +1,7 @@
"""upb_c_proto_library() exposes upb's generated C API for protobuf (foo.upb.h)"""
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")

@ -1,5 +1,6 @@
"""upb_minitable_proto_library() exposes upb's generated minitables (foo.upb_minitable.h)"""
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")

@ -1,6 +1,8 @@
"""upb_c_proto_reflection_library() exposes upb reflection for protobuf (foo.upbdefs.h)"""
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")

@ -24,6 +24,10 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/protobuf.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/protobuf.pc @ONLY)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/protobuf-lite.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/protobuf-lite.pc @ONLY)
if (protobuf_BUILD_LIBUPB)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/upb.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/upb.pc @ONLY)
endif ()
set(_protobuf_libraries libprotobuf-lite libprotobuf)
if (protobuf_BUILD_LIBPROTOC)
@ -72,6 +76,9 @@ if (protobuf_BUILD_PROTOC_BINARIES)
endif (protobuf_BUILD_PROTOC_BINARIES)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/protobuf.pc ${CMAKE_CURRENT_BINARY_DIR}/protobuf-lite.pc DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
if (protobuf_BUILD_LIBUPB)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/upb.pc DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
endif ()
include(${protobuf_SOURCE_DIR}/src/file_lists.cmake)
set(protobuf_HEADERS
@ -122,15 +129,9 @@ set(_install_cmakedir_desc "Directory relative to CMAKE_INSTALL to install the c
set(_build_cmakedir_desc "Directory relative to CMAKE_CURRENT_BINARY_DIR for cmake configuration files")
set(_exampledir_desc "Directory relative to CMAKE_INSTALL_DATA to install examples")
set(_protobuf_subdir_desc "Subdirectory in which to install cmake configuration files")
if(NOT MSVC)
set(protobuf_CMAKE_SUBDIR "cmake/protobuf" CACHE STRING "${_protobuf_subdir_desc}")
set(CMAKE_INSTALL_CMAKEDIR "${CMAKE_INSTALL_LIBDIR}/${protobuf_CMAKE_SUBDIR}" CACHE STRING "${_install_cmakedir_desc}")
set(CMAKE_INSTALL_EXAMPLEDIR "${CMAKE_INSTALL_DATADIR}/protobuf/examples" CACHE STRING "${_exampledir_desc}")
else()
set(protobuf_CMAKE_SUBDIR "cmake" CACHE STRING "${_protobuf_subdir_desc}")
set(CMAKE_INSTALL_CMAKEDIR "cmake" CACHE STRING "${_cmakedir_desc}")
set(CMAKE_INSTALL_EXAMPLEDIR "examples" CACHE STRING "${_exampledir_desc}")
endif()
set(protobuf_CMAKE_SUBDIR "cmake/protobuf" CACHE STRING "${_protobuf_subdir_desc}")
set(CMAKE_INSTALL_CMAKEDIR "${CMAKE_INSTALL_LIBDIR}/${protobuf_CMAKE_SUBDIR}" CACHE STRING "${_install_cmakedir_desc}")
set(CMAKE_INSTALL_EXAMPLEDIR "${CMAKE_INSTALL_DATADIR}/protobuf/examples" CACHE STRING "${_exampledir_desc}")
set(CMAKE_BUILD_CMAKEDIR "${CMAKE_CURRENT_BINARY_DIR}/${protobuf_CMAKE_SUBDIR}" CACHE STRING "${_build_cmakedir_desc}")
mark_as_advanced(protobuf_CMAKE_SUBDIR)
mark_as_advanced(CMAKE_BUILD_CMAKEDIR)

@ -0,0 +1,10 @@
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=@CMAKE_INSTALL_PREFIX@
libdir=@CMAKE_INSTALL_FULL_LIBDIR@
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
Name: Protocol Buffers
Description: Google's Data Interchange Format
Version: @protobuf_VERSION@
Libs: -L${libdir} -lupb @CMAKE_THREAD_LIBS_INIT@
Cflags: -I${includedir}

@ -1810,7 +1810,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::
const std::string type_name =
UpperCase(absl::StrCat(".", FieldDescriptor::TypeName(type)));
const FieldDescriptor* field = GetFieldForType(type, true, Packed::kFalse);
std::string field_name = field->name();
const absl::string_view field_name = field->name();
std::string message_field =
absl::StrCat("\"", field_name, "\": [", field_value, "]");

@ -5,7 +5,7 @@
<title>Google Protocol Buffers tools</title>
<summary>Tools for Protocol Buffers - Google's data interchange format.</summary>
<description>See project site for more info.</description>
<version>3.28.0</version>
<version>3.29.0</version>
<authors>Google Inc.</authors>
<owners>protobuf-packages</owners>
<licenseUrl>https://github.com/protocolbuffers/protobuf/blob/main/LICENSE</licenseUrl>

@ -5,7 +5,7 @@
<Description>C# runtime library for Protocol Buffers - Google's data interchange format.</Description>
<Copyright>Copyright 2015, Google Inc.</Copyright>
<AssemblyTitle>Google Protocol Buffers</AssemblyTitle>
<VersionPrefix>3.28.0</VersionPrefix>
<VersionPrefix>3.29.0</VersionPrefix>
<LangVersion>10.0</LangVersion>
<Authors>Google Inc.</Authors>
<TargetFrameworks>netstandard1.1;netstandard2.0;net45;net50</TargetFrameworks>
@ -43,6 +43,7 @@
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' OR '$(TargetFramework)' == 'netstandard1.1' ">
<!-- TODO -->
<PackageReference Include="System.Memory" Version="4.5.3"/>
</ItemGroup>

@ -8,6 +8,7 @@ bzl_library(
name = "defaults",
srcs = ["defaults.bzl"],
visibility = ["//visibility:public"],
deps = ["//bazel/common:proto_info_bzl"],
)
# Aggregate all the features owned by the Protobuf repo.

@ -1,7 +1,7 @@
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
exports_files(
glob(["*.proto"]) + ["rust_bazel_crate_mapping.txt"],
glob(["*.proto"]),
visibility = [
"//editions:__pkg__",
],

@ -1,27 +0,0 @@
proto3_implicit_proto
1
third_party/protobuf/editions/codegen_tests/proto3_implicit.proto
proto2_optional_proto
1
third_party/protobuf/editions/codegen_tests/proto2_optional.proto
proto3_enum_proto
1
third_party/protobuf/editions/codegen_tests/proto3_enum.proto
struct
1
google/protobuf/struct.proto
wrappers
1
google/protobuf/wrappers.proto
duration
1
google/protobuf/duration.proto
timestamp
1
google/protobuf/timestamp.proto
field_mask
1
google/protobuf/field_mask.proto
any
1
google/protobuf/any.proto

@ -4,13 +4,14 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Provide a rule for generating the intermediate feature set defaults used for feature resolution.
See go/life-of-a-featureset for more information.
"""
load("//bazel/common:proto_info.bzl", "ProtoInfo")
def _compile_edition_defaults_impl(ctx):
out_file = ctx.actions.declare_file(ctx.outputs.output.basename)
sources = []

@ -16,23 +16,23 @@
(0 | ::_fl::kFcOptional | ::_fl::kInt32)},
}},
@@ @@
(void)cached_has_bits;
(void)cached_has_bits;
cached_has_bits = _impl_._has_bits_[0];
- // optional int32 int32_field = 1;
+ // int32 int32_field = 1;
if (cached_has_bits & 0x00000001u) {
target = ::proto2::internal::WireFormatLite::
WriteInt32ToArrayWithField<1>(
cached_has_bits = this_._impl_._has_bits_[0];
- // optional int32 int32_field = 1;
+ // int32 int32_field = 1;
if (cached_has_bits & 0x00000001u) {
target = ::proto2::internal::WireFormatLite::
WriteInt32ToArrayWithField<1>(
@@ @@
(void)cached_has_bits;
(void)cached_has_bits;
{
- // optional int32 int32_field = 1;
+ // int32 int32_field = 1;
cached_has_bits = _impl_._has_bits_[0];
if (cached_has_bits & 0x00000001u) {
total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(
{
- // optional int32 int32_field = 1;
+ // int32 int32_field = 1;
cached_has_bits =
this_._impl_._has_bits_[0];
if (cached_has_bits & 0x00000001u) {
[ FAILED ] third_party/protobuf/editions/golden/simple_proto3.pb.cc
[ RUN ] third_party/protobuf/editions/golden/simple_proto3.pb.h
@@ @@

@ -2,10 +2,10 @@
<testsuites tests="1" name="AllTests">
<testsuite name="EditionsCodegenTests">
<testcase name="third_party/protobuf/editions/golden/simple_proto3.pb.cc" status="run" result="completed" classname="DiffTest">
<failure message="Value of: third_party/protobuf/editions/golden/simple_proto3.pb.cc&#x0A;Expected: &#x0A;// Generated by the protocol buffer compiler. DO NOT EDIT!&#x0A;// NO CHECKED-IN PROTOBUF GENCODE&#x0A;// source: third_party/protobuf/editions/golden/simple_proto3.proto&#x0A;// Protobuf C++ Version: 0.20240628.0&#x0A;&#x0A;#include &quot;third_party/protobuf/editions/golden/simple_proto3.pb.h&quot;&#x0A;&#x0A;#include &lt;algorithm&gt;&#x0A;#include &lt;type_traits&gt;&#x0A;#include &quot;third_party/protobuf/io/coded_stream.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_tctable_impl.h&quot;&#x0A;#include &quot;third_party/protobuf/extension_set.h&quot;&#x0A;#include &quot;third_party/protobuf/wire_format_lite.h&quot;&#x0A;#include &quot;third_party/protobuf/io/zero_copy_stream_impl_lite.h&quot;&#x0A;// @@protoc_insertion_point(includes)&#x0A;&#x0A;// Must be included last.&#x0A;, with the difference:&#x0A;@@ @@&#x0A; ::_pbi::TcParser::GetTable&lt;::protobuf_editions_test::golden::SimpleProto3&gt;(), // to_prefetch&#x0A; #endif // PROTOBUF_PREFETCH_PARSE_TABLE&#x0A; }, {{&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; {::_pbi::TcParser::FastV32S1,&#x0A; {8, 0, 0, PROTOBUF_FIELD_OFFSET(SimpleProto3, _impl_.int32_field_)}},&#x0A; }}, {{&#x0A; 65535, 65535&#x0A; }}, {{&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; {PROTOBUF_FIELD_OFFSET(SimpleProto3, _impl_.int32_field_), _Internal::kHasBitsOffset + 0, 0,&#x0A; (0 | ::_fl::kFcOptional | ::_fl::kInt32)},&#x0A; }},&#x0A;@@ @@&#x0A; (void)cached_has_bits;&#x0A; &#x0A; cached_has_bits = _impl_._has_bits_[0];&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; if (cached_has_bits &amp; 0x00000001u) {&#x0A; target = ::proto2::internal::WireFormatLite::&#x0A; WriteInt32ToArrayWithField&lt;1&gt;(&#x0A;@@ @@&#x0A; (void)cached_has_bits;&#x0A; &#x0A; {&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; cached_has_bits = _impl_._has_bits_[0];&#x0A; if (cached_has_bits &amp; 0x00000001u) {&#x0A; total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(" type=""></failure>
<failure message="Value of: third_party/protobuf/editions/golden/simple_proto3.pb.cc&#x0A;Expected: &#x0A;// Generated by the protocol buffer compiler. DO NOT EDIT!&#x0A;// NO CHECKED-IN PROTOBUF GENCODE&#x0A;// source: third_party/protobuf/editions/golden/simple_proto3.proto&#x0A;// Protobuf C++ Version: 0.20240712.0&#x0A;&#x0A;#include &quot;third_party/protobuf/editions/golden/simple_proto3.pb.h&quot;&#x0A;&#x0A;#include &lt;algorithm&gt;&#x0A;#include &lt;type_traits&gt;&#x0A;#include &quot;third_party/protobuf/io/coded_stream.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_tctable_impl.h&quot;&#x0A;#include &quot;third_party/protobuf/extension_set.h&quot;&#x0A;#include &quot;third_party/protobuf/wire_format_lite.h&quot;&#x0A;#include &quot;third_party/protobuf/io/zero_copy_stream_impl_lite.h&quot;&#x0A;// @@protoc_insertion_point(includes)&#x0A;&#x0A;// Must be included last.&#x0A;, with the difference:&#x0A;@@ @@&#x0A; ::_pbi::TcParser::GetTable&lt;::protobuf_editions_test::golden::SimpleProto3&gt;(), // to_prefetch&#x0A; #endif // PROTOBUF_PREFETCH_PARSE_TABLE&#x0A; }, {{&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; {::_pbi::TcParser::FastV32S1,&#x0A; {8, 0, 0, PROTOBUF_FIELD_OFFSET(SimpleProto3, _impl_.int32_field_)}},&#x0A; }}, {{&#x0A; 65535, 65535&#x0A; }}, {{&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; {PROTOBUF_FIELD_OFFSET(SimpleProto3, _impl_.int32_field_), _Internal::kHasBitsOffset + 0, 0,&#x0A; (0 | ::_fl::kFcOptional | ::_fl::kInt32)},&#x0A; }},&#x0A;@@ @@&#x0A; (void)cached_has_bits;&#x0A; &#x0A; cached_has_bits = this_._impl_._has_bits_[0];&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; if (cached_has_bits &amp; 0x00000001u) {&#x0A; target = ::proto2::internal::WireFormatLite::&#x0A; WriteInt32ToArrayWithField&lt;1&gt;(&#x0A;@@ @@&#x0A; (void)cached_has_bits;&#x0A; &#x0A; {&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; cached_has_bits =&#x0A; this_._impl_._has_bits_[0];&#x0A; if (cached_has_bits &amp; 0x00000001u) {" type=""></failure>
</testcase>
<testcase name="third_party/protobuf/editions/golden/simple_proto3.pb.h" status="run" result="completed" classname="DiffTest">
<failure message="Value of: third_party/protobuf/editions/golden/simple_proto3.pb.h&#x0A;Expected: &#x0A;// Generated by the protocol buffer compiler. DO NOT EDIT!&#x0A;// NO CHECKED-IN PROTOBUF GENCODE&#x0A;// source: third_party/protobuf/editions/golden/simple_proto3.proto&#x0A;// Protobuf C++ Version: 0.20240628.0&#x0A;&#x0A;#ifndef GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;#define GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;&#x0A;#include &lt;limits&gt;&#x0A;#include &lt;string&gt;&#x0A;#include &lt;type_traits&gt;&#x0A;#include &lt;utility&gt;&#x0A;&#x0A;#include &quot;third_party/protobuf/runtime_version.h&quot;&#x0A;#if PROTOBUF_VERSION != 20240628&#x0A;#error &quot;Protobuf C++ gencode is built with an incompatible version of&quot;&#x0A;#error &quot;Protobuf C++ headers/runtime. See&quot;&#x0A;#error &quot;https://protobuf.dev/support/cross-version-runtime-guarantee/#cpp&quot;&#x0A;#endif&#x0A;#include &quot;third_party/protobuf/io/coded_stream.h&quot;&#x0A;#include &quot;third_party/protobuf/arena.h&quot;&#x0A;#include &quot;third_party/protobuf/arenastring.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_tctable_decl.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_util.h&quot;&#x0A;#include &quot;third_party/protobuf/metadata_lite.h&quot;&#x0A;#include &quot;third_party/protobuf/message_lite.h&quot;&#x0A;// @@protoc_insertion_point(includes)&#x0A;&#x0A;// Must be included last.&#x0A;&#x0A;#endif // GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;, with the difference:&#x0A;@@ @@&#x0A; enum : int {&#x0A; kInt32FieldFieldNumber = 1,&#x0A; };&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; bool has_int32_field() const;&#x0A; void clear_int32_field() ;&#x0A; ::int32_t int32_field() const;&#x0A;@@ @@&#x0A; &#x0A; // SimpleProto3&#x0A; &#x0A;-// optional int32 int32_field = 1;&#x0A;+// int32 int32_field = 1;&#x0A; inline bool SimpleProto3::has_int32_field() const {&#x0A; bool value = (_impl_._has_bits_[0] &amp; 0x00000001u) != 0;&#x0A; return value;" type=""></failure>
<failure message="Value of: third_party/protobuf/editions/golden/simple_proto3.pb.h&#x0A;Expected: &#x0A;// Generated by the protocol buffer compiler. DO NOT EDIT!&#x0A;// NO CHECKED-IN PROTOBUF GENCODE&#x0A;// source: third_party/protobuf/editions/golden/simple_proto3.proto&#x0A;// Protobuf C++ Version: 0.20240712.0&#x0A;&#x0A;#ifndef GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;#define GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;&#x0A;#include &lt;limits&gt;&#x0A;#include &lt;string&gt;&#x0A;#include &lt;type_traits&gt;&#x0A;#include &lt;utility&gt;&#x0A;&#x0A;#include &quot;third_party/protobuf/runtime_version.h&quot;&#x0A;#if PROTOBUF_VERSION != 20240712&#x0A;#error &quot;Protobuf C++ gencode is built with an incompatible version of&quot;&#x0A;#error &quot;Protobuf C++ headers/runtime. See&quot;&#x0A;#error &quot;https://protobuf.dev/support/cross-version-runtime-guarantee/#cpp&quot;&#x0A;#endif&#x0A;#include &quot;third_party/protobuf/io/coded_stream.h&quot;&#x0A;#include &quot;third_party/protobuf/arena.h&quot;&#x0A;#include &quot;third_party/protobuf/arenastring.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_tctable_decl.h&quot;&#x0A;#include &quot;third_party/protobuf/generated_message_util.h&quot;&#x0A;#include &quot;third_party/protobuf/metadata_lite.h&quot;&#x0A;#include &quot;third_party/protobuf/message_lite.h&quot;&#x0A;// @@protoc_insertion_point(includes)&#x0A;&#x0A;// Must be included last.&#x0A;&#x0A;#endif // GOOGLE_PROTOBUF_INCLUDED_third_5fparty_2fprotobuf_2feditions_2fgolden_2fsimple_5fproto3_2eproto_2epb_2eh&#x0A;, with the difference:&#x0A;@@ @@&#x0A; enum : int {&#x0A; kInt32FieldFieldNumber = 1,&#x0A; };&#x0A;- // optional int32 int32_field = 1;&#x0A;+ // int32 int32_field = 1;&#x0A; bool has_int32_field() const;&#x0A; void clear_int32_field() ;&#x0A; ::int32_t int32_field() const;&#x0A;@@ @@&#x0A; &#x0A; // SimpleProto3&#x0A; &#x0A;-// optional int32 int32_field = 1;&#x0A;+// int32 int32_field = 1;&#x0A; inline bool SimpleProto3::has_int32_field() const {&#x0A; bool value = (_impl_._has_bits_[0] &amp; 0x00000001u) != 0;&#x0A; return value;" type=""></failure>
</testcase>
<testcase name="third_party/protobuf/editions/golden/simple_proto3.proto.static_reflection.h" status="run" result="completed" classname="DiffTest">
</testcase>

@ -31,7 +31,7 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
":hpb",
":protos_traits",
":traits",
"//upb:base",
"//upb:mem",
"//upb:message",
@ -44,16 +44,16 @@ cc_library(
cc_library(
name = "hpb",
srcs = [
"protos.cc",
"hpb.cc",
],
hdrs = [
"protos.h",
"hpb.h",
],
compatible_with = ["//buildenv/target:non_prod"],
copts = UPB_DEFAULT_CPPOPTS,
visibility = ["//visibility:public"],
deps = [
":protos_extension_lock",
":extension_lock",
"//upb:base",
"//upb:mem",
"//upb:message",
@ -71,9 +71,9 @@ cc_library(
# Internally used type traits.
cc_library(
name = "protos_traits",
name = "traits",
hdrs = [
"protos_traits.h",
"traits.h",
],
compatible_with = ["//buildenv/target:non_prod"],
copts = UPB_DEFAULT_CPPOPTS,
@ -82,8 +82,8 @@ cc_library(
)
cc_library(
name = "protos_internal",
hdrs = ["protos_internal.h"],
name = "internal",
hdrs = ["internal.h"],
compatible_with = ["//buildenv/target:non_prod"],
copts = UPB_DEFAULT_CPPOPTS,
visibility = ["//upb:friends"],
@ -99,10 +99,10 @@ cc_library(
)
cc_library(
name = "protos_extension_lock",
srcs = ["protos_extension_lock.cc"],
name = "extension_lock",
srcs = ["extension_lock.cc"],
hdrs = [
"protos_extension_lock.h",
"extension_lock.h",
],
compatible_with = ["//buildenv/target:non_prod"],
copts = UPB_DEFAULT_CPPOPTS,
@ -118,14 +118,14 @@ cc_library(
cc_library(
name = "generated_protos_support__only_for_generated_code_do_not_use__i_give_permission_to_break_me",
hdrs = [
"protos_internal.h",
"internal.h",
],
compatible_with = ["//buildenv/target:non_prod"],
copts = UPB_DEFAULT_CPPOPTS,
visibility = ["//visibility:public"],
deps = [
":hpb",
":protos_internal",
":internal",
":repeated_field",
"//upb:mem",
"//upb:message",
@ -133,13 +133,13 @@ cc_library(
)
cc_test(
name = "protos_internal_test",
srcs = ["protos_internal_test.cc"],
name = "internal_test",
srcs = ["internal_test.cc"],
copts = UPB_DEFAULT_CPPOPTS,
deps = [
":protos_internal",
"//hpb_generator/tests:test_model_upb_cc_proto",
"//hpb_generator/tests:test_model_upb_proto",
":internal",
"//src/google/protobuf/compiler/hpb/tests:test_model_upb_cc_proto",
"//src/google/protobuf/compiler/hpb/tests:test_model_upb_proto",
"//upb:mem",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
@ -157,18 +157,21 @@ cc_test(
srcs = ["repeated_field_iterator_test.cc"],
deps = [
":repeated_field",
"//hpb",
"//upb:message",
"@com_google_absl//absl/strings:string_view",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
],
)
cc_test(
name = "protos_extension_lock_test",
srcs = ["protos_extension_lock_test.cc"],
name = "extension_lock_test",
srcs = ["extension_lock_test.cc"],
deps = [
"//hpb",
"//hpb:protos_extension_lock",
"//hpb_generator/tests:test_model_upb_cc_proto",
"//hpb:extension_lock",
"//src/google/protobuf/compiler/hpb/tests:test_model_upb_cc_proto",
"//upb:mem",
"@com_google_absl//absl/hash",
"@com_google_absl//absl/log:absl_check",

@ -235,12 +235,12 @@ def _upb_cc_proto_library_aspect_impl(target, ctx):
_upb_cc_proto_library_aspect = aspect(
attrs = {
"_ccopts": attr.label(
default = "//protos:upb_cc_proto_library_copts__for_generated_code_only_do_not_use",
default = "//hpb:upb_cc_proto_library_copts__for_generated_code_only_do_not_use",
),
"_gen_upbprotos": attr.label(
executable = True,
cfg = "exec",
default = "//hpb_generator:protoc-gen-upb-protos",
default = "//src/google/protobuf/compiler/hpb:protoc-gen-upb-protos",
),
"_protoc": attr.label(
executable = True,

@ -5,12 +5,12 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "google/protobuf/hpb/protos_extension_lock.h"
#include "google/protobuf/hpb/extension_lock.h"
#include <atomic>
namespace protos::internal {
namespace hpb::internal {
std::atomic<UpbExtensionLocker> upb_extension_locker_global;
} // namespace protos::internal
} // namespace hpb::internal

@ -10,7 +10,7 @@
#include <atomic>
namespace protos::internal {
namespace hpb::internal {
// TODO: Temporary locking api for cross-language
// concurrency issue around extension api that uses lazy promotion
@ -26,6 +26,6 @@ using UpbExtensionLocker = UpbExtensionUnlocker (*)(const void*);
// TODO: Expose as function instead of global.
extern std::atomic<UpbExtensionLocker> upb_extension_locker_global;
} // namespace protos::internal
} // namespace hpb::internal
#endif // PROTOBUF_HPB_EXTENSION_LOCK_H_

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "google/protobuf/hpb/protos_extension_lock.h"
#include "google/protobuf/hpb/extension_lock.h"
#include <atomic>
#include <mutex>
@ -16,8 +16,8 @@
#include <gtest/gtest.h>
#include "absl/hash/hash.h"
#include "absl/log/absl_check.h"
#include "hpb_generator/tests/test_model.upb.proto.h"
#include "google/protobuf/hpb/protos.h"
#include "google/protobuf/compiler/hpb/tests/test_model.upb.proto.h"
#include "google/protobuf/hpb/hpb.h"
#include "upb/mem/arena.hpp"
#ifndef ASSERT_OK
@ -27,7 +27,7 @@
#define EXPECT_OK(x) EXPECT_TRUE(x.ok())
#endif // EXPECT_OK
namespace protos_generator::test::protos {
namespace hpb_unittest::protos {
namespace {
@ -49,14 +49,14 @@ std::string GenerateTestData() {
std::mutex m[8];
void unlock_func(const void* msg) { m[absl::HashOf(msg) & 0x7].unlock(); }
::protos::internal::UpbExtensionUnlocker lock_func(const void* msg) {
::hpb::internal::UpbExtensionUnlocker lock_func(const void* msg) {
m[absl::HashOf(msg) & 0x7].lock();
return &unlock_func;
}
void TestConcurrentExtensionAccess(::protos::ExtensionRegistry registry) {
::protos::internal::upb_extension_locker_global.store(
&lock_func, std::memory_order_release);
::hpb::internal::upb_extension_locker_global.store(&lock_func,
std::memory_order_release);
const std::string payload = GenerateTestData();
TestModel parsed_model =
::protos::Parse<TestModel>(payload, registry).value();
@ -102,8 +102,6 @@ void TestConcurrentExtensionAccess(::protos::ExtensionRegistry registry) {
test_theme();
test_theme_extension();
}
#ifndef _MSC_VER
// TODO Re-enable this once github runner issue is resolved.
TEST(CppGeneratedCode, ConcurrentAccessDoesNotRaceBothLazy) {
::upb::Arena arena;
@ -122,7 +120,5 @@ TEST(CppGeneratedCode, ConcurrentAccessDoesNotRaceBothEager) {
{{&theme, &ThemeExtension::theme_extension}, arena});
}
#endif // _MSC_VER
} // namespace
} // namespace protos_generator::test::protos
} // namespace hpb_unittest::protos

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "google/protobuf/hpb/protos.h"
#include "google/protobuf/hpb/hpb.h"
#include <atomic>
#include <cstddef>
@ -14,7 +14,7 @@
#include "absl/status/statusor.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/hpb/protos_extension_lock.h"
#include "google/protobuf/hpb/extension_lock.h"
#include "upb/mem/arena.h"
#include "upb/message/accessors.h"
#include "upb/message/copy.h"
@ -83,6 +83,10 @@ absl::Status MessageDecodeError(upb_DecodeStatus status, SourceLocation loc
namespace internal {
using ::hpb::internal::upb_extension_locker_global;
using ::hpb::internal::UpbExtensionLocker;
using ::hpb::internal::UpbExtensionUnlocker;
upb_ExtensionRegistry* GetUpbExtensions(
const ExtensionRegistry& extension_registry) {
return extension_registry.registry_;

@ -8,11 +8,13 @@
#ifndef PROTOBUF_HPB_HPB_H_
#define PROTOBUF_HPB_HPB_H_
#include <cstdint>
#include <type_traits>
#include <vector>
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/strings/string_view.h"
#include "upb/base/status.hpp"
#include "upb/mem/arena.hpp"
#include "upb/message/copy.h"
@ -20,9 +22,12 @@
#include "upb/wire/decode.h"
#include "upb/wire/encode.h"
namespace protos {
namespace hpb {
using Arena = ::upb::Arena;
}
namespace protos {
using hpb::Arena;
class ExtensionRegistry;
template <typename T>
@ -78,6 +83,10 @@ class Ptr final {
Proxy<T> p_;
};
// Suppress -Wctad-maybe-unsupported with our manual deduction guide
template <typename T>
Ptr(T* m) -> Ptr<T>;
inline absl::string_view UpbStrToStringView(upb_StringView str) {
return absl::string_view(str.data, str.size);
}
@ -141,6 +150,10 @@ struct PrivateAccess {
static auto CreateMessage(upb_Arena* arena) {
return typename T::Proxy(upb_Message_New(T::minitable(), arena), arena);
}
template <typename ExtensionId>
static constexpr uint32_t GetExtensionNumber(const ExtensionId& id) {
return id.number();
}
};
template <typename T>
@ -193,6 +206,12 @@ class ExtensionIdentifier : public ExtensionMiniTableProvider {
constexpr explicit ExtensionIdentifier(
const upb_MiniTableExtension* mini_table_ext)
: ExtensionMiniTableProvider(mini_table_ext) {}
private:
constexpr uint32_t number() const {
return upb_MiniTableExtension_Number(mini_table_ext());
}
friend class PrivateAccess;
};
template <typename T>
@ -262,6 +281,14 @@ template <typename T, typename U = RemovePtrT<T>,
typename = std::enable_if_t<!std::is_const_v<U>>>
using PtrOrRaw = T;
template <typename T>
using EnableIfHpbClass = std::enable_if_t<
std::is_base_of<typename T::Access, T>::value &&
std::is_base_of<typename T::Access, typename T::ExtendableType>::value>;
template <typename T>
using EnableIfMutableProto = std::enable_if_t<!std::is_const<T>::value>;
} // namespace internal
template <typename T>
@ -331,16 +358,8 @@ class ExtensionRegistry {
upb_ExtensionRegistry* registry_;
};
template <typename T>
using EnableIfProtosClass = std::enable_if_t<
std::is_base_of<typename T::Access, T>::value &&
std::is_base_of<typename T::Access, typename T::ExtendableType>::value>;
template <typename T>
using EnableIfMutableProto = std::enable_if_t<!std::is_const<T>::value>;
template <typename T, typename Extendee, typename Extension,
typename = EnableIfProtosClass<T>>
typename = internal::EnableIfHpbClass<T>>
ABSL_MUST_USE_RESULT bool HasExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<Extendee, Extension>& id) {
@ -349,15 +368,16 @@ ABSL_MUST_USE_RESULT bool HasExtension(
}
template <typename T, typename Extendee, typename Extension,
typename = EnableIfProtosClass<T>>
typename = internal::EnableIfHpbClass<T>>
ABSL_MUST_USE_RESULT bool HasExtension(
const T* message,
const ::protos::internal::ExtensionIdentifier<Extendee, Extension>& id) {
return HasExtension(protos::Ptr(message), id);
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>,
typename = EnableIfMutableProto<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>,
typename = internal::EnableIfMutableProto<T>>
void ClearExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<T, Extension>& id) {
@ -366,15 +386,17 @@ void ClearExtension(
id.mini_table_ext());
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>>
void ClearExtension(
T* message,
const ::protos::internal::ExtensionIdentifier<T, Extension>& id) {
ClearExtension(::protos::Ptr(message), id);
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>,
typename = EnableIfMutableProto<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>,
typename = internal::EnableIfMutableProto<T>>
absl::Status SetExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
@ -386,8 +408,9 @@ absl::Status SetExtension(
internal::GetInternalMsg(&value));
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>,
typename = EnableIfMutableProto<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>,
typename = internal::EnableIfMutableProto<T>>
absl::Status SetExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
@ -399,8 +422,9 @@ absl::Status SetExtension(
internal::GetInternalMsg(value));
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>,
typename = EnableIfMutableProto<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>,
typename = internal::EnableIfMutableProto<T>>
absl::Status SetExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
@ -414,14 +438,16 @@ absl::Status SetExtension(
internal::GetInternalMsg(&ext), extension_arena);
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>>
absl::Status SetExtension(
T* message, const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
const Extension& value) {
return ::protos::SetExtension(::protos::Ptr(message), id, value);
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>>
absl::Status SetExtension(
T* message, const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
Extension&& value) {
@ -429,7 +455,8 @@ absl::Status SetExtension(
std::forward<Extension>(value));
}
template <typename T, typename Extension, typename = EnableIfProtosClass<T>>
template <typename T, typename Extension,
typename = internal::EnableIfHpbClass<T>>
absl::Status SetExtension(
T* message, const ::protos::internal::ExtensionIdentifier<T, Extension>& id,
Ptr<Extension> value) {
@ -437,7 +464,7 @@ absl::Status SetExtension(
}
template <typename T, typename Extendee, typename Extension,
typename = EnableIfProtosClass<T>>
typename = internal::EnableIfHpbClass<T>>
absl::StatusOr<Ptr<const Extension>> GetExtension(
Ptr<T> message,
const ::protos::internal::ExtensionIdentifier<Extendee, Extension>& id) {
@ -455,7 +482,7 @@ absl::StatusOr<Ptr<const Extension>> GetExtension(
}
template <typename T, typename Extendee, typename Extension,
typename = EnableIfProtosClass<T>>
typename = internal::EnableIfHpbClass<T>>
absl::StatusOr<Ptr<const Extension>> GetExtension(
const T* message,
const ::protos::internal::ExtensionIdentifier<Extendee, Extension>& id) {
@ -559,6 +586,12 @@ absl::StatusOr<absl::string_view> Serialize(Ptr<T> message, upb::Arena& arena,
::protos::internal::GetMiniTable(message), arena.ptr(), options);
}
template <typename T, typename Extension>
constexpr uint32_t ExtensionNumber(
internal::ExtensionIdentifier<T, Extension> id) {
return internal::PrivateAccess::GetExtensionNumber(id);
}
} // namespace protos
#endif // PROTOBUF_HPB_HPB_H_

@ -11,7 +11,7 @@
#include "upb/mem/arena.h"
#include "upb/message/message.h"
namespace protos::internal {
namespace hpb::internal {
// Moves ownership of a message created in a source arena.
//
@ -21,5 +21,5 @@ T MoveMessage(upb_Message* msg, upb_Arena* arena) {
return T(msg, arena);
}
} // namespace protos::internal
} // namespace hpb::internal
#endif

@ -5,33 +5,32 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "google/protobuf/hpb/protos_internal.h"
#include "google/protobuf/hpb/internal.h"
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "hpb_generator/tests/test_model.upb.h"
#include "hpb_generator/tests/test_model.upb.proto.h"
#include "google/protobuf/compiler/hpb/tests/test_model.upb.h"
#include "google/protobuf/compiler/hpb/tests/test_model.upb.proto.h"
#include "upb/mem/arena.h"
namespace protos::testing {
namespace hpb::testing {
namespace {
using ::protos_generator::test::protos::TestModel;
using ::hpb_unittest::protos::TestModel;
TEST(CppGeneratedCode, InternalMoveMessage) {
// Generate message (simulating message created in another VM/language)
upb_Arena* source_arena = upb_Arena_New();
protos_generator_test_TestModel* message =
protos_generator_test_TestModel_new(source_arena);
hpb_unittest_TestModel* message = hpb_unittest_TestModel_new(source_arena);
ASSERT_NE(message, nullptr);
protos_generator_test_TestModel_set_int_value_with_default(message, 123);
hpb_unittest_TestModel_set_int_value_with_default(message, 123);
// Move ownership.
TestModel model = protos::internal::MoveMessage<TestModel>(
(upb_Message*)message, source_arena);
TestModel model = hpb::internal::MoveMessage<TestModel>((upb_Message*)message,
source_arena);
// Now that we have moved ownership, free original arena.
upb_Arena_Free(source_arena);
EXPECT_EQ(model.int_value_with_default(), 123);
}
} // namespace
} // namespace protos::testing
} // namespace hpb::testing

@ -15,9 +15,9 @@
#include <type_traits>
#include "absl/strings/string_view.h"
#include "google/protobuf/hpb/protos.h"
#include "google/protobuf/hpb/protos_traits.h"
#include "google/protobuf/hpb/hpb.h"
#include "google/protobuf/hpb/repeated_field_iterator.h"
#include "google/protobuf/hpb/traits.h"
#include "upb/base/string_view.h"
#include "upb/mem/arena.h"
#include "upb/message/array.h"
@ -37,7 +37,7 @@ namespace internal {
// upb_Array* for the message when the RepeatedFieldProxy is constructed.
template <class T>
class RepeatedFieldProxyBase {
using Array = add_const_if_T_is_const<T, upb_Array>;
using Array = hpb::internal::add_const_if_T_is_const<T, upb_Array>;
public:
explicit RepeatedFieldProxyBase(Array* arr, upb_Arena* arena)

@ -14,7 +14,7 @@
#include <type_traits>
#include "absl/strings/string_view.h"
#include "google/protobuf/hpb/protos.h"
#include "google/protobuf/hpb/hpb.h"
#include "upb/base/string_view.h"
#include "upb/mem/arena.h"
#include "upb/message/array.h"

@ -17,6 +17,9 @@
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "absl/strings/string_view.h"
#include "google/protobuf/hpb/hpb.h"
#include "upb/message/array.h"
using ::testing::ElementsAre;
@ -45,13 +48,13 @@ struct IteratorTestPeer {
}
template <typename T>
static StringRef<T> MakeStringRefProxy(upb_Array* arr, protos::Arena& arena) {
static StringRef<T> MakeStringRefProxy(upb_Array* arr, hpb::Arena& arena) {
return StringRef<T>({arr, arena.ptr(), 0});
}
template <typename T>
static StringIterator<T> MakeStringIterator(upb_Array* arr,
protos::Arena& arena) {
hpb::Arena& arena) {
return StringIterator<T>({arr, arena.ptr()});
}
};
@ -225,12 +228,12 @@ TEST(ScalarIteratorTest, IteratorBasedAlgorithmsWork) {
EXPECT_THAT(v, ElementsAre(10, 12, 14, 16, 18, 11, 13, 15, 17, 19));
}
const char* CloneString(protos::Arena& arena, absl::string_view str) {
const char* CloneString(hpb::Arena& arena, absl::string_view str) {
char* data = (char*)upb_Arena_Malloc(arena.ptr(), str.size());
memcpy(data, str.data(), str.size());
return data;
}
upb_Array* MakeStringArray(protos::Arena& arena,
upb_Array* MakeStringArray(hpb::Arena& arena,
const std::vector<std::string>& input) {
upb_Array* arr = upb_Array_New(arena.ptr(), kUpb_CType_String);
for (absl::string_view str : input) {
@ -243,7 +246,7 @@ upb_Array* MakeStringArray(protos::Arena& arena,
}
TEST(StringReferenceTest, BasicOperationsWork) {
protos::Arena arena;
hpb::Arena arena;
upb_Array* arr = MakeStringArray(arena, {""});
auto read = [&] {
@ -289,7 +292,7 @@ TEST(StringReferenceTest, BasicOperationsWork) {
}
TEST(StringReferenceTest, AssignmentAndSwap) {
protos::Arena arena;
hpb::Arena arena;
upb_Array* arr1 = MakeStringArray(arena, {"ABC"});
upb_Array* arr2 = MakeStringArray(arena, {"DEF"});
@ -309,7 +312,7 @@ TEST(StringReferenceTest, AssignmentAndSwap) {
}
template <typename T>
void TestStringIterator(protos::Arena& arena, upb_Array* array) {
void TestStringIterator(hpb::Arena& arena, upb_Array* array) {
StringIterator<T> it = IteratorTestPeer::MakeStringIterator<T>(array, arena);
// Copy
auto it2 = it;
@ -349,7 +352,7 @@ void TestStringIterator(protos::Arena& arena, upb_Array* array) {
}
TEST(StringIteratorTest, BasicOperationsWork) {
protos::Arena arena;
hpb::Arena arena;
auto* array = MakeStringArray(
arena, {"10", "11", "12", "13", "14", "15", "16", "17", "18", "19"});
TestStringIterator<const absl::string_view>(arena, array);
@ -357,7 +360,7 @@ TEST(StringIteratorTest, BasicOperationsWork) {
}
TEST(StringIteratorTest, Convertibility) {
protos::Arena arena;
hpb::Arena arena;
auto* array = MakeStringArray(
arena, {"10", "11", "12", "13", "14", "15", "16", "17", "18", "19"});
StringIterator<absl::string_view> it =
@ -381,7 +384,7 @@ TEST(StringIteratorTest, Convertibility) {
}
TEST(StringIteratorTest, MutabilityOnlyWorksOnMutable) {
protos::Arena arena;
hpb::Arena arena;
auto* array = MakeStringArray(
arena, {"10", "11", "12", "13", "14", "15", "16", "17", "18", "19"});
StringIterator<absl::string_view> it =
@ -403,7 +406,7 @@ TEST(StringIteratorTest, MutabilityOnlyWorksOnMutable) {
}
TEST(StringIteratorTest, IteratorReferenceInteraction) {
protos::Arena arena;
hpb::Arena arena;
auto* array = MakeStringArray(
arena, {"10", "11", "12", "13", "14", "15", "16", "17", "18", "19"});
StringIterator<absl::string_view> it =
@ -415,7 +418,7 @@ TEST(StringIteratorTest, IteratorReferenceInteraction) {
}
TEST(StringIteratorTest, IteratorBasedAlgorithmsWork) {
protos::Arena arena;
hpb::Arena arena;
auto* array = MakeStringArray(
arena, {"10", "11", "12", "13", "14", "15", "16", "17", "18", "19"});
StringIterator<absl::string_view> it =

@ -2,7 +2,7 @@
#define PROTOBUF_HPB_REQUIRES_H_
#include <type_traits>
namespace protos::internal {
namespace hpb::internal {
// Ports C++20 `requires` to C++17.
// C++20 ideal:
// if constexpr (requires { t.foo(); }) { ... }
@ -12,6 +12,6 @@ template <typename... T, typename F>
constexpr bool Requires(F) {
return std::is_invocable_v<F, T...>;
}
} // namespace protos::internal
} // namespace hpb::internal
#endif // PROTOBUF_HPB_REQUIRES_H_

@ -10,12 +10,12 @@
#include <type_traits>
namespace protos::internal {
namespace hpb::internal {
template <typename T, typename T2>
using add_const_if_T_is_const =
std::conditional_t<std::is_const_v<T>, const T2, T2>;
} // namespace protos::internal
} // namespace hpb::internal
#endif // PROTOBUF_HPB_TRAITS_H_

@ -29,7 +29,9 @@ cc_binary(
":names",
":output",
"//:protobuf",
"//src/google/protobuf",
"//src/google/protobuf/compiler:code_generator",
"//src/google/protobuf/compiler:plugin",
"//upb_generator:file_layout",
],
)
@ -56,6 +58,7 @@ cc_library(
":names",
":output",
"//:protobuf",
"//src/google/protobuf",
"//upb_generator:common",
"//upb_generator:file_layout",
"//upb_generator:keywords",
@ -85,6 +88,7 @@ cc_library(
visibility = ["//visibility:public"],
deps = [
"//:protobuf",
"//src/google/protobuf",
"//src/google/protobuf/compiler:code_generator",
"@com_google_absl//absl/strings",
],
@ -99,5 +103,6 @@ cc_library(
":output",
"//src/google/protobuf",
"//upb_generator:keywords",
"@com_google_absl//absl/strings:string_view",
],
)

@ -1,8 +1,8 @@
`protos` Generator
hpb Generator
==================
This directory contains the generator for the [`protos`
API](https://github.com/protocolbuffers/protobuf/tree/main/protos), an
This directory contains the generator for the [`hpb`
API](https://github.com/protocolbuffers/protobuf/tree/main/hpb), an
experimental C++ protobuf implementation. Most users should use the standard
C++ implementation
[here](https://github.com/protocolbuffers/protobuf/tree/main/src).

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_accessors.h"
#include "google/protobuf/compiler/hpb/gen_accessors.h"
#include <string>
@ -14,17 +14,17 @@
#include "absl/strings/str_cat.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_repeated_fields.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/gen_repeated_fields.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
#include "google/protobuf/compiler/hpb/output.h"
#include "upb_generator/common.h"
#include "upb_generator/keywords.h"
#include "upb_generator/names.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
using NameToFieldDescriptorMap =
absl::flat_hash_map<absl::string_view, const protobuf::FieldDescriptor*>;
@ -566,4 +566,5 @@ std::string ResolveFieldName(const protobuf::FieldDescriptor* field,
return upb::generator::ResolveKeywordConflict(std::string(field_name));
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,16 +5,16 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_ACCESSORS_H_
#define UPB_PROTOS_GENERATOR_ACCESSORS_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_ACCESSORS_H_
#define PROTOBUF_COMPILER_HBP_GEN_ACCESSORS_H_
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
void WriteFieldAccessorsInHeader(const protobuf::Descriptor* desc,
Output& output);
@ -23,6 +23,7 @@ void WriteUsingAccessorsInHeader(const protobuf::Descriptor* desc,
MessageClassType handle_type, Output& output);
void WriteOneofAccessorsInHeader(const protobuf::Descriptor* desc,
Output& output);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_ACCESSORS_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_ACCESSORS_H_

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_enums.h"
#include "google/protobuf/compiler/hpb/gen_enums.h"
#include <algorithm>
#include <limits>
@ -14,12 +14,12 @@
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
// Convert enum value to C++ literal.
//
@ -118,4 +118,5 @@ void WriteHeaderEnumForwardDecls(
}
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,15 +5,15 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_ENUMS_H_
#define UPB_PROTOS_GENERATOR_ENUMS_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_ENUMS_H_
#define PROTOBUF_COMPILER_HBP_GEN_ENUMS_H_
#include "google/protobuf/descriptor.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
std::string EnumTypeName(const protobuf::EnumDescriptor* enum_descriptor);
std::string EnumValueSymbolInNameSpace(
@ -24,6 +24,7 @@ void WriteHeaderEnumForwardDecls(
void WriteEnumDeclarations(
const std::vector<const protobuf::EnumDescriptor*>& enums, Output& output);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_ENUMS_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_ENUMS_H_

@ -5,15 +5,15 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_extensions.h"
#include "google/protobuf/compiler/hpb/gen_extensions.h"
#include "absl/strings/str_cat.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
std::string ExtensionIdentifierBase(const protobuf::FieldDescriptor* ext) {
assert(ext->is_extension());
@ -91,4 +91,5 @@ void WriteExtensionIdentifiers(
}
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,15 +5,15 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_GEN_EXTENSIONS_H_
#define UPB_PROTOS_GENERATOR_GEN_EXTENSIONS_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_EXTENSIONS_H_
#define PROTOBUF_COMPILER_HBP_GEN_EXTENSIONS_H_
#include "google/protobuf/descriptor.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
void WriteExtensionIdentifiersHeader(
const std::vector<const protobuf::FieldDescriptor*>& extensions,
@ -26,6 +26,7 @@ void WriteExtensionIdentifiers(
void WriteExtensionIdentifier(const protobuf::FieldDescriptor* ext,
Output& output);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_GEN_EXTENSIONS_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_EXTENSIONS_H_

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_messages.h"
#include "google/protobuf/compiler/hpb/gen_messages.h"
#include <cstddef>
#include <string>
@ -16,18 +16,18 @@
#include "absl/strings/str_cat.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_accessors.h"
#include "hpb_generator/gen_enums.h"
#include "hpb_generator/gen_extensions.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/gen_accessors.h"
#include "google/protobuf/compiler/hpb/gen_enums.h"
#include "google/protobuf/compiler/hpb/gen_extensions.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
#include "google/protobuf/compiler/hpb/output.h"
#include "upb_generator/common.h"
#include "upb_generator/file_layout.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
void WriteModelAccessDeclaration(const protobuf::Descriptor* descriptor,
Output& output);
@ -254,8 +254,7 @@ void WriteModelPublicDeclaration(
int options));
friend upb_Arena* ::protos::internal::GetArena<$0>($0* message);
friend upb_Arena* ::protos::internal::GetArena<$0>(::protos::Ptr<$0> message);
friend $0(::protos::internal::MoveMessage<$0>(upb_Message* msg,
upb_Arena* arena));
friend $0(::hpb::internal::MoveMessage<$0>(upb_Message* msg, upb_Arena* arena));
)cc",
ClassName(descriptor), MessageName(descriptor),
QualifiedClassName(descriptor));
@ -537,4 +536,5 @@ void WriteUsingEnumsInHeader(
}
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,14 +5,14 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_GEN_MESSAGES_H_
#define UPB_PROTOS_GENERATOR_GEN_MESSAGES_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_MESSAGES_H_
#define PROTOBUF_COMPILER_HBP_GEN_MESSAGES_H_
#include "google/protobuf/descriptor.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace protobuf = ::google::protobuf;
namespace google::protobuf::hpb_generator {
namespace protobuf = ::proto2;
void WriteMessageClassDeclarations(
const protobuf::Descriptor* descriptor,
@ -23,6 +23,7 @@ void WriteMessageImplementation(
const protobuf::Descriptor* descriptor,
const std::vector<const protobuf::FieldDescriptor*>& file_exts,
Output& output);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_GEN_MESSAGES_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_MESSAGES_H_

@ -5,7 +5,7 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_repeated_fields.h"
#include "google/protobuf/compiler/hpb/gen_repeated_fields.h"
#include <string>
#include <vector>
@ -13,18 +13,19 @@
#include "google/protobuf/descriptor.pb.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_accessors.h"
#include "hpb_generator/gen_enums.h"
#include "hpb_generator/gen_extensions.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/gen_accessors.h"
#include "google/protobuf/compiler/hpb/gen_enums.h"
#include "google/protobuf/compiler/hpb/gen_extensions.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
#include "google/protobuf/compiler/hpb/output.h"
#include "upb_generator/common.h"
#include "upb_generator/file_layout.h"
#include "upb_generator/names.h"
namespace protos_generator {
namespace protobuf = ::google::protobuf;
namespace google::protobuf::hpb_generator {
namespace protobuf = ::proto2;
// Adds using accessors to reuse base Access class members from a Proxy/CProxy.
void WriteRepeatedFieldUsingAccessors(const protobuf::FieldDescriptor* field,
@ -322,4 +323,5 @@ void WriteRepeatedScalarAccessor(const protobuf::Descriptor* message,
);
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,15 +5,16 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef THIRD_PARTY_UPB_PROTOS_GENERATOR_GEN_REPEATED_FIELDS_H_
#define THIRD_PARTY_UPB_PROTOS_GENERATOR_GEN_REPEATED_FIELDS_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_REPEATED_FIELDS_H_
#define PROTOBUF_COMPILER_HBP_GEN_REPEATED_FIELDS_H_
#include "absl/strings/string_view.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace protobuf = ::google::protobuf;
namespace google::protobuf::hpb_generator {
namespace protobuf = ::proto2;
void WriteRepeatedFieldUsingAccessors(const protobuf::FieldDescriptor* field,
absl::string_view class_name,
@ -41,6 +42,7 @@ void WriteRepeatedScalarAccessor(const protobuf::Descriptor* message,
absl::string_view resolved_field_name,
absl::string_view class_name, Output& output);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // THIRD_PARTY_UPB_PROTOS_GENERATOR_GEN_REPEATED_FIELDS_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_REPEATED_FIELDS_H_

@ -5,17 +5,18 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/gen_utils.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include <algorithm>
#include <string>
#include <vector>
#include "absl/strings/ascii.h"
#include "absl/strings/string_view.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
void AddEnums(const protobuf::Descriptor* message,
std::vector<const protobuf::EnumDescriptor*>* enums) {
@ -102,7 +103,7 @@ std::vector<const protobuf::FieldDescriptor*> FieldNumberOrder(
return fields;
}
std::string ToCamelCase(const std::string& input, bool lower_first) {
std::string ToCamelCase(const absl::string_view input, bool lower_first) {
bool capitalize_next = !lower_first;
std::string result;
result.reserve(input.size());
@ -126,4 +127,5 @@ std::string ToCamelCase(const std::string& input, bool lower_first) {
return result;
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,19 +5,20 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_GEN_UTILS_H_
#define UPB_PROTOS_GENERATOR_GEN_UTILS_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_UTILS_H_
#define PROTOBUF_COMPILER_HBP_GEN_UTILS_H_
#include <string>
#include <vector>
#include "google/protobuf/descriptor.pb.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/descriptor.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
enum class MessageClassType {
kMessage,
@ -38,8 +39,9 @@ std::vector<const protobuf::FieldDescriptor*> SortedExtensions(
std::vector<const protobuf::FieldDescriptor*> FieldNumberOrder(
const protobuf::Descriptor* message);
std::string ToCamelCase(const std::string& input, bool lower_first);
std::string ToCamelCase(absl::string_view input, bool lower_first);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_GEN_UTILS_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_UTILS_H_

@ -5,15 +5,15 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/names.h"
#include "google/protobuf/compiler/hpb/names.h"
#include <string>
#include "absl/strings/string_view.h"
#include "upb_generator/keywords.h"
namespace protos_generator {
namespace protobuf = ::google::protobuf;
namespace google::protobuf::hpb_generator {
namespace protobuf = ::proto2;
namespace {
@ -22,11 +22,11 @@ std::string NamespaceFromPackageName(absl::string_view package_name) {
"::protos");
}
std::string DotsToColons(const std::string& name) {
std::string DotsToColons(const absl::string_view name) {
return absl::StrReplaceAll(name, {{".", "::"}});
}
std::string Namespace(const std::string& package) {
std::string Namespace(const absl::string_view package) {
if (package.empty()) return "";
return "::" + DotsToColons(package);
}
@ -175,4 +175,5 @@ std::string MessageProxyType(const protobuf::FieldDescriptor* field,
"Proxy";
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,19 +5,19 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_NAMES_H_
#define UPB_PROTOS_GENERATOR_NAMES_H_
#ifndef PROTOBUF_COMPILER_HBP_GEN_NAMES_H_
#define PROTOBUF_COMPILER_HBP_GEN_NAMES_H_
#include <string>
#include "google/protobuf/descriptor.pb.h"
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
inline constexpr absl::string_view kNoPackageNamePrefix = "protos_";
inline constexpr absl::string_view kNoPackageNamePrefix = "hpb_";
std::string ClassName(const protobuf::Descriptor* descriptor);
std::string QualifiedClassName(const protobuf::Descriptor* descriptor);
@ -45,6 +45,7 @@ std::string MessageCProxyType(const protobuf::FieldDescriptor* field,
std::string MessageProxyType(const protobuf::FieldDescriptor* field,
bool is_const);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_NAMES_H_
#endif // PROTOBUF_COMPILER_HBP_GEN_NAMES_H_

@ -5,16 +5,16 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "hpb_generator/output.h"
#include "google/protobuf/compiler/hpb/output.h"
#include <string>
#include "absl/strings/str_replace.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
namespace {
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
} // namespace
@ -66,4 +66,5 @@ std::string CSourceFilename(const google::protobuf::FileDescriptor* file) {
return StripExtension(file->name()) + ".upb.c";
}
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator

@ -5,8 +5,8 @@
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef UPB_PROTOS_GENERATOR_OUTPUT_H
#define UPB_PROTOS_GENERATOR_OUTPUT_H
#ifndef PROTOBUF_COMPILER_HBP_OUTPUT_H_
#define PROTOBUF_COMPILER_HBP_OUTPUT_H_
#include <vector>
@ -16,7 +16,7 @@
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/zero_copy_stream.h"
namespace protos_generator {
namespace google::protobuf::hpb_generator {
class Output {
public:
@ -146,6 +146,7 @@ std::string FileLayoutName(const google::protobuf::FileDescriptor* file);
std::string CHeaderFilename(const google::protobuf::FileDescriptor* file);
std::string CSourceFilename(const google::protobuf::FileDescriptor* file);
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
#endif // UPB_PROTOS_GENERATOR_OUTPUT_H
#endif // PROTOBUF_COMPILER_HBP_OUTPUT_H_

@ -15,18 +15,18 @@
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/plugin.h"
#include "google/protobuf/descriptor.h"
#include "hpb_generator/gen_enums.h"
#include "hpb_generator/gen_extensions.h"
#include "hpb_generator/gen_messages.h"
#include "hpb_generator/gen_utils.h"
#include "hpb_generator/names.h"
#include "hpb_generator/output.h"
namespace protos_generator {
#include "google/protobuf/compiler/hpb/gen_enums.h"
#include "google/protobuf/compiler/hpb/gen_extensions.h"
#include "google/protobuf/compiler/hpb/gen_messages.h"
#include "google/protobuf/compiler/hpb/gen_utils.h"
#include "google/protobuf/compiler/hpb/names.h"
#include "google/protobuf/compiler/hpb/output.h"
namespace google::protobuf::hpb_generator {
namespace {
namespace protoc = ::google::protobuf::compiler;
namespace protobuf = ::google::protobuf;
namespace protobuf = ::proto2;
using FileDescriptor = ::google::protobuf::FileDescriptor;
using google::protobuf::Edition;
@ -265,9 +265,10 @@ void WriteHeaderMessageForwardDecls(const protobuf::FileDescriptor* file,
}
} // namespace
} // namespace protos_generator
} // namespace protobuf
} // namespace google::hpb_generator
int main(int argc, char** argv) {
protos_generator::Generator generator_cc;
google::protobuf::hpb_generator::Generator generator_cc;
return google::protobuf::compiler::PluginMain(argc, argv, &generator_cc);
}

@ -12,12 +12,12 @@
# "cc_proto_library",
# )
# load(
# "//bazel:upb_proto_library.bzl",
# "upb_c_proto_library",
# "//hpb/bazel:upb_cc_proto_library.bzl",
# "upb_cc_proto_library",
# )
# load(
# "//protos/bazel:upb_cc_proto_library.bzl",
# "upb_cc_proto_library",
# "//bazel:upb_proto_library.bzl",
# "upb_c_proto_library",
# )
# load(
# "//upb/bazel:build_defs.bzl",
@ -154,10 +154,11 @@
# "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main",
# "@com_google_absl//absl/status:statusor",
# "@com_google_absl//absl/strings",
# "//hpb:requires",
# "//hpb",
# "//hpb:repeated_field",
# "//protos",
# "//protos:requires",
# "//upb:mem",
# "//protos:repeated_field",
# ],
# )
# end:google_only

@ -7,9 +7,9 @@
syntax = "proto3";
package protos_generator.test;
package hpb_unittest;
import public "hpb_generator/tests/test_enum.proto";
import public "google/protobuf/compiler/hpb/tests/test_enum.proto";
message ChildModel1 {
optional bool child_b1 = 44;

@ -7,7 +7,7 @@
syntax = "proto3";
package protos_generator.test;
package hpb_unittest;
// option java_multiple_files = true;

@ -7,7 +7,7 @@
syntax = "proto3";
package protos_generator.test;
package hpb_unittest;
message HasChildCount {
optional HasChildCount has_child_count = 1;

@ -7,9 +7,9 @@
syntax = "proto2";
package protos_generator.tests;
package hpb_unittest;
import "hpb_generator/tests/no_package.proto";
import "google/protobuf/compiler/hpb/tests/no_package.proto";
// option java_multiple_files = true;

@ -7,7 +7,7 @@
syntax = "proto3";
package protos_generator.test;
package hpb_unittest;
enum TestEnum {
DEVICE_UNKNOWN = 0;

@ -7,9 +7,9 @@
syntax = "proto2";
package protos_generator.test.someotherpackage;
package hpb_unittest.someotherpackage;
import "hpb_generator/tests/test_model.proto";
import "google/protobuf/compiler/hpb/tests/test_model.proto";
// Define extension that is extending proto outside this package with a type
// defined in different file.

@ -18,32 +18,32 @@
#include <gtest/gtest.h>
#include "absl/status/statusor.h"
#include "absl/strings/string_view.h"
#include "hpb_generator/tests/child_model.upb.proto.h"
#include "hpb_generator/tests/no_package.upb.proto.h"
#include "hpb_generator/tests/test_model.upb.proto.h"
#include "protos/protos.h"
#include "protos/repeated_field.h"
#include "protos/repeated_field_iterator.h"
#include "protos/requires.h"
#include "google/protobuf/compiler/hpb/tests/child_model.upb.proto.h"
#include "google/protobuf/compiler/hpb/tests/no_package.upb.proto.h"
#include "google/protobuf/compiler/hpb/tests/test_extension.upb.proto.h"
#include "google/protobuf/compiler/hpb/tests/test_model.upb.proto.h"
#include "google/protobuf/hpb/hpb.h"
#include "google/protobuf/hpb/repeated_field.h"
#include "google/protobuf/hpb/requires.h"
#include "upb/mem/arena.h"
#include "upb/mem/arena.hpp"
namespace {
using ::protos::internal::Requires;
using ::protos_generator::test::protos::ChildModel1;
using ::protos_generator::test::protos::container_ext;
using ::protos_generator::test::protos::ContainerExtension;
using ::protos_generator::test::protos::other_ext;
using ::protos_generator::test::protos::RED;
using ::protos_generator::test::protos::TestEnum;
using ::protos_generator::test::protos::TestModel;
using ::protos_generator::test::protos::TestModel_Category;
using ::protos_generator::test::protos::TestModel_Category_IMAGES;
using ::protos_generator::test::protos::TestModel_Category_NEWS;
using ::protos_generator::test::protos::TestModel_Category_VIDEO;
using ::protos_generator::test::protos::theme;
using ::protos_generator::test::protos::ThemeExtension;
using ::hpb::internal::Requires;
using ::hpb_unittest::protos::ChildModel1;
using ::hpb_unittest::protos::container_ext;
using ::hpb_unittest::protos::ContainerExtension;
using ::hpb_unittest::protos::other_ext;
using ::hpb_unittest::protos::RED;
using ::hpb_unittest::protos::TestEnum;
using ::hpb_unittest::protos::TestModel;
using ::hpb_unittest::protos::TestModel_Category;
using ::hpb_unittest::protos::TestModel_Category_IMAGES;
using ::hpb_unittest::protos::TestModel_Category_NEWS;
using ::hpb_unittest::protos::TestModel_Category_VIDEO;
using ::hpb_unittest::protos::theme;
using ::hpb_unittest::protos::ThemeExtension;
using ::testing::ElementsAre;
TEST(CppGeneratedCode, Constructor) { TestModel test_model; }
@ -54,7 +54,7 @@ TEST(CppGeneratedCode, ImportedEnum) { EXPECT_EQ(3, TestEnum::DEVICE_MONITOR); }
TEST(CppGeneratedCode, Enum) { EXPECT_EQ(1, RED); }
TEST(CppGeneratedCode, EnumNoPackage) { EXPECT_EQ(1, ::protos_CELSIUS); }
TEST(CppGeneratedCode, EnumNoPackage) { EXPECT_EQ(1, ::hpb_CELSIUS); }
TEST(CppGeneratedCode, MessageEnumType) {
TestModel_Category category1 = TestModel_Category_IMAGES;
@ -67,13 +67,13 @@ TEST(CppGeneratedCode, MessageEnumValue) {
}
TEST(CppGeneratedCode, ArenaConstructor) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(false, testModel.has_b1());
}
TEST(CppGeneratedCode, Booleans) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
EXPECT_FALSE(testModel.b1());
testModel.set_b1(true);
@ -87,7 +87,7 @@ TEST(CppGeneratedCode, Booleans) {
}
TEST(CppGeneratedCode, ScalarInt32) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Test int32 defaults.
EXPECT_EQ(testModel.value(), 0);
@ -130,7 +130,7 @@ TEST(CppGeneratedCode, Strings) {
}
TEST(CppGeneratedCode, ScalarUInt32) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Test defaults.
EXPECT_EQ(testModel.optional_uint32(), 0);
@ -150,17 +150,17 @@ TEST(CppGeneratedCode, ScalarUInt32) {
}
TEST(CppGeneratedCode, ScalarInt64) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Test defaults.
EXPECT_EQ(testModel.optional_int64(), 0);
EXPECT_FALSE(testModel.has_optional_int64());
// Set value.
testModel.set_optional_int64(0xFF00CCDDA0001000);
testModel.set_optional_int64(static_cast<int64_t>(0xFF00CCDDA0001000));
EXPECT_TRUE(testModel.has_optional_int64());
EXPECT_EQ(testModel.optional_int64(), 0xFF00CCDDA0001000);
// Change value.
testModel.set_optional_int64(0xFF00CCDD70002000);
testModel.set_optional_int64(static_cast<int64_t>(0xFF00CCDD70002000));
EXPECT_TRUE(testModel.has_optional_int64());
EXPECT_EQ(testModel.optional_int64(), 0xFF00CCDD70002000);
// Clear value.
@ -168,13 +168,13 @@ TEST(CppGeneratedCode, ScalarInt64) {
EXPECT_FALSE(testModel.has_optional_int64());
EXPECT_EQ(testModel.optional_int64(), 0);
// Set after clear.
testModel.set_optional_int64(0xFF00CCDDA0001000);
testModel.set_optional_int64(static_cast<int64_t>(0xFF00CCDDA0001000));
EXPECT_TRUE(testModel.has_optional_int64());
EXPECT_EQ(testModel.optional_int64(), 0xFF00CCDDA0001000);
}
TEST(CppGeneratedCode, ScalarFloat) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Test defaults.
EXPECT_EQ(testModel.optional_float(), 0.0f);
@ -202,7 +202,7 @@ TEST(CppGeneratedCode, ScalarFloat) {
}
TEST(CppGeneratedCode, ScalarDouble) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Test defaults.
EXPECT_EQ(testModel.optional_double(), 0.0);
@ -226,7 +226,7 @@ TEST(CppGeneratedCode, ScalarDouble) {
}
TEST(CppGeneratedCode, Enums) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
// Check enum default value.
@ -254,7 +254,7 @@ TEST(CppGeneratedCode, Enums) {
}
TEST(CppGeneratedCode, FieldWithDefaultValue) {
::protos::Arena arena;
::hpb::Arena arena;
auto testModel = ::protos::CreateMessage<TestModel>(arena);
EXPECT_FALSE(testModel.has_int_value_with_default());
@ -269,7 +269,7 @@ TEST(CppGeneratedCode, FieldWithDefaultValue) {
}
TEST(CppGeneratedCode, OneOfFields) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_FALSE(test_model.has_oneof_member1());
@ -297,7 +297,7 @@ TEST(CppGeneratedCode, OneOfFields) {
}
TEST(CppGeneratedCode, Messages) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(false, test_model.has_child_model_1());
auto child_model = test_model.child_model_1();
@ -321,7 +321,7 @@ TEST(CppGeneratedCode, Messages) {
}
TEST(CppGeneratedCode, NestedMessages) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
auto nested_child = test_model.nested_child_1();
EXPECT_EQ(0, nested_child->nested_child_name().size());
@ -332,7 +332,7 @@ TEST(CppGeneratedCode, NestedMessages) {
}
TEST(CppGeneratedCode, RepeatedMessages) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.child_models_size());
// Should be able to clear repeated field when empty.
@ -361,7 +361,7 @@ TEST(CppGeneratedCode, RepeatedMessages) {
}
TEST(CppGeneratedCode, RepeatedScalar) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.value_array_size());
// Should be able to clear repeated field when empty.
@ -382,7 +382,7 @@ TEST(CppGeneratedCode, RepeatedScalar) {
}
TEST(CppGeneratedCode, RepeatedFieldClear) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
test_model.mutable_value_array()->push_back(5);
test_model.mutable_value_array()->push_back(16);
@ -393,7 +393,7 @@ TEST(CppGeneratedCode, RepeatedFieldClear) {
}
TEST(CppGeneratedCode, RepeatedFieldProxyForScalars) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.value_array().size());
EXPECT_EQ(0, test_model.mutable_value_array()->size());
@ -426,7 +426,7 @@ TEST(CppGeneratedCode, RepeatedFieldProxyForScalars) {
}
TEST(CppGeneratedCode, RepeatedScalarIterator) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
test_model.mutable_value_array()->push_back(5);
test_model.mutable_value_array()->push_back(16);
@ -493,7 +493,7 @@ TEST(CppGeneratedCode, RepeatedScalarIterator) {
}
TEST(CppGeneratedCode, RepeatedFieldProxyForStrings) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.repeated_string().size());
EXPECT_EQ(0, test_model.mutable_repeated_string()->size());
@ -528,7 +528,7 @@ TEST(CppGeneratedCode, RepeatedFieldProxyForStrings) {
}
TEST(CppGeneratedCode, RepeatedFieldProxyForMessages) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.child_models().size());
ChildModel1 child1;
@ -573,7 +573,7 @@ TEST(CppGeneratedCode, RepeatedFieldProxyForMessages) {
}
TEST(CppGeneratedCode, EmptyRepeatedFieldProxyForMessages) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.child_models().size());
ChildModel1 child1;
@ -586,7 +586,7 @@ TEST(CppGeneratedCode, EmptyRepeatedFieldProxyForMessages) {
}
TEST(CppGeneratedCode, RepeatedFieldProxyForMessagesIndexOperator) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.child_models().size());
ChildModel1 child1;
@ -604,7 +604,7 @@ TEST(CppGeneratedCode, RepeatedFieldProxyForMessagesIndexOperator) {
}
TEST(CppGeneratedCode, RepeatedStrings) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.repeated_string_size());
// Should be able to clear repeated field when empty.
@ -626,8 +626,8 @@ TEST(CppGeneratedCode, RepeatedStrings) {
TEST(CppGeneratedCode, MessageMapInt32KeyMessageValue) {
const int key_test_value = 3;
::protos::Arena arena;
::protos::Arena child_arena;
::hpb::Arena arena;
::hpb::Arena child_arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.child_map_size());
test_model.clear_child_map();
@ -648,7 +648,7 @@ TEST(CppGeneratedCode, MessageMapInt32KeyMessageValue) {
}
TEST(CppGeneratedCode, MessageMapStringKeyAndStringValue) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.str_to_str_map_size());
test_model.clear_str_to_str_map();
@ -664,7 +664,7 @@ TEST(CppGeneratedCode, MessageMapStringKeyAndStringValue) {
}
TEST(CppGeneratedCode, MessageMapStringKeyAndInt32Value) {
::protos::Arena arena;
::hpb::Arena arena;
auto test_model = ::protos::CreateMessage<TestModel>(arena);
EXPECT_EQ(0, test_model.str_to_int_map_size());
test_model.clear_str_to_int_map();
@ -725,13 +725,13 @@ TEST(CppGeneratedCode, SetExtension) {
}
TEST(CppGeneratedCode, SetExtensionWithPtr) {
::protos::Arena arena_model;
::hpb::Arena arena_model;
::protos::Ptr<TestModel> model =
::protos::CreateMessage<TestModel>(arena_model);
void* prior_message;
{
// Use a nested scope to make sure the arenas are fused correctly.
::protos::Arena arena;
::hpb::Arena arena;
::protos::Ptr<ThemeExtension> extension1 =
::protos::CreateMessage<ThemeExtension>(arena);
extension1->set_ext_name("Hello World");
@ -748,7 +748,7 @@ TEST(CppGeneratedCode, SetExtensionWithPtr) {
#ifndef _MSC_VER
TEST(CppGeneratedCode, SetExtensionShouldNotCompileForWrongType) {
::protos::Arena arena;
::hpb::Arena arena;
::protos::Ptr<TestModel> model = ::protos::CreateMessage<TestModel>(arena);
ThemeExtension extension1;
ContainerExtension extension2;
@ -769,7 +769,7 @@ TEST(CppGeneratedCode, SetExtensionShouldNotCompileForWrongType) {
#endif
TEST(CppGeneratedCode, SetExtensionWithPtrSameArena) {
::protos::Arena arena;
::hpb::Arena arena;
::protos::Ptr<TestModel> model = ::protos::CreateMessage<TestModel>(arena);
void* prior_message;
{
@ -1227,7 +1227,7 @@ TEST(CppGeneratedCode, HasExtensionAndRegistry) {
ASSERT_TRUE(::protos::SetExtension(&source, theme, extension1).ok());
// Now that we have a source model with extension data, serialize.
::protos::Arena arena;
::hpb::Arena arena;
std::string data = std::string(::protos::Serialize(&source, arena).value());
// Test with ExtensionRegistry
@ -1241,6 +1241,10 @@ TEST(CppGeneratedCode, FieldNumberConstants) {
EXPECT_EQ(225, TestModel::kChildMapFieldNumber);
}
TEST(CppGeneratedCode, ExtensionFieldNumberConstant) {
EXPECT_EQ(12003, ::protos::ExtensionNumber(ThemeExtension::theme_extension));
}
TEST(CppGeneratedCode, ClearConstMessageShouldFailForConstChild) {
TestModel model;
EXPECT_FALSE(CanCallClearMessage<decltype(model.child_model_1())>());

@ -7,9 +7,9 @@
syntax = "proto2";
package protos_generator.test;
package hpb_unittest;
import "hpb_generator/tests/child_model.proto";
import "google/protobuf/compiler/hpb/tests/child_model.proto";
message TestModelContainer {
repeated TestModel models = 1;

@ -4,7 +4,7 @@
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-bom</artifactId>
<version>4.28.0</version>
<version>4.29.0</version>
<packaging>pom</packaging>
<name>Protocol Buffers [BOM]</name>

@ -4,7 +4,7 @@
<parent>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-parent</artifactId>
<version>4.28.0</version>
<version>4.29.0</version>
</parent>
<artifactId>protobuf-java</artifactId>

@ -84,7 +84,9 @@ public abstract class AbstractMessage
@Override
public final String toString() {
return TextFormat.printer().printToString(this);
return TextFormat.printer()
.setFieldReporterLevel(TextFormat.Printer.FieldReporterLevel.ABSTRACT_TO_STRING)
.printToString(this);
}
@Override

@ -24,8 +24,7 @@ import java.io.IOException;
@CheckReturnValue
final class ArrayDecoders {
private ArrayDecoders() {
}
private ArrayDecoders() {}
/**
* A helper used to return multiple values in a Java function. Java doesn't natively support

@ -287,13 +287,12 @@ public abstract class ByteString implements Iterable<Byte>, Serializable {
while (formerBytes.hasNext() && latterBytes.hasNext()) {
int result =
Integer.valueOf(toInt(formerBytes.nextByte()))
.compareTo(toInt(latterBytes.nextByte()));
Integer.compare(toInt(formerBytes.nextByte()), toInt(latterBytes.nextByte()));
if (result != 0) {
return result;
}
}
return Integer.valueOf(former.size()).compareTo(Integer.valueOf(latter.size()));
return Integer.compare(former.size(), latter.size());
}
};

@ -26,6 +26,10 @@ public final class DebugFormat {
return TextFormat.printer()
.emittingSingleLine(this.isSingleLine)
.enablingSafeDebugFormat(true)
.setFieldReporterLevel(
this.isSingleLine
? TextFormat.Printer.FieldReporterLevel.DEBUG_SINGLE_LINE
: TextFormat.Printer.FieldReporterLevel.DEBUG_MULTILINE)
.printToString(message);
}

@ -2343,7 +2343,7 @@ public final class Descriptors {
new Comparator<EnumValueDescriptor>() {
@Override
public int compare(EnumValueDescriptor o1, EnumValueDescriptor o2) {
return Integer.valueOf(o1.getNumber()).compareTo(o2.getNumber());
return Integer.compare(o1.getNumber(), o2.getNumber());
}
};

@ -126,6 +126,7 @@ public class ExtensionRegistry extends ExtensionRegistryLite {
*
* @return Information about the extension if found, or {@code null} otherwise.
*/
@Deprecated
public ExtensionInfo findMutableExtensionByName(final String fullName) {
return mutableExtensionsByName.get(fullName);
}
@ -152,6 +153,7 @@ public class ExtensionRegistry extends ExtensionRegistryLite {
*
* @return Information about the extension if found, or {@code null} otherwise.
*/
@Deprecated
public ExtensionInfo findMutableExtensionByNumber(
final Descriptor containingType, final int fieldNumber) {
return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber));
@ -163,6 +165,7 @@ public class ExtensionRegistry extends ExtensionRegistryLite {
*
* @return Information about the extensions found, or {@code null} if there are none.
*/
@Deprecated
public Set<ExtensionInfo> getAllMutableExtensionsByExtendedType(final String fullName) {
HashSet<ExtensionInfo> extensions = new HashSet<ExtensionInfo>();
for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) {

@ -78,7 +78,7 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/** Get an immutable empty FieldSet. */
@SuppressWarnings("unchecked")
public static <T extends FieldSet.FieldDescriptorLite<T>> FieldSet<T> emptySet() {
return DEFAULT_INSTANCE;
return (FieldSet<T>) DEFAULT_INSTANCE;
}
/** Construct a new Builder. */
@ -86,8 +86,7 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
return new Builder<T>();
}
@SuppressWarnings("rawtypes")
private static final FieldSet DEFAULT_INSTANCE = new FieldSet(true);
private static final FieldSet<?> DEFAULT_INSTANCE = new FieldSet<>(true);
/** Returns {@code true} if empty, {@code false} otherwise. */
boolean isEmpty() {
@ -99,7 +98,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
if (isImmutable) {
return;
}
for (int i = 0; i < fields.getNumArrayEntries(); ++i) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; ++i) {
Entry<T, Object> entry = fields.getArrayEntryAt(i);
if (entry.getValue() instanceof GeneratedMessageLite) {
((GeneratedMessageLite<?, ?>) entry.getValue()).makeImmutable();
@ -149,7 +149,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
// We can't just call fields.clone because List objects in the map
// should not be shared.
FieldSet<T> clone = FieldSet.newFieldSet();
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
Map.Entry<T, Object> entry = fields.getArrayEntryAt(i);
clone.setField(entry.getKey(), entry.getValue());
}
@ -184,7 +185,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
private static <T extends FieldDescriptorLite<T>> SmallSortedMap<T, Object> cloneAllFieldsMap(
SmallSortedMap<T, Object> fields, boolean copyList, boolean resolveLazyFields) {
SmallSortedMap<T, Object> result = SmallSortedMap.newFieldMap();
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
cloneFieldEntry(result, fields.getArrayEntryAt(i), copyList, resolveLazyFields);
}
for (Map.Entry<T, Object> entry : fields.getOverflowEntries()) {
@ -262,7 +264,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/**
* Useful for implementing {@link Message.Builder#setField(Descriptors.FieldDescriptor,Object)}.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
// Avoid iterator allocation.
@SuppressWarnings({"ForeachList", "ForeachListWithUserVar"})
public void setField(final T descriptor, Object value) {
if (descriptor.isRepeated()) {
if (!(value instanceof List)) {
@ -272,10 +275,14 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
// Wrap the contents in a new list so that the caller cannot change
// the list's contents after setting it.
final List newList = new ArrayList<>();
newList.addAll((List) value);
for (final Object element : newList) {
List<?> list = (List<?>) value;
int listSize = list.size();
// Avoid extra allocations: no iterator, no intermediate array copy.
final List<Object> newList = new ArrayList<>(listSize);
for (int i = 0; i < listSize; i++) {
Object element = list.get(i);
verifyType(descriptor, element);
newList.add(element);
}
value = newList;
} else {
@ -426,7 +433,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
* caller to check that all required fields are present.
*/
public boolean isInitialized() {
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
if (!isInitialized(fields.getArrayEntryAt(i))) {
return false;
}
@ -439,12 +447,17 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
return true;
}
// Avoid iterator allocation.
@SuppressWarnings({"ForeachList", "ForeachListWithUserVar"})
private static <T extends FieldDescriptorLite<T>> boolean isInitialized(
final Map.Entry<T, Object> entry) {
final T descriptor = entry.getKey();
if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) {
if (descriptor.isRepeated()) {
for (final Object element : (List<?>) entry.getValue()) {
List<?> list = (List<?>) entry.getValue();
int listSize = list.size();
for (int i = 0; i < listSize; i++) {
Object element = list.get(i);
if (!isMessageFieldValueInitialized(element)) {
return false;
}
@ -484,7 +497,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/** Like {@link Message.Builder#mergeFrom(Message)}, but merges from another {@link FieldSet}. */
public void mergeFrom(final FieldSet<T> other) {
for (int i = 0; i < other.fields.getNumArrayEntries(); i++) {
int n = other.fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
mergeFromField(other.fields.getArrayEntryAt(i));
}
for (final Map.Entry<T, Object> entry : other.fields.getOverflowEntries()) {
@ -571,7 +585,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/** See {@link Message#writeTo(CodedOutputStream)}. */
public void writeTo(final CodedOutputStream output) throws IOException {
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
final Map.Entry<T, Object> entry = fields.getArrayEntryAt(i);
writeField(entry.getKey(), entry.getValue(), output);
}
@ -582,7 +597,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/** Like {@link #writeTo} but uses MessageSet wire format. */
public void writeMessageSetTo(final CodedOutputStream output) throws IOException {
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
writeMessageSetTo(fields.getArrayEntryAt(i), output);
}
for (final Map.Entry<T, Object> entry : fields.getOverflowEntries()) {
@ -716,6 +732,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
}
/** Write a single field. */
// Avoid iterator allocation.
@SuppressWarnings({"ForeachList", "ForeachListWithUserVar"})
public static void writeField(
final FieldDescriptorLite<?> descriptor, final Object value, final CodedOutputStream output)
throws IOException {
@ -723,6 +741,7 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
int number = descriptor.getNumber();
if (descriptor.isRepeated()) {
final List<?> valueList = (List<?>) value;
int valueListSize = valueList.size();
if (descriptor.isPacked()) {
if (valueList.isEmpty()) {
// The tag should not be written for empty packed fields.
@ -731,16 +750,19 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
output.writeTag(number, WireFormat.WIRETYPE_LENGTH_DELIMITED);
// Compute the total data size so the length can be written.
int dataSize = 0;
for (final Object element : valueList) {
for (int i = 0; i < valueListSize; i++) {
Object element = valueList.get(i);
dataSize += computeElementSizeNoTag(type, element);
}
output.writeUInt32NoTag(dataSize);
// Write the data itself, without any tags.
for (final Object element : valueList) {
for (int i = 0; i < valueListSize; i++) {
Object element = valueList.get(i);
writeElementNoTag(output, type, element);
}
} else {
for (final Object element : valueList) {
for (int i = 0; i < valueListSize; i++) {
Object element = valueList.get(i);
writeElement(output, type, number, element);
}
}
@ -759,7 +781,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
*/
public int getSerializedSize() {
int size = 0;
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
final Map.Entry<T, Object> entry = fields.getArrayEntryAt(i);
size += computeFieldSize(entry.getKey(), entry.getValue());
}
@ -772,7 +795,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
/** Like {@link #getSerializedSize} but uses MessageSet wire format. */
public int getMessageSetSerializedSize() {
int size = 0;
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
size += getMessageSetSerializedSize(fields.getArrayEntryAt(i));
}
for (final Map.Entry<T, Object> entry : fields.getOverflowEntries()) {
@ -889,17 +913,21 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
}
/** Compute the number of bytes needed to encode a particular field. */
// Avoid iterator allocation.
@SuppressWarnings({"ForeachList", "ForeachListWithUserVar"})
public static int computeFieldSize(final FieldDescriptorLite<?> descriptor, final Object value) {
WireFormat.FieldType type = descriptor.getLiteType();
int number = descriptor.getNumber();
if (descriptor.isRepeated()) {
List<?> valueList = (List<?>) value;
int valueListSize = valueList.size();
if (descriptor.isPacked()) {
if (valueList.isEmpty()) {
return 0;
}
int dataSize = 0;
for (final Object element : valueList) {
for (int i = 0; i < valueListSize; i++) {
Object element = valueList.get(i);
dataSize += computeElementSizeNoTag(type, element);
}
return dataSize
@ -907,7 +935,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
+ CodedOutputStream.computeUInt32SizeNoTag(dataSize);
} else {
int size = 0;
for (final Object element : valueList) {
for (int i = 0; i < valueListSize; i++) {
Object element = valueList.get(i);
size += computeElementSize(type, number, element);
}
return size;
@ -976,7 +1005,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
private static <T extends FieldDescriptorLite<T>> void replaceBuilders(
SmallSortedMap<T, Object> fieldMap, boolean partial) {
for (int i = 0; i < fieldMap.getNumArrayEntries(); i++) {
int n = fieldMap.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
replaceBuilders(fieldMap.getArrayEntryAt(i), partial);
}
for (Map.Entry<T, Object> entry : fieldMap.getOverflowEntries()) {
@ -1102,7 +1132,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
* Useful for implementing {@link Message.Builder#setField(Descriptors.FieldDescriptor,
* Object)}.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
// Avoid iterator allocation.
@SuppressWarnings({"unchecked", "ForeachList", "ForeachListWithUserVar"})
public void setField(final T descriptor, Object value) {
ensureIsMutable();
if (descriptor.isRepeated()) {
@ -1113,8 +1144,10 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
// Wrap the contents in a new list so that the caller cannot change
// the list's contents after setting it.
final List newList = new ArrayList((List) value);
for (final Object element : newList) {
final List<Object> newList = new ArrayList<>((List<Object>) value);
int newListSize = newList.size();
for (int i = 0; i < newListSize; i++) {
Object element = newList.get(i);
verifyType(descriptor, element);
hasNestedBuilders = hasNestedBuilders || element instanceof MessageLite.Builder;
}
@ -1268,7 +1301,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
* caller to check that all required fields are present.
*/
public boolean isInitialized() {
for (int i = 0; i < fields.getNumArrayEntries(); i++) {
int n = fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
if (!FieldSet.isInitialized(fields.getArrayEntryAt(i))) {
return false;
}
@ -1286,7 +1320,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
*/
public void mergeFrom(final FieldSet<T> other) {
ensureIsMutable();
for (int i = 0; i < other.fields.getNumArrayEntries(); i++) {
int n = other.fields.getNumArrayEntries(); // Optimisation: hoist out of hot loop.
for (int i = 0; i < n; i++) {
mergeFromField(other.fields.getArrayEntryAt(i));
}
for (final Map.Entry<T, Object> entry : other.fields.getOverflowEntries()) {
@ -1294,7 +1329,8 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
}
}
@SuppressWarnings("unchecked")
// Avoid iterator allocation.
@SuppressWarnings({"unchecked", "ForeachList", "ForeachListWithUserVar"})
private void mergeFromField(final Map.Entry<T, Object> entry) {
final T descriptor = entry.getKey();
Object otherValue = entry.getValue();
@ -1305,11 +1341,14 @@ final class FieldSet<T extends FieldSet.FieldDescriptorLite<T>> {
throw new IllegalStateException("Lazy fields can not be repeated");
}
List<Object> value = (List<Object>) getFieldAllowBuilders(descriptor);
List<?> otherList = (List<?>) otherValue;
int otherListSize = otherList.size();
if (value == null) {
value = new ArrayList<>();
value = new ArrayList<>(otherListSize);
fields.put(descriptor, value);
}
for (Object element : (List<?>) otherValue) {
for (int i = 0; i < otherListSize; i++) {
Object element = otherList.get(i);
value.add(FieldSet.cloneIfMutable(element));
}
} else if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) {

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save