Merge branch 'main' into patch-2

pull/18103/head
Adam Cozzette 3 months ago
commit fa26b551f0
  1. 3
      .bazelignore
  2. 5
      .bazelrc
  3. 77
      .bcr/metadata.template.json
  4. 12
      .bcr/presubmit.yml
  5. 2
      .github/BUILD.bazel
  6. 4
      .github/dependabot.yml
  7. 50
      .github/scripts/validate_yaml.py
  8. 2
      .github/workflows/scorecard.yml
  9. 19
      .github/workflows/staleness_check.yml
  10. 14
      .github/workflows/test_bazel.yml
  11. 160
      .github/workflows/test_cpp.yml
  12. 6
      .github/workflows/test_csharp.yml
  13. 15
      .github/workflows/test_java.yml
  14. 14
      .github/workflows/test_objectivec.yml
  15. 39
      .github/workflows/test_php.yml
  16. 4
      .github/workflows/test_php_ext.yml
  17. 22
      .github/workflows/test_python.yml
  18. 25
      .github/workflows/test_release_branches.yml
  19. 18
      .github/workflows/test_ruby.yml
  20. 10
      .github/workflows/test_runner.yml
  21. 33
      .github/workflows/test_rust.yml
  22. 57
      .github/workflows/test_upb.yml
  23. 11
      .gitmodules
  24. 57
      BUILD.bazel
  25. 53
      CMakeLists.txt
  26. 54
      Cargo.bazel.lock
  27. 150
      MODULE.bazel
  28. 48
      Protobuf-C++.podspec
  29. 20
      Protobuf.podspec
  30. 23
      WORKSPACE
  31. 8
      WORKSPACE.bzlmod
  32. 23
      bazel/BUILD.bazel
  33. 9
      bazel/cc_proto_library.bzl
  34. 16
      bazel/common/BUILD
  35. 7
      bazel/common/proto_common.bzl
  36. 6
      bazel/common/proto_info.bzl
  37. 25
      bazel/common/proto_lang_toolchain_info.bzl
  38. 15
      bazel/java_lite_proto_library.bzl
  39. 15
      bazel/java_proto_library.bzl
  40. 106
      bazel/private/BUILD
  41. 198
      bazel/private/bazel_cc_proto_library.bzl
  42. 164
      bazel/private/bazel_java_proto_library_rule.bzl
  43. 3
      bazel/private/bazel_proto_library_rule.bzl
  44. 6
      bazel/private/cc_proto_aspect.bzl
  45. 143
      bazel/private/cc_proto_support.bzl
  46. 178
      bazel/private/java_lite_proto_library.bzl
  47. 62
      bazel/private/java_proto_support.bzl
  48. 4
      bazel/private/native.bzl
  49. 9
      bazel/private/proto_bazel_features.bzl
  50. 186
      bazel/private/proto_info.bzl
  51. 3
      bazel/private/proto_toolchain_rule.bzl
  52. 2
      bazel/private/toolchain_helpers.bzl
  53. 85
      bazel/private/toolchains/BUILD.bazel
  54. 52
      bazel/private/upb_proto_library_internal/aspect.bzl
  55. 4
      bazel/private/upb_proto_library_internal/cc_library_func.bzl
  56. 8
      bazel/py_proto_library.bzl
  57. 17
      bazel/system_python.bzl
  58. 3
      bazel/tests/testdata/BUILD
  59. 9
      bazel/toolchains/BUILD
  60. 2
      bazel/upb_c_proto_library.bzl
  61. 2
      benchmarks/BUILD
  62. 2
      benchmarks/BUILD.googleapis
  63. 2
      benchmarks/build_defs.bzl
  64. 42
      benchmarks/descriptor_sv.proto
  65. 3
      build_defs/cpp_opts.bzl
  66. 7
      build_defs/internal_shell.bzl
  67. 16
      build_defs/java_opts.bzl
  68. 61
      build_defs/kotlin_opts.bzl
  69. 4
      ci/Linux.bazelrc
  70. 4
      ci/Windows.bazelrc
  71. 9
      ci/common.bazelrc
  72. 6
      ci/macOS.bazelrc
  73. 25
      cmake/BUILD.bazel
  74. 26
      cmake/README.md
  75. 50
      cmake/abseil-cpp.cmake
  76. 53
      cmake/conformance.cmake
  77. 34
      cmake/dependencies.cmake
  78. 143
      cmake/dependencies_generator.py
  79. 62
      cmake/gtest.cmake
  80. 11
      cmake/protobuf-generate.cmake
  81. 16
      cmake/tests.cmake
  82. 2
      cmake/upb_generators.cmake
  83. 6
      compatibility/BUILD.bazel
  84. 6
      conformance/BUILD.bazel
  85. 4
      conformance/ConformanceJava.java
  86. 4
      conformance/ConformanceJavaLite.java
  87. 2
      conformance/README.md
  88. 8
      conformance/autoload.php
  89. 102
      conformance/binary_json_conformance_suite.cc
  90. 1
      conformance/binary_json_conformance_suite.h
  91. 1
      conformance/conformance_php.php
  92. 2
      conformance/conformance_rust.rs
  93. 2
      conformance/conformance_test.cc
  94. 2
      conformance/conformance_test_runner.cc
  95. 4
      conformance/defs.bzl
  96. 1
      conformance/failure_list_cpp.txt
  97. 4
      conformance/failure_list_java.txt
  98. 4
      conformance/failure_list_java_lite.txt
  99. 4
      conformance/failure_list_jruby.txt
  100. 11
      conformance/failure_list_jruby_ffi.txt
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,4 +1 @@
# These are fetched as external repositories.
third_party/abseil-cpp
third_party/googletest
_build/

@ -1,6 +1,7 @@
build --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion"
# TODO: ErrorProne's SelfAssertions are violated in protobuf's test
build --javacopt=-Xep:SelfAssertion:WARN
build:dbg --compilation_mode=dbg
@ -24,6 +25,8 @@ build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Abseil passes nullptr to memcmp with 0 size
build:ubsan --copt=-fno-sanitize=nonnull-attribute
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313

@ -1,6 +1,11 @@
{
"homepage": "https://github.com/protocolbuffers/protobuf",
"maintainers": [
{
"email": "protobuf-packages@google.com",
"github": "protobuf-team-bot",
"name": "Protobuf Team"
},
{
"email": "sandyzhang@google.com",
"github": "zhangskz",
@ -15,6 +20,78 @@
"email": "gberg@google.com",
"github": "googleberg",
"name": "Jerry Berg"
},
{
"email": "acozzette@google.com",
"github": "acozzette",
"name": "Adam Cozzette",
"do_not_notify": true
},
{
"email": "deannagarcia@google.com",
"github": "deannagarcia",
"name": "Deanna Garcia",
"do_not_notify": true
},
{
"email": "esrauch@google.com",
"github": "esrauchg",
"name": "Em Rauch",
"do_not_notify": true
},
{
"email": "haberman@google.com",
"github": "haberman",
"name": "Josh Haberman",
"do_not_notify": true
},
{
"email": "hongshin@google.com",
"github": "honglooker",
"name": "Hong Shin",
"do_not_notify": true
},
{
"email": "jatl@google.com",
"github": "JasonLunn",
"name": "Jason Lunn",
"do_not_notify": true
},
{
"email": "jieluo@google.com",
"github": "anandolee",
"name": "Jie Luo",
"do_not_notify": true
},
{
"email": "salo@google.com",
"github": "salo",
"name": "Eric Salo",
"do_not_notify": true
},
{
"email": "sbenza@google.com",
"github": "sbenza",
"name": "Samuel Benzaquen",
"do_not_notify": true
},
{
"email": "shaod@google.com",
"github": "shaod2",
"name": "Dennis Shao",
"do_not_notify": true
},
{
"email": "theodorerose@google.com",
"github": "theodorerose",
"name": "Theodore Rose",
"do_not_notify": true
},
{
"email": "tonyliaoss@google.com",
"github": "tonyliaoss",
"name": "Tony Liao",
"do_not_notify": true
}
],
"repository": ["github:protocolbuffers/protobuf"],

@ -1,6 +1,6 @@
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
bazel: [7.x]
tasks:
verify_targets:
@ -8,8 +8,8 @@ tasks:
platform: ${{ platform }}
bazel: ${{ bazel }}
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
- '--host_cxxopt=-std=c++17'
- '--cxxopt=-std=c++17'
build_targets:
- '@protobuf//:protobuf'
- '@protobuf//:protobuf_lite'
@ -23,14 +23,14 @@ bcr_test_module:
module_path: "examples"
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
bazel: [7.x]
tasks:
run_test_module:
name: "Run test module"
platform: ${{ platform }}
bazel: ${{ bazel }}
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
- '--host_cxxopt=-std=c++17'
- '--cxxopt=-std=c++17'
build_targets:
- "//..."

@ -1,5 +1,5 @@
# This information is extracted from the MacOS runner specs located at:
# https://github.com/actions/runner-images/blob/main/images/macos/macos-12-Readme.md
# https://github.com/actions/runner-images/blob/main/images/macos/macos-13-Readme.md
#
# When updating, also ensure the "xcode_destination" entries in
# `.github/workflows/test_objectivec.yml` are supported for the given versions

@ -4,5 +4,5 @@ updates:
directory: "/"
schedule:
interval: "weekly"
# Allow up to 3 opened pull requests for github-actions versions
open-pull-requests-limit: 3
# Don't allow non-security PRs to be opened.
open-pull-requests-limit: 0

@ -53,47 +53,23 @@ for file in yaml_files:
continuous_condition = 'inputs.continuous-prefix' in jobs[job]['name']
steps = jobs[job]['steps']
for step in steps:
if 'name' in step:
name = step['name']
elif 'with' in step and 'name' in step['with']:
name = step['with']['name']
else:
raise ValueError(
'Step in job %s from file %s does not have a name.' % (job, file)
)
if continuous_condition and 'continuous-run' not in step.get('if', ''):
raise ValueError(
'Step %s in job %s does not check the continuous-run condition'
% (step['name'], job)
'Step %s in job %s from file %s does not check the continuous-run'
' condition' % (name, job, file)
)
if not continuous_condition and 'continuous-run' in step.get('if', ''):
raise ValueError(
'Step %s in job %s checks the continuous-run condition but '
'the job does not contain the continuous-prefix'
% (step['name'], job)
'Step %s in job %s from file %s checks the continuous-run'
' condition but the job does not contain the continuous-prefix'
% (name, job, file)
)
print('PASSED: All steps in all jobs check the continuous-run condition.')
# Check to make sure the list of included branches matches the list of excluded
# branches in staleness_check.yml.
with open(
os.path.join(os.path.dirname(__file__), '../workflows/staleness_check.yml'),
'r',
) as f:
regex_pattern = r"'(\d+\.x)'"
data = yaml.safe_load(f)
matrix = data['jobs']['test']['strategy']['matrix']
included_branches = matrix['branch']
# Main should be included in all test runs
included_branches.remove('main')
excludes = matrix['exclude']
for entry in excludes:
match = re.search(regex_pattern, entry['branch'])
branch = match.group(1)
if branch not in included_branches:
raise ValueError(
'Branch %s is excluded for presubmit runs but is not in the list of'
' matrix branches in staleness_check.yml.' % branch
)
included_branches.remove(branch)
if included_branches:
raise ValueError(
'Branches %s are in the list of matrix branches but do not get excluded'
' for presubmit runs in staleness_check.yml.' % included_branches
)
print(
'PASSED: The list of included branches matches the list of excluded'
' branches in staleness_check.yml.'
)

@ -47,7 +47,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: SARIF file
path: results.sarif

@ -20,25 +20,14 @@ on:
permissions: {}
jobs:
test:
strategy:
fail-fast: false
matrix:
branch: [main, 25.x, 27.x, 28.x]
os: [{ name: Linux, value: ubuntu-latest}]
exclude:
# If we are in a presubmit run, only test main
- branch: ${{ !inputs.continuous-run && '25.x' }}
- branch: ${{ !inputs.continuous-run && '27.x' }}
- branch: ${{ !inputs.continuous-run && '28.x' }}
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }}
runs-on: ${{ matrix.os.value }}
name: Test staleness
runs-on: ubuntu-latest
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }}
steps:
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }}
- name: Checkout
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }}
ref: ${{ inputs.safe-checkout || github.head_ref || github.ref }}
- name: Mark runs associated with commits
if: ${{ github.event_name != 'schedule' && github.event_name != 'workflow_dispatch' }}

@ -28,15 +28,21 @@ jobs:
matrix:
runner: [ ubuntu, windows, macos ]
bazelversion: [ '7.1.2' ]
bzlmod: [true, false ]
bzlmod: [ true, false ]
toolchain_resolution: [ "" ]
include:
- runner: ubuntu
bazelversion: '6.4.0'
# Not running Bazel 6 with bzlmod, because it doesn't support use_repo_rule in rules_jvm_external
bzlmod: false
continuous-only: true
- runner: ubuntu
bzlmod: false
toolchain_resolution: --incompatible_enable_proto_toolchain_resolution=true
- runner: ubuntu
bzlmod: true
toolchain_resolution: --incompatible_enable_proto_toolchain_resolution=true
runs-on: ${{ matrix.runner }}-latest
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }} ${{ matrix.toolchain_resolution && ' (toolchain resolution)' || '' }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -63,4 +69,4 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: examples
version: ${{ matrix.bazelversion }}
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }}
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }} ${{ matrix.toolchain_resolution }}

@ -37,7 +37,7 @@ jobs:
- { name: No-RTTI, flags: --cxxopt=-fno-rtti, continuous-only: true }
include:
# Set defaults
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:3d959f731dc5c54af4865c31ee2bd581ec40028adcdf4c038f3122581f595191
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:6.4.0-27cf7b86212020d7e552bc13b1e084abb971da75
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/... //conformance:conformance_framework_tests
# Override cases with custom images
@ -45,18 +45,19 @@ jobs:
cache_key: Bazel7
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "Bazel7 with Bzlmod", flags: --enable_bzlmod --enable_workspace }
# TODO: remove -Wno-unreachable-code" when dropping C++14
- config: { name: "Bazel7 with Bzlmod", flags: --enable_bzlmod --enable_workspace --per_file_copt=.*/absl/strings/string_view.h@-Wno-unreachable-code --cxxopt="-Wno-self-assign-overloaded" }
cache_key: Bazel7bzlmod
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "TCMalloc" }
cache_key: TcMalloc
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:1c5133455481f4d1bb8afa477029604f41f1a3c46cebe4d9958cf1af95b5c87c"
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc:6.4.0-27cf7b86212020d7e552bc13b1e084abb971da75"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "aarch64" }
cache_key: TcMalloc
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test //third_party/utf8_range/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.3.0-aarch64-68e662b3a56b881804dc4e9d45f949791cbc4b94"
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-aarch64-08714ed7a713068c8418003a2d95f423d4b1eac9"
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
@ -90,7 +91,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:${{ matrix.version }}-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/gcc-${{ matrix.version }}
bazel: test //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/... //conformance:conformance_framework_tests
@ -107,12 +108,11 @@ jobs:
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Cross compile protoc for ${{ matrix.arch }}
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-${{ matrix.arch }}
- name: Setup sccache
@ -123,14 +123,14 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:${{ matrix.arch }}-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-${{ matrix.arch }}-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >
-c "set -ex;
sccache -z;
cmake . -DWITH_PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }}
-Dprotobuf_BUILD_LIBUPB=OFF -Dprotobuf_BUILD_CONFORMANCE=ON -DCMAKE_CXX_STANDARD=14
-Dprotobuf_BUILD_LIBUPB=OFF -Dprotobuf_BUILD_CONFORMANCE=ON -DCMAKE_CXX_STANDARD=17
-Dprotobuf_WITH_ZLIB=OFF ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake --build . --parallel 20;
ctest --parallel 20;
@ -141,18 +141,19 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- flags: -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
- flags: -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=17
- name: Ninja
flags: -G Ninja -DCMAKE_CXX_STANDARD=14
flags: -G Ninja -DCMAKE_CXX_STANDARD=17
continuous-only: true
- name: Shared
flags: -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
flags: -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=17
continuous-only: true
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
# TODO Re-enable this.
#- name: C++20
# flags: -DCMAKE_CXX_STANDARD=20
- name: C++20
flags: -DCMAKE_CXX_STANDARD=20
- name: Package
flags: -DCMAKE_CXX_STANDARD=17 -Dprotobuf_LOCAL_DEPENDENCIES_ONLY=ON
- name: Fetch
flags: -DCMAKE_CXX_STANDARD=17 -Dprotobuf_FORCE_FETCH_DEPENDENCIES=ON
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux CMake ${{ matrix.name}}
runs-on: ubuntu-latest
@ -174,45 +175,59 @@ jobs:
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.16.9-d9624f2aa83cba3eaf906f751d75b36aacb9aa82
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ matrix.flags}} ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_BUILD_TESTS=ON -Dprotobuf_USE_EXTERNAL_GTEST=ON
-Dprotobuf_ABSL_PROVIDER=package
-Dprotobuf_BUILD_TESTS=ON ${{ matrix.package_flags }}
linux-cmake-install:
name: Linux CMake Install
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [package, fetch]
include:
# Set defaults
- type: package
name: Install
flags: -Dprotobuf_LOCAL_DEPENDENCIES_ONLY=ON
- type: fetch
name: Install (Fetch)
flags: -Dprotobuf_FORCE_FETCH_DEPENDENCIES=ON
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }}Linux CMake ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
cache-prefix: linux-cmake-install
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.16.9-d9624f2aa83cba3eaf906f751d75b36aacb9aa82
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
/install.sh -DCMAKE_CXX_STANDARD=17 ${{ env.SCCACHE_CMAKE_FLAGS }}
${{ matrix.flags }}
-Dprotobuf_BUILD_SHARED_LIBS=ON \&\&
/test.sh
${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
-Dprotobuf_BUILD_CONFORMANCE=ON
-DCMAKE_CXX_STANDARD=14
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
-DCMAKE_CXX_STANDARD=17
${{ matrix.flags }}
# This test should always be skipped on presubmit
linux-cmake-examples:
@ -236,15 +251,15 @@ jobs:
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.16.9-d9624f2aa83cba3eaf906f751d75b36aacb9aa82
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
/install.sh -DCMAKE_CXX_STANDARD=17 ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_LOCAL_DEPENDENCIES_ONLY=OFF
-Dprotobuf_BUILD_EXAMPLES=OFF \&\&
mkdir examples/build \&\&
cd examples/build \&\&
cmake .. -DCMAKE_CXX_STANDARD=14 \&\&
cmake .. -DCMAKE_CXX_STANDARD=17 \&\&
cmake --build .
linux-cmake-gcc:
@ -252,8 +267,6 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- name: C++14
flags: -DCMAKE_CXX_STANDARD=14
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
continuous-only: true
@ -268,7 +281,6 @@ jobs:
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -281,7 +293,7 @@ jobs:
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-12.2-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >-
@ -293,31 +305,6 @@ jobs:
ctest --verbose --parallel 20;
sccache -s'
linux-cmake-submodules:
name: Linux CMake Submodules
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-submodules
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_BUILD_CONFORMANCE=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
linux-cmake-32-bit:
name: Linux CMake 32-bit
runs-on: ubuntu-latest
@ -326,7 +313,6 @@ jobs:
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
@ -337,14 +323,14 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:8275360dc5d676f3470872d79087901c0e4153453976bea908a92c82e8d209ea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:d6028ab408c49932836cdc514116f06886d7f6868a4d430630aa52adc5aee2fc
platform: linux/386
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
cd /workspace;
sccache -z;
cmake . -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake . -DCMAKE_CXX_STANDARD=17 ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake --build . --parallel 20;
ctest --verbose --parallel 20;
sccache -s'
@ -355,18 +341,18 @@ jobs:
matrix:
include:
- name: MacOS Bazel
os: macos-12
cache_key: macos-12
os: macos-13
cache_key: macos-13
bazel: test //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
- name: MacOS Bazel 7
os: macos-12
cache_key: macos-12-bazel7
os: macos-13
cache_key: macos-13-bazel7
bazel: test //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
bazel_version: '7.1.2'
continuous-only: true
- name: MacOS Apple Silicon (build only) Bazel
os: macos-12
cache_key: macos-12-arm
os: macos-13
cache_key: macos-13-arm
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
@ -374,7 +360,7 @@ jobs:
os: windows-2022
cache_key: windows-2022
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
- name: Windows Bazel 7
- name: Windows Bazel 7
os: windows-2022
cache_key: windows-2022-bazel7
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
@ -395,18 +381,17 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel: ${{ matrix.bazel }}
bazel-cache: cpp_${{ matrix.cache_key }}
version: ${{ matrix.bazel_version || '6.3.0' }}
version: ${{ matrix.bazel_version || '6.4.0' }}
non-linux-cmake:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
# TODO: investigate and fix
# - name: MacOS CMake
# os: macos-12
# flags: -DCMAKE_CXX_STANDARD=14
# cache-prefix: macos-cmake
- name: MacOS CMake
os: macos-13
cache-prefix: macos-cmake
continuous-only: true
- name: Windows CMake
os: windows-2022
flags: >-
@ -415,7 +400,6 @@ jobs:
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
continuous-only: true
- name: Windows CMake 2019
os: windows-2019
flags: >-
@ -444,7 +428,8 @@ jobs:
cache-prefix: windows-2022-cmake
- name: Windows CMake Install
os: windows-2022
install-flags: -G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF -Dprotobuf_BUILD_TESTS=OFF
install-flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
@ -456,11 +441,10 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup MSVC
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
@ -469,12 +453,6 @@ jobs:
arch: ${{ matrix.windows-arch || 'x64' }}
vsversion: ${{ matrix.vsversion }}
# Workaround for Abseil incompatibility with CMake 3.30 (b/352354235).
- name: Downgrade CMake
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run)}}
run: choco install cmake --version 3.29.6 --force
shell: bash
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
if: ${{ matrix.python-version && (!matrix.continuous-only || inputs.continuous-run) }}
@ -487,7 +465,7 @@ jobs:
shell: bash
- name: Setup sccache
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: ${{ matrix.cache-prefix }}
@ -499,7 +477,9 @@ jobs:
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.install-flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
command: >-
cmake . -DCMAKE_CXX_STANDARD=17 ${{ matrix.install-flags }}
${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build for install
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
@ -522,7 +502,9 @@ jobs:
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
command: >-
cmake . -DCMAKE_CXX_STANDARD=17 ${{ matrix.flags }}
${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build
if: ${{ !matrix.continuous-only || inputs.continuous-run }}

@ -16,7 +16,7 @@ jobs:
# If you wish to add continuous-only jobs you will need to import test-type above
linux:
name: Linux
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -27,7 +27,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:6.4.0-3.1.415-6.0.100-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: /bin/bash
command: >-
@ -43,7 +43,7 @@ jobs:
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:6.4.0-3.1.415-6.0.100-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: csharp_linux
bazel: test //csharp:conformance_test --action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 --test_env=DOTNET_CLI_HOME=/home/bazel

@ -30,19 +30,18 @@ jobs:
include:
- name: OpenJDK 8
cache_key: '8'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-8-27cf7b86212020d7e552bc13b1e084abb971da75
# TODO: b/318555165 - enable the layering check. Currently it does
# not work correctly with the toolchain in this Docker image.
targets: //java/... //java/internal:java_version //compatibility/... --features=-layering_check
continuous-only: true
- name: OpenJDK 11
cache_key: '11'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-11-27cf7b86212020d7e552bc13b1e084abb971da75
targets: //java/... //java/internal:java_version //compatibility/...
continuous-only: true
- name: OpenJDK 17
cache_key: '17'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:17-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-17-27cf7b86212020d7e552bc13b1e084abb971da75
targets: //java/... //java/internal:java_version //compatibility/...
- name: Bazel7
cache_key: 'bazel7nobzlmod'
@ -56,7 +55,7 @@ jobs:
flags: --enable_bzlmod --enable_workspace
- name: aarch64
cache_key: 'aarch64'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-aarch64-08714ed7a713068c8418003a2d95f423d4b1eac9
targets: //java/... //compatibility/... //src/google/protobuf/compiler:protoc_aarch64_test
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }}
@ -97,7 +96,7 @@ jobs:
protobuf-bom:
name: Protobuf Maven BOM
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -106,12 +105,12 @@ jobs:
- name: Generate maven artifacts with bazel and install using maven
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-11-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/11
bash: |
set -ex
bazel build //java:release
bazel build //java:release $BAZEL_FLAGS
mvn install:install-file -Dfile=java/bom/pom.xml -DpomFile=java/bom/pom.xml
mvn install:install-file -Dfile=java/pom.xml -DpomFile=java/pom.xml
mvn install:install-file -Dfile=bazel-bin/java/core/core_mvn-project.jar -DpomFile=bazel-bin/java/core/core_mvn-pom.xml

@ -36,7 +36,7 @@ jobs:
destination: "platform=macOS"
xc_project: "ProtocolBuffers_OSX.xcodeproj"
- platform: "iOS"
destination: "platform=iOS Simulator,name=iPhone 13,OS=latest"
destination: "platform=iOS Simulator,name=iPhone 14,OS=latest"
xc_project: "ProtocolBuffers_iOS.xcodeproj"
# We run presubmits on all "Debug" entries, but not on "Release" entries
- xc_config: "Debug"
@ -44,7 +44,7 @@ jobs:
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
runs-on: macos-12
runs-on: macos-13
env:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
@ -86,14 +86,12 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
PLATFORM: ["ios", "macos", "tvos", "watchos", "visionos"]
# Disabling visionOS for now: https://github.com/actions/runner-images/issues/10559
PLATFORM: ["ios", "macos", "tvos", "watchos"]
CONFIGURATION: ["Debug", "Release"]
include:
- OS: macos-12
- OS: macos-13
XCODE: "14.1"
- OS: macos-14
PLATFORM: "visionos"
XCODE: "15.2"
# We run presubmits on all "Debug" entries, but not on "Release" entries
- CONFIGURATION: "Debug"
- CONFIGURATION: "Release"
@ -149,7 +147,7 @@ jobs:
- platform: "macOS"
bazel_targets: //objectivec/...
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
runs-on: macos-12
runs-on: macos-13
steps:
- name: Checkout pending changes
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}

@ -55,7 +55,7 @@ jobs:
command: composer test \&\& composer test_c
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name}}
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -72,11 +72,20 @@ jobs:
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:${{ matrix.version }}-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache
command: ${{ matrix.command }}
- name: Run conformance tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_linux/${{ matrix.version }}
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH
linux-32bit:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
@ -98,9 +107,9 @@ jobs:
test: 'test'
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux 32-bit ${{ matrix.version}}${{ matrix.suffix_name }}${{ matrix.test_name }}
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
env:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:836f2cedcfe351d9a30055076630408e61994fc7d783e8333a99570968990eeb
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:429f924aec315704b4233adcbe4b29006116f27769db98acd176b9eb69c31299
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -113,7 +122,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
@ -141,7 +150,7 @@ jobs:
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -152,7 +161,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
@ -176,17 +185,23 @@ jobs:
composer test;
composer test_c'
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_linux/${{ matrix.version }}
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- version: '8.2'
continuous-only: true
- version: '8.3'
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} MacOS PHP ${{ matrix.version }}
runs-on: macos-12
# noop
runs-on: macos-13
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -204,7 +219,7 @@ jobs:
- name: Pin PHP version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: shivammathur/setup-php@8872c784b04a1420e81191df5d64fbd59d3d3033 # 2.30.2
uses: shivammathur/setup-php@c541c155eee45413f5b09a52248675b1a2575231 # 2.31.1
with:
php-version: ${{ matrix.version }}
@ -238,4 +253,4 @@ jobs:
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_macos/${{ matrix.version }}
bazel: test //php:conformance_test_c --action_env=PATH --test_env=PATH
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH

@ -42,7 +42,7 @@ jobs:
bazel build //php:release $BAZEL_FLAGS;
cp bazel-bin/php/protobuf-*.tgz .
- uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: protobuf-php-release
path: protobuf-*.tgz
@ -61,7 +61,7 @@ jobs:
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Build ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #4.1.8
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
name: protobuf-php-release

@ -28,8 +28,7 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
# TODO: b/309627662 - Add coverage for Python 3.12.
version: ["3.8", "3.9", "3.10", "3.11"]
version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
include:
- type: Pure
targets: //python/... //python:python_version_test
@ -43,12 +42,14 @@ jobs:
# TODO Enable this once conformance tests are fixed.
flags: --define=use_fast_cpp_protos=true --test_tag_filters=-conformance
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
- version: "3.8"
- version: "3.9"
continuous-only: true
- version: "3.10"
continuous-only: true
- version: "3.11"
continuous-only: true
- version: "3.12"
continuous-only: true
- version: "3.13"
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.type }} ${{ matrix.version }}
runs-on: ubuntu-latest
@ -62,7 +63,7 @@ jobs:
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:{0}-63dd26c0c7a808d92673a3e52e848189d4ab0f17', matrix.version) }}
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:7.1.2-{0}-d9624f2aa83cba3eaf906f751d75b36aacb9aa82', matrix.version) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: python_linux/${{ matrix.type }}_${{ matrix.version }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_PYTHON_VERSION
@ -74,17 +75,18 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
# TODO Consider expanding this set of versions.
version: [ "3.12" ]
version: [ "3.12", "3.13" ]
include:
- type: Pure
targets: //python/... //python:python_version_test
- type: C++
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=true
- version: "3.13"
continuous-only: true
name: MacOS ${{ matrix.type }} ${{ matrix.version }}
runs-on: macos-12
runs-on: macos-13
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -92,7 +94,7 @@ jobs:
ref: ${{ inputs.safe-checkout }}
- name: Pin Python version
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f #v5.1.1
with:
python-version: ${{ matrix.version }}
cache: pip
@ -116,5 +118,5 @@ jobs:
bazel: >-
test ${{ matrix.targets }} ${{ matrix.flags }}
--test_env=KOKORO_PYTHON_VERSION=${{ matrix.version }}
--macos_minimum_os=10.9
--macos_minimum_os=11.0
exclude-targets: -//python/pb_unit_tests/...

@ -0,0 +1,25 @@
name: Release Branch Tests
on:
schedule:
# Run daily at 10 AM UTC (2 AM PDT)
- cron: 0 10 * * *
workflow_dispatch:
permissions: {}
jobs:
releases:
strategy:
fail-fast: false
matrix:
branch: [25.x, 28.x, 29.x]
runs-on: ubuntu-latest
permissions:
actions: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
name: Run Tests on ${{ matrix.branch }}
steps:
- run: gh workflow run test_runner.yml --ref ${{ matrix.branch }}

@ -51,7 +51,7 @@ jobs:
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:{0}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec', matrix.ruby) }}
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:6.4.0-{0}-27cf7b86212020d7e552bc13b1e084abb971da75', matrix.ruby) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_linux/${{ matrix.ruby }}_${{ matrix.bazel }}
bazel: test //ruby/... //ruby/tests:ruby_version --test_env=KOKORO_RUBY_VERSION --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }}
@ -66,7 +66,7 @@ jobs:
linux-32bit:
name: Linux 32-bit
runs-on: ubuntu-latest
runs-on: ubuntu-20-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -77,7 +77,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
@ -88,7 +88,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
gem install bundler -v 2.5.13;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -97,7 +97,7 @@ jobs:
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
runs-on: ubuntu-20-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -108,7 +108,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
@ -119,7 +119,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
gem install bundler -v 2.5.13;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -142,7 +142,7 @@ jobs:
- { version: "3.3", ffi: FFI }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: macos-12
runs-on: macos-13
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
@ -196,7 +196,7 @@ jobs:
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:${{ matrix.ruby }}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:6.4.0-${{ matrix.ruby }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_install/${{ matrix.ruby }}_${{ matrix.bazel }}
bash: >

@ -20,8 +20,6 @@ on:
branches:
- main
- '[0-9]+.x'
# The 21.x and 22.x branches still use Kokoro
- '!2[12].x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
@ -30,8 +28,6 @@ on:
branches:
- main
- '[0-9]+.x'
# The 21.x and 22.x branches still use Kokoro
- '!2[12].x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
@ -40,8 +36,6 @@ on:
branches:
- main
- '[0-9]+.x'
# The 21.x branch still use Kokoro
- '!21.x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
types: [labeled, opened, reopened, synchronize]
@ -54,7 +48,7 @@ permissions:
concurrency:
group: ${{ github.event_name }}-${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: ${{ contains(fromJSON('["pull_request", "pull_request_target", "workflow_dispatch"]'), github.event_name) }}
cancel-in-progress: ${{ contains(fromJSON('["pull_request", "pull_request_target", "workflow_dispatch", "schedule"]'), github.event_name) }}
jobs:
set-vars:
@ -105,7 +99,7 @@ jobs:
- name: Set Test Type Variables
id: set-test-type-vars
run: |
if [ "${{ github.event_name }}" == 'pull_request' ] || [ "${{ github.event_name }}" == 'pull_request_target' ]; then
if ([ "${{ github.event_name }}" == 'pull_request' ] || [ "${{ github.event_name }}" == 'pull_request_target' ]) && ${{ !contains(toJson(github.event.pull_request.body), '\n#test-continuous') }}; then
echo "continuous-run=" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=[SKIPPED] (Continuous)" >> "$GITHUB_OUTPUT"
else

@ -14,8 +14,28 @@ permissions:
jobs:
# This job should be run on presubmit, if any continuous-only tests are added we will need to input test-type above
linux:
name: Linux
runs-on: ubuntu-latest
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "No bzlmod", flags: --noenable_bzlmod }
# TODO: b/379846319 - Fix the tests with bzlmod and enable this.
# - { name: "bzlmod", flags: --enable_bzlmod --enable_workspace }
- { name: Optimized, flags: --config=opt }
- { name: ASAN, flags: --config=asan }
include:
- targets: "//rust/... //src/google/protobuf/compiler/rust/..."
- image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.1-97f82260fd504923d8af642d567afb2d83a1959d"
- bazel_cmd: "test"
# Override cases with custom images
- config: { name: Cargo }
image: "us-docker.pkg.dev/protobuf-build/containers/release/linux/rust:6.3.0-1.74.0-8858126dd9480abf91e6ce8d6e41a5cd3c03882c"
bazel_cmd: "run"
targets: "//rust:cargo_test"
name: Linux ${{ matrix.config.name }}
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -24,11 +44,10 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.1-97f82260fd504923d8af642d567afb2d83a1959d"
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: rust_linux
bazel: >-
test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
//rust:protobuf_upb_test //rust:protobuf_cpp_test
//rust/test/rust_proto_library_unit_test:rust_upb_aspect_test
//src/google/protobuf/compiler/rust/...
${{ matrix.bazel_cmd }} --crosstool_top=//toolchain:clang_suite --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 --@rules_rust//rust/settings:experimental_use_cc_common_link=True
${{ matrix.targets }} ${{ matrix.config.flags }}

@ -50,7 +50,7 @@ jobs:
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-75f2a85ece6526cc3d54087018c0f1097d78d42b
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.4.0' }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
@ -69,7 +69,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-12.2-27cf7b86212020d7e552bc13b1e084abb971da75"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-gcc"
bazel: >-
@ -87,7 +87,8 @@ jobs:
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
- name: Setup Python
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
cache: pip
cache-dependency-path: 'python/requirements.txt'
@ -97,7 +98,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-windows"
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/...
version: 6.3.0
version: 6.4.0
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test
macos:
@ -108,14 +109,16 @@ jobs:
- { name: "macOS", bazel-command: "test" }
- { name: "macOS ARM (build only)", bazel-command: "build", flags: "--cpu=darwin_arm64" }
name: ${{ matrix.config.name }}
runs-on: macos-12
runs-on: macos-13
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
- name: Setup Python
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: 3.12
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Run tests
@ -124,7 +127,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-macos"
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
version: 6.3.0
version: 6.4.0
no-python:
strategy:
@ -139,14 +142,14 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-no-python"
bash: >-
which python3 &&
mv `which python3` /tmp &&
! which python3 &&
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //python/... -- -//python/dist:source_wheel
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //python/... -- -//python/dist:source_wheel -//python:aarch64_test -//python:x86_64_test -//python:google/protobuf/pyext/_message.so -//python:proto_api
build_wheels:
name: Build Wheels
@ -160,17 +163,17 @@ jobs:
- name: Build Wheels
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.3.0-53225851b051e66f8543e972c143f35be757a181
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.4.0-5be0f4fde927ca702ed4cebe096bfb632d6d9a36
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel-python
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //python/dist //python/dist:test_wheel //python/dist:source_wheel
- name: Move Wheels
run: mkdir wheels && find _build/out \( -name 'protobuf*.whl' -o -name 'protobuf-*.tar.gz' \) -exec mv '{}' wheels ';'
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: python-wheels
path: wheels/
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0
with:
name: requirements
# Tests shouldn't have access to the whole upb repo, upload the one file we need
@ -185,26 +188,26 @@ jobs:
# a single wheel. As a result we can just test the oldest and newest
# supported Python versions and assume this gives us sufficient test
# coverage.
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: macos-13, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
- { os: ubuntu-latest, python-version: "3.13", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-13, python-version: "3.13", architecture: x64, type: 'source', continuous-only: true }
# Windows uses the full API up until Python 3.10.
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.13", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.13", architecture: x64, type: 'binary' }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Test Wheels Python ${{ matrix.python-version }} ${{ matrix.os }} ${{ matrix.architecture }} ${{ matrix.type }}
needs: build_wheels
runs-on: ${{ matrix.os }}
@ -215,17 +218,18 @@ jobs:
steps:
- name: Download Wheels
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #4.1.8
with:
name: python-wheels
path: wheels
- name: Download Requirements
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #4.1.8
with:
name: requirements
path: requirements
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
- name: Setup Python
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
python-version: ${{ matrix.python-version }}
@ -275,18 +279,19 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
python-version: ["3.8", "3.12"]
python-version: ["3.9", "3.13"]
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request_target' }}
steps:
- name: Download Wheels
uses: actions/download-artifact@v3
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #4.1.8
with:
name: python-wheels
path: wheels
- name: Delete Binary Wheels
run: find wheels -type f | grep -v none-any | xargs rm
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
- name: Setup Python
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Setup Python venv

11
.gitmodules vendored

@ -1,11 +0,0 @@
[submodule "third_party/googletest"]
path = third_party/googletest
url = https://github.com/google/googletest.git
ignore = dirty
[submodule "third_party/abseil-cpp"]
path = third_party/abseil-cpp
url = https://github.com/abseil/abseil-cpp.git
branch = lts_2023_08_02
[submodule "third_party/jsoncpp"]
path = third_party/jsoncpp
url = https://github.com/open-source-parsers/jsoncpp.git

@ -24,6 +24,11 @@ license(
license_text = ":LICENSE",
)
exports_files(
["MODULE.bazel"],
visibility = ["//cmake:__pkg__"],
)
################################################################################
# Well Known Types Proto Library Rules
#
@ -266,7 +271,7 @@ alias(
alias(
name = "protobuf_nowkt",
actual = "//src/google/protobuf:protobuf_layering_check_legacy",
actual = "//src/google/protobuf",
deprecation = "Use //:protobuf instead",
visibility = ["//visibility:public"],
)
@ -642,53 +647,3 @@ filegroup(
srcs = glob(["**/*.bzl"]),
visibility = ["//visibility:public"],
)
################################################################################
# Packaging rules
################################################################################
# Files included in all source distributions
pkg_files(
name = "common_dist_files",
srcs = glob(
[
"*.bzl",
"cmake/*.cmake",
"cmake/*.in",
"editors/*",
],
allow_empty = True,
) + [
"BUILD.bazel",
"CMakeLists.txt",
"CONTRIBUTORS.txt",
"LICENSE",
"README.md",
"WORKSPACE",
"cmake/README.md",
"generate_descriptor_proto.sh",
"maven_install.json",
"//third_party:BUILD.bazel",
"//third_party:zlib.BUILD",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//pkg:__pkg__"],
)
# Additional files for C#
pkg_files(
name = "csharp_dist_files",
srcs = [
"global.json",
],
visibility = ["//pkg:__pkg__"],
)
# Additional files for ObjC
pkg_files(
name = "objectivec_dist_files",
srcs = [
"Protobuf.podspec",
],
visibility = ["//pkg:__pkg__"],
)

@ -35,6 +35,8 @@ option(protobuf_BUILD_LIBUPB "Build libupb" ON)
option(protobuf_DISABLE_RTTI "Remove runtime type information in the binaries" OFF)
option(protobuf_TEST_XML_OUTDIR "Output directory for XML logs from tests." "")
option(protobuf_ALLOW_CCACHE "Adjust build flags to allow for ccache support." OFF)
option(protobuf_FORCE_FETCH_DEPENDENCIES "Force all dependencies to be downloaded from GitHub. Local installations will be ignored." OFF)
option(protobuf_LOCAL_DEPENDENCIES_ONLY "Prevent downloading any dependencies from GitHub. If this option is set, the dependency must be available locally as an installed package." OFF)
# We support Unity (Jumbo) builds best-effort.
option(protobuf_USE_UNITY_BUILD "Enable Unity (Jumbo) build for" OFF)
@ -84,7 +86,7 @@ if (protobuf_BUILD_SHARED_LIBS)
endif ()
# Version metadata
set(protobuf_VERSION_STRING "5.29.0")
set(protobuf_VERSION_STRING "5.30.0")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")
@ -106,18 +108,21 @@ string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\3"
string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\5"
protobuf_VERSION_PRERELEASE "${protobuf_VERSION_STRING}")
message(STATUS "${protobuf_VERSION_PRERELEASE}")
if (protobuf_FORCE_FETCH_DEPENDENCIES AND protobuf_LOCAL_DEPENDENCIES_ONLY)
message(FATAL_ERROR "Conflicting options protobuf_FORCE_FETCH_DEPENDENCIES and protobuf_LOCAL_DEPENDENCIES_ONLY both set")
endif()
# Package version
set(protobuf_VERSION
"${protobuf_VERSION_MINOR}.${protobuf_VERSION_PATCH}")
if(protobuf_VERSION_PRERELEASE)
message(STATUS "${protobuf_VERSION_PRERELEASE}")
set(protobuf_VERSION "${protobuf_VERSION}.${protobuf_VERSION_PRERELEASE}")
else()
set(protobuf_VERSION "${protobuf_VERSION}.0")
endif()
message(STATUS "${protobuf_VERSION}")
message(STATUS "protobuf version: ${protobuf_VERSION}")
if(protobuf_VERBOSE)
message(STATUS "Configuration script parsing status [")
@ -153,14 +158,6 @@ file(REMOVE ${CMAKE_CURRENT_BINARY_DIR}/cmaketest.map)
find_package(Threads REQUIRED)
# We can install dependencies from submodules if we're running
# CMake v3.13 or newer.
if(CMAKE_VERSION VERSION_LESS 3.13)
set(_protobuf_INSTALL_SUPPORTED_FROM_MODULE OFF)
else()
set(_protobuf_INSTALL_SUPPORTED_FROM_MODULE ON)
endif()
set(_protobuf_FIND_ZLIB)
if (protobuf_WITH_ZLIB)
find_package(ZLIB)
@ -206,31 +203,11 @@ if (protobuf_BUILD_SHARED_LIBS)
set(protobuf_SHARED_OR_STATIC "SHARED")
else (protobuf_BUILD_SHARED_LIBS)
set(protobuf_SHARED_OR_STATIC "STATIC")
# The CMAKE_<LANG>_FLAGS(_<BUILD_TYPE>)? is meant to be user controlled.
# Prior to CMake 3.15, the MSVC runtime library was pushed into the same flags
# making programmatic control difficult. Prefer the functionality in newer
# CMake versions when available.
if(${CMAKE_VERSION} VERSION_GREATER 3.15 OR ${CMAKE_VERSION} VERSION_EQUAL 3.15)
if (protobuf_MSVC_STATIC_RUNTIME)
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>)
else()
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>DLL)
endif()
set(ABSL_MSVC_STATIC_RUNTIME ON)
if (protobuf_MSVC_STATIC_RUNTIME)
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>)
else()
# In case we are building static libraries, link also the runtime library statically
# so that MSVCR*.DLL is not required at runtime.
# https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx
# This is achieved by replacing msvc option /MD with /MT and /MDd with /MTd
# http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F
if (MSVC AND protobuf_MSVC_STATIC_RUNTIME)
foreach(flag_var
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
if(${flag_var} MATCHES "/MD")
string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
endif(${flag_var} MATCHES "/MD")
endforeach(flag_var)
endif (MSVC AND protobuf_MSVC_STATIC_RUNTIME)
set(CMAKE_MSVC_RUNTIME_LIBRARY MultiThreaded$<$<CONFIG:Debug>:Debug>DLL)
endif()
endif (protobuf_BUILD_SHARED_LIBS)
@ -289,12 +266,6 @@ include_directories(
${protobuf_BINARY_DIR}/src
${protobuf_SOURCE_DIR}/src)
set(protobuf_ABSL_PROVIDER "module" CACHE STRING "Provider of absl library")
set_property(CACHE protobuf_ABSL_PROVIDER PROPERTY STRINGS "module" "package")
set(protobuf_JSONCPP_PROVIDER "module" CACHE STRING "Provider of jsoncpp library")
set_property(CACHE protobuf_JSONCPP_PROVIDER PROPERTY STRINGS "module" "package")
if (protobuf_BUILD_TESTS)
include(${protobuf_SOURCE_DIR}/cmake/gtest.cmake)
endif (protobuf_BUILD_TESTS)

@ -1,5 +1,5 @@
{
"checksum": "ca8913cc78d0ec771c537fae8d8e7b4505ab91bd61ddc886cc66dbeb264ff626",
"checksum": "89c489aa74f633247650bf28b86db6ec53c041968fd91758693748f553ef102c",
"crates": {
"aho-corasick 1.1.2": {
"name": "aho-corasick",
@ -17,7 +17,7 @@
"crate_name": "aho_corasick",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -73,7 +73,7 @@
"crate_name": "autocfg",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -107,7 +107,7 @@
"crate_name": "direct_cargo_bazel_deps",
"crate_root": ".direct_cargo_bazel_deps.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -164,7 +164,7 @@
"crate_name": "googletest",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -231,7 +231,7 @@
"crate_name": "googletest_macro",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -282,7 +282,7 @@
"crate_name": "memchr",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -329,7 +329,7 @@
"crate_name": "num_traits",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -341,7 +341,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -374,6 +374,9 @@
"version": "0.2.17"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
],
@ -410,7 +413,7 @@
"crate_name": "paste",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -422,7 +425,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -448,6 +451,9 @@
"version": "1.0.14"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -475,7 +481,7 @@
"crate_name": "proc_macro2",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -487,7 +493,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -523,6 +529,9 @@
"version": "1.0.69"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -550,7 +559,7 @@
"crate_name": "quote",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -605,7 +614,7 @@
"crate_name": "regex",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -687,7 +696,7 @@
"crate_name": "regex_automata",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -770,7 +779,7 @@
"crate_name": "regex_syntax",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -824,7 +833,7 @@
"crate_name": "rustversion",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -836,7 +845,7 @@
"crate_name": "build_script_build",
"crate_root": "build/build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -862,6 +871,9 @@
"version": "1.0.14"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -889,7 +901,7 @@
"crate_name": "syn",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -958,7 +970,7 @@
"crate_name": "unicode_ident",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]

@ -3,7 +3,7 @@
module(
name = "protobuf",
version = "29.0-dev", # Automatically updated on release
version = "30.0-dev", # Automatically updated on release
compatibility_level = 1,
repo_name = "com_google_protobuf",
)
@ -12,85 +12,30 @@ module(
# Bzlmod follows MVS:
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution
# Thus the highest version in their module graph is resolved.
bazel_dep(
name = "abseil-cpp",
version = "20230802.0.bcr.1",
repo_name = "com_google_absl",
)
bazel_dep(
name = "bazel_skylib",
version = "1.7.0",
)
bazel_dep(
name = "jsoncpp",
version = "1.9.5",
)
bazel_dep(
name = "rules_cc",
version = "0.0.9",
)
bazel_dep(
name = "rules_fuzzing",
version = "0.5.2",
)
bazel_dep(
name = "rules_java",
version = "5.3.5",
)
bazel_dep(
name = "rules_jvm_external",
version = "6.0",
)
bazel_dep(
name = "rules_kotlin",
version = "1.9.0",
)
bazel_dep(
name = "rules_license",
version = "0.0.8",
)
bazel_dep(
name = "rules_pkg",
version = "0.7.0",
)
bazel_dep(
name = "rules_python",
version = "0.28.0",
)
bazel_dep(
name = "rules_rust",
version = "0.45.1",
)
bazel_dep(
name = "platforms",
version = "0.0.8",
)
bazel_dep(
name = "zlib",
version = "1.3.1",
)
bazel_dep(
name = "bazel_features",
version = "1.13.0",
repo_name = "proto_bazel_features",
)
bazel_dep(name = "abseil-cpp", version = "20240722.0", repo_name = "com_google_absl")
bazel_dep(name = "bazel_skylib", version = "1.7.0")
bazel_dep(name = "jsoncpp", version = "1.9.6")
bazel_dep(name = "rules_cc", version = "0.0.16")
bazel_dep(name = "rules_fuzzing", version = "0.5.2")
bazel_dep(name = "rules_java", version = "8.3.2")
bazel_dep(name = "rules_jvm_external", version = "6.3")
bazel_dep(name = "rules_kotlin", version = "1.9.6")
bazel_dep(name = "rules_license", version = "1.0.0")
bazel_dep(name = "rules_pkg", version = "1.0.1")
bazel_dep(name = "rules_python", version = "0.28.0")
bazel_dep(name = "rules_rust", version = "0.51.0")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "zlib", version = "1.3.1")
bazel_dep(name = "bazel_features", version = "1.17.0", repo_name = "proto_bazel_features")
bazel_dep(
name = "rules_shell",
version = "0.2.0",
)
# Proto toolchains
register_toolchains("//bazel/private/toolchains:all")
SUPPORTED_PYTHON_VERSIONS = [
"3.8",
"3.9",
"3.10",
"3.11",
@ -125,36 +70,21 @@ pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
use_repo(pip, "pip_deps")
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
rust.toolchain(edition = "2021")
use_repo(rust, "rust_toolchains")
register_toolchains("@rust_toolchains//:all")
crate = use_extension("@rules_rust//crate_universe:extension.bzl", "crate")
crate.spec(
package = "googletest",
version = ">0.0.0",
)
crate.spec(
package = "paste",
version = ">=1",
)
crate.from_specs()
use_repo(
crate,
crate_index = "crates",
)
use_repo(crate, crate_index = "crates")
maven = use_extension("@rules_jvm_external//:extensions.bzl", "maven")
maven.install(
name = "protobuf_maven",
artifacts = [
"com.google.caliper:caliper:1.0-beta-3",
"com.google.code.findbugs:jsr305:3.0.2",
@ -174,32 +104,12 @@ maven.install(
"https://repo.maven.apache.org/maven2",
],
)
use_repo(maven, "maven")
use_repo(maven, "protobuf_maven")
# Development dependencies
bazel_dep(
name = "googletest",
version = "1.14.0",
dev_dependency = True,
repo_name = "com_google_googletest",
)
bazel_dep(
name = "rules_buf",
version = "0.3.0",
dev_dependency = True,
)
bazel_dep(
name = "rules_testing",
version = "0.6.0",
dev_dependency = True,
)
bazel_dep(name = "googletest", version = "1.14.0", dev_dependency = True, repo_name = "com_google_googletest")
bazel_dep(name = "rules_buf", version = "0.3.0", dev_dependency = True)
bazel_dep(name = "rules_testing", version = "0.6.0", dev_dependency = True)
# rules_proto are needed for @com_google_protobuf_v25.0 used in //compatibility/... tests
bazel_dep(
name = "rules_proto",
version = "4.0.0",
dev_dependency = True,
)
bazel_dep(name = "rules_proto", version = "4.0.0", dev_dependency = True)

@ -1,48 +0,0 @@
Pod::Spec.new do |s|
s.name = 'Protobuf-C++'
s.version = '5.29.0'
s.summary = 'Protocol Buffers v3 runtime library for C++.'
s.homepage = 'https://github.com/google/protobuf'
s.license = 'BSD-3-Clause'
s.authors = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' }
# Ensure developers won't hit CocoaPods/CocoaPods#11402 with the resource
# bundle for the privacy manifest.
s.cocoapods_version = '>= 1.12.0'
s.source = { :git => 'https://github.com/google/protobuf.git',
:tag => "v#{s.version}" }
s.source_files = 'src/google/protobuf/*.{h,cc,inc}',
'src/google/protobuf/stubs/*.{h,cc}',
'src/google/protobuf/io/*.{h,cc}',
'src/google/protobuf/util/*.{h,cc}'
# Excluding all the tests in the directories above
s.exclude_files = 'src/google/**/*_test.{h,cc,inc}',
'src/google/**/*_unittest.{h,cc}',
'src/google/protobuf/test_util*.{h,cc}',
'src/google/protobuf/map_lite_test_util.{h,cc}',
'src/google/protobuf/map_test_util*.{h,cc,inc}',
'src/google/protobuf/reflection_tester.{h,cc}'
s.resource_bundle = {
"Protobuf-C++_Privacy" => "PrivacyInfo.xcprivacy"
}
s.header_mappings_dir = 'src'
s.ios.deployment_target = '12.0'
s.osx.deployment_target = '10.13'
s.tvos.deployment_target = '12.0'
s.watchos.deployment_target = '6.0'
s.visionos.deployment_target = '1.0'
s.pod_target_xcconfig = {
# Do not let src/google/protobuf/stubs/time.h override system API
'USE_HEADERMAP' => 'NO',
'ALWAYS_SEARCH_USER_PATHS' => 'NO',
'HEADER_SEARCH_PATHS' => '"$(PODS_TARGET_SRCROOT)/src"'
}
end

@ -5,7 +5,7 @@
# dependent projects use the :git notation to refer to the library.
Pod::Spec.new do |s|
s.name = 'Protobuf'
s.version = '3.29.0'
s.version = '4.30.0'
s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.'
s.homepage = 'https://github.com/protocolbuffers/protobuf'
s.license = 'BSD-3-Clause'
@ -18,17 +18,7 @@ Pod::Spec.new do |s|
s.source = { :git => 'https://github.com/protocolbuffers/protobuf.git',
:tag => "v#{s.version}" }
s.source_files = 'objectivec/*.{h,m,swift}',
'objectivec/google/protobuf/Any.pbobjc.h',
'objectivec/google/protobuf/Api.pbobjc.h',
'objectivec/google/protobuf/Duration.pbobjc.h',
'objectivec/google/protobuf/Empty.pbobjc.h',
'objectivec/google/protobuf/FieldMask.pbobjc.h',
'objectivec/google/protobuf/SourceContext.pbobjc.h',
'objectivec/google/protobuf/Struct.pbobjc.h',
'objectivec/google/protobuf/Timestamp.pbobjc.h',
'objectivec/google/protobuf/Type.pbobjc.h',
'objectivec/google/protobuf/Wrappers.pbobjc.h'
s.source_files = 'objectivec/*.{h,m,swift}'
# The following would cause duplicate symbol definitions. GPBProtocolBuffers is expected to be
# left out, as it's an umbrella implementation file.
s.exclude_files = 'objectivec/GPBProtocolBuffers.m'
@ -44,8 +34,10 @@ Pod::Spec.new do |s|
s.user_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1' }
s.pod_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1' }
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.13'
s.ios.deployment_target = '15.0'
s.osx.deployment_target = '11.0'
# The following are best-effort / community supported, and are not covered by
# our official support policies: https://protobuf.dev/support/version-support/
s.tvos.deployment_target = '12.0'
s.watchos.deployment_target = '6.0'
s.visionos.deployment_target = '1.0'

@ -5,10 +5,6 @@ workspace(name = "com_google_protobuf")
# buildifier: disable=duplicated-name
local_repository(name = "com_google_protobuf", path = ".")
# Second self-reference that makes it possible to load proto rules from @protobuf.
# buildifier: disable=duplicated-name
local_repository(name = "protobuf", path = ".")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
local_repository(
@ -63,6 +59,7 @@ rules_jvm_external_setup()
load("@rules_jvm_external//:defs.bzl", "maven_install")
maven_install(
name = "protobuf_maven",
artifacts = PROTOBUF_MAVEN_ARTIFACTS,
# For updating instructions, see:
# https://github.com/bazelbuild/rules_jvm_external#updating-maven_installjson
@ -73,7 +70,7 @@ maven_install(
],
)
load("@maven//:defs.bzl", "pinned_maven_install")
load("@protobuf_maven//:defs.bzl", "pinned_maven_install")
pinned_maven_install()
@ -94,6 +91,12 @@ load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependen
apple_support_dependencies()
load("@rules_java//java:repositories.bzl", "rules_java_dependencies", "rules_java_toolchains")
rules_java_dependencies()
rules_java_toolchains()
load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies")
rules_cc_dependencies()
@ -110,10 +113,10 @@ kt_register_toolchains()
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
"https://github.com/protocolbuffers/rules_ruby/archive/588d9dd40487277e2560ece09fe310d7c0ecb4a6.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
strip_prefix = "rules_ruby-588d9dd40487277e2560ece09fe310d7c0ecb4a6",
integrity = "sha256-Lh/xxR6WsKJnS92sYkpJDBtdS6DNrCbi0kuUxBffG6E=",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
@ -202,8 +205,8 @@ fuzzing_py_deps_install_deps()
http_archive(
name = "rules_rust",
integrity = "sha256-F8U7+AC5MvMtPKGdLLnorVM84cDXKfDRgwd7/dq3rUY=",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.46.0/rules_rust-v0.46.0.tar.gz"],
integrity = "sha256-BCrPtzRpstGEj+FI2Bw0IsYepHqeGQDxyew29R6OcZM=",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.51.0/rules_rust-v0.51.0.tar.gz"],
)
load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_register_toolchains")

@ -9,10 +9,10 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
"https://github.com/protocolbuffers/rules_ruby/archive/588d9dd40487277e2560ece09fe310d7c0ecb4a6.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
strip_prefix = "rules_ruby-588d9dd40487277e2560ece09fe310d7c0ecb4a6",
integrity = "sha256-Lh/xxR6WsKJnS92sYkpJDBtdS6DNrCbi0kuUxBffG6E=",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
@ -21,7 +21,7 @@ ruby_runtime("system_ruby")
register_toolchains("@system_ruby//:toolchain")
# Follwing are just needed to run conformance tests, not really needed to support them via MODULE.bazel
# Following are just needed to run conformance tests, not really needed to support them via MODULE.bazel
# For testing runtime against old gencode from a previous major version.
http_archive(

@ -13,24 +13,31 @@ bzl_library(
name = "proto_library_bzl",
srcs = ["proto_library.bzl"],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:bazel_proto_library_rule_bzl",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "cc_proto_library_bzl",
srcs = ["cc_proto_library.bzl"],
visibility = ["//visibility:public"],
deps = ["//bazel/private:bazel_cc_proto_library_bzl"],
)
bzl_library(
name = "java_proto_library_bzl",
srcs = ["java_proto_library.bzl"],
visibility = ["//visibility:public"],
deps = ["//bazel/private:bazel_java_proto_library_rule_bzl"],
)
bzl_library(
name = "java_lite_proto_library_bzl",
srcs = ["java_lite_proto_library.bzl"],
visibility = ["//visibility:public"],
deps = ["//bazel/private:java_lite_proto_library_bzl"],
)
bzl_library(
@ -56,3 +63,19 @@ bzl_library(
visibility = ["//visibility:public"],
deps = ["//bazel/private:upb_proto_library_internal_bzl"],
)
# The data in this target is exposed in //bazel/private:for_bazel_tests
filegroup(
name = "for_bazel_tests",
testonly = True,
srcs = [
"BUILD.bazel",
":cc_proto_library_bzl",
":java_lite_proto_library_bzl",
":proto_library_bzl",
":py_proto_library_bzl",
"//bazel/common:for_bazel_tests",
"//bazel/toolchains:for_bazel_tests",
],
visibility = ["//bazel/private:__pkg__"],
)

@ -1,3 +1,10 @@
"""cc_proto_library rule"""
cc_proto_library = native.cc_proto_library
load("//bazel/private:bazel_cc_proto_library.bzl", _cc_proto_library = "cc_proto_library") # buildifier: disable=bzl-visibility
def cc_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "cc_proto_library"):
_cc_proto_library(**kwattrs)
else:
native.cc_proto_library(**kwattrs) # buildifier: disable=native-cc-proto

@ -10,6 +10,7 @@ bzl_library(
visibility = ["//visibility:public"],
deps = [
":proto_lang_toolchain_info_bzl",
"//bazel/private:native_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@proto_bazel_features//:features",
],
@ -22,7 +23,8 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
"//bazel/private:proto_info_bzl",
"@proto_bazel_features//:features",
],
)
@ -38,9 +40,13 @@ bzl_library(
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]) + ["@proto_bazel_features//:features"],
visibility = [
"//bazel:__pkg__",
name = "for_bazel_tests",
testonly = True,
srcs = [
"BUILD",
"proto_common_bzl",
"proto_info_bzl",
"proto_lang_toolchain_info_bzl",
],
visibility = ["//bazel:__pkg__"],
)

@ -9,6 +9,7 @@
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:native.bzl", "native_proto_common")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _import_virtual_proto_path(path):
@ -62,6 +63,7 @@ def _get_import_path(proto_file):
Args:
proto_file: (File) The .proto file
Returns:
(str) import path
"""
@ -347,5 +349,8 @@ proto_common = struct(
get_import_path = _get_import_path,
ProtoLangToolchainInfo = ProtoLangToolchainInfo,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION,
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = True,
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = (
getattr(native_proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False) or
not hasattr(native_proto_common, "ProtoLangToolchainInfo")
),
)

@ -1,5 +1,7 @@
"""ProtoInfo"""
load("//bazel/private:native.bzl", "NativeProtoInfo")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:proto_info.bzl", _ProtoInfo = "ProtoInfo") # buildifier: disable=bzl-visibility
ProtoInfo = NativeProtoInfo
# This resolves to Starlark ProtoInfo in Bazel 8 or with --incompatible_enable_autoload flag
ProtoInfo = getattr(bazel_features.globals, "ProtoInfo", None) or _ProtoInfo

@ -1,5 +1,26 @@
"""ProtoLangToolchainInfo"""
load("//bazel/private:native.bzl", "native_proto_common")
load("//bazel/private:native.bzl", "native_proto_common") # buildifier: disable=bzl-visibility
ProtoLangToolchainInfo = native_proto_common.ProtoLangToolchainInfo
# Use Starlark implementation only if native_proto_common.ProtoLangToolchainInfo doesn't exist
ProtoLangToolchainInfo = getattr(native_proto_common, "ProtoLangToolchainInfo", provider(
doc = """Specifies how to generate language-specific code from .proto files.
Used by LANG_proto_library rules.""",
fields = dict(
out_replacement_format_flag = """(str) Format string used when passing output to the plugin
used by proto compiler.""",
output_files = """("single","multiple","legacy") Format out_replacement_format_flag with
a path to single file or a directory in case of multiple files.""",
plugin_format_flag = "(str) Format string used when passing plugin to proto compiler.",
plugin = "(FilesToRunProvider) Proto compiler plugin.",
runtime = "(Target) Runtime.",
provided_proto_sources = "(list[File]) Proto sources provided by the toolchain.",
proto_compiler = "(FilesToRunProvider) Proto compiler.",
protoc_opts = "(list[str]) Options to pass to proto compiler.",
progress_message = "(str) Progress message to set on the proto compiler action.",
mnemonic = "(str) Mnemonic to set on the proto compiler action.",
allowlist_different_package = """(Target) Allowlist to create lang_proto_library in a
different package than proto_library""",
toolchain_type = """(Label) Toolchain type that was used to obtain this info""",
),
))

@ -1,3 +1,16 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""java_lite_proto_library rule"""
java_lite_proto_library = native.java_lite_proto_library
load("//bazel/private:java_lite_proto_library.bzl", _java_lite_proto_library = "java_lite_proto_library") # buildifier: disable=bzl-visibility
def java_lite_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "java_lite_proto_library"):
_java_lite_proto_library(**kwattrs)
else:
native.java_lite_proto_library(**kwattrs)

@ -1,3 +1,16 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""java_proto_library rule"""
java_proto_library = native.java_proto_library
load("//bazel/private:bazel_java_proto_library_rule.bzl", _java_proto_library = "java_proto_library") # buildifier: disable=bzl-visibility
def java_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "java_proto_library"):
_java_proto_library(**kwattrs)
else:
native.java_proto_library(**kwattrs)

@ -1,10 +1,31 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
load("//bazel/private:native_bool_flag.bzl", "native_bool_flag")
load(":native_bool_flag.bzl", "native_bool_flag")
package(default_applicable_licenses = ["//:license"])
toolchain_type(
name = "proto_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "cc_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "java_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "javalite_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "python_toolchain_type",
visibility = ["//visibility:public"],
)
bzl_library(
@ -31,6 +52,12 @@ bzl_library(
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "proto_info_bzl",
srcs = ["proto_info.bzl"],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "bazel_proto_library_rule_bzl",
srcs = [
@ -38,15 +65,61 @@ bzl_library(
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@bazel_skylib//lib:paths",
"@bazel_skylib//rules:common_settings",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "bazel_java_proto_library_rule_bzl",
srcs = [
"bazel_java_proto_library_rule.bzl",
"java_proto_support.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"@rules_java//java/common",
],
)
bzl_library(
name = "java_lite_proto_library_bzl",
srcs = [
"java_lite_proto_library.bzl",
"java_proto_support.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"@rules_java//java/common",
],
)
bzl_library(
name = "bazel_cc_proto_library_bzl",
srcs = [
"bazel_cc_proto_library.bzl",
"cc_proto_support.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"@proto_bazel_features//:features",
"@rules_cc//cc:find_cc_toolchain_bzl",
],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
@ -54,9 +127,9 @@ bzl_library(
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
],
)
@ -75,6 +148,14 @@ bzl_library(
],
)
bzl_library(
name = "cc_proto_aspect_bzl",
srcs = ["cc_proto_aspect.bzl"],
deps = [
":bazel_cc_proto_library_bzl",
],
)
bzl_library(
name = "toolchain_helpers_bzl",
srcs = [
@ -110,10 +191,21 @@ native_bool_flag(
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "native_bool_flag_bzl",
srcs = ["native_bool_flag.bzl"],
visibility = ["//visibility:private"],
deps = ["@bazel_skylib//rules:common_settings"],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
visibility = [
"//bazel:__pkg__",
name = "for_bazel_tests",
testonly = True,
srcs = [
"BUILD",
":native_bool_flag_bzl",
"//bazel:for_bazel_tests",
"//bazel/private/toolchains:for_bazel_tests",
],
visibility = ["//visibility:public"],
)

@ -0,0 +1,198 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Bazel's implementation of cc_proto_library"""
load("@rules_cc//cc:find_cc_toolchain.bzl", "use_cc_toolchain")
load("@rules_cc//cc/common:cc_info.bzl", "CcInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:cc_proto_support.bzl", "cc_proto_compile_and_link")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_CC_PROTO_TOOLCHAIN = Label("//bazel/private:cc_toolchain_type")
_ProtoCcFilesInfo = provider(fields = ["files"], doc = "Provide cc proto files.")
_ProtoCcHeaderInfo = provider(fields = ["headers"], doc = "Provide cc proto headers.")
def _get_output_files(actions, proto_info, suffixes):
result = []
for suffix in suffixes:
result.extend(proto_common.declare_generated_files(
actions = actions,
proto_info = proto_info,
extension = suffix,
))
return result
# TODO: Make this code actually work.
def _get_strip_include_prefix(ctx, proto_info):
proto_root = proto_info.proto_source_root
if proto_root == "." or proto_root == ctx.label.workspace_root:
return ""
strip_include_prefix = ""
if proto_root.startswith(ctx.bin_dir.path):
proto_root = proto_root[len(ctx.bin_dir.path) + 1:]
elif proto_root.startswith(ctx.genfiles_dir.path):
proto_root = proto_root[len(ctx.genfiles_dir.path) + 1:]
if proto_root.startswith(ctx.label.workspace_root):
proto_root = proto_root[len(ctx.label.workspace_root):]
strip_include_prefix = "//" + proto_root
return strip_include_prefix
def _aspect_impl(target, ctx):
proto_info = target[ProtoInfo]
proto_configuration = ctx.fragments.proto
sources = []
headers = []
textual_hdrs = []
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
should_generate_code = proto_common.experimental_should_generate_code(proto_info, proto_toolchain, "cc_proto_library", target.label)
if should_generate_code:
if len(proto_info.direct_sources) != 0:
# Bazel 7 didn't expose cc_proto_library_source_suffixes used by Kythe
# gradually falling back to .pb.cc
if type(proto_configuration.cc_proto_library_source_suffixes) == "builtin_function_or_method":
source_suffixes = [".pb.cc"]
header_suffixes = [".pb.h"]
else:
source_suffixes = proto_configuration.cc_proto_library_source_suffixes
header_suffixes = proto_configuration.cc_proto_library_header_suffixes
sources = _get_output_files(ctx.actions, proto_info, source_suffixes)
headers = _get_output_files(ctx.actions, proto_info, header_suffixes)
header_provider = _ProtoCcHeaderInfo(headers = depset(headers))
else:
# If this proto_library doesn't have sources, it provides the combined headers of all its
# direct dependencies. Thus, if a direct dependency does have sources, the generated files
# are also provided by this library. If a direct dependency does not have sources, it will
# do the same thing, so that effectively this library looks through all source-less
# proto_libraries and provides all generated headers of the proto_libraries with sources
# that it depends on.
transitive_headers = []
for dep in getattr(ctx.rule.attr, "deps", []):
if _ProtoCcHeaderInfo in dep:
textual_hdrs.extend(dep[_ProtoCcHeaderInfo].headers.to_list())
transitive_headers.append(dep[_ProtoCcHeaderInfo].headers)
header_provider = _ProtoCcHeaderInfo(headers = depset(transitive = transitive_headers))
else: # shouldn't generate code
header_provider = _ProtoCcHeaderInfo(headers = depset())
proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = proto_toolchain,
generated_files = sources + headers,
experimental_output_files = "multiple",
)
deps = []
if proto_toolchain.runtime:
deps = [proto_toolchain.runtime]
deps.extend(getattr(ctx.rule.attr, "deps", []))
cc_info, libraries, temps = cc_proto_compile_and_link(
ctx = ctx,
deps = deps,
sources = sources,
headers = headers,
textual_hdrs = textual_hdrs,
strip_include_prefix = _get_strip_include_prefix(ctx, proto_info),
)
return [
cc_info,
_ProtoCcFilesInfo(files = depset(sources + headers + libraries)),
OutputGroupInfo(temp_files_INTERNAL_ = temps),
header_provider,
]
cc_proto_aspect = aspect(
implementation = _aspect_impl,
attr_aspects = ["deps"],
fragments = ["cpp", "proto"],
required_providers = [ProtoInfo],
provides = [CcInfo],
attrs = toolchains.if_legacy_toolchain({"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
)}),
toolchains = use_cc_toolchain() + toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)
def _cc_proto_library_impl(ctx):
if len(ctx.attr.deps) != 1:
fail(
"'deps' attribute must contain exactly one label " +
"(we didn't name it 'dep' for consistency). " +
"The main use-case for multiple deps is to create a rule that contains several " +
"other targets. This makes dependency bloat more likely. It also makes it harder" +
"to remove unused deps.",
attr = "deps",
)
dep = ctx.attr.deps[0]
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain)
return [DefaultInfo(files = dep[_ProtoCcFilesInfo].files), dep[CcInfo], dep[OutputGroupInfo]]
cc_proto_library = rule(
implementation = _cc_proto_library_impl,
doc = """
<p>
<code>cc_proto_library</code> generates C++ code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre>
<code class="lang-starlark">
cc_library(
name = "lib",
deps = [":foo_cc_proto"],
)
cc_proto_library(
name = "foo_cc_proto",
deps = [":foo_proto"],
)
proto_library(
name = "foo_proto",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(
aspects = [cc_proto_aspect],
allow_rules = ["proto_library"],
allow_files = False,
doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate C++ code for.""",
),
} | toolchains.if_legacy_toolchain({
"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
),
}),
provides = [CcInfo],
toolchains = toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)

@ -0,0 +1,164 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""The implementation of the `java_proto_library` rule and its aspect."""
load("@rules_java//java/common:java_info.bzl", "JavaInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_JAVA_PROTO_TOOLCHAIN = Label("//bazel/private:java_toolchain_type")
def _filter_provider(provider, *attrs):
return [dep[provider] for attr in attrs for dep in attr if provider in dep]
def _bazel_java_proto_aspect_impl(target, ctx):
"""Generates and compiles Java code for a proto_library.
The function runs protobuf compiler on the `proto_library` target using
`proto_lang_toolchain` specified by `--proto_toolchain_for_java` flag.
This generates a source jar.
After that the source jar is compiled, respecting `deps` and `exports` of
the `proto_library`.
Args:
target: (Target) The `proto_library` target (any target providing `ProtoInfo`.
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java
version of`proto_library` and `JavaProtoAspectInfo` with all source and
runtime jars.
"""
proto_toolchain_info = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN)
source_jar = None
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_proto_library", target.label):
# Generate source jar using proto compiler.
source_jar = ctx.actions.declare_file(ctx.label.name + "-speed-src.jar")
proto_common.compile(
ctx.actions,
target[ProtoInfo],
proto_toolchain_info,
[source_jar],
experimental_output_files = "single",
)
# Compile Java sources (or just merge if there aren't any)
deps = _filter_provider(JavaInfo, ctx.rule.attr.deps)
exports = _filter_provider(JavaInfo, ctx.rule.attr.exports)
if source_jar and proto_toolchain_info.runtime:
deps.append(proto_toolchain_info.runtime[JavaInfo])
java_info, jars = java_compile_for_protos(
ctx,
"-speed.jar",
source_jar,
deps,
exports,
)
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps if JavaProtoAspectInfo in dep]
return [
java_info,
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)),
]
bazel_java_proto_aspect = aspect(
implementation = _bazel_java_proto_aspect_impl,
attrs = toolchains.if_legacy_toolchain({
"_aspect_java_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"),
),
}),
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] + toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN),
attr_aspects = ["deps", "exports"],
required_providers = [ProtoInfo],
provides = [JavaInfo, JavaProtoAspectInfo],
fragments = ["java"],
)
def bazel_java_proto_library_rule(ctx):
"""Merges results of `java_proto_aspect` in `deps`.
Args:
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, DefaultInfo, OutputGroupInfo])
"""
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN)
for dep in ctx.attr.deps:
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain)
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False)
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps])
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars])
return [
java_info,
DefaultInfo(
files = transitive_src_and_runtime_jars,
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars),
),
OutputGroupInfo(default = depset()),
]
java_proto_library = rule(
implementation = bazel_java_proto_library_rule,
doc = """
<p>
<code>java_proto_library</code> generates Java code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre class="code">
<code class="lang-starlark">
java_library(
name = "lib",
runtime_deps = [":foo_java_proto"],
)
java_proto_library(
name = "foo_java_proto",
deps = [":foo_proto"],
)
proto_library(
name = "foo_proto",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(
providers = [ProtoInfo],
aspects = [bazel_java_proto_aspect],
doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate Java code for.
""",
),
# buildifier: disable=attr-license (calling attr.license())
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(),
} | toolchains.if_legacy_toolchain({
"_aspect_java_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"),
),
}), # buildifier: disable=attr-licenses (attribute called licenses)
provides = [JavaInfo],
toolchains = toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN),
)

@ -166,8 +166,7 @@ def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set):
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value:
args.add("--include_source_info")
if hasattr(ctx.attr, "_retain_options") and ctx.attr._retain_options:
args.add("--retain_options")
args.add("--retain_options")
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value
if strict_deps:

@ -0,0 +1,6 @@
"""Exposes cc_proto_aspect to rules_rust"""
load("//bazel/private:bazel_cc_proto_library.bzl", _cc_proto_aspect = "cc_proto_aspect") # buildifier: disable=bzl-visibility
load("//bazel/private:native.bzl", _native_cc_proto_aspect = "native_cc_proto_aspect") # buildifier: disable=bzl-visibility
cc_proto_aspect = _cc_proto_aspect if not hasattr(native, "cc_proto_library") else _native_cc_proto_aspect

@ -0,0 +1,143 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Supporting C++ compilation of generated code"""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain")
load("@rules_cc//cc/common:cc_common.bzl", "cc_common")
load("@rules_cc//cc/common:cc_info.bzl", "CcInfo")
def get_feature_configuration(ctx, has_sources, extra_requested_features = []):
"""Returns C++ feature configuration for compiling and linking generated C++ files.
Args:
ctx: (RuleCtx) rule context.
has_sources: (bool) Has the proto_library sources.
extra_requested_features: (list[str]) Additionally requested features.
Returns:
(FeatureConfiguration) C++ feature configuration
"""
cc_toolchain = find_cc_toolchain(ctx)
requested_features = ctx.features + extra_requested_features
# TODO: Remove LAYERING_CHECK once we have verified that there are direct
# dependencies for all generated #includes.
unsupported_features = ctx.disabled_features + ["parse_headers", "layering_check"]
if has_sources:
requested_features.append("header_modules")
else:
unsupported_features.append("header_modules")
return cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = requested_features,
unsupported_features = unsupported_features,
)
def _get_libraries_from_linking_outputs(linking_outputs, feature_configuration):
library_to_link = linking_outputs.library_to_link
if not library_to_link:
return []
outputs = []
if library_to_link.static_library:
outputs.append(library_to_link.static_library)
if library_to_link.pic_static_library:
outputs.append(library_to_link.pic_static_library)
# On Windows, dynamic library is not built by default, so don't add them to files_to_build.
if not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows"):
if library_to_link.resolved_symlink_dynamic_library:
outputs.append(library_to_link.resolved_symlink_dynamic_library)
elif library_to_link.dynamic_library:
outputs.append(library_to_link.dynamic_library)
if library_to_link.resolved_symlink_interface_library:
outputs.append(library_to_link.resolved_symlink_interface_library)
elif library_to_link.interface_library:
outputs.append(library_to_link.interface_library)
return outputs
def cc_proto_compile_and_link(ctx, deps, sources, headers, disallow_dynamic_library = None, feature_configuration = None, alwayslink = False, **kwargs):
"""Creates C++ compilation and linking actions for C++ proto sources.
Args:
ctx: rule context
deps: (list[CcInfo]) List of libraries to be added as dependencies to compilation and linking
actions.
sources:(list[File]) List of C++ sources files.
headers: list(File] List of C++ headers files.
disallow_dynamic_library: (bool) Are dynamic libraries disallowed.
feature_configuration: (FeatureConfiguration) feature configuration to use.
alwayslink: (bool) Should the library be always linked.
**kwargs: Additional arguments passed to the compilation. See cc_common.compile.
Returns:
(CcInfo, list[File], list[File])
- CcInfo provider with compilation context and linking context
- A list of linked libraries related to this proto
- A list of temporary files generated durind compilation
"""
cc_toolchain = find_cc_toolchain(ctx)
feature_configuration = feature_configuration or get_feature_configuration(ctx, bool(sources))
if disallow_dynamic_library == None:
# TODO: Configure output artifact with action_config
# once proto compile action is configurable from the crosstool.
disallow_dynamic_library = not cc_common.is_enabled(
feature_name = "supports_dynamic_linker",
feature_configuration = feature_configuration,
)
(compilation_context, compilation_outputs) = cc_common.compile(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
srcs = sources,
public_hdrs = headers,
compilation_contexts = [dep[CcInfo].compilation_context for dep in deps if CcInfo in dep],
name = ctx.label.name,
# Don't instrument the generated C++ files even when --collect_code_coverage is set.
# If we actually start generating coverage instrumentation for .proto files based on coverage
# data from the generated C++ files, this will have to be removed. Currently, the work done
# to instrument those files and execute the instrumentation is all for nothing, and it can
# be quite a bit of extra computation even when that's not made worse by performance bugs,
# as in b/64963386.
# code_coverage_enabled = False (cc_common.compile disables code_coverage by default)
**kwargs
)
if sources:
linking_context, linking_outputs = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep],
name = ctx.label.name,
disallow_dynamic_library = disallow_dynamic_library,
alwayslink = alwayslink,
)
libraries = _get_libraries_from_linking_outputs(linking_outputs, feature_configuration)
else:
linking_context = cc_common.merge_linking_contexts(
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep],
)
libraries = []
debug_context = None
temps = []
if bazel_features.cc.protobuf_on_allowlist:
debug_context = cc_common.merge_debug_context(
[cc_common.create_debug_context(compilation_outputs)] +
[dep[CcInfo].debug_context() for dep in deps if CcInfo in dep],
)
temps = compilation_outputs.temps()
return CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
debug_context = debug_context,
), libraries, temps

@ -0,0 +1,178 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""A Starlark implementation of the java_lite_proto_library rule."""
load("@rules_java//java/common:java_common.bzl", "java_common")
load("@rules_java//java/common:java_info.bzl", "JavaInfo")
load("@rules_java//java/common:proguard_spec_info.bzl", "ProguardSpecInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_PROTO_TOOLCHAIN_ATTR = "_aspect_proto_toolchain_for_javalite"
_JAVA_LITE_PROTO_TOOLCHAIN = Label("//bazel/private:javalite_toolchain_type")
def _aspect_impl(target, ctx):
"""Generates and compiles Java code for a proto_library dependency graph.
Args:
target: (Target) The `proto_library` target.
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java
version of`proto_library` and `JavaProtoAspectInfo` with all source and
runtime jars.
"""
deps = [dep[JavaInfo] for dep in ctx.rule.attr.deps]
exports = [exp[JavaInfo] for exp in ctx.rule.attr.exports]
proto_toolchain_info = toolchains.find_toolchain(
ctx,
"_aspect_proto_toolchain_for_javalite",
_JAVA_LITE_PROTO_TOOLCHAIN,
)
source_jar = None
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_lite_proto_library", target.label):
source_jar = ctx.actions.declare_file(ctx.label.name + "-lite-src.jar")
proto_common.compile(
ctx.actions,
target[ProtoInfo],
proto_toolchain_info,
[source_jar],
experimental_output_files = "single",
)
runtime = proto_toolchain_info.runtime
if runtime:
deps.append(runtime[JavaInfo])
java_info, jars = java_compile_for_protos(
ctx,
"-lite.jar",
source_jar,
deps,
exports,
injecting_rule_kind = "java_lite_proto_library",
)
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps]
return [
java_info,
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)),
]
_java_lite_proto_aspect = aspect(
implementation = _aspect_impl,
attr_aspects = ["deps", "exports"],
attrs = toolchains.if_legacy_toolchain({
_PROTO_TOOLCHAIN_ATTR: attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"),
),
}),
fragments = ["java"],
required_providers = [ProtoInfo],
provides = [JavaInfo, JavaProtoAspectInfo],
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] +
toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN),
)
def _rule_impl(ctx):
"""Merges results of `java_proto_aspect` in `deps`.
`java_lite_proto_library` is identical to `java_proto_library` in every respect, except it
builds JavaLite protos.
Implementation of this rule is built on the implementation of `java_proto_library`.
Args:
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, DefaultInfo, OutputGroupInfo, ProguardSpecInfo])
"""
proto_toolchain_info = toolchains.find_toolchain(
ctx,
"_aspect_proto_toolchain_for_javalite",
_JAVA_LITE_PROTO_TOOLCHAIN,
)
for dep in ctx.attr.deps:
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain_info)
runtime = proto_toolchain_info.runtime
if runtime:
proguard_provider_specs = runtime[ProguardSpecInfo]
else:
proguard_provider_specs = ProguardSpecInfo(depset())
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False)
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps])
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars])
if hasattr(java_common, "add_constraints"):
java_info = java_common.add_constraints(java_info, constraints = ["android"])
return [
java_info,
DefaultInfo(
files = transitive_src_and_runtime_jars,
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars),
),
OutputGroupInfo(default = depset()),
proguard_provider_specs,
]
java_lite_proto_library = rule(
implementation = _rule_impl,
doc = """
<p>
<code>java_lite_proto_library</code> generates Java code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre class="code">
<code class="lang-starlark">
java_library(
name = "lib",
runtime_deps = [":foo"],
)
java_lite_proto_library(
name = "foo",
deps = [":bar"],
)
proto_library(
name = "bar",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(providers = [ProtoInfo], aspects = [_java_lite_proto_aspect], doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate Java code for.
"""),
} | toolchains.if_legacy_toolchain({
_PROTO_TOOLCHAIN_ATTR: attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"),
),
}),
provides = [JavaInfo],
toolchains = toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN),
)

@ -0,0 +1,62 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Support for compiling protoc generated Java code."""
load("@rules_java//java/private:proto_support.bzl", "compile", "merge") # buildifier: disable=bzl-visibility
# The provider is used to collect source and runtime jars in the `proto_library` dependency graph.
JavaProtoAspectInfo = provider("JavaProtoAspectInfo", fields = ["jars"])
java_info_merge_for_protos = merge
def java_compile_for_protos(ctx, output_jar_suffix, source_jar = None, deps = [], exports = [], injecting_rule_kind = "java_proto_library"):
"""Compiles Java source jar returned by proto compiler.
Use this call for java_xxx_proto_library. It uses java_common.compile with
some checks disabled (via javacopts) and jspecify disabled, so that the
generated code passes.
It also takes care that input source jar is not repackaged with a different
name.
When `source_jar` is `None`, the function only merges `deps` and `exports`.
Args:
ctx: (RuleContext) Used to call `java_common.compile`
output_jar_suffix: (str) How to name the output jar. For example: `-speed.jar`.
source_jar: (File) Input source jar (may be `None`).
deps: (list[JavaInfo]) `deps` of the `proto_library`.
exports: (list[JavaInfo]) `exports` of the `proto_library`.
injecting_rule_kind: (str) Rule kind requesting the compilation.
It's embedded into META-INF of the produced runtime jar, for debugging.
Returns:
((JavaInfo, list[File])) JavaInfo of this target and list containing source
and runtime jar, when they are created.
"""
if source_jar != None:
path, sep, filename = ctx.label.name.rpartition("/")
output_jar = ctx.actions.declare_file(path + sep + "lib" + filename + output_jar_suffix)
java_toolchain = ctx.toolchains["@bazel_tools//tools/jdk:toolchain_type"].java
java_info = compile(
ctx = ctx,
output = output_jar,
java_toolchain = java_toolchain,
source_jars = [source_jar],
deps = deps,
exports = exports,
output_source_jar = source_jar,
injecting_rule_kind = injecting_rule_kind,
javac_opts = java_toolchain._compatible_javacopts.get("proto", depset()),
enable_jspecify = False,
include_compilation_info = False,
)
jars = [source_jar, output_jar]
else:
# If there are no proto sources just pass along the compilation dependencies.
java_info = merge(deps + exports, merge_java_outputs = False, merge_source_jars = False)
jars = []
return java_info, jars

@ -1,5 +1,5 @@
"""Renames toplevel symbols so they can be exported in Starlark under the same name"""
NativeProtoInfo = ProtoInfo
native_proto_common = proto_common_do_not_use
native_cc_proto_aspect = cc_proto_aspect

@ -8,11 +8,15 @@
"""Vendored version of bazel_features for protobuf, to keep a one-step setup"""
_PROTO_BAZEL_FEATURES = """bazel_features = struct(
cc = struct(
protobuf_on_allowlist = {protobuf_on_allowlist},
),
proto = struct(
starlark_proto_info = {starlark_proto_info},
),
globals = struct(
PackageSpecificationInfo = {PackageSpecificationInfo},
ProtoInfo = getattr(getattr(native, 'legacy_globals', None), 'ProtoInfo', {ProtoInfo})
),
)
"""
@ -29,6 +33,9 @@ def _proto_bazel_features_impl(rctx):
starlark_proto_info = major_version_int >= 7
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4)
protobuf_on_allowlist = major_version_int > 7
ProtoInfo = "ProtoInfo" if major_version_int < 8 else "None"
rctx.file("BUILD.bazel", """
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
@ -41,6 +48,8 @@ exports_files(["features.bzl"])
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format(
starlark_proto_info = repr(starlark_proto_info),
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None",
protobuf_on_allowlist = repr(protobuf_on_allowlist),
ProtoInfo = ProtoInfo,
))
proto_bazel_features = repository_rule(

@ -0,0 +1,186 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""
Definition of ProtoInfo provider.
"""
_warning = """ Don't use this field. It's intended for internal use and will be changed or removed
without warning."""
def _uniq(iterable):
unique_elements = {element: None for element in iterable}
return list(unique_elements.keys())
def _join(*path):
return "/".join([p for p in path if p != ""])
def _empty_to_dot(path):
return path if path else "."
def _from_root(root, repo, relpath):
"""Constructs an exec path from root to relpath"""
if not root:
# `relpath` is a directory with an input source file, the exec path is one of:
# - when in main repo: `package/path`
# - when in a external repository: `external/repo/package/path`
# - with sibling layout: `../repo/package/path`
return _join(repo, relpath)
else:
# `relpath` is a directory with a generated file or an output directory:
# - when in main repo: `{root}/package/path`
# - when in an external repository: `{root}/external/repo/package/path`
# - with sibling layout: `{root}/package/path`
return _join(root, "" if repo.startswith("../") else repo, relpath)
def _create_proto_info(*, srcs, deps, descriptor_set, proto_path = "", workspace_root = "", bin_dir = None, allow_exports = None):
"""Constructs ProtoInfo.
Args:
srcs: ([File]) List of .proto files (possibly under _virtual path)
deps: ([ProtoInfo]) List of dependencies
descriptor_set: (File) Descriptor set for this Proto
proto_path: (str) Path that should be stripped from files in srcs. When
stripping is needed, the files should be symlinked into `_virtual_imports/target_name`
directory. Only such paths are accepted.
workspace_root: (str) Set to ctx.workspace_root if this is not the main repository.
bin_dir: (str) Set to ctx.bin_dir if _virtual_imports are used.
allow_exports: (Target) The packages where this proto_library can be exported.
Returns:
(ProtoInfo)
"""
# Validate parameters
src_prefix = _join(workspace_root.replace("external/", "../"), proto_path)
for src in srcs:
if type(src) != "File":
fail("srcs parameter expects a list of Files")
if src.owner.workspace_root != workspace_root:
fail("srcs parameter expects all files to have the same workspace_root: ", workspace_root)
if not src.short_path.startswith(src_prefix):
fail("srcs parameter expects all files start with %s" % src_prefix)
if type(descriptor_set) != "File":
fail("descriptor_set parameter expected to be a File")
if proto_path:
if "_virtual_imports/" not in proto_path:
fail("proto_path needs to contain '_virtual_imports' directory")
if proto_path.split("/")[-2] != "_virtual_imports":
fail("proto_path needs to be formed like '_virtual_imports/target_name'")
if not bin_dir:
fail("bin_dir parameter should be set when _virtual_imports are used")
direct_proto_sources = srcs
transitive_proto_sources = depset(
direct = direct_proto_sources,
transitive = [dep._transitive_proto_sources for dep in deps],
order = "preorder",
)
transitive_sources = depset(
direct = srcs,
transitive = [dep.transitive_sources for dep in deps],
order = "preorder",
)
# There can be up more than 1 direct proto_paths, for example when there's
# a generated and non-generated .proto file in srcs
root_paths = _uniq([src.root.path for src in srcs])
transitive_proto_path = depset(
direct = [_empty_to_dot(_from_root(root, workspace_root, proto_path)) for root in root_paths],
transitive = [dep.transitive_proto_path for dep in deps],
)
if srcs:
check_deps_sources = depset(direct = srcs)
else:
check_deps_sources = depset(transitive = [dep.check_deps_sources for dep in deps])
transitive_descriptor_sets = depset(
direct = [descriptor_set],
transitive = [dep.transitive_descriptor_sets for dep in deps],
)
# Layering checks.
if srcs:
exported_sources = depset(direct = direct_proto_sources)
else:
exported_sources = depset(transitive = [dep._exported_sources for dep in deps])
if "_virtual_imports/" in proto_path:
#TODO: remove bin_dir from proto_source_root (when users assuming it's there are migrated)
proto_source_root = _empty_to_dot(_from_root(bin_dir, workspace_root, proto_path))
elif workspace_root.startswith("../"):
proto_source_root = proto_path
else:
proto_source_root = _empty_to_dot(_join(workspace_root, proto_path))
proto_info = dict(
direct_sources = srcs,
transitive_sources = transitive_sources,
direct_descriptor_set = descriptor_set,
transitive_descriptor_sets = transitive_descriptor_sets,
proto_source_root = proto_source_root,
transitive_proto_path = transitive_proto_path,
check_deps_sources = check_deps_sources,
transitive_imports = transitive_sources,
_direct_proto_sources = direct_proto_sources,
_transitive_proto_sources = transitive_proto_sources,
_exported_sources = exported_sources,
)
if allow_exports:
proto_info["allow_exports"] = allow_exports
return proto_info
ProtoInfo, _ = provider(
doc = "Encapsulates information provided by a `proto_library.`",
fields = {
"direct_sources": "(list[File]) The `.proto` source files from the `srcs` attribute.",
"transitive_sources": """(depset[File]) The `.proto` source files from this rule and all
its dependent protocol buffer rules.""",
"direct_descriptor_set": """(File) The descriptor set of the direct sources. If no srcs,
contains an empty file.""",
"transitive_descriptor_sets": """(depset[File]) A set of descriptor set files of all
dependent `proto_library` rules, and this one's. This is not the same as passing
--include_imports to proto-compiler. Will be empty if no dependencies.""",
"proto_source_root": """(str) The directory relative to which the `.proto` files defined in
the `proto_library` are defined. For example, if this is `a/b` and the rule has the
file `a/b/c/d.proto` as a source, that source file would be imported as
`import c/d.proto`
In principle, the `proto_source_root` directory itself should always
be relative to the output directory (`ctx.bin_dir`).
This is at the moment not true for `proto_libraries` using (additional and/or strip)
import prefixes. `proto_source_root` is in this case prefixed with the output
directory. For example, the value is similar to
`bazel-out/k8-fastbuild/bin/a/_virtual_includes/b` for an input file in
`a/_virtual_includes/b/c.proto` that should be imported as `c.proto`.
When using the value please account for both cases in a general way.
That is assume the value is either prefixed with the output directory or not.
This will make it possible to fix `proto_library` in the future.
""",
"transitive_proto_path": """(depset(str) A set of `proto_source_root`s collected from the
transitive closure of this rule.""",
"check_deps_sources": """(depset[File]) The `.proto` sources from the 'srcs' attribute.
If the library is a proxy library that has no sources, it contains the
`check_deps_sources` from this library's direct deps.""",
"allow_exports": """(Target) The packages where this proto_library can be exported.""",
# Deprecated fields:
"transitive_imports": """(depset[File]) Deprecated: use `transitive_sources` instead.""",
# Internal fields:
"_direct_proto_sources": """(list[File]) The `ProtoSourceInfo`s from the `srcs`
attribute.""" + _warning,
"_transitive_proto_sources": """(depset[File]) The `ProtoSourceInfo`s from this
rule and all its dependent protocol buffer rules.""" + _warning,
"_exported_sources": """(depset[File]) A set of `ProtoSourceInfo`s that may be
imported by another `proto_library` depending on this one.""" + _warning,
},
init = _create_proto_info,
)

@ -7,6 +7,7 @@
#
"""A Starlark implementation of the proto_toolchain rule."""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
@ -26,7 +27,7 @@ def _impl(ctx):
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
toolchain_type = toolchains.PROTO_TOOLCHAIN,
**(dict(toolchain_type = toolchains.PROTO_TOOLCHAIN) if proto_common.INCOMPATIBLE_PASS_TOOLCHAIN_TYPE else {})
),
),
]

@ -45,5 +45,5 @@ toolchains = struct(
find_toolchain = _find_toolchain,
if_legacy_toolchain = _if_legacy_toolchain,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution,
PROTO_TOOLCHAIN = "//bazel/private:proto_toolchain_type",
PROTO_TOOLCHAIN = Label("//bazel/private:proto_toolchain_type"),
)

@ -0,0 +1,85 @@
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain")
load("//bazel/toolchains:proto_toolchain.bzl", "proto_toolchain")
# Keep this file as small as possible and free of any unnecessary loads
# It is loaded by every use of protobuf repository, and loads here can force
# fetching of additional external repositories
# It's also intentionally using toolchain instead of proto_lang_toolchain,
# because the former does not resolve dependencies until toolchain resolution
# needs them
proto_toolchain(
name = "protoc_sources",
exec_compatible_with = [],
proto_compiler = "//:protoc",
)
toolchain(
name = "cc_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//:cc_toolchain",
toolchain_type = "//bazel/private:cc_toolchain_type",
)
toolchain(
name = "java_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/core:toolchain",
toolchain_type = "//bazel/private:java_toolchain_type",
)
toolchain(
name = "javalite_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/lite:toolchain",
toolchain_type = "//bazel/private:javalite_toolchain_type",
)
toolchain(
name = "python_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//python:python_toolchain",
toolchain_type = "//bazel/private:python_toolchain_type",
)
# Following toolchain registrations are for builtin Bazel 7 rules
# which defined them in other repositories.
toolchain(
name = "cc_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//:cc_toolchain",
toolchain_type = "@rules_cc//cc/proto:toolchain_type",
)
toolchain(
name = "java_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/core:toolchain",
toolchain_type = "@rules_java//java/proto:toolchain_type",
)
toolchain(
name = "javalite_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/lite:toolchain",
toolchain_type = "@rules_java//java/proto:lite_toolchain_type",
)
filegroup(
name = "for_bazel_tests",
testonly = True,
srcs = [
"BUILD.bazel",
],
visibility = [
"//bazel/private:__pkg__",
],
)

@ -13,7 +13,6 @@ GeneratedSrcsInfo = provider(
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
"thunks": "Experimental, do not use. List of srcs defining C API. Incompatible with hdrs.",
},
)
@ -47,7 +46,6 @@ def _merge_generated_srcs(srcs):
return GeneratedSrcsInfo(
srcs = _concat_lists([s.srcs for s in srcs]),
hdrs = _concat_lists([s.hdrs for s in srcs]),
thunks = _concat_lists([s.thunks for s in srcs]),
)
def _get_implicit_weak_field_sources(ctx, proto_info):
@ -96,11 +94,10 @@ def _get_feature_configuration(ctx, cc_toolchain, proto_info):
def _generate_srcs_list(ctx, generator, proto_info):
if len(proto_info.direct_sources) == 0:
return GeneratedSrcsInfo(srcs = [], hdrs = [], thunks = [], includes = [])
return GeneratedSrcsInfo(srcs = [], hdrs = [], includes = [])
ext = "." + generator
srcs = []
thunks = []
hdrs = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".h",
@ -115,27 +112,10 @@ def _generate_srcs_list(ctx, generator, proto_info):
extension = ext + ".c",
proto_info = proto_info,
)
if generator == "upb":
thunks = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".thunks.c",
proto_info = proto_info,
)
ctx.actions.run_shell(
inputs = hdrs,
outputs = thunks,
command = " && ".join([
"sed 's/UPB_INLINE //' {} > {}".format(hdr.path, thunk.path)
for (hdr, thunk) in zip(hdrs, thunks)
]),
progress_message = "Generating thunks for upb protos API for: " + ctx.label.name,
mnemonic = "GenUpbProtosThunks",
)
return GeneratedSrcsInfo(
srcs = srcs,
hdrs = hdrs,
thunks = thunks,
)
def _generate_upb_protos(ctx, generator, proto_info, feature_configuration):
@ -162,9 +142,7 @@ def _generate_upb_protos(ctx, generator, proto_info, feature_configuration):
return srcs
def _generate_name(ctx, generator, thunks = False):
if thunks:
return ctx.rule.attr.name + "." + generator + ".thunks"
def _generate_name(ctx, generator):
return ctx.rule.attr.name + "." + generator
def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider):
@ -195,24 +173,9 @@ def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider, proto_i
dep_ccinfos = dep_ccinfos,
)
if files.thunks:
cc_info_with_thunks = cc_library_func(
ctx = ctx,
name = _generate_name(ctx, generator, files.thunks),
hdrs = [],
srcs = files.thunks,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos + [cc_info],
)
return cc_provider(
cc_info = cc_info,
cc_info_with_thunks = cc_info_with_thunks,
)
else:
return cc_provider(
cc_info = cc_info,
)
return cc_provider(
cc_info = cc_info,
)
_GENERATORS = ["upb", "upbdefs", "upb_minitable"]
@ -223,7 +186,6 @@ def _get_hint_providers(ctx, generator):
possible_owners = []
for generator in _GENERATORS:
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator)))
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator, thunks = True)))
if hasattr(cc_common, "CcSharedLibraryHintInfo"):
return [cc_common.CcSharedLibraryHintInfo(owners = possible_owners)]
@ -252,7 +214,7 @@ def upb_proto_aspect_impl(
`cc_info` field. The aspect will ensure that each compilation action can compile and link
against this provider's cc_info for all proto_library() deps.
dep_cc_provider: For aspects that depend on other aspects, this is the provider of the aspect
that we depend on. The aspect wil be able to include the header files from this provider.
that we depend on. The aspect will be able to include the header files from this provider.
file_provider: A provider that this aspect will attach to the target to expose the source
files generated by this aspect. These files are primarily useful for returning in
DefaultInfo(), so users who build the upb_*proto_library() rule directly can view the
@ -266,7 +228,7 @@ def upb_proto_aspect_impl(
if not getattr(ctx.rule.attr, "srcs", []):
# This target doesn't declare any sources, reexport all its deps instead.
# This is known as an "alias library":
# https://bazel.build/reference/be/protocol-buffer#proto_library.srcs
# https://bazel.build/versions/6.4.0/reference/be/protocol-buffer#proto_library.srcs
files = _merge_generated_srcs([dep[file_provider].srcs for dep in ctx.rule.attr.deps])
wrapped_cc_info = cc_provider(
cc_info = cc_common.merge_cc_infos(direct_cc_infos = dep_ccinfos),

@ -5,7 +5,7 @@ load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_c
def upb_use_cpp_toolchain():
return use_cpp_toolchain()
def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
def cc_library_func(ctx, name, hdrs, srcs, copts, dep_ccinfos, includes = []):
"""Like cc_library(), but callable from rules.
Args:
@ -14,8 +14,8 @@ def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
hdrs: Public headers that can be #included from other rules.
srcs: C/C++ source files.
copts: Additional options for cc compilation.
includes: Additional include paths.
dep_ccinfos: CcInfo providers of dependencies we should build/link against.
includes: Additional include paths.
Returns:
CcInfo provider for this compilation.

@ -5,7 +5,7 @@ load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type"
_PY_PROTO_TOOLCHAIN = Label("//bazel/private:python_toolchain_type")
_PyProtoInfo = provider(
doc = "Encapsulates information needed by the Python proto rules.",
@ -50,9 +50,9 @@ def _py_proto_aspect_impl(target, ctx):
))
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN]
toolchain = ctx.toolchains[_PY_PROTO_TOOLCHAIN]
if not toolchain:
fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN)
fail("No toolchains registered for '%s'." % _PY_PROTO_TOOLCHAIN)
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo]
@ -126,7 +126,7 @@ _py_proto_aspect = aspect(
attr_aspects = ["deps"],
required_providers = [ProtoInfo],
provides = [_PyProtoInfo],
toolchains = toolchains.use_toolchain(PY_PROTO_TOOLCHAIN),
toolchains = toolchains.use_toolchain(_PY_PROTO_TOOLCHAIN),
)
def _py_proto_library_rule(ctx):

@ -1,17 +0,0 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Temporary alias to repository rule for using Python 3.x headers from the system."""
load(
"//python/dist:system_python.bzl",
_system_python = "system_python",
)
# TODO: Temporary alias. This is deprecated and to be removed in a future
# release. Users should now get system_python from protobuf_deps.bzl.
system_python = _system_python

@ -1,3 +1,6 @@
load("//bazel:proto_library.bzl", "proto_library")
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain")
package(
default_applicable_licenses = ["//:license"],
default_visibility = ["//visibility:public"],

@ -28,8 +28,13 @@ bzl_library(
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
name = "for_bazel_tests",
testonly = True,
srcs = [
"BUILD",
"proto_lang_toolchain_bzl",
"proto_toolchain_bzl",
],
visibility = [
"//bazel:__pkg__",
],

@ -8,7 +8,7 @@ load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl
UpbWrappedCcInfo = provider(
"Provider for cc_info for protos",
fields = ["cc_info", "cc_info_with_thunks"],
fields = ["cc_info"],
)
_UpbWrappedGeneratedSrcsInfo = provider(

@ -235,7 +235,7 @@ genrule(
),
outs = ["size_data.txt"],
# We want --format=GNU which counts rodata with data, not text.
cmd = "size $$($$OSTYPE == 'linux-gnu' ? '--format=GNU -d' : '') $(SRCS) > $@",
cmd = "size $$([ $$OSTYPE == 'linux-gnu' ] && echo '--format=GNU -d' || echo '') $(SRCS) > $@",
# "size" sometimes isn't available remotely.
local = 1,
tags = ["no-remote-exec"],

@ -29,7 +29,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
load(
"@protobuf//bazel:proto_library.bzl",
"@com_google_protobuf//bazel:proto_library.bzl",
"proto_library",
)

@ -20,7 +20,7 @@ def tmpl_cc_binary(name, gen, args, replacements = [], **kwargs):
)
if _is_google3:
kwargs["malloc"] = "//base:system_malloc"
kwargs["malloc"] = "@bazel_tools//tools/cpp:malloc"
kwargs["features"] = ["-static_linking_mode"]
native.cc_binary(
name = name,

@ -22,7 +22,6 @@ option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "GPB";
option cc_enable_arenas = true;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
@ -34,13 +33,16 @@ message FileDescriptorSet {
message FileDescriptorProto {
optional string name = 1
[ctype = STRING_PIECE]; // file name, relative to root of source tree
optional string package = 2
[ctype = STRING_PIECE]; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3 [ctype = STRING_PIECE];
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
@ -50,7 +52,6 @@ message FileDescriptorProto {
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
@ -70,20 +71,17 @@ message DescriptorProto {
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
@ -93,7 +91,9 @@ message DescriptorProto {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10 [ctype = STRING_PIECE];
@ -114,10 +114,12 @@ message FieldDescriptorProto {
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
@ -125,6 +127,7 @@ message FieldDescriptorProto {
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
@ -150,6 +153,7 @@ message FieldDescriptorProto {
}
optional string name = 1 [ctype = STRING_PIECE];
optional int32 number = 3;
optional Label label = 4;
@ -214,6 +218,7 @@ message FieldDescriptorProto {
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
optional OneofOptions options = 2;
}
@ -222,7 +227,6 @@ message EnumDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
@ -249,16 +253,16 @@ message EnumDescriptorProto {
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
optional int32 number = 2;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
repeated MethodDescriptorProto method = 2;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
@ -269,12 +273,14 @@ message MethodDescriptorProto {
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2 [ctype = STRING_PIECE];
optional string output_type = 3 [ctype = STRING_PIECE];
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default = false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default = false];
}
@ -351,6 +357,7 @@ message FileOptions {
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default = SPEED];
// Sets the Go package where structs generated from this .proto will be
@ -371,8 +378,11 @@ message FileOptions {
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default = false];
optional bool java_generic_services = 17 [default = false];
optional bool py_generic_services = 18 [default = false];
optional bool php_generic_services = 42 [default = false];
// Is this file deprecated?
@ -499,14 +509,14 @@ message FieldOptions {
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
@ -526,6 +536,7 @@ message FieldOptions {
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
@ -554,7 +565,6 @@ message FieldOptions {
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
@ -666,6 +676,7 @@ message MethodOptions {
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level = 34
[default = IDEMPOTENCY_UNKNOWN];
@ -690,13 +701,16 @@ message UninterpretedOption {
// "foo.(bar.baz).qux".
message NamePart {
optional string name_part = 1 [ctype = STRING_PIECE];
optional bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3 [ctype = STRING_PIECE];
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
@ -754,6 +768,7 @@ message SourceCodeInfo {
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
@ -835,7 +850,9 @@ message SourceCodeInfo {
//
// // ignored detached comments.
optional string leading_comments = 3 [ctype = STRING_PIECE];
optional string trailing_comments = 4 [ctype = STRING_PIECE];
repeated string leading_detached_comments = 6 [ctype = STRING_PIECE];
}
}
@ -847,6 +864,7 @@ message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.

@ -17,10 +17,7 @@ COPTS = select({
"/wd4996", # The compiler encountered a deprecated declaration.
],
"//conditions:default": [
"-DHAVE_ZLIB",
"-Woverloaded-virtual",
"-Wno-sign-compare",
"-Wno-nonnull",
],
})

@ -3,6 +3,9 @@ Internal tools to migrate shell commands to Bazel as an intermediate step
to wider Bazelification.
"""
load("@rules_shell//shell:sh_binary.bzl", "sh_binary")
load("@rules_shell//shell:sh_test.bzl", "sh_test")
def inline_sh_binary(
name,
srcs = [],
@ -41,7 +44,7 @@ def inline_sh_binary(
testonly = kwargs["testonly"] if "testonly" in kwargs else None,
)
native.sh_binary(
sh_binary(
name = name,
srcs = [name + "_genrule"],
data = srcs + tools + deps,
@ -86,7 +89,7 @@ def inline_sh_test(
testonly = kwargs["testonly"] if "testonly" in kwargs else None,
)
native.sh_test(
sh_test(
name = name,
srcs = [name + "_genrule"],
data = srcs + tools + deps,

@ -5,7 +5,7 @@ load("@rules_jvm_external//:defs.bzl", "java_export")
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION")
load("//java/osgi:osgi.bzl", "osgi_java_library")
JAVA_OPTS = [
JAVA_RELEASE_OPTS = [
"-source 8",
"-target 8",
"-Xep:Java8ApiChecker:ERROR",
@ -16,13 +16,21 @@ BUNDLE_LICENSE = "https://opensource.org/licenses/BSD-3-Clause"
def protobuf_java_export(**kwargs):
java_export(
javacopts = JAVA_OPTS,
javacopts = JAVA_RELEASE_OPTS,
# https://github.com/bazelbuild/rules_jvm_external/issues/1245
javadocopts = [
"-notimestamp",
"-use",
"-quiet",
"-Xdoclint:-missing",
"-encoding",
"UTF8",
],
**kwargs
)
def protobuf_java_library(**kwargs):
java_library(
javacopts = JAVA_OPTS,
**kwargs
)
@ -68,7 +76,7 @@ def protobuf_versioned_java_library(
java_library target.
"""
osgi_java_library(
javacopts = JAVA_OPTS,
javacopts = JAVA_RELEASE_OPTS,
automatic_module_name = automatic_module_name,
bundle_doc_url = BUNDLE_DOC_URL,
bundle_license = BUNDLE_LICENSE,

@ -0,0 +1,61 @@
"""Protobuf-specific kotlin build rules."""
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION")
load("//java/osgi:kotlin_osgi.bzl", "osgi_kt_jvm_library")
BUNDLE_DOC_URL = "https://developers.google.com/protocol-buffers/"
BUNDLE_LICENSE = "https://opensource.org/licenses/BSD-3-Clause"
def protobuf_versioned_kt_jvm_library(
automatic_module_name,
bundle_description,
bundle_name,
bundle_symbolic_name,
bundle_additional_imports = [],
bundle_additional_exports = [],
**kwargs):
"""Extends `kt_jvm_library` to add OSGi headers to the MANIFEST.MF using bndlib
This macro should be usable as a drop-in replacement for kt_jvm_library.
The additional arguments are given the bndlib tool to generate an OSGi-compliant manifest file.
See [bnd documentation](https://bnd.bndtools.org/chapters/110-introduction.html)
Takes all the args that are standard for a kt_jvm_library target plus the following.
Args:
bundle_description: (required) The Bundle-Description header defines a short
description of this bundle.
automatic_module_name: (required) The Automatic-Module-Name header that represents
the name of the module when this bundle is used as an automatic
module.
bundle_name: (required) The Bundle-Name header defines a readable name for this
bundle. This should be a short, human-readable name that can
contain spaces.
bundle_symbolic_name: (required) The Bundle-SymbolicName header specifies a
non-localizable name for this bundle. The bundle symbolic name
together with a version must identify a unique bundle though it can
be installed multiple times in a framework. The bundle symbolic
name should be based on the reverse domain name convention.
bundle_additional_exports: The Export-Package header contains a
declaration of exported packages. These are additional export
package statements to be added before the default wildcard export
"*;version={$Bundle-Version}".
bundle_additional_imports: The Import-Package header declares the
imported packages for this bundle. These are additional import
package statements to be added before the default wildcard import
"*".
**kwargs: Additional key-word arguments that are passed to the internal
kt_jvm_library target.
"""
osgi_kt_jvm_library(
automatic_module_name = automatic_module_name,
bundle_doc_url = BUNDLE_DOC_URL,
bundle_license = BUNDLE_LICENSE,
bundle_version = PROTOBUF_JAVA_VERSION,
bundle_description = bundle_description,
bundle_name = bundle_name,
bundle_symbolic_name = bundle_symbolic_name,
bundle_additional_exports = bundle_additional_exports,
bundle_additional_imports = bundle_additional_imports + ["sun.misc;resolution:=optional"],
**kwargs
)

@ -1,4 +1,6 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
build --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build --cxxopt="-Woverloaded-virtual"
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations"

@ -1,5 +1,7 @@
import common.bazelrc
# Workaround for maximum path length issues
build --cxxopt=/std:c++17 --host_cxxopt=/std:c++17
startup --output_user_root=C:/tmp --windows_enable_symlinks
common --enable_runfiles
common --enable_runfiles

@ -1,3 +1,6 @@
# TODO: ErrorProne's SelfAssertions are violated in tests
build --javacopt=-Xep:SelfAssertion:WARN
build:dbg --compilation_mode=dbg
build:opt --compilation_mode=opt
@ -27,6 +30,8 @@ build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Abseil passes nullptr to memcmp with 0 size
build:ubsan --copt=-fno-sanitize=nonnull-attribute
# Workaround Bazel 7 remote cache issues.
# See https://github.com/bazelbuild/bazel/issues/20161
@ -75,6 +80,10 @@ build --incompatible_use_host_features
# https://github.com/protocolbuffers/protobuf/issues/14313
common --noenable_bzlmod
# For easier debugging of build failures.
common --announce_rc
build --verbose_failures
# Important: this flag ensures that we remain compliant with the C++ layering
# check.
build --features=layering_check

@ -1,6 +1,8 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
build --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build --cxxopt="-Woverloaded-virtual"
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations"
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes

@ -0,0 +1,25 @@
load("@rules_python//python:defs.bzl", "py_binary")
load("//upb/cmake:build_defs.bzl", "staleness_test")
py_binary(
name = "dependencies_generator",
srcs = ["dependencies_generator.py"],
)
genrule(
name = "generate_dependencies",
srcs = ["//:MODULE.bazel"],
outs = ["generated-in/dependencies.cmake"],
cmd = "$(location :dependencies_generator) " +
"$(location //:MODULE.bazel) $@",
tools = [":dependencies_generator"],
)
staleness_test(
name = "test_dependencies_staleness",
outs = [
"dependencies.cmake",
],
generated_pattern = "generated-in/%s",
tags = ["manual"],
)

@ -82,13 +82,6 @@ Go to the project folder:
C:\Path\to\src> cd protobuf
C:\Path\to\src\protobuf>
Remember to update any submodules if you are using git clone (you can skip this
step if you are using a release .tar.gz or .zip package):
```console
C:\Path\to\src\protobuf> git submodule update --init --recursive
```
Good. Now you are ready for *CMake* configuration.
## CMake Configuration
@ -117,8 +110,8 @@ Create a temporary *build* folder and change your working directory to it:
C:\Path\to\build\protobuf>
During configuration you will also be specifying where CMake should expect to
find your Abseil installation. To do so, first set `-Dprotobuf_ABSL_PROVIDER=package`
and then set `-DCMAKE_PREFIX_PATH` to the path where you installed Abseil.
find your Abseil installation. To do so, set `-DCMAKE_PREFIX_PATH` to the path
where you installed Abseil.
For example:
@ -126,10 +119,14 @@ For example:
C:\Path\to\build\protobuf> cmake -S. -Bcmake-out \
-DCMAKE_INSTALL_PREFIX=/tmp/protobuf \
-DCMAKE_CXX_STANDARD=14 \
-Dprotobuf_ABSL_PROVIDER=package \
-DCMAKE_PREFIX_PATH=/tmp/absl # Path to where I installed Abseil
```
If the installation of a dependency can't be found, CMake will default to
downloading and building a copy from GitHub. To prevent this and make it an
error condition, you can optionally set
`-Dprotobuf_LOCAL_DEPENDENCIES_ONLY=ON`.
The *Makefile* and *Ninja* generators can build the project in only one configuration, so you need to build
a separate folder for each configuration.
@ -156,15 +153,14 @@ It will generate *Visual Studio* solution file *protobuf.sln* in current directo
Unit tests are being built along with the rest of protobuf. The unit tests require Google Mock (now a part of Google Test).
A copy of [Google Test](https://github.com/google/googletest) is included as a Git submodule in the `third-party/googletest` folder.
(You do need to initialize the Git submodules as explained above.)
By default, a local copy of [Google Test](https://github.com/google/googletest)
will be downloaded during CMake configuration.
Alternately, you may want to use protobuf in a larger set-up, you may want to use that standard CMake approach where
you build and install a shared copy of Google Test.
After you've built and installed your Google Test copy, you need add the following definition to your *cmake* command line
during the configuration step: `-Dprotobuf_USE_EXTERNAL_GTEST=ON`.
This will cause the standard CMake `find_package(GTest REQUIRED)` to be used.
After you've built and installed your Google Test copy, the standard CMake
`find_package(GTest)` will use it.
[find_package](https://cmake.org/cmake/help/latest/command/find_package.html) will search in a default location,
which on Windows is *C:\Program Files*. This is most likely not what you want. You will want instead to search for

@ -10,31 +10,36 @@ if(protobuf_BUILD_TESTS)
set(ABSL_FIND_GOOGLETEST OFF)
endif()
if(TARGET absl::strings)
# If Abseil is included already, skip including it.
# (https://github.com/protocolbuffers/protobuf/issues/10435)
elseif(protobuf_ABSL_PROVIDER STREQUAL "module")
if(NOT ABSL_ROOT_DIR)
set(ABSL_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party/abseil-cpp)
if (NOT TARGET absl::strings)
if (NOT protobuf_FORCE_FETCH_DEPENDENCIES)
# Use "CONFIG" as there is no built-in cmake module for absl.
find_package(absl CONFIG)
endif()
if(EXISTS "${ABSL_ROOT_DIR}/CMakeLists.txt")
if(protobuf_INSTALL)
# Fallback to fetching Abseil from github if it's not found locally.
if (NOT absl_FOUND AND NOT protobuf_LOCAL_DEPENDENCIES_ONLY)
include(${protobuf_SOURCE_DIR}/cmake/dependencies.cmake)
message(STATUS "Fallback to downloading Abseil ${abseil-cpp-version} from GitHub")
include(FetchContent)
FetchContent_Declare(
absl
GIT_REPOSITORY "https://github.com/abseil/abseil-cpp.git"
GIT_TAG "${abseil-cpp-version}"
)
if (protobuf_INSTALL)
# When protobuf_INSTALL is enabled and Abseil will be built as a module,
# Abseil will be installed along with protobuf for convenience.
set(ABSL_ENABLE_INSTALL ON)
endif()
add_subdirectory(${ABSL_ROOT_DIR} third_party/abseil-cpp)
else()
message(WARNING "protobuf_ABSL_PROVIDER is \"module\" but ABSL_ROOT_DIR is wrong")
FetchContent_MakeAvailable(absl)
endif()
if(protobuf_INSTALL AND NOT _protobuf_INSTALL_SUPPORTED_FROM_MODULE)
message(WARNING "protobuf_INSTALL will be forced to FALSE because protobuf_ABSL_PROVIDER is \"module\" and CMake version (${CMAKE_VERSION}) is less than 3.13.")
set(protobuf_INSTALL FALSE)
endif()
elseif(protobuf_ABSL_PROVIDER STREQUAL "package")
# Use "CONFIG" as there is no built-in cmake module for absl.
find_package(absl REQUIRED CONFIG)
endif()
if (NOT TARGET absl::strings)
message(FATAL_ERROR "Cannot find abseil-cpp dependency that's needed to build protobuf.\n")
endif()
set(_protobuf_FIND_ABSL "if(NOT TARGET absl::strings)\n find_package(absl CONFIG)\nendif()")
if (BUILD_SHARED_LIBS AND MSVC)
@ -45,13 +50,8 @@ if (BUILD_SHARED_LIBS AND MSVC)
# Once https://github.com/abseil/abseil-cpp/pull/1466 is merged and released
# in the minimum version of abseil required by protobuf, it is possible to
# always link absl::abseil_dll and absl::abseil_test_dll and remove the if
if(protobuf_ABSL_PROVIDER STREQUAL "package")
set(protobuf_ABSL_USED_TARGETS absl::abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS absl::abseil_test_dll)
else()
set(protobuf_ABSL_USED_TARGETS abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS abseil_test_dll)
endif()
set(protobuf_ABSL_USED_TARGETS absl::abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS absl::abseil_test_dll)
else()
set(protobuf_ABSL_USED_TARGETS
absl::absl_check

@ -1,15 +1,31 @@
if (protobuf_JSONCPP_PROVIDER STREQUAL "module")
if (NOT EXISTS "${protobuf_SOURCE_DIR}/third_party/jsoncpp/CMakeLists.txt")
message(FATAL_ERROR
"Cannot find third_party/jsoncpp directory that's needed to "
"build conformance tests. If you use git, make sure you have cloned "
"submodules:\n"
" git submodule update --init --recursive\n"
"If instead you want to skip them, run cmake with:\n"
" cmake -Dprotobuf_BUILD_CONFORMANCE=OFF\n")
# Don't run jsoncpp tests.
set(JSONCPP_WITH_TESTS OFF)
if (NOT TARGET jsoncpp_lib)
if (NOT protobuf_FORCE_FETCH_DEPENDENCIES)
find_package(jsoncpp)
endif()
# Fallback to fetching Googletest from github if it's not found locally.
if (NOT jsoncpp_FOUND AND NOT protobuf_LOCAL_DEPENDENCIES_ONLY)
include(${protobuf_SOURCE_DIR}/cmake/dependencies.cmake)
message(STATUS "Fallback to downloading jsoncpp ${jsoncpp-version} from GitHub")
include(FetchContent)
FetchContent_Declare(
jsoncpp
GIT_REPOSITORY "https://github.com/open-source-parsers/jsoncpp.git"
GIT_TAG "${jsoncpp-version}"
)
FetchContent_MakeAvailable(jsoncpp)
endif()
elseif(protobuf_JSONCPP_PROVIDER STREQUAL "package")
find_package(jsoncpp REQUIRED)
endif()
if (NOT TARGET jsoncpp_lib)
message(FATAL_ERROR
"Cannot find jsoncpp dependency that's needed to build conformance tests.\n"
"If instead you want to skip these tests, run cmake with:\n"
" cmake -Dprotobuf_BUILD_CONFORMANCE=OFF\n")
endif()
file(MAKE_DIRECTORY ${protobuf_BINARY_DIR}/conformance)
@ -129,18 +145,13 @@ add_test(NAME conformance_cpp_test
--text_format_failure_list ${protobuf_SOURCE_DIR}/conformance/text_format_failure_list_cpp.txt
--output_dir ${protobuf_TEST_XML_OUTDIR}
--maximum_edition 2023
${CMAKE_CURRENT_BINARY_DIR}/conformance_cpp
$<TARGET_FILE:conformance_cpp>
DEPENDS conformance_test_runner conformance_cpp)
set(JSONCPP_WITH_TESTS OFF CACHE BOOL "Disable tests")
if(protobuf_JSONCPP_PROVIDER STREQUAL "module")
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp third_party/jsoncpp)
target_include_directories(conformance_test_runner PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp/include)
if(BUILD_SHARED_LIBS)
target_link_libraries(conformance_test_runner jsoncpp_lib)
else()
target_link_libraries(conformance_test_runner jsoncpp_static)
endif()
if(BUILD_SHARED_LIBS)
target_link_libraries(conformance_test_runner jsoncpp_lib)
else()
target_link_libraries(conformance_test_runner jsoncpp)
target_link_libraries(conformance_test_runner jsoncpp_static)
endif()

@ -0,0 +1,34 @@
# Auto-generated by @//cmake:make_dependencies
#
# This file contains lists of external dependencies based on our Bazel
# config. It should be included from a hand-written CMake file that uses
# them.
#
# Changes to this file will be overwritten based on Bazel definitions.
if(${CMAKE_VERSION} VERSION_GREATER 3.10 OR ${CMAKE_VERSION} VERSION_EQUAL 3.10)
include_guard()
endif()
set(abseil-cpp-version "20240722.0")
set(bazel_skylib-version "1.7.0")
set(jsoncpp-version "1.9.6")
set(rules_cc-version "0.0.16")
set(rules_fuzzing-version "0.5.2")
set(rules_java-version "8.3.2")
set(rules_jvm_external-version "6.3")
set(rules_kotlin-version "1.9.6")
set(rules_license-version "1.0.0")
set(rules_pkg-version "1.0.1")
set(rules_python-version "0.28.0")
set(rules_rust-version "0.51.0")
set(platforms-version "0.0.8")
set(zlib-version "1.3.1")
set(bazel_features-version "1.17.0")
set(rules_shell-version "0.2.0")
set(googletest-version "1.14.0")
set(rules_buf-version "0.3.0")
set(rules_testing-version "0.6.0")
set(rules_proto-version "4.0.0")

@ -0,0 +1,143 @@
#!/usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A tool to convert MODULE.bazel -> CMakeLists.txt.
This tool is very protobuf-specific at the moment, and should not be seen as a
generic Bazel -> CMake converter.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import textwrap
class ExtensionFunctions(object):
"""A fake extension that we can use to get the functions we need."""
def toolchain(self, *args, **kwargs):
pass
def parse(self, *args, **kwargs):
pass
def spec(self, *args, **kwargs):
pass
def from_specs(self, *args, **kwargs):
pass
def install(self, *args, **kwargs):
pass
class ModuleFileFunctions(object):
"""A fake MODULE file that we can exec() to get the functions we need."""
def __init__(self, converter):
self.converter = converter
def module(self, *args, **kwargs):
pass
def bazel_dep(self, name, version, **kwargs):
self.converter.toplevel += textwrap.dedent(
"""\
set(%(name)s-version "%(version)s")
"""
% {
"name": name,
"version": version,
}
)
def register_toolchains(self, *args):
pass
def use_repo(self, *args, **kwargs):
pass
def use_extension(self, *args):
return ExtensionFunctions()
class Converter(object):
def __init__(self):
self.toplevel = ""
self.if_lua = ""
def convert(self):
return self.template % {
"toplevel": converter.toplevel,
}
template = textwrap.dedent("""\
# Auto-generated by @//cmake:make_dependencies
#
# This file contains lists of external dependencies based on our Bazel
# config. It should be included from a hand-written CMake file that uses
# them.
#
# Changes to this file will be overwritten based on Bazel definitions.
if(${CMAKE_VERSION} VERSION_GREATER 3.10 OR ${CMAKE_VERSION} VERSION_EQUAL 3.10)
include_guard()
endif()
%(toplevel)s
""")
data = {}
converter = Converter()
def GetDict(obj):
ret = {}
for k in dir(obj):
if not k.startswith("_"):
ret[k] = getattr(obj, k)
return ret
# We take the MODULE path as a command-line argument to ensure that we can find
# it regardless of how exactly Bazel was invoked.
exec(open(sys.argv[1]).read(), GetDict(ModuleFileFunctions(converter)))
with open(sys.argv[2], "w") as f:
f.write(converter.convert())

@ -1,48 +1,28 @@
option(protobuf_USE_EXTERNAL_GTEST "Use external Google Test (i.e. not the one in third_party/googletest)" OFF)
if (protobuf_USE_EXTERNAL_GTEST)
find_package(GTest REQUIRED CONFIG)
else()
if (NOT EXISTS "${protobuf_SOURCE_DIR}/third_party/googletest/CMakeLists.txt")
message(FATAL_ERROR
"Cannot find third_party/googletest directory that's needed to "
"build tests. If you use git, make sure you have cloned submodules:\n"
" git submodule update --init --recursive\n"
"If instead you want to skip tests, run cmake with:\n"
" cmake -Dprotobuf_BUILD_TESTS=OFF\n")
if (NOT TARGET GTest::gmock)
if (NOT protobuf_FORCE_FETCH_DEPENDENCIES)
find_package(GTest CONFIG)
endif()
set(googlemock_source_dir "${protobuf_SOURCE_DIR}/third_party/googletest/googlemock")
set(googletest_source_dir "${protobuf_SOURCE_DIR}/third_party/googletest/googletest")
include_directories(
${googlemock_source_dir}
${googletest_source_dir}
${googletest_source_dir}/include
${googlemock_source_dir}/include
)
# Fallback to fetching Googletest from github if it's not found locally.
if (NOT GTest_FOUND AND NOT protobuf_LOCAL_DEPENDENCIES_ONLY)
include(${protobuf_SOURCE_DIR}/cmake/dependencies.cmake)
message(STATUS "Fallback to downloading GTest ${googletest-version} from GitHub")
add_library(gmock ${protobuf_SHARED_OR_STATIC}
"${googlemock_source_dir}/src/gmock-all.cc"
"${googletest_source_dir}/src/gtest-all.cc"
)
if (protobuf_BUILD_SHARED_LIBS)
set_target_properties(gmock
PROPERTIES
COMPILE_DEFINITIONS
"GTEST_CREATE_SHARED_LIBRARY=1"
include(FetchContent)
FetchContent_Declare(
googletest
GIT_REPOSITORY "https://github.com/google/googletest.git"
GIT_TAG "v${googletest-version}"
)
endif()
if (protobuf_INSTALL)
set(protobuf_INSTALL_TESTS ON)
# Due to https://github.com/google/googletest/issues/4384, we can't name this
# GTest for use with find_package until 1.15.0.
FetchContent_MakeAvailable(googletest)
endif()
endif()
target_link_libraries(gmock ${CMAKE_THREAD_LIBS_INIT})
add_library(gmock_main STATIC "${googlemock_source_dir}/src/gmock_main.cc")
target_link_libraries(gmock_main gmock)
add_library(GTest::gmock ALIAS gmock)
add_library(GTest::gmock_main ALIAS gmock_main)
add_library(GTest::gtest ALIAS gmock)
add_library(GTest::gtest_main ALIAS gmock_main)
if (NOT TARGET GTest::gmock)
message(FATAL_ERROR
"Cannot find googletest dependency that's needed to build tests.\n"
"If instead you want to skip tests, run cmake with:\n"
" cmake -Dprotobuf_BUILD_TESTS=OFF\n")
endif()

@ -2,11 +2,11 @@ function(protobuf_generate)
include(CMakeParseArguments)
set(_options APPEND_PATH)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN PLUGIN_OPTIONS DEPENDENCIES)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN PLUGIN_OPTIONS PROTOC_EXE)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS PROTOC_OPTIONS)
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS PROTOC_OPTIONS DEPENDENCIES)
cmake_parse_arguments(protobuf_generate "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
@ -83,6 +83,11 @@ function(protobuf_generate)
endforeach()
endif()
if(NOT protobuf_generate_PROTOC_EXE)
# Default to using the CMake executable
set(protobuf_generate_PROTOC_EXE protobuf::protoc)
endif()
foreach(DIR ${protobuf_generate_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
@ -143,7 +148,7 @@ function(protobuf_generate)
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
COMMAND ${protobuf_generate_PROTOC_EXE}
ARGS ${protobuf_generate_PROTOC_OPTIONS} --${protobuf_generate_LANGUAGE}_out ${_plugin_options}:${protobuf_generate_PROTOC_OUT_DIR} ${_plugin} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} ${protobuf_PROTOC_EXE} ${protobuf_generate_DEPENDENCIES}
COMMENT ${_comment}

@ -220,6 +220,8 @@ add_custom_target(restore-installed-headers)
file(GLOB_RECURSE _local_hdrs
"${PROJECT_SOURCE_DIR}/src/*.h"
"${PROJECT_SOURCE_DIR}/src/*.inc"
)
file(GLOB_RECURSE _local_upb_hdrs
"${PROJECT_SOURCE_DIR}/upb/*.h"
)
@ -235,6 +237,7 @@ list(APPEND _exclude_hdrs ${test_util_hdrs} ${lite_test_util_hdrs} ${common_test
${compiler_test_utils_hdrs} ${upb_test_util_files})
foreach(_hdr ${_exclude_hdrs})
list(REMOVE_ITEM _local_hdrs ${_hdr})
list(REMOVE_ITEM _local_upb_hdrs ${_hdr})
endforeach()
foreach(_hdr ${_local_hdrs})
@ -250,6 +253,19 @@ foreach(_hdr ${_local_hdrs})
copy "${_tmp_file}" "${_hdr}")
endforeach()
foreach(_hdr ${_local_upb_hdrs})
string(REPLACE "${protobuf_SOURCE_DIR}/upb" "" _file ${_hdr})
set(_tmp_file "${CMAKE_BINARY_DIR}/tmp-install-test/${_file}")
add_custom_command(TARGET remove-installed-headers PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E remove -f "${_hdr}")
add_custom_command(TARGET save-installed-headers PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E
copy "${_hdr}" "${_tmp_file}" || true)
add_custom_command(TARGET restore-installed-headers PRE_BUILD
COMMAND ${CMAKE_COMMAND} -E
copy "${_tmp_file}" "${_hdr}")
endforeach()
add_dependencies(remove-installed-headers save-installed-headers)
if(protobuf_REMOVE_INSTALLED_HEADERS)
# Make sure we remove all the headers *before* any codegen occurs.

@ -16,8 +16,6 @@ foreach(generator upb upbdefs upb_minitable)
)
target_include_directories(protoc-gen-${generator} PRIVATE ${bootstrap_cmake_dir})
target_link_libraries(protoc-gen-${generator}
${protobuf_LIB_PROTOBUF}
${protobuf_LIB_PROTOC}
${protobuf_LIB_UPB}
${protobuf_ABSL_USED_TARGETS}
)

@ -33,12 +33,6 @@ java_runtime_conformance(
gencode_version = "main",
)
# Generates a build_test named "conformance_v3.25.0"
java_runtime_conformance(
name = "java_conformance_v3.25.0",
gencode_version = "3.25.0",
)
# Breaking change detection for well-known types and descriptor.proto.
buf_breaking_test(
name = "any_proto_breaking",

@ -10,6 +10,9 @@ load(
load("@rules_ruby//ruby:defs.bzl", "ruby_binary")
load("//:protobuf.bzl", "internal_csharp_proto_library", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library", "internal_ruby_proto_library")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:java_lite_proto_library.bzl", "java_lite_proto_library")
load("//bazel:java_proto_library.bzl", "java_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load("//ruby:defs.bzl", "internal_ruby_proto_library")
@ -343,7 +346,7 @@ inline_sh_binary(
"conformance_php.php",
],
cmd = """
php -d include_path=conformance:src/google/protobuf \\
php -d include_path=php/generated:conformance:src/google/protobuf:editions/golden \\
-d auto_prepend_file=$(rootpath autoload.php) \\
$(rootpath conformance_php.php)
""",
@ -351,6 +354,7 @@ inline_sh_binary(
deps = [
":conformance_php_proto",
"//:test_messages_proto3_php_proto",
"//editions:test_messages_proto3_editions_php_proto",
"//php:source_files",
],
)

@ -70,7 +70,7 @@ class ConformanceJava {
}
private enum BinaryDecoderType {
BTYE_STRING_DECODER,
BYTE_STRING_DECODER,
BYTE_ARRAY_DECODER,
ARRAY_BYTE_BUFFER_DECODER,
READONLY_ARRAY_BYTE_BUFFER_DECODER,
@ -84,7 +84,7 @@ class ConformanceJava {
ByteString bytes, BinaryDecoderType type, Parser<T> parser, ExtensionRegistry extensions)
throws InvalidProtocolBufferException {
switch (type) {
case BTYE_STRING_DECODER:
case BYTE_STRING_DECODER:
case BYTE_ARRAY_DECODER:
return parser.parseFrom(bytes, extensions);
case ARRAY_BYTE_BUFFER_DECODER:

@ -67,7 +67,7 @@ class ConformanceJavaLite {
}
private enum BinaryDecoderType {
BTYE_STRING_DECODER,
BYTE_STRING_DECODER,
BYTE_ARRAY_DECODER,
ARRAY_BYTE_BUFFER_DECODER,
READONLY_ARRAY_BYTE_BUFFER_DECODER,
@ -84,7 +84,7 @@ class ConformanceJavaLite {
ExtensionRegistryLite extensions)
throws InvalidProtocolBufferException {
switch (type) {
case BTYE_STRING_DECODER:
case BYTE_STRING_DECODER:
case BYTE_ARRAY_DECODER:
return parser.parseFrom(bytes, extensions);
case ARRAY_BYTE_BUFFER_DECODER:

@ -70,7 +70,7 @@ C#:
Objective-C (Mac only):
$ `bazel test //objectivec:conformance_test --macos_minimum_os=10.9
$ `bazel test //objectivec:conformance_test --macos_minimum_os=11.0`
Ruby:

@ -7,8 +7,12 @@ define("GOOGLE_GPBMETADATA_NAMESPACE", "GPBMetadata\\Google\\Protobuf\\");
function protobuf_autoloader_impl($class, $prefix) {
$length = strlen($prefix);
if ((substr($class, 0, $length) === $prefix)) {
$path = 'php/src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
include_once $path;
$path = 'src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
if (file_exists('php/' . $path)) {
include_once 'php/' . $path;
} else {
include_once 'php/generated/' . $path;
}
}
}

@ -26,6 +26,7 @@
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "absl/strings/substitute.h"
#include "json/config.h"
#include "json/reader.h"
#include "json/value.h"
#include "conformance/conformance.pb.h"
@ -659,12 +660,16 @@ void BinaryAndJsonConformanceSuiteImpl<
suite_.ReportFailure(test, level, request, response);
return;
}
Json::Reader reader;
Json::CharReaderBuilder builder;
Json::Value value;
if (!reader.parse(response.json_payload(), value)) {
Json::String err;
const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
if (!reader->parse(
response.json_payload().c_str(),
response.json_payload().c_str() + response.json_payload().length(),
&value, &err)) {
test.set_failure_message(
absl::StrCat("JSON payload cannot be parsed as valid JSON: ",
reader.getFormattedErrorMessages()));
absl::StrCat("JSON payload cannot be parsed as valid JSON: ", err));
suite_.ReportFailure(test, level, request, response);
return;
}
@ -1361,6 +1366,43 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestIllegalTags() {
}
}
template <typename MessageType>
void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestUnmatchedGroup() {
ExpectParseFailureForProto(tag(201, WireFormatLite::WIRETYPE_END_GROUP),
"UnmatchedEndGroup", REQUIRED);
ExpectParseFailureForProto(tag(1234, WireFormatLite::WIRETYPE_END_GROUP),
"UnmatchedEndGroupUnknown", REQUIRED);
ExpectParseFailureForProto(tag(1, WireFormatLite::WIRETYPE_END_GROUP),
"UnmatchedEndGroupWrongType", REQUIRED);
ExpectParseFailureForProto(
len(18, tag(1234, WireFormatLite::WIRETYPE_END_GROUP)),
"UnmatchedEndGroupNestedLen", REQUIRED);
ExpectParseFailureForProto(
group(201, tag(202, WireFormatLite::WIRETYPE_END_GROUP)),
"UnmatchedEndGroupNested", REQUIRED);
ExpectParseFailureForProto(
absl::StrCat(tag(1, WireFormatLite::WIRETYPE_END_GROUP),
len(2, "hello world")),
"UnmatchedEndGroupWithData", REQUIRED);
ExpectParseFailureForProto(tag(201, WireFormatLite::WIRETYPE_START_GROUP),
"UnmatchedStartGroup", REQUIRED);
ExpectParseFailureForProto(tag(1234, WireFormatLite::WIRETYPE_START_GROUP),
"UnmatchedStartGroupUnknown", REQUIRED);
ExpectParseFailureForProto(tag(1, WireFormatLite::WIRETYPE_START_GROUP),
"UnmatchedStartGroupWrongType", REQUIRED);
ExpectParseFailureForProto(
len(18, tag(1234, WireFormatLite::WIRETYPE_START_GROUP)),
"UnmatchedStartGroupNestedLen", REQUIRED);
ExpectParseFailureForProto(
group(201, tag(202, WireFormatLite::WIRETYPE_START_GROUP)),
"UnmatchedStartGroupNested", REQUIRED);
ExpectParseFailureForProto(
absl::StrCat(tag(1, WireFormatLite::WIRETYPE_START_GROUP),
len(2, "hello world")),
"UnmatchedStartGroupWithData", REQUIRED);
}
template <typename MessageType>
void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestUnknownWireType() {
for (uint8_t type : {0x6, 0x7}) {
@ -1548,7 +1590,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::RunAllTests() {
}
TestIllegalTags();
TestUnmatchedGroup();
TestUnknownWireType();
int64_t kInt64Min = -9223372036854775808ULL;
@ -2284,6 +2326,10 @@ void BinaryAndJsonConformanceSuiteImpl<
RunValidJsonTest("Int32FieldStringValueEscaped", REQUIRED,
R"({"optionalInt32": "2\u003147483647"})",
"optional_int32: 2147483647");
RunValidJsonTest("Int32FieldStringValueZero", REQUIRED,
R"({"optionalInt32": "0"})", "optional_int32: 0");
RunValidJsonTest("Int32FieldQuotedExponentialValue", REQUIRED,
R"({"optionalInt32": "1e5"})", "optional_int32: 100000");
// Parsers reject out-of-bound integer values.
ExpectParseFailureForJson("Int32FieldTooLarge", REQUIRED,
@ -2298,7 +2344,8 @@ void BinaryAndJsonConformanceSuiteImpl<
R"({"optionalInt64": "-9223372036854775809"})");
ExpectParseFailureForJson("Uint64FieldTooLarge", REQUIRED,
R"({"optionalUint64": "18446744073709551616"})");
// Parser reject non-integer numeric values as well.
// Parser reject non-integer numeric values.
ExpectParseFailureForJson("Int32FieldNotInteger", REQUIRED,
R"({"optionalInt32": 0.5})");
ExpectParseFailureForJson("Uint32FieldNotInteger", REQUIRED,
@ -2308,6 +2355,22 @@ void BinaryAndJsonConformanceSuiteImpl<
ExpectParseFailureForJson("Uint64FieldNotInteger", REQUIRED,
R"({"optionalUint64": "0.5"})");
// Parser reject non-numeric string values.
ExpectParseFailureForJson("Int32FieldStringValuePartiallyNumeric", REQUIRED,
R"({"optionalInt32": "12abc"})");
ExpectParseFailureForJson("Int32FieldStringValueNonNumeric", REQUIRED,
R"({"optionalInt32": "abc"})");
// Parser reject empty string values.
ExpectParseFailureForJson("Int32FieldEmptyString", REQUIRED,
R"({"optionalInt32": ""})");
ExpectParseFailureForJson("Uint32FieldEmptyString", REQUIRED,
R"({"optionalUint32": ""})");
ExpectParseFailureForJson("Int64FieldEmptyString", REQUIRED,
R"({"optionalInt64": ""})");
ExpectParseFailureForJson("Uint64FieldEmptyString", REQUIRED,
R"({"optionalUint64": ""})");
// Integers but represented as float values are accepted.
RunValidJsonTest("Int32FieldFloatTrailingZero", REQUIRED,
R"({"optionalInt32": 100000.000})",
@ -2402,6 +2465,9 @@ void BinaryAndJsonConformanceSuiteImpl<
// Values can be quoted.
RunValidJsonTest("FloatFieldQuotedValue", REQUIRED,
R"({"optionalFloat": "1"})", "optional_float: 1");
RunValidJsonTest("FloatFieldQuotedExponentialValue", REQUIRED,
R"({"optionalFloat": "1.175494e-38"})",
"optional_float: 1.175494e-38");
// Special values.
RunValidJsonTest("FloatFieldNan", REQUIRED, R"({"optionalFloat": "NaN"})",
"optional_float: nan");
@ -2431,12 +2497,23 @@ void BinaryAndJsonConformanceSuiteImpl<
R"({"optionalFloat": Infinity})");
ExpectParseFailureForJson("FloatFieldNegativeInfinityNotQuoted", RECOMMENDED,
R"({"optionalFloat": -Infinity})");
// Parsers should reject out-of-bound values.
ExpectParseFailureForJson("FloatFieldTooSmall", REQUIRED,
R"({"optionalFloat": -3.502823e+38})");
ExpectParseFailureForJson("FloatFieldTooLarge", REQUIRED,
R"({"optionalFloat": 3.502823e+38})");
// Parsers should reject empty string values.
ExpectParseFailureForJson("FloatFieldEmptyString", REQUIRED,
R"({"optionalFloat": ""})");
// Parser reject non-numeric string values.
ExpectParseFailureForJson("FloatFieldStringValuePartiallyNumeric", REQUIRED,
R"({"optionalFloat": "12abc"})");
ExpectParseFailureForJson("FloatFieldStringValueNonNumeric", REQUIRED,
R"({"optionalFloat": "abc"})");
// Double fields.
RunValidJsonTest("DoubleFieldMinPositiveValue", REQUIRED,
R"({"optionalDouble": 2.22507e-308})",
@ -2453,6 +2530,9 @@ void BinaryAndJsonConformanceSuiteImpl<
// Values can be quoted.
RunValidJsonTest("DoubleFieldQuotedValue", REQUIRED,
R"({"optionalDouble": "1"})", "optional_double: 1");
RunValidJsonTest("DoubleFieldQuotedExponentialValue", REQUIRED,
R"({"optionalDouble": "2.22507e-308"})",
"optional_double: 2.22507e-308");
// Special values.
RunValidJsonTest("DoubleFieldNan", REQUIRED, R"({"optionalDouble": "NaN"})",
"optional_double: nan");
@ -2489,6 +2569,16 @@ void BinaryAndJsonConformanceSuiteImpl<
ExpectParseFailureForJson("DoubleFieldTooLarge", REQUIRED,
R"({"optionalDouble": +1.89769e+308})");
// Parsers should reject empty string values.
ExpectParseFailureForJson("DoubleFieldEmptyString", REQUIRED,
R"({"optionalDouble": ""})");
// Parser reject non-numeric string values.
ExpectParseFailureForJson("DoubleFieldStringValuePartiallyNumeric", REQUIRED,
R"({"optionalDouble": "12abc"})");
ExpectParseFailureForJson("DoubleFieldStringValueNonNumeric", REQUIRED,
R"({"optionalDouble": "abc"})");
// Enum fields.
RunValidJsonTest("EnumField", REQUIRED, R"({"optionalNestedEnum": "FOO"})",
"optional_nested_enum: FOO");

@ -143,6 +143,7 @@ class BinaryAndJsonConformanceSuiteImpl {
ConformanceLevel level);
void TestPrematureEOFForType(google::protobuf::FieldDescriptor::Type type);
void TestIllegalTags();
void TestUnmatchedGroup();
void TestUnknownWireType();
void TestOneofMessage();
void TestUnknownMessage();

@ -86,6 +86,7 @@ function doTest($request)
case 'protobuf_test_messages.editions.proto3.TestAllTypesProto3':
$test_message = new TestAllTypesProto3Editions();
break;
case 'protobuf_test_messages.proto2.TestAllTypesProto2':
case 'protobuf_test_messages.editions.proto2.TestAllTypesProto2':
$response->setSkipped('PHP doesn\'t support proto2');
return $response;

@ -8,7 +8,7 @@ use conformance_rust_proto::{ConformanceRequest, ConformanceResponse, WireFormat
use protobuf::prelude::*;
use protobuf::Optional::{Set, Unset};
use protobuf::ParseError;
use protobuf::{Message, ParseError};
use std::io::{self, ErrorKind, Read, Write};
use test_messages_edition2023_rust_proto::TestAllTypesEdition2023;

@ -182,7 +182,7 @@ bool CheckSetEmpty(const absl::btree_map<std::string, TestStatus>& set_to_check,
namespace google {
namespace protobuf {
constexpr int kMaximumWildcardExpansions = 5;
constexpr int kMaximumWildcardExpansions = 10;
ConformanceTestSuite::ConformanceRequestSetting::ConformanceRequestSetting(
ConformanceLevel level, conformance::WireFormat input_format,

@ -157,7 +157,7 @@ void UsageError() {
" output files.\n\n");
fprintf(stderr, " --test <test_name> Only run\n");
fprintf(stderr,
" the specified test. Mulitple tests\n"
" the specified test. Multiple tests\n"
" can be specified by repeating the \n"
" flag.\n\n");
fprintf(stderr,

@ -3,6 +3,8 @@
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE.
"""
load("@rules_shell//shell:sh_test.bzl", "sh_test")
def conformance_test(
name,
testee,
@ -31,7 +33,7 @@ def conformance_test(
if maximum_edition:
args = args + ["--maximum_edition %s" % maximum_edition]
native.sh_test(
sh_test(
name = name,
srcs = ["//conformance:bazel_conformance_test_runner.sh"],
data = [testee] + failure_lists + [

@ -35,3 +35,4 @@ Recommended.*.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.*.FieldMaskPathsDontRoundTrip.JsonOutput # Should have failed to serialize, but didn't.
Recommended.*.FieldMaskTooManyUnderscore.JsonOutput # Should have failed to serialize, but didn't.
Recommended.*.JsonInput.FieldMaskInvalidCharacter # Should have failed to parse, but didn't.
Required.*.JsonInput.Int32FieldQuotedExponentialValue.* # Failed to parse input or produce output.

@ -43,3 +43,7 @@ Required.*.JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotBool
Required.*.JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt # Should have failed to parse, but didn't.
Required.*.JsonInput.StringFieldNotAString # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnknownOrdering.ProtobufOutput # Unknown field mismatch
Required.*.ProtobufInput.UnmatchedEndGroup # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupUnknown # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWithData # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWrongType # Should have failed to parse, but didn't.

@ -6,3 +6,7 @@
Required.*.ProtobufInput.PrematureEofInDelimitedDataForKnownNonRepeatedValue.MESSAGE # Should have failed to parse, but didn't.
Required.*.ProtobufInput.PrematureEofInDelimitedDataForKnownRepeatedValue.MESSAGE # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroup # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupUnknown # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWithData # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWrongType # Should have failed to parse, but didn't.

@ -144,3 +144,7 @@ Required.Editions_Proto2.ProtobufInput.UnknownOrdering.ProtobufOutput
Required.Editions_Proto3.ProtobufInput.UnknownOrdering.ProtobufOutput
Required.Proto2.ProtobufInput.UnknownOrdering.ProtobufOutput
Required.Proto3.ProtobufInput.UnknownOrdering.ProtobufOutput
Required.*.ProtobufInput.UnmatchedEndGroup # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupUnknown # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWithData # Should have failed to parse, but didn't.
Required.*.ProtobufInput.UnmatchedEndGroupWrongType # Should have failed to parse, but didn't.

@ -0,0 +1,11 @@
Required.*.JsonInput.DoubleFieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.FloatFieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.Int32FieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.Int64FieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.Uint32FieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.Uint64FieldEmptyString # Should have failed to parse, but didn't.
Required.*.JsonInput.DoubleFieldStringValueNonNumeric # Should have failed to parse, but didn't.
Required.*.JsonInput.DoubleFieldStringValuePartiallyNumeric # Should have failed to parse, but didn't.
Required.*.JsonInput.FloatFieldStringValueNonNumeric # Should have failed to parse, but didn't.
Required.*.JsonInput.FloatFieldStringValuePartiallyNumeric # Should have failed to parse, but didn't.
Required.*.JsonInput.Int32FieldQuotedExponentialValue.* # Failed to parse input or produce output.

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save