Merge branch 'main' into patch-1

pull/17399/head
Toby Schneider 2 months ago committed by GitHub
commit 77bea54bdc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 6
      .bazelrc
  2. 77
      .bcr/metadata.template.json
  3. 8
      .bcr/presubmit.yml
  4. 4
      .github/dependabot.yml
  5. 67
      .github/scripts/validate_yaml.py
  6. 9
      .github/scripts/validate_yaml_test.sh
  7. 3
      .github/workflows/janitor.yml
  8. 19
      .github/workflows/staleness_check.yml
  9. 30
      .github/workflows/test_bazel.yml
  10. 134
      .github/workflows/test_cpp.yml
  11. 8
      .github/workflows/test_csharp.yml
  12. 93
      .github/workflows/test_java.yml
  13. 45
      .github/workflows/test_objectivec.yml
  14. 77
      .github/workflows/test_php.yml
  15. 22
      .github/workflows/test_php_ext.yml
  16. 26
      .github/workflows/test_python.yml
  17. 25
      .github/workflows/test_release_branches.yml
  18. 55
      .github/workflows/test_ruby.yml
  19. 114
      .github/workflows/test_runner.yml
  20. 15
      .github/workflows/test_rust.yml
  21. 80
      .github/workflows/test_upb.yml
  22. 25
      .github/workflows/test_yaml.yml
  23. 7
      BUILD.bazel
  24. 2
      CMakeLists.txt
  25. 78
      Cargo.bazel.lock
  26. 8
      Cargo.lock
  27. 172
      MODULE.bazel
  28. 2
      Protobuf-C++.podspec
  29. 4
      Protobuf.podspec
  30. 55
      WORKSPACE
  31. 45
      WORKSPACE.bzlmod
  32. 1
      bazel/BUILD.bazel
  33. 9
      bazel/cc_proto_library.bzl
  34. 16
      bazel/common/BUILD
  35. 354
      bazel/common/proto_common.bzl
  36. 6
      bazel/common/proto_info.bzl
  37. 25
      bazel/common/proto_lang_toolchain_info.bzl
  38. 15
      bazel/java_lite_proto_library.bzl
  39. 15
      bazel/java_proto_library.bzl
  40. 140
      bazel/private/BUILD
  41. 42
      bazel/private/BUILD.bazel
  42. 197
      bazel/private/bazel_cc_proto_library.bzl
  43. 164
      bazel/private/bazel_java_proto_library_rule.bzl
  44. 356
      bazel/private/bazel_proto_library_rule.bzl
  45. 141
      bazel/private/cc_proto_support.bzl
  46. 178
      bazel/private/java_lite_proto_library.bzl
  47. 62
      bazel/private/java_proto_support.bzl
  48. 2
      bazel/private/native.bzl
  49. 35
      bazel/private/native_bool_flag.bzl
  50. 59
      bazel/private/proto_bazel_features.bzl
  51. 186
      bazel/private/proto_info.bzl
  52. 155
      bazel/private/proto_lang_toolchain_rule.bzl
  53. 14
      bazel/private/proto_toolchain_rule.bzl
  54. 49
      bazel/private/toolchain_helpers.bzl
  55. 74
      bazel/private/toolchains/BUILD.bazel
  56. 56
      bazel/private/upb_proto_library_internal/aspect.bzl
  57. 26
      bazel/private/upb_proto_library_internal/cc_library_func.bzl
  58. 21
      bazel/proto_library.bzl
  59. 18
      bazel/py_proto_library.bzl
  60. 5
      bazel/tests/BUILD
  61. 368
      bazel/tests/proto_common_compile_tests.bzl
  62. 135
      bazel/tests/testdata/BUILD
  63. 57
      bazel/tests/testdata/compile_rule.bzl
  64. 13
      bazel/toolchains/BUILD
  65. 17
      bazel/toolchains/proto_lang_toolchain.bzl
  66. 10
      bazel/toolchains/proto_toolchain.bzl
  67. 4
      bazel/upb_c_proto_library.bzl
  68. 2
      bazel/upb_minitable_proto_library.bzl
  69. 2
      bazel/upb_proto_reflection_library.bzl
  70. 7
      benchmarks/BUILD
  71. 6
      benchmarks/build_defs.bzl
  72. 2
      build_defs/cpp_opts.bzl
  73. 16
      build_defs/java_opts.bzl
  74. 1
      ci/Linux.bazelrc
  75. 6
      ci/common.bazelrc
  76. 1
      ci/macOS.bazelrc
  77. 2
      cmake/conformance.cmake
  78. 7
      cmake/install.cmake
  79. 9
      cmake/protobuf-generate.cmake
  80. 10
      cmake/upb.pc.cmake
  81. 1
      cmake/upb_generators.cmake
  82. 144
      compatibility/BUILD.bazel
  83. 1
      compatibility/buf.yaml
  84. BIN
      compatibility/v3.25.0/generic_test_protos-speed.srcjar
  85. BIN
      compatibility/v3.25.0/java_test_protos-speed.srcjar
  86. BIN
      compatibility/v3.25.0/lite_test_protos-speed.srcjar
  87. 37
      conformance/BUILD.bazel
  88. 4
      conformance/ConformanceJava.java
  89. 4
      conformance/ConformanceJavaLite.java
  90. 8
      conformance/autoload.php
  91. 171
      conformance/binary_json_conformance_suite.cc
  92. 14
      conformance/conformance.proto
  93. 14
      conformance/conformance_cpp.cc
  94. 1
      conformance/conformance_php.php
  95. 44
      conformance/conformance_python.py
  96. 61
      conformance/conformance_rust.rs
  97. 414
      conformance/conformance_test.cc
  98. 137
      conformance/conformance_test.h
  99. 199
      conformance/conformance_test_runner.cc
  100. 130
      conformance/failure_list_cpp.txt
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,5 +1,9 @@
build --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
# Needed for java_lite_proto_library, that's using ProguardSpecProvider
# Once the provider is ported to Starlark the flag may be removed.
common --experimental_google_legacy_api
build:dbg --compilation_mode=dbg
build:opt --compilation_mode=opt
@ -22,6 +26,8 @@ build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Abseil passes nullptr to memcmp with 0 size
build:ubsan --copt=-fno-sanitize=nonnull-attribute
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313

@ -1,6 +1,11 @@
{
"homepage": "https://github.com/protocolbuffers/protobuf",
"maintainers": [
{
"email": "protobuf-packages@google.com",
"github": "protobuf-team-bot",
"name": "Protobuf Team"
},
{
"email": "sandyzhang@google.com",
"github": "zhangskz",
@ -15,6 +20,78 @@
"email": "gberg@google.com",
"github": "googleberg",
"name": "Jerry Berg"
},
{
"email": "acozette@google.com",
"github": "acozette",
"name": "Adam Cozette",
"do_not_notify": true
},
{
"email": "deannagarcia@google.com",
"github": "deannagarcia",
"name": "Deanna Garcia",
"do_not_notify": true
},
{
"email": "esrauch@google.com",
"github": "esrauchg",
"name": "Em Rauch",
"do_not_notify": true
},
{
"email": "haberman@google.com",
"github": "haberman",
"name": "Josh Haberman",
"do_not_notify": true
},
{
"email": "hongshin@google.com",
"github": "honglooker",
"name": "Hong Shin",
"do_not_notify": true
},
{
"email": "jatl@google.com",
"github": "JasonLunn",
"name": "Jason Lunn",
"do_not_notify": true
},
{
"email": "jieluo@google.com",
"github": "anandolee",
"name": "Jie Luo",
"do_not_notify": true
},
{
"email": "salo@google.com",
"github": "salo",
"name": "Eric Salo",
"do_not_notify": true
},
{
"email": "sbenza@google.com",
"github": "sbenza",
"name": "Samuel Benzaquen",
"do_not_notify": true
},
{
"email": "shaod@google.com",
"github": "shaod2",
"name": "Dennis Shao",
"do_not_notify": true
},
{
"email": "theodorerose@google.com",
"github": "theodorerose",
"name": "Theodore Rose",
"do_not_notify": true
},
{
"email": "tonyliaoss@google.com",
"github": "tonyliaoss",
"name": "Tony Liao",
"do_not_notify": true
}
],
"repository": ["github:protocolbuffers/protobuf"],

@ -1,6 +1,6 @@
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
bazel: [7.x]
tasks:
verify_targets:
@ -10,11 +10,12 @@ tasks:
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
- '--experimental_google_legacy_api'
build_targets:
- '@protobuf//:protobuf'
- '@protobuf//:protobuf_lite'
- '@protobuf//:protobuf_python'
- '@protobuf//:protobuf_rust'
- '@protobuf//:protobuf_java'
- '@protobuf//:protoc'
- '@protobuf//:test_messages_proto2_cc_proto'
- '@protobuf//:test_messages_proto3_cc_proto'
@ -23,7 +24,7 @@ bcr_test_module:
module_path: "examples"
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
bazel: [7.x]
tasks:
run_test_module:
name: "Run test module"
@ -32,5 +33,6 @@ bcr_test_module:
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
- '--experimental_google_legacy_api'
build_targets:
- "//..."

@ -4,5 +4,5 @@ updates:
directory: "/"
schedule:
interval: "weekly"
# Allow up to 3 opened pull requests for github-actions versions
open-pull-requests-limit: 3
# Don't allow non-security PRs to be opened.
open-pull-requests-limit: 0

@ -0,0 +1,67 @@
"""Validate the YAML files for GitHub Actions workflows.
TODO: b/359303016 - convert to use unittest
"""
import os
import re
import yaml
# Ensure every job is in the list of blocking jobs.
with open(
os.path.join(os.path.dirname(__file__), '../workflows/test_runner.yml'), 'r'
) as f:
data = yaml.safe_load(f)
# List of all YAML files that are used by jobs in the test_runner.yml file.
yaml_files = []
# Get a list of all jobs in the test_runner, except for the blocking job and
# the tag removal job, which is not always run.
all_jobs = list(data['jobs'].keys())
all_jobs.remove('all-blocking-tests')
all_jobs.remove('remove-tag')
passed = True
blocking_jobs = data['jobs']['all-blocking-tests']['needs']
for job in all_jobs:
if 'uses' in data['jobs'][job]:
yaml_files.append(
os.path.join(
os.path.dirname(__file__),
'../workflows',
os.path.basename(data['jobs'][job]['uses']),
)
)
if job not in blocking_jobs:
passed = False
raise ValueError('Job %s is not in the list of blocking jobs.' % job)
print('PASSED: All jobs are in the list of blocking jobs.')
# Ensure every job with a continuous prefix conditions every step on whether we
# are in a continuous run.
for file in yaml_files:
with open(file, 'r') as f:
data = yaml.safe_load(f)
jobs = data['jobs']
for job in jobs:
if 'steps' not in jobs[job]:
continue
continuous_condition = 'inputs.continuous-prefix' in jobs[job]['name']
steps = jobs[job]['steps']
for step in steps:
if continuous_condition and 'continuous-run' not in step.get('if', ''):
raise ValueError(
'Step %s in job %s does not check the continuous-run condition'
% (step['name'], job)
)
if not continuous_condition and 'continuous-run' in step.get('if', ''):
raise ValueError(
'Step %s in job %s checks the continuous-run condition but '
'the job does not contain the continuous-prefix'
% (step['name'], job)
)
print('PASSED: All steps in all jobs check the continuous-run condition.')

@ -0,0 +1,9 @@
#!/bin/bash
source googletest.sh || exit 1
script=${TEST_SRCDIR}/google3/third_party/protobuf/github/validate_yaml
$script || die "Failed to execute $script"
echo "PASS"

@ -45,7 +45,8 @@ jobs:
This issue is labeled `inactive` because the last activity was over
90 days ago.
90 days ago. This issue will be closed and archived after 14
additional days without activity.
close-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active

@ -6,6 +6,11 @@ on:
- cron: 0 10 * * *
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: false
description: "The SHA key for the commit we want to run over"
@ -15,20 +20,14 @@ on:
permissions: {}
jobs:
test:
strategy:
fail-fast: false
matrix:
branch: [main, 25.x, 27.x]
os: [{ name: Linux, value: ubuntu-latest}]
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }}
runs-on: ${{ matrix.os.value }}
name: Test staleness
runs-on: ubuntu-latest
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }}
steps:
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }}
- name: Checkout
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }}
ref: ${{ inputs.safe-checkout || github.head_ref || github.ref }}
- name: Mark runs associated with commits
if: ${{ github.event_name != 'schedule' && github.event_name != 'workflow_dispatch' }}

@ -3,10 +3,20 @@ name: Bazel Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -18,37 +28,45 @@ jobs:
matrix:
runner: [ ubuntu, windows, macos ]
bazelversion: [ '7.1.2' ]
bzlmod: [true, false ]
bzlmod: [ true, false ]
toolchain_resolution: [ "" ]
include:
- runner: ubuntu
bazelversion: '6.4.0'
bzlmod: true
# Not running Bazel 6 with bzlmod, because it doesn't support use_repo_rule in rules_jvm_external
bzlmod: false
- runner: ubuntu
bazelversion: '6.4.0'
bzlmod: false
toolchain_resolution: --incompatible_enable_proto_toolchain_resolution=true
- runner: ubuntu
bzlmod: true
toolchain_resolution: --incompatible_enable_proto_toolchain_resolution=true
runs-on: ${{ matrix.runner }}-latest
name: Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }} ${{ matrix.toolchain_resolution && ' (toolchain resolution)' || '' }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Windows startup flags
if: runner.os == 'Windows'
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
working-directory: examples
shell: bash
run: echo "startup --output_user_root=C:/ --windows_enable_symlinks" >> .bazelrc
- name: Configure Bazel version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
working-directory: examples
shell: bash
run: echo "${{ matrix.bazelversion }}" > .bazelversion
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: examples
version: ${{ matrix.bazelversion }}
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }}
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }} ${{ matrix.toolchain_resolution }}

@ -3,10 +3,21 @@ name: C++ Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -18,41 +29,51 @@ jobs:
matrix:
config:
- { name: Optimized, flags: --config=opt }
- { name: Debug, flags: --config=dbg }
- { name: Debug, flags: --config=dbg, continuous-only: true }
- { name: ASAN, flags: --config=asan, runner: ubuntu-22-4core }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-22-4core }
- { name: TSAN, flags: --config=tsan, runner: ubuntu-22-4core }
- { name: UBSAN, flags: --config=ubsan }
- { name: No-RTTI, flags: --cxxopt=-fno-rtti }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-22-4core, continuous-only: true }
- { name: TSAN, flags: --config=tsan, runner: ubuntu-22-4core, continuous-only: true }
- { name: UBSAN, flags: --config=ubsan, runner: ubuntu-22-4core, continuous-only: true,}
- { name: No-RTTI, flags: --cxxopt=-fno-rtti, continuous-only: true }
include:
# Set defaults
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:3d959f731dc5c54af4865c31ee2bd581ec40028adcdf4c038f3122581f595191
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:6.4.0-27cf7b86212020d7e552bc13b1e084abb971da75
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/... //conformance:conformance_framework_tests
# Override cases with custom images
- config: { name: "Bazel7" }
- config: { name: "Bazel7", flags: --noenable_bzlmod }
cache_key: Bazel7
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "Bazel7 with Bzlmod", flags: --enable_bzlmod --enable_workspace }
cache_key: Bazel7bzlmod
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "TCMalloc" }
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:1c5133455481f4d1bb8afa477029604f41f1a3c46cebe4d9958cf1af95b5c87c"
cache_key: TcMalloc
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc:6.4.0-27cf7b86212020d7e552bc13b1e084abb971da75"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "aarch64" }
cache_key: TcMalloc
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test //third_party/utf8_range/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.3.0-aarch64-68e662b3a56b881804dc4e9d45f949791cbc4b94"
name: Linux ${{ matrix.config.name }}
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-aarch64-08714ed7a713068c8418003a2d95f423d4b1eac9"
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/${{ matrix.config.name }}
bazel-cache: cpp_linux/${{ matrix.cache_key }}
bazel: test ${{ matrix.targets }} ${{ matrix.config.flags }}
exclude-targets: ${{ matrix.exclude-targets }}
linux-gcc:
strategy:
@ -69,10 +90,10 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:${{ matrix.version }}-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/gcc-${{ matrix.version }}
bazel: test //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
bazel: test //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/... //conformance:conformance_framework_tests
linux-release:
strategy:
@ -91,7 +112,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-${{ matrix.arch }}
- name: Setup sccache
@ -102,7 +123,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:${{ matrix.arch }}-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-${{ matrix.arch }}-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >
@ -123,32 +144,37 @@ jobs:
- flags: -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
- name: Ninja
flags: -G Ninja -DCMAKE_CXX_STANDARD=14
continuous-only: true
- name: Shared
flags: -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
continuous-only: true
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
# TODO Re-enable this.
#- name: C++20
# flags: -DCMAKE_CXX_STANDARD=20
name: Linux CMake ${{ matrix.name}}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux CMake ${{ matrix.name}}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup sccache
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ matrix.flags}} ${{ env.SCCACHE_CMAKE_FLAGS }}
@ -174,7 +200,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
@ -188,25 +214,29 @@ jobs:
-DCMAKE_CXX_STANDARD=14
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
# This test should always be skipped on presubmit
linux-cmake-examples:
name: Linux CMake Examples
name: ${{ inputs.continuous-prefix }} Linux CMake Examples
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup sccache
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-examples
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
@ -226,28 +256,32 @@ jobs:
flags: -DCMAKE_CXX_STANDARD=14
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
continuous-only: true
- name: C++20
flags: -DCMAKE_CXX_STANDARD=20
name: Linux CMake GCC ${{ matrix.name }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux CMake GCC ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-gcc
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-12.2-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >-
@ -278,7 +312,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ env.SCCACHE_CMAKE_FLAGS }}
@ -303,7 +337,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:8275360dc5d676f3470872d79087901c0e4153453976bea908a92c82e8d209ea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:429f924aec315704b4233adcbe4b29006116f27769db98acd176b9eb69c31299
platform: linux/386
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
@ -323,41 +357,45 @@ jobs:
- name: MacOS Bazel
os: macos-12
cache_key: macos-12
bazel: test //src/... //third_party/utf8_range/...
bazel: test //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
- name: MacOS Bazel 7
os: macos-12
cache_key: macos-12-bazel7
bazel: test //src/... //third_party/utf8_range/...
bazel: test //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
bazel_version: '7.1.2'
continuous-only: true
- name: MacOS Apple Silicon (build only) Bazel
os: macos-12
cache_key: macos-12-arm
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/...
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/... //conformance:conformance_framework_tests
- name: Windows Bazel
os: windows-2022
cache_key: windows-2022
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
- name: Windows Bazel 7
- name: Windows Bazel 7
os: windows-2022
cache_key: windows-2022-bazel7
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
bazel_version: '7.1.2'
name: ${{ matrix.name }}
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel: ${{ matrix.bazel }}
bazel-cache: cpp_${{ matrix.cache_key }}
version: ${{ matrix.bazel_version || '6.3.0' }}
version: ${{ matrix.bazel_version || '6.4.0' }}
non-linux-cmake:
strategy:
@ -377,6 +415,7 @@ jobs:
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
continuous-only: true
- name: Windows CMake 2019
os: windows-2019
flags: >-
@ -387,6 +426,7 @@ jobs:
cache-prefix: windows-2019-cmake
# windows-2019 has python3.7 installed, which is incompatible with the latest gcloud
python-version: '3.9'
continuous-only: true
- name: Windows CMake 32-bit
os: windows-2022
flags: >-
@ -394,6 +434,7 @@ jobs:
vsversion: '2022'
windows-arch: 'win32'
cache-prefix: windows-2022-win32-cmake
continuous-only: true
- name: Windows CMake Shared
os: windows-2022
flags: >-
@ -410,34 +451,43 @@ jobs:
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
vsversion: '2022'
cache-prefix: windows-2022-cmake
name: ${{ matrix.name }}
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup MSVC
if: ${{ runner.os == 'Windows' }}
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: ilammy/msvc-dev-cmd@cec98b9d092141f74527d0afa6feb2af698cfe89 # v1.12.1
with:
arch: ${{ matrix.windows-arch || 'x64' }}
vsversion: ${{ matrix.vsversion }}
# Workaround for Abseil incompatibility with CMake 3.30 (b/352354235).
- name: Downgrade CMake
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run)}}
run: choco install cmake --version 3.29.6 --force
shell: bash
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
if: ${{ matrix.python-version }}
if: ${{ matrix.python-version && (!matrix.continuous-only || inputs.continuous-run) }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Use custom python for gcloud
if: ${{ matrix.python-version }}
if: ${{ matrix.python-version && (!matrix.continuous-only || inputs.continuous-run) }}
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV
shell: bash
- name: Setup sccache
if: ${{ runner.os == 'Windows' && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: ${{ matrix.cache-prefix }}
@ -445,42 +495,46 @@ jobs:
# Install phase.
- name: Configure CMake for install
if: matrix.install-flags
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.install-flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build for install
if: matrix.install-flags
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Install
if: matrix.install-flags
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: cmake --build . --target install
- name: Report and clear sccache stats
if: matrix.install-flags
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: sccache -s && sccache -z
- name: Clear CMake cache
if: matrix.install-flags
if: ${{ matrix.install-flags && (!matrix.continuous-only || inputs.continuous-run) }}
shell: bash
run: cmake --build . --target clean && rm CMakeCache.txt
- name: Configure CMake
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Test
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: ctest --verbose --parallel 20 -C Debug
- name: Report sccache stats
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: sccache -s

@ -12,9 +12,11 @@ permissions:
contents: read
jobs:
# All C# jobs are currently run on presubmit
# If you wish to add continuous-only jobs you will need to import test-type above
linux:
name: Linux
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -25,7 +27,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:6.4.0-3.1.415-6.0.100-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: /bin/bash
command: >-
@ -41,7 +43,7 @@ jobs:
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:6.4.0-3.1.415-6.0.100-08714ed7a713068c8418003a2d95f423d4b1eac9
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: csharp_linux
bazel: test //csharp:conformance_test --action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 --test_env=DOTNET_CLI_HOME=/home/bazel

@ -3,10 +3,21 @@ name: Java Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -18,38 +29,51 @@ jobs:
matrix:
include:
- name: OpenJDK 8
version: '8'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
cache_key: '8'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-8-27cf7b86212020d7e552bc13b1e084abb971da75
# TODO: b/318555165 - enable the layering check. Currently it does
# not work correctly with the toolchain in this Docker image.
targets: //java/... //java/internal:java_version //compatibility/... --features=-layering_check
- name: OpenJDK 11
version: '11'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
cache_key: '11'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-11-27cf7b86212020d7e552bc13b1e084abb971da75
targets: //java/... //java/internal:java_version //compatibility/...
continuous-only: true
- name: OpenJDK 17
version: '17'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:17-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
cache_key: '17'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-17-27cf7b86212020d7e552bc13b1e084abb971da75
targets: //java/... //java/internal:java_version //compatibility/...
- name: Bazel7
cache_key: 'bazel7nobzlmod'
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
targets: //java/... //java/internal:java_version //compatibility/...
flags: --noenable_bzlmod
- name: Bazel7 with Bzlmod
cache_key: 'bazel7bzlmod'
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
targets: //java/... //java/internal:java_version //compatibility/...
flags: --enable_bzlmod --enable_workspace
- name: aarch64
version: 'aarch64'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
cache_key: 'aarch64'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.4.0-aarch64-08714ed7a713068c8418003a2d95f423d4b1eac9
targets: //java/... //compatibility/... //src/google/protobuf/compiler:protoc_aarch64_test
name: Linux ${{ matrix.name }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/${{ matrix.version }}
bazel: test ${{ matrix.targets }} --test_env=KOKORO_JAVA_VERSION
bazel-cache: java_linux/${{ matrix.cache_key }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_JAVA_VERSION
# TODO restore this test (or a better one) when gRPC has rebuilt with 26.x
# linkage-monitor:
@ -69,52 +93,31 @@ jobs:
# # TODO: b/318555165 - enable the layering check. Currently it does
# # not work correctly with the toolchain in this Docker image.
# bazel: test --test_output=all //java:linkage_monitor --spawn_strategy=standalone --features=-layering_check
protobuf-bom:
name: Protobuf Maven BOM
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Build protoc
id: build-protoc
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-x86_64
- name: Move protoc into place and clean up
run: |
mv ${{ steps.build-protoc.outputs.protoc }} protoc
sudo rm -rf _build
- name: Install snapshot version locally (not using generated pom.xml)
run: |
mvn -e -B -Dhttps.protocols=TLSv1.2 install -Dmaven.test.skip=true
working-directory: java
- name: Generate pom.xml files from the template
- name: Generate maven artifacts with bazel and install using maven
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:6.4.0-11-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/11
# protobuf-java and protobuf-java-util are the member of the BOM
bash: |
bazel build //java/core:core_mvn-pom //java/util:util_mvn-pom
cp bazel-bin/java/core/core_mvn-pom.xml .
cp bazel-bin/java/util/util_mvn-pom.xml .
- name: Copy the generated pom.xml files to the local Maven repository
shell: bash
run: |
LOCAL_MAVEN_GROUP_DIR="${HOME}/.m2/repository/com/google/protobuf"
VERSION=$(grep "<version>" core_mvn-pom.xml | sed "s/<version>\(.*\)<\/version>/\1/" | xargs)
cp core_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java/${VERSION}/protobuf-java-${VERSION}.pom
cp util_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java-util/${VERSION}/protobuf-java-util-${VERSION}.pom
set -ex
bazel build //java:release $BAZEL_FLAGS
mvn install:install-file -Dfile=java/bom/pom.xml -DpomFile=java/bom/pom.xml
mvn install:install-file -Dfile=java/pom.xml -DpomFile=java/pom.xml
mvn install:install-file -Dfile=bazel-bin/java/core/core_mvn-project.jar -DpomFile=bazel-bin/java/core/core_mvn-pom.xml
mvn install:install-file -Dfile=bazel-bin/java/core/lite_mvn-project.jar -DpomFile=bazel-bin/java/core/lite_mvn-pom.xml
mvn install:install-file -Dfile=bazel-bin/java/kotlin-lite/kotlin-lite_mvn-project.jar -DpomFile=bazel-bin/java/kotlin-lite/kotlin-lite_mvn-pom.xml
mvn install:install-file -Dfile=bazel-bin/java/kotlin/kotlin_mvn-project.jar -DpomFile=bazel-bin/java/kotlin/kotlin_mvn-pom.xml
mvn install:install-file -Dfile=bazel-bin/java/util/util_mvn-project.jar -DpomFile=bazel-bin/java/util/util_mvn-pom.xml
- name: Clean up
run: |
sudo rm -rf _build
- name: Validate Protobuf BOM
uses: googleapis/java-cloud-bom/tests/validate-bom@fd56f04bb0bc581776a74031591f0b3bc5e7920a # v26.13.0
with:
bom-path: java/bom/pom.xml

@ -3,10 +3,21 @@ name: Objective-C Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -27,24 +38,31 @@ jobs:
- platform: "iOS"
destination: "platform=iOS Simulator,name=iPhone 13,OS=latest"
xc_project: "ProtocolBuffers_iOS.xcodeproj"
# We run presubmits on all "Debug" entries, but not on "Release" entries
- xc_config: "Debug"
- xc_config: "Release"
continuous-only: true
name: Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup ccache
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/ccache@v3
with:
cache-prefix: objectivec_${{ matrix.platform }}_${{ matrix.xc_config }}
support-modules: true
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
env:
CC: ${{ github.workspace }}/ci/clang_wrapper
@ -57,10 +75,10 @@ jobs:
-scheme ProtocolBuffers \
-configuration ${{ matrix.xc_config }} \
-destination "${{ matrix.destination }}" \
test \
| xcpretty
test
- name: Report ccache stats
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
shell: bash
run: ccache -s -v
@ -68,24 +86,29 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
PLATFORM: ["ios", "macos", "tvos", "watchos", "visionos"]
# Disabling visionOS for now: https://github.com/actions/runner-images/issues/10559
PLATFORM: ["ios", "macos", "tvos", "watchos"]
CONFIGURATION: ["Debug", "Release"]
include:
- OS: macos-12
XCODE: "14.1"
- OS: macos-14
PLATFORM: "visionos"
XCODE: "15.2"
name: CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }}
# We run presubmits on all "Debug" entries, but not on "Release" entries
- CONFIGURATION: "Debug"
- CONFIGURATION: "Release"
continuous-only: true
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }}
runs-on: ${{ matrix.OS }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Xcode version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: sudo xcode-select -switch /Applications/Xcode_${{ matrix.XCODE }}.app
- name: Pod lib lint
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -105,6 +128,7 @@ jobs:
- name: Optimized
flags: --config=opt
bazel_action: test
continuous-only: true
- name: Debug
flags: --config=dbg
bazel_action: test
@ -113,6 +137,7 @@ jobs:
- name: Apple_Silicon_Optimized
flags: --config=opt --cpu=darwin_arm64
bazel_action: build
continuous-only: true
- name: Apple_Silicon_Debug
flags: --config=dbg --cpu=darwin_arm64
bazel_action: build
@ -121,14 +146,16 @@ jobs:
include:
- platform: "macOS"
bazel_targets: //objectivec/...
name: Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
runs-on: macos-12
steps:
- name: Checkout pending changes
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: bazel ${{ matrix.config.bazel_action }}
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}

@ -4,10 +4,21 @@ name: PHP Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -26,40 +37,55 @@ jobs:
version: 8.1.14-dbg
version-short: "8.1"
command: composer test \&\& composer test_c
continuous-only: true
- name: 8.1 Memory Leak
version: 8.1.14-dbg
version-short: "8.1"
# Run specialized memory leak & multirequest tests.
command: composer test_c \&\& tests/multirequest.sh \&\& tests/memory_leak_test.sh
continuous-only: true
- name: 8.1 Valgrind
version: 8.1.14-dbg
version-short: "8.1"
command: composer test_valgrind
continuous-only: true
- name: 8.3 Optimized
version: "8.3.1"
version-short: "8.3"
command: composer test \&\& composer test_c
name: Linux ${{ matrix.name}}
runs-on: ubuntu-latest
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name}}
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup composer
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version-short }}
directory: php
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:${{ matrix.version }}-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache
command: ${{ matrix.command }}
- name: Run conformance tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:6.4.0-${{ matrix.version }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_linux/${{ matrix.version }}
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH
linux-32bit:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
@ -73,34 +99,42 @@ jobs:
include:
- suffix: '-zts'
suffix_name: ' Thread Safe'
continuous-only: true
- test: 'test_c'
test_name: ' Extension'
continuous-only: true
- suffix: ''
test: 'test'
name: Linux 32-bit ${{ matrix.version}}${{ matrix.suffix_name }}${{ matrix.test_name }}
runs-on: ubuntu-latest
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux 32-bit ${{ matrix.version}}${{ matrix.suffix_name }}${{ matrix.test_name }}
runs-on: ubuntu-22-4core
env:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:836f2cedcfe351d9a30055076630408e61994fc7d783e8333a99570968990eeb
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:429f924aec315704b4233adcbe4b29006116f27769db98acd176b9eb69c31299
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for i386
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
- name: Setup composer
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: ${{ env.image }}
@ -116,7 +150,7 @@ jobs:
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -127,7 +161,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
@ -151,41 +185,57 @@ jobs:
composer test;
composer test_c'
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_linux/${{ matrix.version }}
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ['8.2', '8.3']
include:
- version: '8.3'
name: MacOS PHP ${{ matrix.version }}
runs-on: macos-12
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} MacOS PHP ${{ matrix.version }}
# noop
runs-on: macos-13
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Uninstall problematic libgd
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: brew uninstall --ignore-dependencies gd
- name: Install dependencies
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: brew install coreutils gd
- name: Pin PHP version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: shivammathur/setup-php@8872c784b04a1420e81191df5d64fbd59d3d3033 # 2.30.2
with:
php-version: ${{ matrix.version }}
- name: Check PHP version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: php --version | grep ${{ matrix.version }} || (echo "Invalid PHP version - $(php --version)" && exit 1)
- name: Setup composer
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
@ -198,8 +248,9 @@ jobs:
popd
- name: Run conformance tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_macos/${{ matrix.version }}
bazel: test //php:conformance_test_c --action_env=PATH --test_env=PATH
bazel: test //php:conformance_test //php:conformance_test_c --action_env=PATH --test_env=PATH

@ -3,10 +3,21 @@ name: PHP Extension Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read # to fetch code (actions/checkout)
@ -41,15 +52,22 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ["8.1", "8.2", "8.3"]
name: Build ${{ matrix.version }}
include:
- version: "8.1"
continuous-only: true
- version: "8.2"
continuous-only: true
- version: "8.3"
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Build ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
name: protobuf-php-release
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php-extension:${{ matrix.version }}-a48f26c08d9a803dd0177dda63563f6ea6f7b2d4

@ -3,10 +3,21 @@ name: Python Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -17,8 +28,7 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
# TODO: b/309627662 - Add coverage for Python 3.12.
version: ["3.8", "3.9", "3.10", "3.11"]
version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
include:
- type: Pure
targets: //python/... //python:python_version_test
@ -32,18 +42,26 @@ jobs:
# TODO Enable this once conformance tests are fixed.
flags: --define=use_fast_cpp_protos=true --test_tag_filters=-conformance
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
- version: "3.8"
- version: "3.9"
continuous-only: true
- version: "3.10"
continuous-only: true
- version: "3.11"
name: Linux ${{ matrix.type }} ${{ matrix.version }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.type }} ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:{0}-63dd26c0c7a808d92673a3e52e848189d4ab0f17', matrix.version) }}
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:6.4.0-{0}-27cf7b86212020d7e552bc13b1e084abb971da75', matrix.version) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: python_linux/${{ matrix.type }}_${{ matrix.version }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_PYTHON_VERSION

@ -0,0 +1,25 @@
name: Release Branch Tests
on:
schedule:
# Run daily at 10 AM UTC (2 AM PDT)
- cron: 0 10 * * *
workflow_dispatch:
permissions: {}
jobs:
releases:
strategy:
fail-fast: false
matrix:
branch: [25.x, 28.x, 29.x]
runs-on: ubuntu-latest
permissions:
actions: write
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
name: Run Tests on ${{ matrix.branch }}
steps:
- run: gh workflow run test_runner.yml --ref ${{ matrix.branch }}

@ -3,10 +3,21 @@ name: Ruby Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -20,31 +31,34 @@ jobs:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x.
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE }
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI }
- { name: Ruby 3.1, ruby: ruby-3.1.0 }
- { name: Ruby 3.2, ruby: ruby-3.2.0 }
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI, continuous-only: true }
- { name: Ruby 3.1, ruby: ruby-3.1.0, continuous-only: true }
- { name: Ruby 3.2, ruby: ruby-3.2.0, continuous-only: true }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI }
name: Linux ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Linux ${{ matrix.name }} ${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:{0}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec', matrix.ruby) }}
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:6.4.0-{0}-27cf7b86212020d7e552bc13b1e084abb971da75', matrix.ruby) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_linux/${{ matrix.ruby }}_${{ matrix.bazel }}
bazel: test //ruby/... //ruby/tests:ruby_version --test_env=KOKORO_RUBY_VERSION --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }}
# Useful tool for troubleshooting, but the action introduces flakes as well,
# e.g. https://github.com/actions/upload-artifact/issues/569
# - name: Archive log artifacts
# if: ${{ matrix.presubmit || inputs.test-type == 'continuous' }}
# uses: actions/upload-artifact@v4
# with:
# name: test-logs-${{ matrix.ruby }}_${{ matrix.ffi || 'NATIVE' }}
@ -52,7 +66,7 @@ jobs:
linux-32bit:
name: Linux 32-bit
runs-on: ubuntu-latest
runs-on: ubuntu-20-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -63,7 +77,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
@ -74,7 +88,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
gem install bundler -v 2.5.13;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -83,7 +97,7 @@ jobs:
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
runs-on: ubuntu-20-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -94,7 +108,7 @@ jobs:
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
@ -105,7 +119,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
gem install bundler -v 2.5.13;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -121,35 +135,40 @@ jobs:
# Ruby versions for CRuby, but only on Bazel 5.x.
# Quote versions numbers otherwise 3.0 will render as 3
- { version: "3.0", ffi: NATIVE }
- { version: "3.0", ffi: FFI }
- { version: "3.1" }
- { version: "3.2" }
- { version: "3.0", ffi: FFI, continuous-only: true }
- { version: "3.1", continuous-only: true }
- { version: "3.2", continuous-only: true }
- { version: "3.3", ffi: NATIVE }
- { version: "3.3", ffi: FFI }
name: MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: macos-12
steps:
- name: Checkout pending changes
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Pin Ruby version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: ruby/setup-ruby@961f85197f92e4842e3cb92a4f97bd8e010cdbaf # v1.165.0
with:
ruby-version: ${{ matrix.version }}
- name: Validate version
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: ruby --version | grep ${{ matrix.version }} || (echo "Invalid Ruby version - $(ruby --version)" && exit 1)
- name: Run tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_macos/${{ matrix.version }}
bazel: test //ruby/... --test_env=KOKORO_RUBY_VERSION=${{ matrix.version }} --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }}
# This test should always be skipped on presubmit
test_ruby_gems:
strategy:
fail-fast: false
@ -165,17 +184,19 @@ jobs:
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI }
name: Install ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
name: ${{ inputs.continuous-prefix }} Install ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:${{ matrix.ruby }}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:6.4.0-${{ matrix.ruby }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_install/${{ matrix.ruby }}_${{ matrix.bazel }}
bash: >

@ -48,7 +48,7 @@ on:
# manual
workflow_dispatch:
permissions:
contents: read
@ -57,8 +57,8 @@ concurrency:
cancel-in-progress: ${{ contains(fromJSON('["pull_request", "pull_request_target", "workflow_dispatch"]'), github.event_name) }}
jobs:
check-tag:
name: Check for Safety
set-vars:
name: Set Variables
# Avoid running tests twice on PR updates. If the PR is coming from our
# repository, it's safe and we can use `pull_request`. Otherwise, we should
@ -77,6 +77,15 @@ jobs:
# Store the sha for checkout so we can easily use it later. For safe
# events, this will be blank and use the defaults.
checkout-sha: ${{ steps.safe-checkout.outputs.sha }}
# Stores a string to be used as a boolean denoting whether this is a
# continuous run. An empty string denotes that the run is on presubmit,
# otherwise we are in a continuous run. This helps us determine which
# tests to block on.
continuous-run: ${{ steps.set-test-type-vars.outputs.continuous-run }}
# Stores a string that will serve as the prefix for all continuous tests.
# Either way we prepend "(Continuous)" but in the case that we are in
# a presubmit run, we should also mark them "[SKIPPED]"
continuous-prefix: ${{ steps.set-test-type-vars.outputs.continuous-prefix }}
steps:
- name: Check
# Trivially pass for safe PRs, and explicitly error for unsafe ones
@ -93,9 +102,20 @@ jobs:
${{ github.event_name != 'pull_request_target' }} ||
echo "sha=${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
- name: Set Test Type Variables
id: set-test-type-vars
run: |
if ([ "${{ github.event_name }}" == 'pull_request' ] || [ "${{ github.event_name }}" == 'pull_request_target' ]) && ${{ !contains(toJson(github.event.pull_request.body), '\n#test-continuous') }}; then
echo "continuous-run=" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=[SKIPPED] (Continuous)" >> "$GITHUB_OUTPUT"
else
echo "continuous-run=continuous" >> "$GITHUB_OUTPUT"
echo "continuous-prefix=(Continuous)" >> "$GITHUB_OUTPUT"
fi
remove-tag:
name: Remove safety tag
needs: [check-tag]
needs: [set-vars]
if: github.event.action == 'labeled'
runs-on: ubuntu-latest
permissions:
@ -106,103 +126,141 @@ jobs:
fail_on_error: true
labels: ':a: safe for tests'
validate-yaml:
name: Validate YAML
needs: [set-vars]
uses: ./.github/workflows/test_yaml.yml
with:
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
# Note: this pattern of passing the head sha is vulnerable to PWN requests for
# pull_request_target events. We carefully limit those workflows to require a
# human stamp before continuing.
bazel:
name: Bazel
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_bazel.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
cpp:
name: C++
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_cpp.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
java:
name: Java
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_java.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
python:
name: Python
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_python.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
ruby:
name: Ruby
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_ruby.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
php:
name: PHP
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_php.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
php-ext:
name: PHP Extension
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_php_ext.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
csharp:
name: C#
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_csharp.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
secrets: inherit
objectivec:
name: Objective-C
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_objectivec.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
rust:
name: Rust
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_rust.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
secrets: inherit
upb:
name: μpb
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/test_upb.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
continuous-prefix: ${{ needs.set-vars.outputs.continuous-prefix }}
secrets: inherit
staleness:
name: Staleness
needs: [check-tag]
needs: [set-vars]
uses: ./.github/workflows/staleness_check.yml
# Staleness tests have scheduled runs during off-hours to avoid race conditions.
if: ${{ github.event_name != 'schedule' }}
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
continuous-run: ${{ needs.set-vars.outputs.continuous-run }}
safe-checkout: ${{ needs.set-vars.outputs.checkout-sha }}
secrets: inherit
# This test depends on all blocking tests and indicates whether they all suceeded.
all-blocking-tests:
name: All Blocking Tests${{ github.event_name == 'pull_request_target' && ' (fork)' || ''}}
needs: [set-vars, validate-yaml, bazel, cpp, java, python, ruby, php, php-ext, csharp, objectivec, rust, upb, staleness]
runs-on: ubuntu-latest
steps:
- name: Check test results
run: "${{ !contains(join(needs.*.result, ' '), 'failure') && !contains(join(needs.*.result, ' '), 'cancelled') }}"
# This workflow must run even if one or more of the dependent workflows
# failed.
if: always()

@ -12,9 +12,10 @@ permissions:
contents: read
jobs:
# This job should be run on presubmit, if any continuous-only tests are added we will need to input test-type above
linux:
name: Linux
runs-on: ubuntu-latest
runs-on: ubuntu-22-4core
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
@ -27,6 +28,16 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: rust_linux
bazel: >-
test //rust:protobuf_upb_test //rust:protobuf_cpp_test
test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 --@rules_rust//rust/settings:experimental_use_cc_common_link=True
//rust:all
//rust/test/cpp/interop/...
//rust/test/rust_proto_library_unit_test:rust_upb_aspect_test
//src/google/protobuf/compiler/rust/...
- name: Run Cargo tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/release/linux/rust:6.3.0-1.74.0-8858126dd9480abf91e6ce8d6e41a5cd3c03882c"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: rust_linux
bazel: >-
run --crosstool_top=//toolchain:clang_suite --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //rust:cargo_test

@ -3,10 +3,21 @@ name: μpb Tests
on:
workflow_call:
inputs:
continuous-run:
required: true
description: "Boolean string denoting whether this run is continuous --
empty string for presubmit, non-empty string for continuous."
type: string
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
continuous-prefix:
required: true
description: "The string continuous-only tests should be prefixed with when displaying test
results."
type: string
permissions:
contents: read
@ -17,27 +28,29 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "Bazel 7", bazel_version: "7.1.1" }
- { name: "Bazel 7", bazel_version: "7.1.1", continuous-only: true }
- { name: "Fastbuild" }
- { name: "Optimized", flags: "-c opt" }
- { name: "Optimized", flags: "-c opt", continuous-only: true }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/...", runner: ubuntu-22-4core }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/..." }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/...", continuous-only: true }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/..." }
# TODO: Add 32-bit ASAN test
# TODO: Restore the FastTable tests
name: ${{ matrix.config.name }}
name: ${{ matrix.config.continuous-only && inputs.continuous-prefix || '' }} ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
if: ${{ !matrix.config.continuous-only || inputs.continuous-run }}
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-75f2a85ece6526cc3d54087018c0f1097d78d42b
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.4.0' }}-27cf7b86212020d7e552bc13b1e084abb971da75
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
@ -56,10 +69,13 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:6.4.0-12.2-27cf7b86212020d7e552bc13b1e084abb971da75"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-gcc"
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
bazel: >-
test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt
--copt="-Wno-error=maybe-uninitialized" --copt="-Wno-error=attributes"
//bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
windows:
strategy:
@ -81,7 +97,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-windows"
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/...
version: 6.3.0
version: 6.4.0
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test
macos:
@ -108,7 +124,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-macos"
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //python/... //upb/... //upb_generator/...
version: 6.3.0
version: 6.4.0
no-python:
strategy:
@ -123,7 +139,7 @@ jobs:
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.4.0-cf84e92285ca133b9c8104ad7b14d70e953cbb8e
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-no-python"
bash: >-
@ -144,7 +160,7 @@ jobs:
- name: Build Wheels
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.3.0-53225851b051e66f8543e972c143f35be757a181
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.4.0-5be0f4fde927ca702ed4cebe096bfb632d6d9a36
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel-python
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //python/dist //python/dist:test_wheel //python/dist:source_wheel
@ -161,8 +177,6 @@ jobs:
path: python/requirements.txt
test_wheels:
name: Test Wheels
needs: build_wheels
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
@ -175,22 +189,24 @@ jobs:
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source' }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source' }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-12, python-version: "3.8", architecture: x64, type: 'source', continuous-only: true }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
- { os: macos-13, python-version: "3.12", architecture: x64, type: 'source', continuous-only: true }
# Windows uses the full API up until Python 3.10.
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary', continuous-only: true }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' }
name: ${{ matrix.continuous-only && inputs.continuous-prefix || '' }} Test Wheels Python ${{ matrix.python-version }} ${{ matrix.os }} ${{ matrix.architecture }} ${{ matrix.type }}
needs: build_wheels
runs-on: ${{ matrix.os }}
if: ${{ github.event_name != 'pull_request_target' }}
defaults:
@ -198,20 +214,24 @@ jobs:
shell: bash
steps:
- name: Download Wheels
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
with:
name: python-wheels
path: wheels
- name: Download Requirements
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
uses: actions/download-artifact@v3
with:
name: requirements
path: requirements
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
- name: Setup Python venv
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: |
python -m pip install --upgrade pip
python -m venv env
@ -221,24 +241,28 @@ jobs:
- name: Install tzdata
run: pip install tzdata
# Only needed on Windows, Linux ships with tzdata.
if: ${{ contains(matrix.os, 'windows') }}
if: ${{ contains(matrix.os, 'windows') && (!matrix.continuous-only || inputs.continuous-run) }}
- name: Install requirements
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: pip install -r requirements/requirements.txt
- name: Install Protobuf Binary Wheel
if: ${{ matrix.type == 'binary' && (!matrix.continuous-only || inputs.continuous-run) }}
run: pip install -vvv --no-index --find-links wheels protobuf
if: ${{ matrix.type == 'binary' }}
- name: Install Protobuf Source Wheel
if: ${{ matrix.type == 'source' && (!matrix.continuous-only || inputs.continuous-run) }}
run: |
cd wheels
tar -xzvf *.tar.gz
cd protobuf-*/
pip install .
if: ${{ matrix.type == 'source' }}
- name: Test that module is importable
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: python -v -c 'from google._upb import _message; assert "google._upb._message.MessageMeta" in str(_message.MessageMeta)'
- name: Install Protobuf Test Wheel
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: pip install -vvv --no-index --find-links wheels protobuftests
- name: Run the unit tests
if: ${{ !matrix.continuous-only || inputs.continuous-run }}
run: |
TESTS=$(pip show -f protobuftests | grep pb_unit_tests.*py$ | sed 's,/,.,g' | sed 's,\\,.,g' | sed -E 's,.py$,,g')
for test in $TESTS; do

@ -0,0 +1,25 @@
name: Validate YAML
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
validate-yaml:
name: Validate YAML
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run python validation script
run: |
python .github/scripts/validate_yaml.py

@ -1,6 +1,7 @@
# Bazel (https://bazel.build/) BUILD file for Protobuf.
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library")
load("@rules_license//rules:license.bzl", "license")
load("@rules_pkg//pkg:mappings.bzl", "pkg_files", "strip_prefix")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:java_lite_proto_library.bzl", "java_lite_proto_library")
@ -17,6 +18,12 @@ exports_files([
"PrivacyInfo.xcprivacy",
])
license(
name = "license",
package_name = "protobuf",
license_text = ":LICENSE",
)
################################################################################
# Well Known Types Proto Library Rules
#

@ -84,7 +84,7 @@ if (protobuf_BUILD_SHARED_LIBS)
endif ()
# Version metadata
set(protobuf_VERSION_STRING "5.28.0")
set(protobuf_VERSION_STRING "5.30.0")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")

@ -1,5 +1,5 @@
{
"checksum": "b4edbf28ea96b685a90948d9efaa14d55f0f01e27b5d774b3ecd6eff3c231517",
"checksum": "89c489aa74f633247650bf28b86db6ec53c041968fd91758693748f553ef102c",
"crates": {
"aho-corasick 1.1.2": {
"name": "aho-corasick",
@ -17,7 +17,7 @@
"crate_name": "aho_corasick",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -73,7 +73,7 @@
"crate_name": "autocfg",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -107,7 +107,7 @@
"crate_name": "direct_cargo_bazel_deps",
"crate_root": ".direct_cargo_bazel_deps.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -123,7 +123,7 @@
"deps": {
"common": [
{
"id": "googletest 0.11.0",
"id": "googletest 0.12.0",
"target": "googletest"
}
],
@ -145,15 +145,15 @@
"license_ids": [],
"license_file": null
},
"googletest 0.11.0": {
"googletest 0.12.0": {
"name": "googletest",
"version": "0.11.0",
"version": "0.12.0",
"package_url": "https://github.com/google/googletest-rust",
"repository": {
"Git": {
"remote": "https://github.com/google/googletest-rust",
"commitish": {
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
"Rev": "b407f3b5774defb8917d714bfb7af485e117d621"
},
"strip_prefix": "googletest"
}
@ -164,7 +164,7 @@
"crate_name": "googletest",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -194,7 +194,7 @@
"proc_macro_deps": {
"common": [
{
"id": "googletest_macro 0.11.0",
"id": "googletest_macro 0.12.0",
"target": "googletest_macro"
},
{
@ -204,7 +204,7 @@
],
"selects": {}
},
"version": "0.11.0"
"version": "0.12.0"
},
"license": "Apache-2.0",
"license_ids": [
@ -212,15 +212,15 @@
],
"license_file": "LICENSE"
},
"googletest_macro 0.11.0": {
"googletest_macro 0.12.0": {
"name": "googletest_macro",
"version": "0.11.0",
"version": "0.12.0",
"package_url": "https://github.com/google/googletest-rust",
"repository": {
"Git": {
"remote": "https://github.com/google/googletest-rust",
"commitish": {
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
"Rev": "b407f3b5774defb8917d714bfb7af485e117d621"
},
"strip_prefix": "googletest_macro"
}
@ -231,7 +231,7 @@
"crate_name": "googletest_macro",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -258,7 +258,7 @@
"selects": {}
},
"edition": "2021",
"version": "0.11.0"
"version": "0.12.0"
},
"license": "Apache-2.0",
"license_ids": [
@ -282,7 +282,7 @@
"crate_name": "memchr",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -329,7 +329,7 @@
"crate_name": "num_traits",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -341,7 +341,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -374,6 +374,9 @@
"version": "0.2.17"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
],
@ -410,7 +413,7 @@
"crate_name": "paste",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -422,7 +425,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -448,6 +451,9 @@
"version": "1.0.14"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -475,7 +481,7 @@
"crate_name": "proc_macro2",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -487,7 +493,7 @@
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -523,6 +529,9 @@
"version": "1.0.69"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -550,7 +559,7 @@
"crate_name": "quote",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -605,7 +614,7 @@
"crate_name": "regex",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -687,7 +696,7 @@
"crate_name": "regex_automata",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -770,7 +779,7 @@
"crate_name": "regex_syntax",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -824,7 +833,7 @@
"crate_name": "rustversion",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -836,7 +845,7 @@
"crate_name": "build_script_build",
"crate_root": "build/build.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -862,6 +871,9 @@
"version": "1.0.14"
},
"build_script_attrs": {
"compile_data_glob": [
"**"
],
"data_glob": [
"**"
]
@ -889,7 +901,7 @@
"crate_name": "syn",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -958,7 +970,7 @@
"crate_name": "unicode_ident",
"crate_root": "src/lib.rs",
"srcs": {
"allow_empty": false,
"allow_empty": true,
"include": [
"**/*.rs"
]
@ -1092,8 +1104,8 @@
]
},
"direct_deps": [
"googletest 0.11.0",
"googletest 0.12.0",
"paste 1.0.14"
],
"direct_dev_deps": []
}
}

8
Cargo.lock generated

@ -27,8 +27,8 @@ dependencies = [
[[package]]
name = "googletest"
version = "0.11.0"
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
version = "0.12.0"
source = "git+https://github.com/google/googletest-rust?rev=b407f3b5774defb8917d714bfb7af485e117d621#b407f3b5774defb8917d714bfb7af485e117d621"
dependencies = [
"googletest_macro",
"num-traits",
@ -38,8 +38,8 @@ dependencies = [
[[package]]
name = "googletest_macro"
version = "0.11.0"
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
version = "0.12.0"
source = "git+https://github.com/google/googletest-rust?rev=b407f3b5774defb8917d714bfb7af485e117d621#b407f3b5774defb8917d714bfb7af485e117d621"
dependencies = [
"quote",
"syn",

@ -3,7 +3,7 @@
module(
name = "protobuf",
version = "28.0-dev", # Automatically updated on release
version = "30.0-dev", # Automatically updated on release
compatibility_level = 1,
repo_name = "com_google_protobuf",
)
@ -12,21 +12,85 @@ module(
# Bzlmod follows MVS:
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution
# Thus the highest version in their module graph is resolved.
bazel_dep(name = "abseil-cpp", version = "20230802.0.bcr.1", repo_name = "com_google_absl")
bazel_dep(name = "bazel_skylib", version = "1.4.1")
bazel_dep(name = "jsoncpp", version = "1.9.5")
bazel_dep(name = "rules_cc", version = "0.0.9")
bazel_dep(name = "rules_fuzzing", version = "0.5.2")
bazel_dep(name = "rules_java", version = "5.3.5")
bazel_dep(name = "rules_jvm_external", version = "5.1")
bazel_dep(name = "rules_pkg", version = "0.7.0")
bazel_dep(name = "rules_python", version = "0.28.0")
bazel_dep(name = "rules_rust", version = "0.45.1")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "zlib", version = "1.3.1")
# TODO: remove after toolchain types are moved to protobuf
bazel_dep(name = "rules_proto", version = "4.0.0")
bazel_dep(
name = "abseil-cpp",
version = "20230802.0.bcr.1",
repo_name = "com_google_absl",
)
bazel_dep(
name = "bazel_skylib",
version = "1.7.0",
)
bazel_dep(
name = "jsoncpp",
version = "1.9.5",
)
bazel_dep(
name = "rules_cc",
version = "0.0.9",
)
bazel_dep(
name = "rules_fuzzing",
version = "0.5.2",
)
bazel_dep(
name = "rules_java",
version = "7.11.1",
)
bazel_dep(
name = "rules_jvm_external",
version = "6.3",
)
bazel_dep(
name = "rules_kotlin",
version = "1.9.6",
)
bazel_dep(
name = "rules_license",
version = "1.0.0",
)
bazel_dep(
name = "rules_pkg",
version = "1.0.1",
)
bazel_dep(
name = "rules_python",
version = "0.28.0",
)
bazel_dep(
name = "rules_rust",
version = "0.51.0",
)
bazel_dep(
name = "platforms",
version = "0.0.8",
)
bazel_dep(
name = "zlib",
version = "1.3.1",
)
bazel_dep(
name = "bazel_features",
version = "1.17.0",
repo_name = "proto_bazel_features",
)
# Proto toolchains
register_toolchains("//bazel/private/toolchains:all")
SUPPORTED_PYTHON_VERSIONS = [
"3.8",
@ -35,7 +99,9 @@ SUPPORTED_PYTHON_VERSIONS = [
"3.11",
"3.12",
]
python = use_extension("@rules_python//python/extensions:python.bzl", "python")
[
python.toolchain(
is_default = python_version == SUPPORTED_PYTHON_VERSIONS[-1],
@ -43,8 +109,14 @@ python = use_extension("@rules_python//python/extensions:python.bzl", "python")
)
for python_version in SUPPORTED_PYTHON_VERSIONS
]
use_repo(python, system_python = "python_{}".format(SUPPORTED_PYTHON_VERSIONS[-1].replace(".", "_")))
use_repo(
python,
system_python = "python_{}".format(SUPPORTED_PYTHON_VERSIONS[-1].replace(".", "_")),
)
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
[
pip.parse(
hub_name = "pip_deps",
@ -53,20 +125,84 @@ pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
)
for python_version in SUPPORTED_PYTHON_VERSIONS
]
use_repo(pip, "pip_deps")
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
rust.toolchain(edition = "2021")
use_repo(rust, "rust_toolchains")
register_toolchains("@rust_toolchains//:all")
crate = use_extension("@rules_rust//crate_universe:extension.bzl", "crate")
crate.spec(
package = "googletest",
version = ">0.0.0",
)
crate.spec(
package = "paste",
version = ">=1",
)
crate.from_specs()
use_repo(crate, crate_index = "crates")
use_repo(
crate,
crate_index = "crates",
)
maven = use_extension("@rules_jvm_external//:extensions.bzl", "maven")
maven.install(
artifacts = [
"com.google.caliper:caliper:1.0-beta-3",
"com.google.code.findbugs:jsr305:3.0.2",
"com.google.code.gson:gson:2.8.9",
"com.google.errorprone:error_prone_annotations:2.5.1",
"com.google.j2objc:j2objc-annotations:2.8",
"com.google.guava:guava:32.0.1-jre",
"com.google.guava:guava-testlib:32.0.1-jre",
"com.google.truth:truth:1.1.2",
"junit:junit:4.13.2",
"org.mockito:mockito-core:4.3.1",
"biz.aQute.bnd:biz.aQute.bndlib:6.4.0",
"info.picocli:picocli:4.6.3",
],
repositories = [
"https://repo1.maven.org/maven2",
"https://repo.maven.apache.org/maven2",
],
)
use_repo(maven, "maven")
# Development dependencies
bazel_dep(
name = "googletest",
version = "1.14.0",
dev_dependency = True,
repo_name = "com_google_googletest",
)
bazel_dep(
name = "rules_buf",
version = "0.3.0",
dev_dependency = True,
)
bazel_dep(
name = "rules_testing",
version = "0.6.0",
dev_dependency = True,
)
# rules_proto are needed for @com_google_protobuf_v25.0 used in //compatibility/... tests
bazel_dep(
name = "rules_proto",
version = "4.0.0",
dev_dependency = True,
)

@ -1,6 +1,6 @@
Pod::Spec.new do |s|
s.name = 'Protobuf-C++'
s.version = '5.28.0'
s.version = '5.30.0'
s.summary = 'Protocol Buffers v3 runtime library for C++.'
s.homepage = 'https://github.com/google/protobuf'
s.license = 'BSD-3-Clause'

@ -5,7 +5,7 @@
# dependent projects use the :git notation to refer to the library.
Pod::Spec.new do |s|
s.name = 'Protobuf'
s.version = '3.28.0'
s.version = '3.30.0'
s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.'
s.homepage = 'https://github.com/protocolbuffers/protobuf'
s.license = 'BSD-3-Clause'
@ -45,7 +45,7 @@ Pod::Spec.new do |s|
s.pod_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1' }
s.ios.deployment_target = '10.0'
s.osx.deployment_target = '10.12'
s.osx.deployment_target = '10.13'
s.tvos.deployment_target = '12.0'
s.watchos.deployment_target = '6.0'
s.visionos.deployment_target = '1.0'

@ -41,10 +41,10 @@ http_archive(
http_archive(
name = "com_google_googletest",
sha256 = "730215d76eace9dd49bf74ce044e8daa065d175f1ac891cc1d6bb184ef94e565",
strip_prefix = "googletest-f53219cdcb7b084ef57414efea92ee5b71989558",
sha256 = "7315acb6bf10e99f332c8a43f00d5fbb1ee6ca48c52f6b936991b216c586aaad",
strip_prefix = "googletest-1.15.0",
urls = [
"https://github.com/google/googletest/archive/f53219cdcb7b084ef57414efea92ee5b71989558.tar.gz" # 2023-03-16
"https://github.com/google/googletest/releases/download/v1.15.0/googletest-1.15.0.tar.gz" # 2024-07-15
],
)
@ -99,11 +99,11 @@ load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies")
rules_cc_dependencies()
# For `kt_jvm_library`
load("@io_bazel_rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
load("@rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
kotlin_repositories()
load("@io_bazel_rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
load("@rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
kt_register_toolchains()
@ -202,8 +202,8 @@ fuzzing_py_deps_install_deps()
http_archive(
name = "rules_rust",
integrity = "sha256-F8U7+AC5MvMtPKGdLLnorVM84cDXKfDRgwd7/dq3rUY=",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.46.0/rules_rust-v0.46.0.tar.gz"],
integrity = "sha256-BCrPtzRpstGEj+FI2Bw0IsYepHqeGQDxyew29R6OcZM=",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.51.0/rules_rust-v0.51.0.tar.gz"],
)
load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_register_toolchains")
@ -220,7 +220,7 @@ crates_repository(
packages = {
"googletest": crate.spec(
git = "https://github.com/google/googletest-rust",
rev = "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f",
rev = "b407f3b5774defb8917d714bfb7af485e117d621",
),
"paste": crate.spec(
version = ">=1",
@ -238,10 +238,39 @@ http_archive(
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz",
)
# Needed as a dependency of @com_google_protobuf_v25.x, which was before
# utf8_range was merged in.
# Needed as a dependency of @com_google_protobuf_v25.0
load("@com_google_protobuf_v25.0//:protobuf_deps.bzl", protobuf_v25_deps="protobuf_deps")
protobuf_v25_deps()
# Needed for testing only
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_testing",
sha256 = "02c62574631876a4e3b02a1820cb51167bb9cdcdea2381b2fa9d9b8b11c407c4",
strip_prefix = "rules_testing-0.6.0",
url = "https://github.com/bazelbuild/rules_testing/releases/download/v0.6.0/rules_testing-v0.6.0.tar.gz",
)
# For checking breaking changes to well-known types from the previous release version.
load("//:protobuf_version.bzl", "PROTOBUF_PREVIOUS_RELEASE")
http_archive(
name = "com_google_protobuf_previous_release",
strip_prefix = "protobuf-" + PROTOBUF_PREVIOUS_RELEASE,
url = "https://github.com/protocolbuffers/protobuf/releases/download/v{0}/protobuf-{0}.tar.gz".format(PROTOBUF_PREVIOUS_RELEASE),
)
http_archive(
name = "utf8_range",
strip_prefix = "utf8_range-d863bc33e15cba6d873c878dcca9e6fe52b2f8cb",
url = "https://github.com/protocolbuffers/utf8_range/archive/d863bc33e15cba6d873c878dcca9e6fe52b2f8cb.zip",
name = "rules_buf",
integrity = "sha256-Hr64Q/CaYr0E3ptAjEOgdZd1yc+cBjp7OG1wzuf3DIs=",
strip_prefix = "rules_buf-0.3.0",
urls = [
"https://github.com/bufbuild/rules_buf/archive/refs/tags/v0.3.0.zip",
],
)
load("@rules_buf//buf:repositories.bzl", "rules_buf_dependencies", "rules_buf_toolchains")
rules_buf_dependencies()
rules_buf_toolchains(version = "v1.32.1")

@ -0,0 +1,45 @@
# This is a WORKSPACE file used by bzlmod in combination with MODULE.bazel.
# It's used for a gradual migration and it should be empty.
# Don't remove this file. If the file doesn't exist, bzlmod falls back to WORKSPACE file.
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# TODO: either replace rules_ruby with a maintained version on BCR
# or use bzlmod extensions to depend on this specific repo
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
ruby_runtime("system_ruby")
register_toolchains("@system_ruby//:toolchain")
# Following are just needed to run conformance tests, not really needed to support them via MODULE.bazel
# For testing runtime against old gencode from a previous major version.
http_archive(
name = "com_google_protobuf_v25.0",
strip_prefix = "protobuf-25.0",
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz",
)
# Needed as a dependency of @com_google_protobuf_v25.0
load("@com_google_protobuf_v25.0//:protobuf_deps.bzl", protobuf_v25_deps="protobuf_deps")
protobuf_v25_deps()
# Needed for checking breaking changes from the previous release version.
load("//:protobuf_version.bzl", "PROTOBUF_PREVIOUS_RELEASE")
http_archive(
name = "com_google_protobuf_previous_release",
strip_prefix = "protobuf-" + PROTOBUF_PREVIOUS_RELEASE,
url = "https://github.com/protocolbuffers/protobuf/releases/download/v{0}/protobuf-{0}.tar.gz".format(PROTOBUF_PREVIOUS_RELEASE),
)

@ -40,6 +40,7 @@ bzl_library(
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@rules_python//python:py_info_bzl",
],
)

@ -1,3 +1,10 @@
"""cc_proto_library rule"""
cc_proto_library = native.cc_proto_library
load("//bazel/private:bazel_cc_proto_library.bzl", _cc_proto_library = "cc_proto_library") # buildifier: disable=bzl-visibility
def cc_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "cc_proto_library"):
_cc_proto_library(**kwattrs)
else:
native.cc_proto_library(**kwattrs) # buildifier: disable=native-cc-proto

@ -1,5 +1,7 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
package(default_applicable_licenses = ["//:license"])
bzl_library(
name = "proto_common_bzl",
srcs = [
@ -7,7 +9,9 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
":proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@proto_bazel_features//:features",
],
)
@ -29,6 +33,14 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
":proto_common.bzl",
"//bazel/private:native_bzl",
],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]) + ["@proto_bazel_features//:features"],
visibility = [
"//bazel:__pkg__",
],
)

@ -1,5 +1,355 @@
"""proto_common"""
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Definition of proto_common module, together with bazel providers for proto rules."""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:native.bzl", "native_proto_common")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
proto_common = native_proto_common
def _import_virtual_proto_path(path):
"""Imports all paths for virtual imports.
They're of the form:
'bazel-out/k8-fastbuild/bin/external/foo/e/_virtual_imports/e' or
'bazel-out/foo/k8-fastbuild/bin/e/_virtual_imports/e'"""
if path.count("/") > 4:
return "-I%s" % path
return None
def _import_repo_proto_path(path):
"""Imports all paths for generated files in external repositories.
They are of the form:
'bazel-out/k8-fastbuild/bin/external/foo' or
'bazel-out/foo/k8-fastbuild/bin'"""
path_count = path.count("/")
if path_count > 2 and path_count <= 4:
return "-I%s" % path
return None
def _import_main_output_proto_path(path):
"""Imports all paths for generated files or source files in external repositories.
They're of the form:
'bazel-out/k8-fastbuild/bin'
'external/foo'
'../foo'
"""
if path.count("/") <= 2 and path != ".":
return "-I%s" % path
return None
def _remove_repo(file):
"""Removes `../repo/` prefix from path, e.g. `../repo/package/path -> package/path`"""
short_path = file.short_path
workspace_root = file.owner.workspace_root
if workspace_root:
if workspace_root.startswith("external/"):
workspace_root = "../" + workspace_root.removeprefix("external/")
return short_path.removeprefix(workspace_root + "/")
return short_path
def _get_import_path(proto_file):
"""Returns the import path of a .proto file
This is the path as used for the file that can be used in an `import` statement in another
.proto file.
Args:
proto_file: (File) The .proto file
Returns:
(str) import path
"""
repo_path = _remove_repo(proto_file)
index = repo_path.find("_virtual_imports/")
if index >= 0:
index = repo_path.find("/", index + len("_virtual_imports/"))
repo_path = repo_path[index + 1:]
return repo_path
def _output_directory(proto_info, root):
proto_source_root = proto_info.proto_source_root
if proto_source_root.startswith(root.path):
#TODO: remove this branch when bin_dir is removed from proto_source_root
proto_source_root = proto_source_root.removeprefix(root.path).removeprefix("/")
if proto_source_root == "" or proto_source_root == ".":
return root.path
return root.path + "/" + proto_source_root
def _check_collocated(label, proto_info, proto_lang_toolchain_info):
"""Checks if lang_proto_library is collocated with proto_library.
Exceptions are allowed by an allowlist defined on `proto_lang_toolchain` and
on an allowlist defined on `proto_library`'s `allow_exports` attribute.
If checks are not successful the function fails.
Args:
label: (Label) The label of lang_proto_library
proto_info: (ProtoInfo) The ProtoInfo from the proto_library dependency.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target.
"""
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
if not _PackageSpecificationInfo:
if proto_lang_toolchain_info.allowlist_different_package or getattr(proto_info, "allow_exports", None):
fail("Allowlist checks not supported before Bazel 6.4.0")
return
if (proto_info.direct_descriptor_set.owner.package != label.package and
proto_lang_toolchain_info.allowlist_different_package):
if not proto_lang_toolchain_info.allowlist_different_package[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
if (proto_info.direct_descriptor_set.owner.package != label.package and
hasattr(proto_info, "allow_exports")):
if not proto_info.allow_exports[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
def _compile(
actions,
proto_info,
proto_lang_toolchain_info,
generated_files,
plugin_output = None,
additional_args = None,
additional_tools = [],
additional_inputs = depset(),
additional_proto_lang_toolchain_info = None,
resource_set = None,
experimental_exec_group = None,
experimental_progress_message = None,
experimental_output_files = "legacy"):
"""Creates proto compile action for compiling *.proto files to language specific sources.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to register the actions.
proto_info: (ProtoInfo) The ProtoInfo from proto_library to generate the sources for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc..
generated_files: (list[File]) The output files generated by the proto compiler.
Callee needs to declare files using `ctx.actions.declare_file`.
See also: `proto_common.declare_generated_files`.
plugin_output: (File|str) Deprecated: Set `proto_lang_toolchain.output_files`
and remove the parameter.
For backwards compatibility, when the proto_lang_toolchain isn't updated
the value is used.
additional_args: (Args) Additional arguments to add to the action.
Accepts a ctx.actions.args() object that is added at the beginning
of the command line.
additional_tools: (list[File]) Additional tools to add to the action.
additional_inputs: (Depset[File]) Additional input files to add to the action.
resource_set: (func) A callback function that is passed to the created action.
See `ctx.actions.run`, `resource_set` parameter for full definition of
the callback.
experimental_exec_group: (str) Sets `exec_group` on proto compile action.
Avoid using this parameter.
experimental_progress_message: Overrides progress_message from the toolchain.
Don't use this parameter. It's only intended for the transition.
experimental_output_files: (str) Overwrites output_files from the toolchain.
Don't use this parameter. It's only intended for the transition.
"""
if type(generated_files) != type([]):
fail("generated_files is expected to be a list of Files")
if not generated_files:
return # nothing to do
if experimental_output_files not in ["single", "multiple", "legacy"]:
fail('experimental_output_files expected to be one of ["single", "multiple", "legacy"]')
args = actions.args()
args.use_param_file(param_file_arg = "@%s")
args.set_param_file_format("multiline")
tools = list(additional_tools)
if experimental_output_files != "legacy":
output_files = experimental_output_files
else:
output_files = getattr(proto_lang_toolchain_info, "output_files", "legacy")
if output_files != "legacy":
if proto_lang_toolchain_info.out_replacement_format_flag:
if output_files == "single":
if len(generated_files) > 1:
fail("generated_files only expected a single file")
plugin_output = generated_files[0]
else:
plugin_output = _output_directory(proto_info, generated_files[0].root)
if plugin_output:
args.add(plugin_output, format = proto_lang_toolchain_info.out_replacement_format_flag)
if proto_lang_toolchain_info.plugin:
tools.append(proto_lang_toolchain_info.plugin)
args.add(proto_lang_toolchain_info.plugin.executable, format = proto_lang_toolchain_info.plugin_format_flag)
# Protoc searches for .protos -I paths in order they are given and then
# uses the path within the directory as the package.
# This requires ordering the paths from most specific (longest) to least
# specific ones, so that no path in the list is a prefix of any of the
# following paths in the list.
# For example: 'bazel-out/k8-fastbuild/bin/external/foo' needs to be listed
# before 'bazel-out/k8-fastbuild/bin'. If not, protoc will discover file under
# the shorter path and use 'external/foo/...' as its package path.
args.add_all(proto_info.transitive_proto_path, map_each = _import_virtual_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_repo_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_main_output_proto_path)
args.add("-I.") # Needs to come last
args.add_all(proto_lang_toolchain_info.protoc_opts)
args.add_all(proto_info.direct_sources)
if additional_args:
additional_args.use_param_file(param_file_arg = "@%s")
additional_args.set_param_file_format("multiline")
actions.run(
mnemonic = proto_lang_toolchain_info.mnemonic,
progress_message = experimental_progress_message if experimental_progress_message else proto_lang_toolchain_info.progress_message,
executable = proto_lang_toolchain_info.proto_compiler,
arguments = [args, additional_args] if additional_args else [args],
inputs = depset(transitive = [proto_info.transitive_sources, additional_inputs]),
outputs = generated_files,
tools = tools,
use_default_shell_env = True,
resource_set = resource_set,
exec_group = experimental_exec_group,
toolchain = _toolchain_type(proto_lang_toolchain_info),
)
_BAZEL_TOOLS_PREFIX = "external/bazel_tools/"
def _experimental_filter_sources(proto_info, proto_lang_toolchain_info):
if not proto_info.direct_sources:
return [], []
# Collect a set of provided protos
provided_proto_sources = proto_lang_toolchain_info.provided_proto_sources
provided_paths = {}
for src in provided_proto_sources:
path = src.path
# For listed protos bundled with the Bazel tools repository, their exec paths start
# with external/bazel_tools/. This prefix needs to be removed first, because the protos in
# user repositories will not have that prefix.
if path.startswith(_BAZEL_TOOLS_PREFIX):
provided_paths[path[len(_BAZEL_TOOLS_PREFIX):]] = None
else:
provided_paths[path] = None
# Filter proto files
proto_files = proto_info._direct_proto_sources
excluded = []
included = []
for proto_file in proto_files:
if proto_file.path in provided_paths:
excluded.append(proto_file)
else:
included.append(proto_file)
return included, excluded
def _experimental_should_generate_code(
proto_info,
proto_lang_toolchain_info,
rule_name,
target_label):
"""Checks if the code should be generated for the given proto_library.
The code shouldn't be generated only when the toolchain already provides it
to the language through its runtime dependency.
It fails when the proto_library contains mixed proto files, that should and
shouldn't generate code.
Args:
proto_info: (ProtoInfo) The ProtoInfo from proto_library to check the generation for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc.
rule_name: (str) Name of the rule used in the failure message.
target_label: (Label) The label of the target used in the failure message.
Returns:
(bool) True when the code should be generated.
"""
included, excluded = _experimental_filter_sources(proto_info, proto_lang_toolchain_info)
if included and excluded:
fail(("The 'srcs' attribute of '%s' contains protos for which '%s' " +
"shouldn't generate code (%s), in addition to protos for which it should (%s).\n" +
"Separate '%s' into 2 proto_library rules.") % (
target_label,
rule_name,
", ".join([f.short_path for f in excluded]),
", ".join([f.short_path for f in included]),
target_label,
))
return bool(included)
def _declare_generated_files(
actions,
proto_info,
extension,
name_mapper = None):
"""Declares generated files with a specific extension.
Use this in lang_proto_library-es when protocol compiler generates files
that correspond to .proto file names.
The function removes ".proto" extension with given one (e.g. ".pb.cc") and
declares new output files.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to declare the files.
proto_info: (ProtoInfo) The ProtoInfo to declare the files for.
extension: (str) The extension to use for generated files.
name_mapper: (str->str) A function mapped over the base filename without
the extension. Used it to replace characters in the name that
cause problems in a specific programming language.
Returns:
(list[File]) The list of declared files.
"""
proto_sources = proto_info.direct_sources
outputs = []
for src in proto_sources:
basename_no_ext = src.basename[:-(len(src.extension) + 1)]
if name_mapper:
basename_no_ext = name_mapper(basename_no_ext)
# Note that two proto_library rules can have the same source file, so this is actually a
# shared action. NB: This can probably result in action conflicts if the proto_library rules
# are not the same.
outputs.append(actions.declare_file(basename_no_ext + extension, sibling = src))
return outputs
def _toolchain_type(proto_lang_toolchain_info):
if toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
return getattr(proto_lang_toolchain_info, "toolchain_type", None)
else:
return None
proto_common = struct(
compile = _compile,
declare_generated_files = _declare_generated_files,
check_collocated = _check_collocated,
experimental_should_generate_code = _experimental_should_generate_code,
experimental_filter_sources = _experimental_filter_sources,
get_import_path = _get_import_path,
ProtoLangToolchainInfo = ProtoLangToolchainInfo,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION,
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = (
getattr(native_proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False) or
not hasattr(native_proto_common, "ProtoLangToolchainInfo")
),
)

@ -1,5 +1,7 @@
"""ProtoInfo"""
load("//bazel/private:native.bzl", "NativeProtoInfo")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:proto_info.bzl", _ProtoInfo = "ProtoInfo") # buildifier: disable=bzl-visibility
ProtoInfo = NativeProtoInfo
# This resolves to Starlark ProtoInfo in Bazel 8 or with --incompatible_enable_autoload flag
ProtoInfo = getattr(bazel_features.globals, "ProtoInfo", None) or _ProtoInfo

@ -1,5 +1,26 @@
"""ProtoLangToolchainInfo"""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:native.bzl", "native_proto_common") # buildifier: disable=bzl-visibility
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo
# Use Starlark implementation only if native_proto_common.ProtoLangToolchainInfo doesn't exist
ProtoLangToolchainInfo = getattr(native_proto_common, "ProtoLangToolchainInfo", provider(
doc = """Specifies how to generate language-specific code from .proto files.
Used by LANG_proto_library rules.""",
fields = dict(
out_replacement_format_flag = """(str) Format string used when passing output to the plugin
used by proto compiler.""",
output_files = """("single","multiple","legacy") Format out_replacement_format_flag with
a path to single file or a directory in case of multiple files.""",
plugin_format_flag = "(str) Format string used when passing plugin to proto compiler.",
plugin = "(FilesToRunProvider) Proto compiler plugin.",
runtime = "(Target) Runtime.",
provided_proto_sources = "(list[File]) Proto sources provided by the toolchain.",
proto_compiler = "(FilesToRunProvider) Proto compiler.",
protoc_opts = "(list[str]) Options to pass to proto compiler.",
progress_message = "(str) Progress message to set on the proto compiler action.",
mnemonic = "(str) Mnemonic to set on the proto compiler action.",
allowlist_different_package = """(Target) Allowlist to create lang_proto_library in a
different package than proto_library""",
toolchain_type = """(Label) Toolchain type that was used to obtain this info""",
),
))

@ -1,3 +1,16 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""java_lite_proto_library rule"""
java_lite_proto_library = native.java_lite_proto_library
load("//bazel/private:java_lite_proto_library.bzl", _java_lite_proto_library = "java_lite_proto_library") # buildifier: disable=bzl-visibility
def java_lite_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "java_lite_proto_library"):
_java_lite_proto_library(**kwattrs)
else:
native.java_lite_proto_library(**kwattrs)

@ -1,3 +1,16 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""java_proto_library rule"""
java_proto_library = native.java_proto_library
load("//bazel/private:bazel_java_proto_library_rule.bzl", _java_proto_library = "java_proto_library") # buildifier: disable=bzl-visibility
def java_proto_library(**kwattrs):
# Only use Starlark rules when they are removed from Bazel
if not hasattr(native, "java_proto_library"):
_java_proto_library(**kwattrs)
else:
native.java_proto_library(**kwattrs)

@ -0,0 +1,140 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
load("//bazel/private:native_bool_flag.bzl", "native_bool_flag")
package(default_applicable_licenses = ["//:license"])
toolchain_type(
name = "proto_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "cc_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "java_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "javalite_toolchain_type",
visibility = ["//visibility:public"],
)
toolchain_type(
name = "python_toolchain_type",
visibility = ["//visibility:public"],
)
bzl_library(
name = "upb_proto_library_internal_bzl",
srcs = [
"upb_proto_library_internal/aspect.bzl",
"upb_proto_library_internal/cc_library_func.bzl",
"upb_proto_library_internal/copts.bzl",
"upb_proto_library_internal/rule.bzl",
],
visibility = ["//bazel:__pkg__"],
deps = [
"//bazel/common:proto_common_bzl",
"@bazel_skylib//lib:paths",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)
bzl_library(
name = "native_bzl",
srcs = [
"native.bzl",
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "bazel_proto_library_rule_bzl",
srcs = [
"bazel_proto_library_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@bazel_skylib//lib:paths",
"@bazel_skylib//rules:common_settings",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
"proto_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
],
)
bzl_library(
name = "proto_lang_toolchain_rule_bzl",
srcs = [
"proto_lang_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "toolchain_helpers_bzl",
srcs = [
"toolchain_helpers.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":native_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
],
)
native_bool_flag(
name = "experimental_proto_descriptor_sets_include_source_info",
flag = "experimental_proto_descriptor_sets_include_source_info",
match_value = "true",
visibility = ["//bazel:__subpackages__"],
)
native_bool_flag(
name = "strict_proto_deps",
flag = "strict_proto_deps",
match_value = "off",
result = False,
visibility = ["//bazel:__subpackages__"],
)
native_bool_flag(
name = "strict_public_imports",
flag = "strict_public_imports",
match_value = "off",
result = False,
visibility = ["//bazel:__subpackages__"],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
visibility = [
"//bazel:__pkg__",
],
)

@ -1,42 +0,0 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
bzl_library(
name = "upb_proto_library_internal_bzl",
srcs = [
"upb_proto_library_internal/aspect.bzl",
"upb_proto_library_internal/cc_library_func.bzl",
"upb_proto_library_internal/copts.bzl",
"upb_proto_library_internal/rule.bzl",
],
visibility = ["//bazel:__pkg__"],
deps = [
"//bazel/common:proto_common_bzl",
"@bazel_skylib//lib:paths",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)
bzl_library(
name = "native_bzl",
srcs = [
"native.bzl",
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
"proto_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
)

@ -0,0 +1,197 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Bazel's implementation of cc_proto_library"""
load("@rules_cc//cc:find_cc_toolchain.bzl", "use_cc_toolchain")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:cc_proto_support.bzl", "cc_proto_compile_and_link")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_CC_PROTO_TOOLCHAIN = "//bazel/private:cc_toolchain_type"
_ProtoCcFilesInfo = provider(fields = ["files"], doc = "Provide cc proto files.")
_ProtoCcHeaderInfo = provider(fields = ["headers"], doc = "Provide cc proto headers.")
def _get_output_files(actions, proto_info, suffixes):
result = []
for suffix in suffixes:
result.extend(proto_common.declare_generated_files(
actions = actions,
proto_info = proto_info,
extension = suffix,
))
return result
# TODO: Make this code actually work.
def _get_strip_include_prefix(ctx, proto_info):
proto_root = proto_info.proto_source_root
if proto_root == "." or proto_root == ctx.label.workspace_root:
return ""
strip_include_prefix = ""
if proto_root.startswith(ctx.bin_dir.path):
proto_root = proto_root[len(ctx.bin_dir.path) + 1:]
elif proto_root.startswith(ctx.genfiles_dir.path):
proto_root = proto_root[len(ctx.genfiles_dir.path) + 1:]
if proto_root.startswith(ctx.label.workspace_root):
proto_root = proto_root[len(ctx.label.workspace_root):]
strip_include_prefix = "//" + proto_root
return strip_include_prefix
def _aspect_impl(target, ctx):
proto_info = target[ProtoInfo]
proto_configuration = ctx.fragments.proto
sources = []
headers = []
textual_hdrs = []
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
should_generate_code = proto_common.experimental_should_generate_code(proto_info, proto_toolchain, "cc_proto_library", target.label)
if should_generate_code:
if len(proto_info.direct_sources) != 0:
# Bazel 7 didn't expose cc_proto_library_source_suffixes used by Kythe
# gradually falling back to .pb.cc
if type(proto_configuration.cc_proto_library_source_suffixes) == "builtin_function_or_method":
source_suffixes = [".pb.cc"]
header_suffixes = [".pb.h"]
else:
source_suffixes = proto_configuration.cc_proto_library_source_suffixes
header_suffixes = proto_configuration.cc_proto_library_header_suffixes
sources = _get_output_files(ctx.actions, proto_info, source_suffixes)
headers = _get_output_files(ctx.actions, proto_info, header_suffixes)
header_provider = _ProtoCcHeaderInfo(headers = depset(headers))
else:
# If this proto_library doesn't have sources, it provides the combined headers of all its
# direct dependencies. Thus, if a direct dependency does have sources, the generated files
# are also provided by this library. If a direct dependency does not have sources, it will
# do the same thing, so that effectively this library looks through all source-less
# proto_libraries and provides all generated headers of the proto_libraries with sources
# that it depends on.
transitive_headers = []
for dep in getattr(ctx.rule.attr, "deps", []):
if _ProtoCcHeaderInfo in dep:
textual_hdrs.extend(dep[_ProtoCcHeaderInfo].headers.to_list())
transitive_headers.append(dep[_ProtoCcHeaderInfo].headers)
header_provider = _ProtoCcHeaderInfo(headers = depset(transitive = transitive_headers))
else: # shouldn't generate code
header_provider = _ProtoCcHeaderInfo(headers = depset())
proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = proto_toolchain,
generated_files = sources + headers,
experimental_output_files = "multiple",
)
deps = []
if proto_toolchain.runtime:
deps = [proto_toolchain.runtime]
deps.extend(getattr(ctx.rule.attr, "deps", []))
cc_info, libraries, temps = cc_proto_compile_and_link(
ctx = ctx,
deps = deps,
sources = sources,
headers = headers,
textual_hdrs = textual_hdrs,
strip_include_prefix = _get_strip_include_prefix(ctx, proto_info),
)
return [
cc_info,
_ProtoCcFilesInfo(files = depset(sources + headers + libraries)),
OutputGroupInfo(temp_files_INTERNAL_ = temps),
header_provider,
]
cc_proto_aspect = aspect(
implementation = _aspect_impl,
attr_aspects = ["deps"],
fragments = ["cpp", "proto"],
required_providers = [ProtoInfo],
provides = [CcInfo],
attrs = toolchains.if_legacy_toolchain({"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
)}),
toolchains = use_cc_toolchain() + toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)
def _cc_proto_library_impl(ctx):
if len(ctx.attr.deps) != 1:
fail(
"'deps' attribute must contain exactly one label " +
"(we didn't name it 'dep' for consistency). " +
"The main use-case for multiple deps is to create a rule that contains several " +
"other targets. This makes dependency bloat more likely. It also makes it harder" +
"to remove unused deps.",
attr = "deps",
)
dep = ctx.attr.deps[0]
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN)
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain)
return [DefaultInfo(files = dep[_ProtoCcFilesInfo].files), dep[CcInfo], dep[OutputGroupInfo]]
cc_proto_library = rule(
implementation = _cc_proto_library_impl,
doc = """
<p>
<code>cc_proto_library</code> generates C++ code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre>
<code class="lang-starlark">
cc_library(
name = "lib",
deps = [":foo_cc_proto"],
)
cc_proto_library(
name = "foo_cc_proto",
deps = [":foo_proto"],
)
proto_library(
name = "foo_proto",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(
aspects = [cc_proto_aspect],
allow_rules = ["proto_library"],
allow_files = False,
doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate C++ code for.""",
),
} | toolchains.if_legacy_toolchain({
"_aspect_cc_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"),
),
}),
provides = [CcInfo],
toolchains = toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN),
)

@ -0,0 +1,164 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""The implementation of the `java_proto_library` rule and its aspect."""
load("@rules_java//java/common:java_info.bzl", "JavaInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_JAVA_PROTO_TOOLCHAIN = "//bazel/private:java_toolchain_type"
def _filter_provider(provider, *attrs):
return [dep[provider] for attr in attrs for dep in attr if provider in dep]
def _bazel_java_proto_aspect_impl(target, ctx):
"""Generates and compiles Java code for a proto_library.
The function runs protobuf compiler on the `proto_library` target using
`proto_lang_toolchain` specified by `--proto_toolchain_for_java` flag.
This generates a source jar.
After that the source jar is compiled, respecting `deps` and `exports` of
the `proto_library`.
Args:
target: (Target) The `proto_library` target (any target providing `ProtoInfo`.
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java
version of`proto_library` and `JavaProtoAspectInfo` with all source and
runtime jars.
"""
proto_toolchain_info = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN)
source_jar = None
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_proto_library", target.label):
# Generate source jar using proto compiler.
source_jar = ctx.actions.declare_file(ctx.label.name + "-speed-src.jar")
proto_common.compile(
ctx.actions,
target[ProtoInfo],
proto_toolchain_info,
[source_jar],
experimental_output_files = "single",
)
# Compile Java sources (or just merge if there aren't any)
deps = _filter_provider(JavaInfo, ctx.rule.attr.deps)
exports = _filter_provider(JavaInfo, ctx.rule.attr.exports)
if source_jar and proto_toolchain_info.runtime:
deps.append(proto_toolchain_info.runtime[JavaInfo])
java_info, jars = java_compile_for_protos(
ctx,
"-speed.jar",
source_jar,
deps,
exports,
)
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps if JavaProtoAspectInfo in dep]
return [
java_info,
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)),
]
bazel_java_proto_aspect = aspect(
implementation = _bazel_java_proto_aspect_impl,
attrs = toolchains.if_legacy_toolchain({
"_aspect_java_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"),
),
}),
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] + toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN),
attr_aspects = ["deps", "exports"],
required_providers = [ProtoInfo],
provides = [JavaInfo, JavaProtoAspectInfo],
fragments = ["java"],
)
def bazel_java_proto_library_rule(ctx):
"""Merges results of `java_proto_aspect` in `deps`.
Args:
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, DefaultInfo, OutputGroupInfo])
"""
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN)
for dep in ctx.attr.deps:
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain)
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False)
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps])
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars])
return [
java_info,
DefaultInfo(
files = transitive_src_and_runtime_jars,
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars),
),
OutputGroupInfo(default = depset()),
]
java_proto_library = rule(
implementation = bazel_java_proto_library_rule,
doc = """
<p>
<code>java_proto_library</code> generates Java code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre class="code">
<code class="lang-starlark">
java_library(
name = "lib",
runtime_deps = [":foo_java_proto"],
)
java_proto_library(
name = "foo_java_proto",
deps = [":foo_proto"],
)
proto_library(
name = "foo_proto",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(
providers = [ProtoInfo],
aspects = [bazel_java_proto_aspect],
doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate Java code for.
""",
),
# buildifier: disable=attr-license (calling attr.license())
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(),
} | toolchains.if_legacy_toolchain({
"_aspect_java_proto_toolchain": attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"),
),
}), # buildifier: disable=attr-licenses (attribute called licenses)
provides = [JavaInfo],
toolchains = toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN),
)

@ -0,0 +1,356 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Implementation of proto_library rule.
"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
STRICT_DEPS_FLAG_TEMPLATE = (
#
"--direct_dependencies_violation_msg=" +
"%%s is imported, but %s doesn't directly depend on a proto_library that 'srcs' it."
)
def _check_srcs_package(target_package, srcs):
"""Check that .proto files in sources are from the same package.
This is done to avoid clashes with the generated sources."""
#TODO: this does not work with filegroups that contain files that are not in the package
for src in srcs:
if target_package != src.label.package:
fail("Proto source with label '%s' must be in same package as consuming rule." % src.label)
def _get_import_prefix(ctx):
"""Gets and verifies import_prefix attribute if it is declared."""
import_prefix = ctx.attr.import_prefix
if not paths.is_normalized(import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "import_prefix")
if paths.is_absolute(import_prefix):
fail("should be a relative path", attr = "import_prefix")
return import_prefix
def _get_strip_import_prefix(ctx):
"""Gets and verifies strip_import_prefix."""
strip_import_prefix = ctx.attr.strip_import_prefix
if not paths.is_normalized(strip_import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "strip_import_prefix")
if paths.is_absolute(strip_import_prefix):
strip_import_prefix = strip_import_prefix[1:]
else: # Relative to current package
strip_import_prefix = _join(ctx.label.package, strip_import_prefix)
return strip_import_prefix.removesuffix("/")
def _proto_library_impl(ctx):
# Verifies attributes.
_check_srcs_package(ctx.label.package, ctx.attr.srcs)
srcs = ctx.files.srcs
deps = [dep[ProtoInfo] for dep in ctx.attr.deps]
exports = [dep[ProtoInfo] for dep in ctx.attr.exports]
import_prefix = _get_import_prefix(ctx)
strip_import_prefix = _get_strip_import_prefix(ctx)
check_for_reexport = deps + exports if not srcs else exports
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
for proto in check_for_reexport:
if getattr(proto, "allow_exports", None):
if not _PackageSpecificationInfo:
fail("Allowlist checks not supported before Bazel 6.4.0")
if not proto.allow_exports[_PackageSpecificationInfo].contains(ctx.label):
fail("proto_library '%s' can't be reexported in package '//%s'" % (proto.direct_descriptor_set.owner, ctx.label.package))
proto_path, virtual_srcs = _process_srcs(ctx, srcs, import_prefix, strip_import_prefix)
descriptor_set = ctx.actions.declare_file(ctx.label.name + "-descriptor-set.proto.bin")
proto_info = ProtoInfo(
srcs = virtual_srcs,
deps = deps,
descriptor_set = descriptor_set,
proto_path = proto_path,
workspace_root = ctx.label.workspace_root,
bin_dir = ctx.bin_dir.path,
allow_exports = ctx.attr.allow_exports,
)
_write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set)
# We assume that the proto sources will not have conflicting artifacts
# with the same root relative path
data_runfiles = ctx.runfiles(
files = [proto_info.direct_descriptor_set],
transitive_files = depset(transitive = [proto_info.transitive_sources]),
)
return [
proto_info,
DefaultInfo(
files = depset([proto_info.direct_descriptor_set]),
default_runfiles = ctx.runfiles(), # empty
data_runfiles = data_runfiles,
),
]
def _process_srcs(ctx, srcs, import_prefix, strip_import_prefix):
"""Returns proto_path and sources, optionally symlinking them to _virtual_imports.
Returns:
(str, [File]) A pair of proto_path and virtual_sources.
"""
if import_prefix != "" or strip_import_prefix != "":
# Use virtual source roots
return _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix)
else:
# No virtual source roots
return "", srcs
def _join(*path):
return "/".join([p for p in path if p != ""])
def _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix):
"""Symlinks srcs to _virtual_imports.
Returns:
A pair proto_path, directs_sources.
"""
virtual_imports = _join("_virtual_imports", ctx.label.name)
proto_path = _join(ctx.label.package, virtual_imports)
if ctx.label.workspace_name == "":
full_strip_import_prefix = strip_import_prefix
else:
full_strip_import_prefix = _join("..", ctx.label.workspace_name, strip_import_prefix)
if full_strip_import_prefix:
full_strip_import_prefix += "/"
virtual_srcs = []
for src in srcs:
# Remove strip_import_prefix
if not src.short_path.startswith(full_strip_import_prefix):
fail(".proto file '%s' is not under the specified strip prefix '%s'" %
(src.short_path, full_strip_import_prefix))
import_path = src.short_path[len(full_strip_import_prefix):]
# Add import_prefix
virtual_src = ctx.actions.declare_file(_join(virtual_imports, import_prefix, import_path))
ctx.actions.symlink(
output = virtual_src,
target_file = src,
progress_message = "Symlinking virtual .proto sources for %{label}",
)
virtual_srcs.append(virtual_src)
return proto_path, virtual_srcs
def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set):
"""Writes descriptor set."""
if proto_info.direct_sources == []:
ctx.actions.write(descriptor_set, "")
return
dependencies_descriptor_sets = depset(transitive = [dep.transitive_descriptor_sets for dep in deps])
args = ctx.actions.args()
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value:
args.add("--include_source_info")
args.add("--retain_options")
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value
if strict_deps:
if proto_info.direct_sources:
strict_importable_sources = depset(
direct = proto_info._direct_proto_sources,
transitive = [dep._exported_sources for dep in deps],
)
else:
strict_importable_sources = None
if strict_importable_sources:
args.add_joined(
"--direct_dependencies",
strict_importable_sources,
map_each = proto_common.get_import_path,
join_with = ":",
)
# Example: `--direct_dependencies a.proto:b.proto`
else:
# The proto compiler requires an empty list to turn on strict deps checking
args.add("--direct_dependencies=")
# Set `-direct_dependencies_violation_msg=`
args.add(ctx.label, format = STRICT_DEPS_FLAG_TEMPLATE)
strict_imports = ctx.attr._strict_public_imports[BuildSettingInfo].value
if strict_imports:
public_import_protos = depset(transitive = [export._exported_sources for export in exports])
if not public_import_protos:
# This line is necessary to trigger the check.
args.add("--allowed_public_imports=")
else:
args.add_joined(
"--allowed_public_imports",
public_import_protos,
map_each = proto_common.get_import_path,
join_with = ":",
)
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[toolchains.PROTO_TOOLCHAIN]
if not toolchain:
fail("Protocol compiler toolchain could not be resolved.")
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = proto_common.ProtoLangToolchainInfo(
out_replacement_format_flag = "--descriptor_set_out=%s",
output_files = "single",
mnemonic = "GenProtoDescriptorSet",
progress_message = "Generating Descriptor Set proto_library %{label}",
proto_compiler = ctx.executable._proto_compiler,
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
plugin = None,
)
proto_common.compile(
ctx.actions,
proto_info,
proto_lang_toolchain_info,
generated_files = [descriptor_set],
additional_inputs = dependencies_descriptor_sets,
additional_args = args,
)
proto_library = rule(
_proto_library_impl,
# TODO: proto_common docs are missing
# TODO: ProtoInfo link doesn't work and docs are missing
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Use <code>proto_library</code> to define libraries of protocol buffers which
may be used from multiple languages. A <code>proto_library</code> may be listed
in the <code>deps</code> clause of supported rules, such as
<code>java_proto_library</code>.
<p>When compiled on the command-line, a <code>proto_library</code> creates a file
named <code>foo-descriptor-set.proto.bin</code>, which is the descriptor set for
the messages the rule srcs. The file is a serialized
<code>FileDescriptorSet</code>, which is described in
<a href="https://developers.google.com/protocol-buffers/docs/techniques#self-description">
https://developers.google.com/protocol-buffers/docs/techniques#self-description</a>.
<p>It only contains information about the <code>.proto</code> files directly
mentioned by a <code>proto_library</code> rule; the collection of transitive
descriptor sets is available through the
<code>[ProtoInfo].transitive_descriptor_sets</code> Starlark provider.
See documentation in <code>proto_info.bzl</code>.
<p>Recommended code organization:
<ul>
<li>One <code>proto_library</code> rule per <code>.proto</code> file.
<li>A file named <code>foo.proto</code> will be in a rule named <code>foo_proto</code>,
which is located in the same package.
<li>A <code>[language]_proto_library</code> that wraps a <code>proto_library</code>
named <code>foo_proto</code> should be called <code>foo_[language]_proto</code>,
and be located in the same package.
</ul>""",
attrs = {
"srcs": attr.label_list(
allow_files = [".proto", ".protodevel"],
flags = ["DIRECT_COMPILE_TIME_INPUT"],
# TODO: Should .protodevel be advertised or deprecated?
doc = """
The list of <code>.proto</code> and <code>.protodevel</code> files that are
processed to create the target. This is usually a non empty list. One usecase
where <code>srcs</code> can be empty is an <i>alias-library</i>. This is a
proto_library rule having one or more other proto_library in <code>deps</code>.
This pattern can be used to e.g. export a public api under a persistent name.""",
),
"deps": attr.label_list(
providers = [ProtoInfo],
doc = """
The list of other <code>proto_library</code> rules that the target depends upon.
A <code>proto_library</code> may only depend on other <code>proto_library</code>
targets. It may not depend on language-specific libraries.""",
),
"exports": attr.label_list(
providers = [ProtoInfo],
doc = """
List of proto_library targets that can be referenced via "import public" in the
proto source.
It's an error if you use "import public" but do not list the corresponding library
in the exports attribute.
Note that you have list the library both in deps and exports since not all
lang_proto_library implementations have been changed yet.""",
),
"strip_import_prefix": attr.string(
default = "/",
doc = """
The prefix to strip from the paths of the .proto files in this rule.
<p>When set, .proto source files in the <code>srcs</code> attribute of this rule are
accessible at their path with this prefix cut off.
<p>If it's a relative path (not starting with a slash), it's taken as a package-relative
one. If it's an absolute one, it's understood as a repository-relative path.
<p>The prefix in the <code>import_prefix</code> attribute is added after this prefix is
stripped.""",
),
"import_prefix": attr.string(
doc = """
The prefix to add to the paths of the .proto files in this rule.
<p>When set, the .proto source files in the <code>srcs</code> attribute of this rule are
accessible at is the value of this attribute prepended to their repository-relative path.
<p>The prefix in the <code>strip_import_prefix</code> attribute is removed before this
prefix is added.""",
),
"allow_exports": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
doc = """
An optional allowlist that prevents proto library to be reexported or used in
lang_proto_library that is not in one of the listed packages.""",
),
"data": attr.label_list(
allow_files = True,
flags = ["SKIP_CONSTRAINTS_OVERRIDE"],
),
# buildifier: disable=attr-license (calling attr.license())
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(),
"_experimental_proto_descriptor_sets_include_source_info": attr.label(
default = "//bazel/private:experimental_proto_descriptor_sets_include_source_info",
),
"_strict_proto_deps": attr.label(
default =
"//bazel/private:strict_proto_deps",
),
"_strict_public_imports": attr.label(
default = "//bazel/private:strict_public_imports",
),
} | toolchains.if_legacy_toolchain({
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}), # buildifier: disable=attr-licenses (attribute called licenses)
fragments = ["proto"],
provides = [ProtoInfo],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN),
)

@ -0,0 +1,141 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Supporting C++ compilation of generated code"""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain")
def get_feature_configuration(ctx, has_sources, extra_requested_features = []):
"""Returns C++ feature configuration for compiling and linking generated C++ files.
Args:
ctx: (RuleCtx) rule context.
has_sources: (bool) Has the proto_library sources.
extra_requested_features: (list[str]) Additionally requested features.
Returns:
(FeatureConfiguration) C++ feature configuration
"""
cc_toolchain = find_cc_toolchain(ctx)
requested_features = ctx.features + extra_requested_features
# TODO: Remove LAYERING_CHECK once we have verified that there are direct
# dependencies for all generated #includes.
unsupported_features = ctx.disabled_features + ["parse_headers", "layering_check"]
if has_sources:
requested_features.append("header_modules")
else:
unsupported_features.append("header_modules")
return cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = requested_features,
unsupported_features = unsupported_features,
)
def _get_libraries_from_linking_outputs(linking_outputs, feature_configuration):
library_to_link = linking_outputs.library_to_link
if not library_to_link:
return []
outputs = []
if library_to_link.static_library:
outputs.append(library_to_link.static_library)
if library_to_link.pic_static_library:
outputs.append(library_to_link.pic_static_library)
# On Windows, dynamic library is not built by default, so don't add them to files_to_build.
if not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows"):
if library_to_link.resolved_symlink_dynamic_library:
outputs.append(library_to_link.resolved_symlink_dynamic_library)
elif library_to_link.dynamic_library:
outputs.append(library_to_link.dynamic_library)
if library_to_link.resolved_symlink_interface_library:
outputs.append(library_to_link.resolved_symlink_interface_library)
elif library_to_link.interface_library:
outputs.append(library_to_link.interface_library)
return outputs
def cc_proto_compile_and_link(ctx, deps, sources, headers, disallow_dynamic_library = None, feature_configuration = None, alwayslink = False, **kwargs):
"""Creates C++ compilation and linking actions for C++ proto sources.
Args:
ctx: rule context
deps: (list[CcInfo]) List of libraries to be added as dependencies to compilation and linking
actions.
sources:(list[File]) List of C++ sources files.
headers: list(File] List of C++ headers files.
disallow_dynamic_library: (bool) Are dynamic libraries disallowed.
feature_configuration: (FeatureConfiguration) feature configuration to use.
alwayslink: (bool) Should the library be always linked.
**kwargs: Additional arguments passed to the compilation. See cc_common.compile.
Returns:
(CcInfo, list[File], list[File])
- CcInfo provider with compilation context and linking context
- A list of linked libraries related to this proto
- A list of temporary files generated durind compilation
"""
cc_toolchain = find_cc_toolchain(ctx)
feature_configuration = feature_configuration or get_feature_configuration(ctx, bool(sources))
if disallow_dynamic_library == None:
# TODO: Configure output artifact with action_config
# once proto compile action is configurable from the crosstool.
disallow_dynamic_library = not cc_common.is_enabled(
feature_name = "supports_dynamic_linker",
feature_configuration = feature_configuration,
)
(compilation_context, compilation_outputs) = cc_common.compile(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
srcs = sources,
public_hdrs = headers,
compilation_contexts = [dep[CcInfo].compilation_context for dep in deps if CcInfo in dep],
name = ctx.label.name,
# Don't instrument the generated C++ files even when --collect_code_coverage is set.
# If we actually start generating coverage instrumentation for .proto files based on coverage
# data from the generated C++ files, this will have to be removed. Currently, the work done
# to instrument those files and execute the instrumentation is all for nothing, and it can
# be quite a bit of extra computation even when that's not made worse by performance bugs,
# as in b/64963386.
# code_coverage_enabled = False (cc_common.compile disables code_coverage by default)
**kwargs
)
if sources:
linking_context, linking_outputs = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = cc_toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep],
name = ctx.label.name,
disallow_dynamic_library = disallow_dynamic_library,
alwayslink = alwayslink,
)
libraries = _get_libraries_from_linking_outputs(linking_outputs, feature_configuration)
else:
linking_context = cc_common.merge_linking_contexts(
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep],
)
libraries = []
debug_context = None
temps = []
if bazel_features.cc.protobuf_on_allowlist:
debug_context = cc_common.merge_debug_context(
[cc_common.create_debug_context(compilation_outputs)] +
[dep[CcInfo].debug_context() for dep in deps if CcInfo in dep],
)
temps = compilation_outputs.temps()
return CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
debug_context = debug_context,
), libraries, temps

@ -0,0 +1,178 @@
# Copyright (c) 2009-2024, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""A Starlark implementation of the java_lite_proto_library rule."""
load("@rules_java//java/common:java_common.bzl", "java_common")
load("@rules_java//java/common:java_info.bzl", "JavaInfo")
load("@rules_java//java/common:proguard_spec_info.bzl", "ProguardSpecInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
_PROTO_TOOLCHAIN_ATTR = "_aspect_proto_toolchain_for_javalite"
_JAVA_LITE_PROTO_TOOLCHAIN = "//bazel/private:javalite_toolchain_type"
def _aspect_impl(target, ctx):
"""Generates and compiles Java code for a proto_library dependency graph.
Args:
target: (Target) The `proto_library` target.
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java
version of`proto_library` and `JavaProtoAspectInfo` with all source and
runtime jars.
"""
deps = [dep[JavaInfo] for dep in ctx.rule.attr.deps]
exports = [exp[JavaInfo] for exp in ctx.rule.attr.exports]
proto_toolchain_info = toolchains.find_toolchain(
ctx,
"_aspect_proto_toolchain_for_javalite",
_JAVA_LITE_PROTO_TOOLCHAIN,
)
source_jar = None
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_lite_proto_library", target.label):
source_jar = ctx.actions.declare_file(ctx.label.name + "-lite-src.jar")
proto_common.compile(
ctx.actions,
target[ProtoInfo],
proto_toolchain_info,
[source_jar],
experimental_output_files = "single",
)
runtime = proto_toolchain_info.runtime
if runtime:
deps.append(runtime[JavaInfo])
java_info, jars = java_compile_for_protos(
ctx,
"-lite.jar",
source_jar,
deps,
exports,
injecting_rule_kind = "java_lite_proto_library",
)
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps]
return [
java_info,
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)),
]
_java_lite_proto_aspect = aspect(
implementation = _aspect_impl,
attr_aspects = ["deps", "exports"],
attrs = toolchains.if_legacy_toolchain({
_PROTO_TOOLCHAIN_ATTR: attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"),
),
}),
fragments = ["java"],
required_providers = [ProtoInfo],
provides = [JavaInfo, JavaProtoAspectInfo],
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] +
toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN),
)
def _rule_impl(ctx):
"""Merges results of `java_proto_aspect` in `deps`.
`java_lite_proto_library` is identical to `java_proto_library` in every respect, except it
builds JavaLite protos.
Implementation of this rule is built on the implementation of `java_proto_library`.
Args:
ctx: (RuleContext) The rule context.
Returns:
([JavaInfo, DefaultInfo, OutputGroupInfo, ProguardSpecInfo])
"""
proto_toolchain_info = toolchains.find_toolchain(
ctx,
"_aspect_proto_toolchain_for_javalite",
_JAVA_LITE_PROTO_TOOLCHAIN,
)
for dep in ctx.attr.deps:
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain_info)
runtime = proto_toolchain_info.runtime
if runtime:
proguard_provider_specs = runtime[ProguardSpecInfo]
else:
proguard_provider_specs = ProguardSpecInfo(specs = depset())
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False)
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps])
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars])
if hasattr(java_common, "add_constraints"):
java_info = java_common.add_constraints(java_info, constraints = ["android"])
return [
java_info,
DefaultInfo(
files = transitive_src_and_runtime_jars,
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars),
),
OutputGroupInfo(default = depset()),
proguard_provider_specs,
]
java_lite_proto_library = rule(
implementation = _rule_impl,
doc = """
<p>
<code>java_lite_proto_library</code> generates Java code from <code>.proto</code> files.
</p>
<p>
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library
</code></a> rules.
</p>
<p>
Example:
</p>
<pre class="code">
<code class="lang-starlark">
java_library(
name = "lib",
runtime_deps = [":foo"],
)
java_lite_proto_library(
name = "foo",
deps = [":bar"],
)
proto_library(
name = "bar",
)
</code>
</pre>
""",
attrs = {
"deps": attr.label_list(providers = [ProtoInfo], aspects = [_java_lite_proto_aspect], doc = """
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a>
rules to generate Java code for.
"""),
} | toolchains.if_legacy_toolchain({
_PROTO_TOOLCHAIN_ATTR: attr.label(
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"),
),
}),
provides = [JavaInfo],
toolchains = toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN),
)

@ -0,0 +1,62 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Support for compiling protoc generated Java code."""
load("@rules_java//java/private:proto_support.bzl", "compile", "merge") # buildifier: disable=bzl-visibility
# The provider is used to collect source and runtime jars in the `proto_library` dependency graph.
JavaProtoAspectInfo = provider("JavaProtoAspectInfo", fields = ["jars"])
java_info_merge_for_protos = merge
def java_compile_for_protos(ctx, output_jar_suffix, source_jar = None, deps = [], exports = [], injecting_rule_kind = "java_proto_library"):
"""Compiles Java source jar returned by proto compiler.
Use this call for java_xxx_proto_library. It uses java_common.compile with
some checks disabled (via javacopts) and jspecify disabled, so that the
generated code passes.
It also takes care that input source jar is not repackaged with a different
name.
When `source_jar` is `None`, the function only merges `deps` and `exports`.
Args:
ctx: (RuleContext) Used to call `java_common.compile`
output_jar_suffix: (str) How to name the output jar. For example: `-speed.jar`.
source_jar: (File) Input source jar (may be `None`).
deps: (list[JavaInfo]) `deps` of the `proto_library`.
exports: (list[JavaInfo]) `exports` of the `proto_library`.
injecting_rule_kind: (str) Rule kind requesting the compilation.
It's embedded into META-INF of the produced runtime jar, for debugging.
Returns:
((JavaInfo, list[File])) JavaInfo of this target and list containing source
and runtime jar, when they are created.
"""
if source_jar != None:
path, sep, filename = ctx.label.name.rpartition("/")
output_jar = ctx.actions.declare_file(path + sep + "lib" + filename + output_jar_suffix)
java_toolchain = ctx.toolchains["@bazel_tools//tools/jdk:toolchain_type"].java
java_info = compile(
ctx = ctx,
output = output_jar,
java_toolchain = java_toolchain,
source_jars = [source_jar],
deps = deps,
exports = exports,
output_source_jar = source_jar,
injecting_rule_kind = injecting_rule_kind,
javac_opts = java_toolchain._compatible_javacopts.get("proto", depset()),
enable_jspecify = False,
include_compilation_info = False,
)
jars = [source_jar, output_jar]
else:
# If there are no proto sources just pass along the compilation dependencies.
java_info = merge(deps + exports, merge_java_outputs = False, merge_source_jars = False)
jars = []
return java_info, jars

@ -1,5 +1,3 @@
"""Renames toplevel symbols so they can be exported in Starlark under the same name"""
NativeProtoInfo = ProtoInfo
native_proto_common = proto_common_do_not_use

@ -0,0 +1,35 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
A helper rule that reads a native boolean flag.
"""
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
def _impl(ctx):
return [BuildSettingInfo(value = ctx.attr.value)]
_native_bool_flag_rule = rule(
implementation = _impl,
attrs = {"value": attr.bool()},
)
def native_bool_flag(*, name, flag, match_value = "true", result = True, **kwargs):
_native_bool_flag_rule(
name = name,
value = select({
name + "_setting": result,
"//conditions:default": not result,
}),
**kwargs
)
native.config_setting(
name = name + "_setting",
values = {flag: match_value},
visibility = ["//visibility:private"],
)

@ -0,0 +1,59 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Vendored version of bazel_features for protobuf, to keep a one-step setup"""
_PROTO_BAZEL_FEATURES = """bazel_features = struct(
cc = struct(
protobuf_on_allowlist = {protobuf_on_allowlist},
),
proto = struct(
starlark_proto_info = {starlark_proto_info},
),
globals = struct(
PackageSpecificationInfo = {PackageSpecificationInfo},
ProtoInfo = getattr(getattr(native, 'legacy_globals', None), 'ProtoInfo', {ProtoInfo})
),
)
"""
def _proto_bazel_features_impl(rctx):
# An empty string is treated as a "dev version", which is greater than anything.
bazel_version = native.bazel_version or "999999.999999.999999"
version_parts = bazel_version.split("-")[0].split(".")
if len(version_parts) != 3:
fail("invalid Bazel version '{}': got {} dot-separated segments, want 3".format(bazel_version, len(version_parts)))
major_version_int = int(version_parts[0])
minor_version_int = int(version_parts[1])
starlark_proto_info = major_version_int >= 7
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4)
protobuf_on_allowlist = major_version_int > 7
ProtoInfo = "ProtoInfo" if major_version_int < 8 else "None"
rctx.file("BUILD.bazel", """
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
name = "features",
srcs = ["features.bzl"],
visibility = ["//visibility:public"],
)
exports_files(["features.bzl"])
""")
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format(
starlark_proto_info = repr(starlark_proto_info),
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None",
protobuf_on_allowlist = repr(protobuf_on_allowlist),
ProtoInfo = ProtoInfo,
))
proto_bazel_features = repository_rule(
implementation = _proto_bazel_features_impl,
# Force reruns on server restarts to keep native.bazel_version up-to-date.
local = True,
)

@ -0,0 +1,186 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""
Definition of ProtoInfo provider.
"""
_warning = """ Don't use this field. It's intended for internal use and will be changed or removed
without warning."""
def _uniq(iterable):
unique_elements = {element: None for element in iterable}
return list(unique_elements.keys())
def _join(*path):
return "/".join([p for p in path if p != ""])
def _empty_to_dot(path):
return path if path else "."
def _from_root(root, repo, relpath):
"""Constructs an exec path from root to relpath"""
if not root:
# `relpath` is a directory with an input source file, the exec path is one of:
# - when in main repo: `package/path`
# - when in a external repository: `external/repo/package/path`
# - with sibling layout: `../repo/package/path`
return _join(repo, relpath)
else:
# `relpath` is a directory with a generated file or an output directory:
# - when in main repo: `{root}/package/path`
# - when in an external repository: `{root}/external/repo/package/path`
# - with sibling layout: `{root}/package/path`
return _join(root, "" if repo.startswith("../") else repo, relpath)
def _create_proto_info(*, srcs, deps, descriptor_set, proto_path = "", workspace_root = "", bin_dir = None, allow_exports = None):
"""Constructs ProtoInfo.
Args:
srcs: ([File]) List of .proto files (possibly under _virtual path)
deps: ([ProtoInfo]) List of dependencies
descriptor_set: (File) Descriptor set for this Proto
proto_path: (str) Path that should be stripped from files in srcs. When
stripping is needed, the files should be symlinked into `_virtual_imports/target_name`
directory. Only such paths are accepted.
workspace_root: (str) Set to ctx.workspace_root if this is not the main repository.
bin_dir: (str) Set to ctx.bin_dir if _virtual_imports are used.
allow_exports: (Target) The packages where this proto_library can be exported.
Returns:
(ProtoInfo)
"""
# Validate parameters
src_prefix = _join(workspace_root.replace("external/", "../"), proto_path)
for src in srcs:
if type(src) != "File":
fail("srcs parameter expects a list of Files")
if src.owner.workspace_root != workspace_root:
fail("srcs parameter expects all files to have the same workspace_root: ", workspace_root)
if not src.short_path.startswith(src_prefix):
fail("srcs parameter expects all files start with %s" % src_prefix)
if type(descriptor_set) != "File":
fail("descriptor_set parameter expected to be a File")
if proto_path:
if "_virtual_imports/" not in proto_path:
fail("proto_path needs to contain '_virtual_imports' directory")
if proto_path.split("/")[-2] != "_virtual_imports":
fail("proto_path needs to be formed like '_virtual_imports/target_name'")
if not bin_dir:
fail("bin_dir parameter should be set when _virtual_imports are used")
direct_proto_sources = srcs
transitive_proto_sources = depset(
direct = direct_proto_sources,
transitive = [dep._transitive_proto_sources for dep in deps],
order = "preorder",
)
transitive_sources = depset(
direct = srcs,
transitive = [dep.transitive_sources for dep in deps],
order = "preorder",
)
# There can be up more than 1 direct proto_paths, for example when there's
# a generated and non-generated .proto file in srcs
root_paths = _uniq([src.root.path for src in srcs])
transitive_proto_path = depset(
direct = [_empty_to_dot(_from_root(root, workspace_root, proto_path)) for root in root_paths],
transitive = [dep.transitive_proto_path for dep in deps],
)
if srcs:
check_deps_sources = depset(direct = srcs)
else:
check_deps_sources = depset(transitive = [dep.check_deps_sources for dep in deps])
transitive_descriptor_sets = depset(
direct = [descriptor_set],
transitive = [dep.transitive_descriptor_sets for dep in deps],
)
# Layering checks.
if srcs:
exported_sources = depset(direct = direct_proto_sources)
else:
exported_sources = depset(transitive = [dep._exported_sources for dep in deps])
if "_virtual_imports/" in proto_path:
#TODO: remove bin_dir from proto_source_root (when users assuming it's there are migrated)
proto_source_root = _empty_to_dot(_from_root(bin_dir, workspace_root, proto_path))
elif workspace_root.startswith("../"):
proto_source_root = proto_path
else:
proto_source_root = _empty_to_dot(_join(workspace_root, proto_path))
proto_info = dict(
direct_sources = srcs,
transitive_sources = transitive_sources,
direct_descriptor_set = descriptor_set,
transitive_descriptor_sets = transitive_descriptor_sets,
proto_source_root = proto_source_root,
transitive_proto_path = transitive_proto_path,
check_deps_sources = check_deps_sources,
transitive_imports = transitive_sources,
_direct_proto_sources = direct_proto_sources,
_transitive_proto_sources = transitive_proto_sources,
_exported_sources = exported_sources,
)
if allow_exports:
proto_info["allow_exports"] = allow_exports
return proto_info
ProtoInfo, _ = provider(
doc = "Encapsulates information provided by a `proto_library.`",
fields = {
"direct_sources": "(list[File]) The `.proto` source files from the `srcs` attribute.",
"transitive_sources": """(depset[File]) The `.proto` source files from this rule and all
its dependent protocol buffer rules.""",
"direct_descriptor_set": """(File) The descriptor set of the direct sources. If no srcs,
contains an empty file.""",
"transitive_descriptor_sets": """(depset[File]) A set of descriptor set files of all
dependent `proto_library` rules, and this one's. This is not the same as passing
--include_imports to proto-compiler. Will be empty if no dependencies.""",
"proto_source_root": """(str) The directory relative to which the `.proto` files defined in
the `proto_library` are defined. For example, if this is `a/b` and the rule has the
file `a/b/c/d.proto` as a source, that source file would be imported as
`import c/d.proto`
In principle, the `proto_source_root` directory itself should always
be relative to the output directory (`ctx.bin_dir`).
This is at the moment not true for `proto_libraries` using (additional and/or strip)
import prefixes. `proto_source_root` is in this case prefixed with the output
directory. For example, the value is similar to
`bazel-out/k8-fastbuild/bin/a/_virtual_includes/b` for an input file in
`a/_virtual_includes/b/c.proto` that should be imported as `c.proto`.
When using the value please account for both cases in a general way.
That is assume the value is either prefixed with the output directory or not.
This will make it possible to fix `proto_library` in the future.
""",
"transitive_proto_path": """(depset(str) A set of `proto_source_root`s collected from the
transitive closure of this rule.""",
"check_deps_sources": """(depset[File]) The `.proto` sources from the 'srcs' attribute.
If the library is a proxy library that has no sources, it contains the
`check_deps_sources` from this library's direct deps.""",
"allow_exports": """(Target) The packages where this proto_library can be exported.""",
# Deprecated fields:
"transitive_imports": """(depset[File]) Deprecated: use `transitive_sources` instead.""",
# Internal fields:
"_direct_proto_sources": """(list[File]) The `ProtoSourceInfo`s from the `srcs`
attribute.""" + _warning,
"_transitive_proto_sources": """(depset[File]) The `ProtoSourceInfo`s from this
rule and all its dependent protocol buffer rules.""" + _warning,
"_exported_sources": """(depset[File]) A set of `ProtoSourceInfo`s that may be
imported by another `proto_library` depending on this one.""" + _warning,
},
init = _create_proto_info,
)

@ -0,0 +1,155 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Implementation of the proto_lang_toolchain rule."""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _rule_impl(ctx):
provided_proto_sources = depset(transitive = [bp[ProtoInfo]._transitive_proto_sources for bp in ctx.attr.blacklisted_protos]).to_list()
flag = ctx.attr.command_line
if flag.find("$(PLUGIN_OUT)") > -1:
fail("in attribute 'command_line': Placeholder '$(PLUGIN_OUT)' is not supported.")
flag = flag.replace("$(OUT)", "%s")
plugin = None
if ctx.attr.plugin != None:
plugin = ctx.attr.plugin[DefaultInfo].files_to_run
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
proto_compiler = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.proto_compiler
protoc_opts = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.protoc_opts
else:
proto_compiler = ctx.attr._proto_compiler.files_to_run
protoc_opts = ctx.fragments.proto.experimental_protoc_opts
if ctx.attr.protoc_minimal_do_not_use:
proto_compiler = ctx.attr.protoc_minimal_do_not_use.files_to_run
proto_lang_toolchain_info = ProtoLangToolchainInfo(
out_replacement_format_flag = flag,
output_files = ctx.attr.output_files,
plugin_format_flag = ctx.attr.plugin_format_flag,
plugin = plugin,
runtime = ctx.attr.runtime,
provided_proto_sources = provided_proto_sources,
proto_compiler = proto_compiler,
protoc_opts = protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
allowlist_different_package = ctx.attr.allowlist_different_package,
toolchain_type = ctx.attr.toolchain_type.label if ctx.attr.toolchain_type else None,
)
return [
DefaultInfo(files = depset(), runfiles = ctx.runfiles()),
platform_common.ToolchainInfo(proto = proto_lang_toolchain_info),
# TODO: remove when --incompatible_enable_proto_toolchains is flipped and removed
proto_lang_toolchain_info,
]
proto_lang_toolchain = rule(
_rule_impl,
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Specifies how a LANG_proto_library rule (e.g., <code>java_proto_library</code>) should invoke the
proto-compiler.
Some LANG_proto_library rules allow specifying which toolchain to use using command-line flags;
consult their documentation.
<p>Normally you should not write those kind of rules unless you want to
tune your Java compiler.
<p>There's no compiler. The proto-compiler is taken from the proto_library rule we attach to. It is
passed as a command-line flag to Blaze.
Several features require a proto-compiler to be invoked on the proto_library rule itself.
It's beneficial to enforce the compiler that LANG_proto_library uses is the same as the one
<code>proto_library</code> does.
<h4>Examples</h4>
<p>A simple example would be:
<pre><code class="lang-starlark">
proto_lang_toolchain(
name = "javalite_toolchain",
command_line = "--javalite_out=shared,immutable:$(OUT)",
plugin = ":javalite_plugin",
runtime = ":protobuf_lite",
)
</code></pre>
""",
attrs = {
"progress_message": attr.string(default = "Generating proto_library %{label}", doc = """
This value will be set as the progress message on protoc action."""),
"mnemonic": attr.string(default = "GenProto", doc = """
This value will be set as the mnemonic on protoc action."""),
"command_line": attr.string(mandatory = True, doc = """
This value will be passed to proto-compiler to generate the code. Only include the parts
specific to this code-generator/plugin (e.g., do not include -I parameters)
<ul>
<li><code>$(OUT)</code> is LANG_proto_library-specific. The rules are expected to define
how they interpret this variable. For Java, for example, $(OUT) will be replaced with
the src-jar filename to create.</li>
</ul>"""),
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "legacy", doc = """
Controls how <code>$(OUT)</code> in <code>command_line</code> is formatted, either by
a path to a single file or output directory in case of multiple files.
Possible values are: "single", "multiple"."""),
"plugin_format_flag": attr.string(doc = """
If provided, this value will be passed to proto-compiler to use the plugin.
The value must contain a single %s which is replaced with plugin executable.
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>"""),
"plugin": attr.label(
executable = True,
cfg = "exec",
doc = """
If provided, will be made available to the action that calls the proto-compiler, and will be
passed to the proto-compiler:
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>""",
),
"runtime": attr.label(doc = """
A language-specific library that the generated code is compiled against.
The exact behavior is LANG_proto_library-specific.
Java, for example, should compile against the runtime."""),
"blacklisted_protos": attr.label_list(
providers = [ProtoInfo],
doc = """
No code will be generated for files in the <code>srcs</code> attribute of
<code>blacklisted_protos</code>.
This is used for .proto files that are already linked into proto runtimes, such as
<code>any.proto</code>.""",
),
# TODO: add doc
"allowlist_different_package": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
),
# TODO: add doc
"toolchain_type": attr.label(),
# DO NOT USE. For Protobuf incremental changes only: b/305068148.
"protoc_minimal_do_not_use": attr.label(
cfg = "exec",
executable = True,
),
} | ({} if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION else {
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}),
provides = [ProtoLangToolchainInfo],
fragments = ["proto"],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), # Used to obtain protoc
)

@ -1,13 +1,17 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""A Starlark implementation of the proto_toolchain rule."""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _impl(ctx):
kwargs = {}
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
kwargs["toolchain_type"] = "@rules_proto//proto:toolchain_type"
return [
DefaultInfo(
files = depset(),
@ -23,7 +27,7 @@ def _impl(ctx):
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
**kwargs
**(dict(toolchain_type = toolchains.PROTO_TOOLCHAIN) if proto_common.INCOMPATIBLE_PASS_TOOLCHAIN_TYPE else {})
),
),
]

@ -0,0 +1,49 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""
Toolchain helpers.
The helpers here should be used for a migration to toolchain in proto rules.
Anybody that needs them in another repository should copy them, because after
the migration is finished, the helpers can be removed.
"""
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:native.bzl", "native_proto_common")
_incompatible_toolchain_resolution = getattr(native_proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _find_toolchain(ctx, legacy_attr, toolchain_type):
if _incompatible_toolchain_resolution:
toolchain = ctx.toolchains[toolchain_type]
if not toolchain:
fail("No toolchains registered for '%s'." % toolchain_type)
return toolchain.proto
else:
return getattr(ctx.attr, legacy_attr)[ProtoLangToolchainInfo]
def _use_toolchain(toolchain_type):
if _incompatible_toolchain_resolution:
return [config_common.toolchain_type(toolchain_type, mandatory = False)]
else:
return []
def _if_legacy_toolchain(legacy_attr_dict):
if _incompatible_toolchain_resolution:
return {}
else:
return legacy_attr_dict
toolchains = struct(
use_toolchain = _use_toolchain,
find_toolchain = _find_toolchain,
if_legacy_toolchain = _if_legacy_toolchain,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution,
PROTO_TOOLCHAIN = "//bazel/private:proto_toolchain_type",
)

@ -0,0 +1,74 @@
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain")
load("//bazel/toolchains:proto_toolchain.bzl", "proto_toolchain")
# Keep this file as small as possible and free of any unnecessary loads
# It is loaded by every use of protobuf repository, and loads here can force
# fetching of additional external repositories
# It's also intentionally using toolchain instead of proto_lang_toolchain,
# because the former does not resolve dependencies until toolchain resolution
# needs them
proto_toolchain(
name = "protoc_sources",
exec_compatible_with = [],
proto_compiler = "//:protoc",
)
toolchain(
name = "cc_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//:cc_toolchain",
toolchain_type = "//bazel/private:cc_toolchain_type",
)
toolchain(
name = "java_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/core:toolchain",
toolchain_type = "//bazel/private:java_toolchain_type",
)
toolchain(
name = "javalite_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/lite:toolchain",
toolchain_type = "//bazel/private:javalite_toolchain_type",
)
toolchain(
name = "python_source_toolchain",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//python:python_toolchain",
toolchain_type = "//bazel/private:python_toolchain_type",
)
# Following toolchain registrations are for builtin Bazel 7 rules
# which defined them in other repositories.
toolchain(
name = "cc_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//:cc_toolchain",
toolchain_type = "@rules_cc//cc/proto:toolchain_type",
)
toolchain(
name = "java_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/core:toolchain",
toolchain_type = "@rules_java//java/proto:toolchain_type",
)
toolchain(
name = "javalite_source_toolchain_bazel7",
exec_compatible_with = [],
target_compatible_with = [],
toolchain = "//java/lite:toolchain",
toolchain_type = "@rules_java//java/proto:lite_toolchain_type",
)

@ -6,20 +6,13 @@ load("//bazel/common:proto_info.bzl", "ProtoInfo")
load(":upb_proto_library_internal/cc_library_func.bzl", "cc_library_func")
load(":upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo")
# begin:github_only
_is_google3 = False
# end:github_only
# begin:google_only
# _is_google3 = True
# end:google_only
GeneratedSrcsInfo = provider(
"Provides generated headers and sources",
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
"thunks": "Experimental, do not use. List of srcs defining C API. Incompatible with hdrs.",
},
)
@ -53,7 +46,6 @@ def _merge_generated_srcs(srcs):
return GeneratedSrcsInfo(
srcs = _concat_lists([s.srcs for s in srcs]),
hdrs = _concat_lists([s.hdrs for s in srcs]),
thunks = _concat_lists([s.thunks for s in srcs]),
)
def _get_implicit_weak_field_sources(ctx, proto_info):
@ -102,11 +94,10 @@ def _get_feature_configuration(ctx, cc_toolchain, proto_info):
def _generate_srcs_list(ctx, generator, proto_info):
if len(proto_info.direct_sources) == 0:
return GeneratedSrcsInfo(srcs = [], hdrs = [], thunks = [], includes = [])
return GeneratedSrcsInfo(srcs = [], hdrs = [], includes = [])
ext = "." + generator
srcs = []
thunks = []
hdrs = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".h",
@ -121,27 +112,10 @@ def _generate_srcs_list(ctx, generator, proto_info):
extension = ext + ".c",
proto_info = proto_info,
)
if generator == "upb":
thunks = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".thunks.c",
proto_info = proto_info,
)
ctx.actions.run_shell(
inputs = hdrs,
outputs = thunks,
command = " && ".join([
"sed 's/UPB_INLINE //' {} > {}".format(hdr.path, thunk.path)
for (hdr, thunk) in zip(hdrs, thunks)
]),
progress_message = "Generating thunks for upb protos API for: " + ctx.label.name,
mnemonic = "GenUpbProtosThunks",
)
return GeneratedSrcsInfo(
srcs = srcs,
hdrs = hdrs,
thunks = thunks,
)
def _generate_upb_protos(ctx, generator, proto_info, feature_configuration):
@ -168,9 +142,7 @@ def _generate_upb_protos(ctx, generator, proto_info, feature_configuration):
return srcs
def _generate_name(ctx, generator, thunks = False):
if thunks:
return ctx.rule.attr.name + "." + generator + ".thunks"
def _generate_name(ctx, generator):
return ctx.rule.attr.name + "." + generator
def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider):
@ -201,24 +173,9 @@ def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider, proto_i
dep_ccinfos = dep_ccinfos,
)
if files.thunks:
cc_info_with_thunks = cc_library_func(
ctx = ctx,
name = _generate_name(ctx, generator, files.thunks),
hdrs = [],
srcs = files.thunks,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos + [cc_info],
)
return cc_provider(
cc_info = cc_info,
cc_info_with_thunks = cc_info_with_thunks,
)
else:
return cc_provider(
cc_info = cc_info,
)
return cc_provider(
cc_info = cc_info,
)
_GENERATORS = ["upb", "upbdefs", "upb_minitable"]
@ -229,7 +186,6 @@ def _get_hint_providers(ctx, generator):
possible_owners = []
for generator in _GENERATORS:
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator)))
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator, thunks = True)))
if hasattr(cc_common, "CcSharedLibraryHintInfo"):
return [cc_common.CcSharedLibraryHintInfo(owners = possible_owners)]
@ -258,7 +214,7 @@ def upb_proto_aspect_impl(
`cc_info` field. The aspect will ensure that each compilation action can compile and link
against this provider's cc_info for all proto_library() deps.
dep_cc_provider: For aspects that depend on other aspects, this is the provider of the aspect
that we depend on. The aspect wil be able to include the header files from this provider.
that we depend on. The aspect will be able to include the header files from this provider.
file_provider: A provider that this aspect will attach to the target to expose the source
files generated by this aspect. These files are primarily useful for returning in
DefaultInfo(), so users who build the upb_*proto_library() rule directly can view the

@ -2,25 +2,9 @@
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
# begin:google_only
#
# def upb_use_cpp_toolchain():
# # TODO: We shouldn't need to add this to the result of use_cpp_toolchain().
# return [
# config_common.toolchain_type(
# "@bazel_tools//tools/cpp:cc_runtimes_toolchain_type",
# mandatory = False,
# ),
# ] + use_cpp_toolchain()
#
# end:google_only
# begin:github_only
def upb_use_cpp_toolchain():
return use_cpp_toolchain()
# end:github_only
def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
"""Like cc_library(), but callable from rules.
@ -37,16 +21,6 @@ def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
CcInfo provider for this compilation.
"""
# begin:google_only
# cc_runtimes_toolchain = ctx.toolchains["@bazel_tools//tools/cpp:cc_runtimes_toolchain_type"]
# if cc_runtimes_toolchain:
# dep_ccinfos += [
# target[CcInfo]
# for target in cc_runtimes_toolchain.cc_runtimes_info.runtimes
# ]
#
# end:google_only
compilation_contexts = [info.compilation_context for info in dep_ccinfos]
linking_contexts = [info.linking_context for info in dep_ccinfos]
toolchain = find_cpp_toolchain(ctx)

@ -1,3 +1,20 @@
"""proto_library rule"""
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Macro of proto_library rule.
"""
proto_library = native.proto_library
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:bazel_proto_library_rule.bzl", _proto_library = "proto_library")
def proto_library(**kwattrs):
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_library(**kwattrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_library(**kwattrs)

@ -3,8 +3,9 @@
load("@rules_python//python:py_info.bzl", "PyInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type"
_PY_PROTO_TOOLCHAIN = Label("//bazel/private:python_toolchain_type")
_PyProtoInfo = provider(
doc = "Encapsulates information needed by the Python proto rules.",
@ -22,9 +23,6 @@ _PyProtoInfo = provider(
def _filter_provider(provider, *attrs):
return [dep[provider] for attr in attrs for dep in attr if provider in dep]
def _incompatible_toolchains_enabled():
return getattr(proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _py_proto_aspect_impl(target, ctx):
"""Generates and compiles Python code for a proto_library.
@ -51,10 +49,10 @@ def _py_proto_aspect_impl(target, ctx):
proto.path,
))
if _incompatible_toolchains_enabled():
toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN]
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[_PY_PROTO_TOOLCHAIN]
if not toolchain:
fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN)
fail("No toolchains registered for '%s'." % _PY_PROTO_TOOLCHAIN)
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo]
@ -120,15 +118,15 @@ def _py_proto_aspect_impl(target, ctx):
_py_proto_aspect = aspect(
implementation = _py_proto_aspect_impl,
attrs = {} if _incompatible_toolchains_enabled() else {
attrs = toolchains.if_legacy_toolchain({
"_aspect_proto_toolchain": attr.label(
default = "//python:python_toolchain",
),
},
}),
attr_aspects = ["deps"],
required_providers = [ProtoInfo],
provides = [_PyProtoInfo],
toolchains = [PY_PROTO_TOOLCHAIN] if _incompatible_toolchains_enabled() else [],
toolchains = toolchains.use_toolchain(_PY_PROTO_TOOLCHAIN),
)
def _py_proto_library_rule(ctx):

@ -0,0 +1,5 @@
load(":proto_common_compile_tests.bzl", "proto_common_compile_test_suite")
package(default_applicable_licenses = ["//:license"])
proto_common_compile_test_suite(name = "proto_common_compile_test_suite")

@ -0,0 +1,368 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Tests for `proto_common.compile` function."""
load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
load("@rules_testing//lib:truth.bzl", "matching")
load("@rules_testing//lib:util.bzl", "util")
load("//bazel:proto_library.bzl", "proto_library")
load("//bazel/tests/testdata:compile_rule.bzl", "compile_rule")
protocol_compiler = "/protoc"
def proto_common_compile_test_suite(name):
util.helper_target(
proto_library,
name = "simple_proto",
srcs = ["A.proto"],
)
test_suite(
name = name,
tests = [
_test_compile_basic,
_test_compile_noplugin,
_test_compile_with_plugin_output,
_test_compile_with_directory_plugin_output,
_test_compile_additional_args,
_test_compile_additional_tools,
_test_compile_additional_tools_no_plugin,
_test_compile_additional_inputs,
_test_compile_resource_set,
_test_compile_protoc_opts,
_test_compile_direct_generated_protos,
_test_compile_indirect_generated_protos,
],
)
# Verifies basic usage of `proto_common.compile`.
def _test_compile_basic(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_basic_impl,
)
def _test_compile_basic_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
action.mnemonic().equals("MyMnemonic")
# Verifies usage of proto_common.generate_code with no plugin specified by toolchain.
def _test_compile_noplugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_noplugin_impl,
)
def _test_compile_noplugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "single",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_plugin_output_impl,
)
def _test_compile_with_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/test_compile_with_plugin_output_compile"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_directory_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "multiple",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_directory_plugin_output_impl,
)
def _test_compile_with_directory_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/bin"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_args` parameter
def _test_compile_additional_args(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_args = ["--a", "--b"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_args_impl,
)
def _test_compile_additional_args_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--a"),
matching.equals_wrapper("--b"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter
def _test_compile_additional_tools(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_impl,
)
def _test_compile_additional_tools_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
matching.file_basename_equals("plugin"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_additional_tools_no_plugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_no_plugin_impl,
)
def _test_compile_additional_tools_no_plugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
],
)
action.inputs().not_contains_predicate(matching.file_basename_equals("plugin"))
# Verifies usage of `proto_common.compile` with `additional_inputs` parameter.
def _test_compile_additional_inputs(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_inputs = ["input1.txt", "input2.txt"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_inputs_impl,
)
def _test_compile_additional_inputs_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("input1.txt"),
matching.file_basename_equals("input2.txt"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_resource_set(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
use_resource_set = True,
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_resource_set_impl,
)
def _test_compile_resource_set_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic") # @unused
# We can't check the specification of the resource set, but we at least verify analysis passes
# Verifies `--protocopts` are passed to command line.
def _test_compile_protoc_opts(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
config_settings = {"//command_line_option:protocopt": ["--foo", "--bar"]},
impl = _test_compile_protoc_opts_impl,
)
def _test_compile_protoc_opts_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--foo"),
matching.equals_wrapper("--bar"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies `proto_common.compile`> correctly handles direct generated `.proto` files.
def _test_compile_direct_generated_protos(name):
util.helper_target(native.genrule, name = name + "_generate_G", cmd = "", outs = ["G.proto"])
util.helper_target(
proto_library,
name = name + "_directly_generated_proto",
srcs = ["A.proto", "G.proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_directly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_direct_generated_protos_impl,
)
def _test_compile_direct_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
matching.str_matches("*-out/*/*/*/G.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter
def _test_compile_indirect_generated_protos(name):
util.helper_target(native.genrule, name = "_generate_h", srcs = ["A.txt"], cmd = "", outs = ["H.proto"])
util.helper_target(proto_library, name = "_generated_proto", srcs = ["H.proto"])
util.helper_target(
proto_library,
name = name + "_indirectly_generated_proto",
srcs = ["A.proto"],
deps = [":_generated_proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_indirectly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_indirect_generated_protos_impl,
)
def _test_compile_indirect_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)

@ -0,0 +1,135 @@
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain")
package(
default_applicable_licenses = ["//:license"],
default_visibility = ["//visibility:public"],
)
proto_lang_toolchain(
name = "toolchain",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
plugin = ":plugin",
plugin_format_flag = "--plugin=%s",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_lang_toolchain(
name = "toolchain_noplugin",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "plugin",
srcs = ["plugin.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_library(
name = "runtime",
srcs = ["runtime.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "descriptors",
srcs = [
"descriptor.proto",
"metadata.proto",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "any",
srcs = ["any.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "something",
srcs = ["something.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "mixed",
srcs = [
":descriptors",
":something",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "denied",
srcs = [
":any",
":descriptors",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool1",
srcs = ["tool1.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool2",
srcs = ["tool2.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)

@ -0,0 +1,57 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Testing function for proto_common module"""
load("//bazel/common:proto_common.bzl", "proto_common")
def _resource_set_callback(_os, inputs_size):
return {"memory": 25 + 0.15 * inputs_size, "cpu": 1}
def _impl(ctx):
outfile = ctx.actions.declare_file(ctx.attr.name)
kwargs = {}
if ctx.attr.plugin_output == "single":
kwargs["plugin_output"] = outfile.path
elif ctx.attr.plugin_output == "multiple":
kwargs["plugin_output"] = ctx.bin_dir.path
elif ctx.attr.plugin_output == "wrong":
kwargs["plugin_output"] = ctx.bin_dir.path + "///"
if ctx.attr.additional_args:
additional_args = ctx.actions.args()
additional_args.add_all(ctx.attr.additional_args)
kwargs["additional_args"] = additional_args
if ctx.files.additional_tools:
kwargs["additional_tools"] = ctx.files.additional_tools
if ctx.files.additional_inputs:
kwargs["additional_inputs"] = depset(ctx.files.additional_inputs)
if ctx.attr.use_resource_set:
kwargs["resource_set"] = _resource_set_callback
if ctx.attr.progress_message:
kwargs["experimental_progress_message"] = ctx.attr.progress_message
proto_common.compile(
ctx.actions,
ctx.attr.proto_dep[ProtoInfo],
ctx.attr.toolchain[proto_common.ProtoLangToolchainInfo],
[outfile],
**kwargs
)
return [DefaultInfo(files = depset([outfile]))]
compile_rule = rule(
_impl,
attrs = {
"proto_dep": attr.label(),
"plugin_output": attr.string(),
"toolchain": attr.label(default = ":toolchain"),
"additional_args": attr.string_list(),
"additional_tools": attr.label_list(cfg = "exec"),
"additional_inputs": attr.label_list(allow_files = True),
"use_resource_set": attr.bool(),
"progress_message": attr.string(),
},
)

@ -1,5 +1,7 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
package(default_applicable_licenses = ["//:license"])
bzl_library(
name = "proto_toolchain_bzl",
srcs = [
@ -8,6 +10,7 @@ bzl_library(
visibility = ["//visibility:public"],
deps = [
"//bazel/private:proto_toolchain_rule_bzl",
"//bazel/private:toolchain_helpers_bzl",
],
)
@ -19,5 +22,15 @@ bzl_library(
visibility = ["//visibility:public"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/private:proto_lang_toolchain_rule_bzl",
"@proto_bazel_features//:features",
],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
visibility = [
"//bazel:__pkg__",
],
)

@ -1,6 +1,15 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""proto_lang_toolchain rule"""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:proto_lang_toolchain_rule.bzl", _proto_lang_toolchain_rule = "proto_lang_toolchain")
def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with = [], target_compatible_with = [], **attrs):
"""Creates a proto_lang_toolchain and corresponding toolchain target.
@ -21,8 +30,12 @@ def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with =
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
attrs["toolchain_type"] = toolchain_type
# buildifier: disable=native-proto
native.proto_lang_toolchain(name = name, **attrs)
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_lang_toolchain_rule(name = name, **attrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_lang_toolchain(name = name, **attrs)
if toolchain_type:
native.toolchain(

@ -1,9 +1,17 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Macro wrapping the proto_toolchain implementation.
The macro additionally creates toolchain target when toolchain_type is given.
"""
load("//bazel/private:proto_toolchain_rule.bzl", _proto_toolchain_rule = "proto_toolchain")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []):
"""Creates a proto_toolchain and toolchain target for proto_library.
@ -19,7 +27,7 @@ def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []):
native.toolchain(
name = name + "_toolchain",
toolchain_type = "@rules_proto//proto:toolchain_type",
toolchain_type = toolchains.PROTO_TOOLCHAIN,
exec_compatible_with = exec_compatible_with,
target_compatible_with = [],
toolchain = name,

@ -8,7 +8,7 @@ load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl
UpbWrappedCcInfo = provider(
"Provider for cc_info for protos",
fields = ["cc_info", "cc_info_with_thunks"],
fields = ["cc_info"],
)
_UpbWrappedGeneratedSrcsInfo = provider(
@ -33,7 +33,7 @@ upb_c_proto_library_aspect = aspect(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upb_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_toolchain"),
default = Label("//upb_generator/c:toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",

@ -45,7 +45,7 @@ upb_minitable_proto_library_aspect = aspect(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upb_minitable_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_minitable_toolchain"),
default = Label("//upb_generator/minitable:toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",

@ -31,7 +31,7 @@ _upb_proto_reflection_library_aspect = aspect(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upbdefs_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upbdefs_toolchain"),
default = Label("//upb_generator/reflection:toolchain"),
cfg = getattr(proto_common, "proto_lang_toolchain_cfg", "target"),
),
"_cc_toolchain": attr.label(

@ -20,9 +20,7 @@ load(
"tmpl_cc_binary",
)
# begin:google_only
# package(default_applicable_licenses = ["//upb:license"])
# end:google_only
package(default_applicable_licenses = ["//:license"])
licenses(["notice"])
@ -74,7 +72,6 @@ cc_test(
"//:protobuf",
"//src/google/protobuf/json",
"//upb:base",
"//upb:descriptor_upb_proto",
"//upb:json",
"//upb:mem",
"//upb:reflection",
@ -238,7 +235,7 @@ genrule(
),
outs = ["size_data.txt"],
# We want --format=GNU which counts rodata with data, not text.
cmd = "size $$($$OSTYPE == 'linux-gnu' ? '--format=GNU -d' : '') $(SRCS) > $@",
cmd = "size $$([ $$OSTYPE == 'linux-gnu' ] && echo '--format=GNU -d' || echo '') $(SRCS) > $@",
# "size" sometimes isn't available remotely.
local = 1,
tags = ["no-remote-exec"],

@ -8,13 +8,7 @@
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
# begin:google_only
# _is_google3 = True
# end:google_only
# begin:github_only
_is_google3 = False
# end:github_only
def tmpl_cc_binary(name, gen, args, replacements = [], **kwargs):
srcs = [name + ".cc"]

@ -15,14 +15,12 @@ COPTS = select({
"/wd4506", # no definition for inline function 'function'
"/wd4800", # 'type' : forcing value to bool 'true' or 'false' (performance warning)
"/wd4996", # The compiler encountered a deprecated declaration.
"/utf-8", # Set source and execution character sets to UTF-8
],
"//conditions:default": [
"-DHAVE_ZLIB",
"-Woverloaded-virtual",
"-Wno-sign-compare",
"-Wno-nonnull",
"-Werror",
],
})

@ -5,7 +5,7 @@ load("@rules_jvm_external//:defs.bzl", "java_export")
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION")
load("//java/osgi:osgi.bzl", "osgi_java_library")
JAVA_OPTS = [
JAVA_RELEASE_OPTS = [
"-source 8",
"-target 8",
"-Xep:Java8ApiChecker:ERROR",
@ -16,13 +16,21 @@ BUNDLE_LICENSE = "https://opensource.org/licenses/BSD-3-Clause"
def protobuf_java_export(**kwargs):
java_export(
javacopts = JAVA_OPTS,
javacopts = JAVA_RELEASE_OPTS,
# https://github.com/bazelbuild/rules_jvm_external/issues/1245
javadocopts = [
"-notimestamp",
"-use",
"-quiet",
"-Xdoclint:-missing",
"-encoding",
"UTF8",
],
**kwargs
)
def protobuf_java_library(**kwargs):
java_library(
javacopts = JAVA_OPTS,
**kwargs
)
@ -68,7 +76,7 @@ def protobuf_versioned_java_library(
java_library target.
"""
osgi_java_library(
javacopts = JAVA_OPTS,
javacopts = JAVA_RELEASE_OPTS,
automatic_module_name = automatic_module_name,
bundle_doc_url = BUNDLE_DOC_URL,
bundle_license = BUNDLE_LICENSE,

@ -1,3 +1,4 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations"

@ -1,3 +1,7 @@
# Needed for java_lite_proto_library, that's using ProguardSpecProvider
# TODO: Once the provider is ported to Starlark the flag may be removed.
common --experimental_google_legacy_api
build:dbg --compilation_mode=dbg
build:opt --compilation_mode=opt
@ -27,6 +31,8 @@ build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Abseil passes nullptr to memcmp with 0 size
build:ubsan --copt=-fno-sanitize=nonnull-attribute
# Workaround Bazel 7 remote cache issues.
# See https://github.com/bazelbuild/bazel/issues/20161

@ -1,5 +1,6 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations"
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes

@ -93,6 +93,8 @@ add_executable(conformance_test_runner
${protobuf_SOURCE_DIR}/conformance/conformance_test_main.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.h
${protobuf_SOURCE_DIR}/conformance/failure_list_trie_node.cc
${protobuf_SOURCE_DIR}/conformance/failure_list_trie_node.h
)
add_executable(conformance_cpp

@ -24,6 +24,10 @@ configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/protobuf.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/protobuf.pc @ONLY)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/protobuf-lite.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/protobuf-lite.pc @ONLY)
if (protobuf_BUILD_LIBUPB)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/upb.pc.cmake
${CMAKE_CURRENT_BINARY_DIR}/upb.pc @ONLY)
endif ()
set(_protobuf_libraries libprotobuf-lite libprotobuf)
if (protobuf_BUILD_LIBPROTOC)
@ -72,6 +76,9 @@ if (protobuf_BUILD_PROTOC_BINARIES)
endif (protobuf_BUILD_PROTOC_BINARIES)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/protobuf.pc ${CMAKE_CURRENT_BINARY_DIR}/protobuf-lite.pc DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
if (protobuf_BUILD_LIBUPB)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/upb.pc DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
endif ()
include(${protobuf_SOURCE_DIR}/src/file_lists.cmake)
set(protobuf_HEADERS

@ -2,7 +2,7 @@ function(protobuf_generate)
include(CMakeParseArguments)
set(_options APPEND_PATH)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN PLUGIN_OPTIONS DEPENDENCIES)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR PLUGIN PLUGIN_OPTIONS DEPENDENCIES PROTOC_EXE)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
@ -83,6 +83,11 @@ function(protobuf_generate)
endforeach()
endif()
if(NOT protobuf_generate_PROTOC_EXE)
# Default to using the CMake executable
set(protobuf_generate_PROTOC_EXE protobuf::protoc)
endif()
foreach(DIR ${protobuf_generate_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
@ -143,7 +148,7 @@ function(protobuf_generate)
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
COMMAND ${protobuf_generate_PROTOC_EXE}
ARGS ${protobuf_generate_PROTOC_OPTIONS} --${protobuf_generate_LANGUAGE}_out ${_plugin_options}:${protobuf_generate_PROTOC_OUT_DIR} ${_plugin} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} ${protobuf_PROTOC_EXE} ${protobuf_generate_DEPENDENCIES}
COMMENT ${_comment}

@ -0,0 +1,10 @@
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=@CMAKE_INSTALL_PREFIX@
libdir=@CMAKE_INSTALL_FULL_LIBDIR@
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
Name: Protocol Buffers
Description: Google's Data Interchange Format
Version: @protobuf_VERSION@
Libs: -L${libdir} -lupb @CMAKE_THREAD_LIBS_INIT@
Cflags: -I${includedir}

@ -16,7 +16,6 @@ foreach(generator upb upbdefs upb_minitable)
)
target_include_directories(protoc-gen-${generator} PRIVATE ${bootstrap_cmake_dir})
target_link_libraries(protoc-gen-${generator}
${protobuf_LIB_PROTOBUF}
${protobuf_LIB_UPB}
${protobuf_ABSL_USED_TARGETS}
)

@ -1,10 +1,31 @@
load("@rules_buf//buf:defs.bzl", "buf_breaking_test")
load("//compatibility:runtime_conformance.bzl", "java_runtime_conformance")
# Simple build tests for compatibility of gencode from previous major versions
# with the current runtime.
#
# To add more test cases in Java, use java_runtime_conformance as below, and add
# the corresponding http_archive in the WORKSPACE file for the version.
load("//compatibility:runtime_conformance.bzl", "java_runtime_conformance")
java_library(
name = "v25_test_protos_srcjar",
testonly = True,
srcs = glob([
"v3.25.0/*.srcjar",
]),
visibility = ["//java/core:__pkg__"],
deps = ["//java/core"],
)
java_library(
name = "v25_test_protos_jar",
testonly = True,
srcs = glob([
"v3.25.0/*.srcjar",
]),
visibility = ["//java/core:__pkg__"],
deps = ["@com_google_protobuf_v25.0//java/core"],
)
# main gencode builds with main runtime as a proof of concept.
java_runtime_conformance(
@ -12,8 +33,121 @@ java_runtime_conformance(
gencode_version = "main",
)
# Generates a build_test named "conformance_v3.25.0"
java_runtime_conformance(
name = "java_conformance_v3.25.0",
gencode_version = "3.25.0",
# Breaking change detection for well-known types and descriptor.proto.
buf_breaking_test(
name = "any_proto_breaking",
against = "@com_google_protobuf_previous_release//:any_proto",
config = ":buf.yaml",
targets = ["//:any_proto"],
)
buf_breaking_test(
name = "api_proto_breaking",
against = "@com_google_protobuf_previous_release//:api_proto",
config = ":buf.yaml",
targets = ["//:api_proto"],
)
buf_breaking_test(
name = "descriptor_proto_breaking",
against = "@com_google_protobuf_previous_release//:descriptor_proto",
config = ":buf.yaml",
targets = ["//:descriptor_proto"],
)
buf_breaking_test(
name = "duration_proto_breaking",
against = "@com_google_protobuf_previous_release//:duration_proto",
config = ":buf.yaml",
targets = ["//:duration_proto"],
)
buf_breaking_test(
name = "empty_proto_breaking",
against = "@com_google_protobuf_previous_release//:empty_proto",
config = ":buf.yaml",
targets = ["//:empty_proto"],
)
buf_breaking_test(
name = "field_mask_proto_breaking",
against = "@com_google_protobuf_previous_release//:field_mask_proto",
config = ":buf.yaml",
targets = ["//:field_mask_proto"],
)
buf_breaking_test(
name = "source_context_proto_breaking",
against = "@com_google_protobuf_previous_release//:source_context_proto",
config = ":buf.yaml",
targets = ["//:source_context_proto"],
)
buf_breaking_test(
name = "struct_proto_breaking",
against = "@com_google_protobuf_previous_release//:struct_proto",
config = ":buf.yaml",
targets = ["//:struct_proto"],
)
buf_breaking_test(
name = "timestamp_proto_breaking",
against = "@com_google_protobuf_previous_release//:timestamp_proto",
config = ":buf.yaml",
targets = ["//:timestamp_proto"],
)
buf_breaking_test(
name = "type_proto_breaking",
against = "@com_google_protobuf_previous_release//:type_proto",
config = ":buf.yaml",
targets = ["//:type_proto"],
)
buf_breaking_test(
name = "wrappers_proto_breaking",
against = "@com_google_protobuf_previous_release//:wrappers_proto",
config = ":buf.yaml",
targets = ["//:wrappers_proto"],
)
buf_breaking_test(
name = "compiler_plugin_proto_breaking",
against = "@com_google_protobuf_previous_release//:compiler_plugin_proto",
config = ":buf.yaml",
targets = ["//:compiler_plugin_proto"],
)
buf_breaking_test(
name = "cpp_features_proto_breaking",
against = "@com_google_protobuf_previous_release//:cpp_features_proto",
config = ":buf.yaml",
targets = ["//:cpp_features_proto"],
)
buf_breaking_test(
name = "java_features_proto_breaking",
against = "@com_google_protobuf_previous_release//:java_features_proto",
config = ":buf.yaml",
targets = ["//:java_features_proto"],
)
test_suite(
name = "proto_breaking",
tests = [
"any_proto_breaking",
"api_proto_breaking",
"compiler_plugin_proto_breaking",
"cpp_features_proto_breaking",
"descriptor_proto_breaking",
"duration_proto_breaking",
"empty_proto_breaking",
"field_mask_proto_breaking",
"java_features_proto_breaking",
"source_context_proto_breaking",
"struct_proto_breaking",
"timestamp_proto_breaking",
"type_proto_breaking",
"wrappers_proto_breaking",
],
)

@ -0,0 +1 @@
version: v1

@ -10,6 +10,9 @@ load(
load("@rules_ruby//ruby:defs.bzl", "ruby_binary")
load("//:protobuf.bzl", "internal_csharp_proto_library", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library", "internal_ruby_proto_library")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:java_lite_proto_library.bzl", "java_lite_proto_library")
load("//bazel:java_proto_library.bzl", "java_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load("//ruby:defs.bzl", "internal_ruby_proto_library")
@ -134,15 +137,18 @@ cc_library(
srcs = [
"conformance_test.cc",
"conformance_test_runner.cc",
"failure_list_trie_node.cc",
],
hdrs = [
"conformance_test.h",
"failure_list_trie_node.h",
],
includes = ["."],
deps = [
":conformance_cc_proto",
"//src/google/protobuf",
"//src/google/protobuf:descriptor_legacy",
"//src/google/protobuf:endian",
"//src/google/protobuf:protobuf_lite",
"//src/google/protobuf/util:differencer",
"//src/google/protobuf/util:json_util",
@ -151,11 +157,33 @@ cc_library(
"@com_google_absl//absl/container:flat_hash_set",
"@com_google_absl//absl/log:absl_check",
"@com_google_absl//absl/log:absl_log",
"@com_google_absl//absl/status",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/strings:str_format",
"@com_google_absl//absl/types:optional",
],
)
cc_test(
name = "failure_list_trie_node_test",
srcs = ["failure_list_trie_node_test.cc"],
deps = [
":conformance_test",
"@com_google_absl//absl/status",
"@com_google_absl//absl/status:statusor",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/types:optional",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
],
)
# Add more meta-testing here. This is not to be confused with a conformance test itself.
test_suite(
name = "conformance_framework_tests",
tests = ["failure_list_trie_node_test"],
)
cc_library(
name = "binary_json_conformance_suite",
testonly = 1,
@ -232,6 +260,7 @@ cc_binary(
"//src/google/protobuf",
"//src/google/protobuf:port",
"//src/google/protobuf:protobuf_lite",
"//src/google/protobuf/json",
"//src/google/protobuf/stubs",
"//src/google/protobuf/util:json_util",
"//src/google/protobuf/util:type_resolver",
@ -304,6 +333,11 @@ py_binary(
],
)
py_binary(
name = "update_failure_list",
srcs = ["update_failure_list.py"],
)
inline_sh_binary(
name = "conformance_php",
testonly = 1,
@ -312,7 +346,7 @@ inline_sh_binary(
"conformance_php.php",
],
cmd = """
php -d include_path=conformance:src/google/protobuf \\
php -d include_path=php/generated:conformance:src/google/protobuf:editions/golden \\
-d auto_prepend_file=$(rootpath autoload.php) \\
$(rootpath conformance_php.php)
""",
@ -320,6 +354,7 @@ inline_sh_binary(
deps = [
":conformance_php_proto",
"//:test_messages_proto3_php_proto",
"//editions:test_messages_proto3_editions_php_proto",
"//php:source_files",
],
)

@ -70,7 +70,7 @@ class ConformanceJava {
}
private enum BinaryDecoderType {
BTYE_STRING_DECODER,
BYTE_STRING_DECODER,
BYTE_ARRAY_DECODER,
ARRAY_BYTE_BUFFER_DECODER,
READONLY_ARRAY_BYTE_BUFFER_DECODER,
@ -84,7 +84,7 @@ class ConformanceJava {
ByteString bytes, BinaryDecoderType type, Parser<T> parser, ExtensionRegistry extensions)
throws InvalidProtocolBufferException {
switch (type) {
case BTYE_STRING_DECODER:
case BYTE_STRING_DECODER:
case BYTE_ARRAY_DECODER:
return parser.parseFrom(bytes, extensions);
case ARRAY_BYTE_BUFFER_DECODER:

@ -67,7 +67,7 @@ class ConformanceJavaLite {
}
private enum BinaryDecoderType {
BTYE_STRING_DECODER,
BYTE_STRING_DECODER,
BYTE_ARRAY_DECODER,
ARRAY_BYTE_BUFFER_DECODER,
READONLY_ARRAY_BYTE_BUFFER_DECODER,
@ -84,7 +84,7 @@ class ConformanceJavaLite {
ExtensionRegistryLite extensions)
throws InvalidProtocolBufferException {
switch (type) {
case BTYE_STRING_DECODER:
case BYTE_STRING_DECODER:
case BYTE_ARRAY_DECODER:
return parser.parseFrom(bytes, extensions);
case ARRAY_BYTE_BUFFER_DECODER:

@ -7,8 +7,12 @@ define("GOOGLE_GPBMETADATA_NAMESPACE", "GPBMetadata\\Google\\Protobuf\\");
function protobuf_autoloader_impl($class, $prefix) {
$length = strlen($prefix);
if ((substr($class, 0, $length) === $prefix)) {
$path = 'php/src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
include_once $path;
$path = 'src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
if (file_exists('php/' . $path)) {
include_once 'php/' . $path;
} else {
include_once 'php/generated/' . $path;
}
}
}

@ -14,6 +14,7 @@
#include <cstring>
#include <memory>
#include <string>
#include <type_traits>
#include <utility>
#include <vector>
@ -25,6 +26,9 @@
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "absl/strings/substitute.h"
#include "json/config.h"
#include "json/reader.h"
#include "json/value.h"
#include "conformance/conformance.pb.h"
#include "conformance_test.h"
#include "conformance/test_protos/test_messages_edition2023.pb.h"
@ -33,8 +37,6 @@
#include "google/protobuf/endian.h"
#include "google/protobuf/json/json.h"
#include "google/protobuf/test_messages_proto2.pb.h"
#include "google/protobuf/test_messages_proto2.pb.h"
#include "google/protobuf/test_messages_proto3.pb.h"
#include "google/protobuf/test_messages_proto3.pb.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/unknown_field_set.h"
@ -43,6 +45,7 @@
using conformance::ConformanceRequest;
using conformance::ConformanceResponse;
using conformance::TestStatus;
using conformance::WireFormat;
using google::protobuf::Descriptor;
using google::protobuf::FieldDescriptor;
@ -302,19 +305,22 @@ bool BinaryAndJsonConformanceSuite::ParseResponse(
const std::string& test_name = setting.GetTestName();
ConformanceLevel level = setting.GetLevel();
TestStatus test;
test.set_name(test_name);
switch (response.result_case()) {
case ConformanceResponse::kProtobufPayload: {
if (requested_output != conformance::PROTOBUF) {
ReportFailure(test_name, level, request, response,
absl::StrCat("Test was asked for ",
WireFormatToString(requested_output),
" output but provided PROTOBUF instead."));
test.set_failure_message(absl::StrCat(
"Test was asked for ", WireFormatToString(requested_output),
" output but provided PROTOBUF instead."));
ReportFailure(test, level, request, response);
return false;
}
if (!test_message->ParseFromString(response.protobuf_payload())) {
ReportFailure(test_name, level, request, response,
"Protobuf output we received from test was unparseable.");
test.set_failure_message(
"Protobuf output we received from test was unparseable.");
ReportFailure(test, level, request, response);
return false;
}
@ -323,16 +329,17 @@ bool BinaryAndJsonConformanceSuite::ParseResponse(
case ConformanceResponse::kJsonPayload: {
if (requested_output != conformance::JSON) {
ReportFailure(test_name, level, request, response,
absl::StrCat("Test was asked for ",
WireFormatToString(requested_output),
" output but provided JSON instead."));
test.set_failure_message(absl::StrCat(
"Test was asked for ", WireFormatToString(requested_output),
" output but provided JSON instead."));
ReportFailure(test, level, request, response);
return false;
}
if (!ParseJsonResponse(response, test_message)) {
ReportFailure(test_name, level, request, response,
"JSON output we received from test was unparseable.");
test.set_failure_message(
"JSON output we received from test was unparseable.");
ReportFailure(test, level, request, response);
return false;
}
@ -437,14 +444,19 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::
absl::StrCat(setting.ConformanceLevelToString(level), ".",
setting.GetSyntaxIdentifier(), ".ProtobufInput.", test_name);
suite_.RunTest(effective_test_name, request, &response);
if (!suite_.RunTest(effective_test_name, request, &response)) {
return;
}
TestStatus test;
test.set_name(effective_test_name);
if (response.result_case() == ConformanceResponse::kParseError) {
suite_.ReportSuccess(effective_test_name);
suite_.ReportSuccess(test);
} else if (response.result_case() == ConformanceResponse::kSkipped) {
suite_.ReportSkip(effective_test_name, request, response);
suite_.ReportSkip(test, request, response);
} else {
suite_.ReportFailure(effective_test_name, level, request, response,
"Should have failed to parse, but didn't.");
test.set_failure_message("Should have failed to parse, but didn't.");
suite_.ReportFailure(test, level, request, response);
}
}
@ -631,34 +643,42 @@ void BinaryAndJsonConformanceSuiteImpl<
setting.ConformanceLevelToString(level), ".",
setting.GetSyntaxIdentifier(), ".JsonInput.", test_name, ".Validator");
suite_.RunTest(effective_test_name, request, &response);
if (!suite_.RunTest(effective_test_name, request, &response)) {
return;
}
TestStatus test;
test.set_name(effective_test_name);
if (response.result_case() == ConformanceResponse::kSkipped) {
suite_.ReportSkip(effective_test_name, request, response);
suite_.ReportSkip(test, request, response);
return;
}
if (response.result_case() != ConformanceResponse::kJsonPayload) {
suite_.ReportFailure(effective_test_name, level, request, response,
absl::StrCat("Expected JSON payload but got type ",
response.result_case()));
test.set_failure_message(absl::StrCat("Expected JSON payload but got type ",
response.result_case()));
suite_.ReportFailure(test, level, request, response);
return;
}
Json::Reader reader;
Json::CharReaderBuilder builder;
Json::Value value;
if (!reader.parse(response.json_payload(), value)) {
suite_.ReportFailure(
effective_test_name, level, request, response,
absl::StrCat("JSON payload cannot be parsed as valid JSON: ",
reader.getFormattedErrorMessages()));
Json::String err;
const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
if (!reader->parse(
response.json_payload().c_str(),
response.json_payload().c_str() + response.json_payload().length(),
&value, &err)) {
test.set_failure_message(
absl::StrCat("JSON payload cannot be parsed as valid JSON: ", err));
suite_.ReportFailure(test, level, request, response);
return;
}
if (!validator(value)) {
suite_.ReportFailure(effective_test_name, level, request, response,
"JSON payload validation failed.");
test.set_failure_message("JSON payload validation failed.");
suite_.ReportFailure(test, level, request, response);
return;
}
suite_.ReportSuccess(effective_test_name);
suite_.ReportSuccess(test);
}
template <typename MessageType>
@ -677,14 +697,19 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::ExpectParseFailureForJson(
absl::StrCat(setting.ConformanceLevelToString(level), ".",
SyntaxIdentifier(), ".JsonInput.", test_name);
suite_.RunTest(effective_test_name, request, &response);
if (!suite_.RunTest(effective_test_name, request, &response)) {
return;
}
TestStatus test;
test.set_name(effective_test_name);
if (response.result_case() == ConformanceResponse::kParseError) {
suite_.ReportSuccess(effective_test_name);
suite_.ReportSuccess(test);
} else if (response.result_case() == ConformanceResponse::kSkipped) {
suite_.ReportSkip(effective_test_name, request, response);
suite_.ReportSkip(test, request, response);
} else {
suite_.ReportFailure(effective_test_name, level, request, response,
"Should have failed to parse, but didn't.");
test.set_failure_message("Should have failed to parse, but didn't.");
suite_.ReportFailure(test, level, request, response);
}
}
@ -707,14 +732,19 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::
absl::StrCat(setting.ConformanceLevelToString(level), ".",
SyntaxIdentifier(), ".", test_name, ".JsonOutput");
suite_.RunTest(effective_test_name, request, &response);
if (!suite_.RunTest(effective_test_name, request, &response)) {
return;
}
TestStatus test;
test.set_name(effective_test_name);
if (response.result_case() == ConformanceResponse::kSerializeError) {
suite_.ReportSuccess(effective_test_name);
suite_.ReportSuccess(test);
} else if (response.result_case() == ConformanceResponse::kSkipped) {
suite_.ReportSkip(effective_test_name, request, response);
suite_.ReportSkip(test, request, response);
} else {
suite_.ReportFailure(effective_test_name, level, request, response,
"Should have failed to serialize, but didn't.");
test.set_failure_message("Should have failed to serialize, but didn't.");
suite_.ReportFailure(test, level, request, response);
}
}
@ -1207,7 +1237,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestValidDataForOneofType(
{
// Tests oneof with default value.
const std::string proto = default_value;
const std::string& proto = default_value;
MessageType test_message;
test_message.MergeFromString(proto);
std::string text;
@ -1223,7 +1253,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestValidDataForOneofType(
{
// Tests oneof with non-default value.
const std::string proto = non_default_value;
const std::string& proto = non_default_value;
MessageType test_message;
test_message.MergeFromString(proto);
std::string text;
@ -1240,7 +1270,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestValidDataForOneofType(
{
// Tests oneof with multiple values of the same field.
const std::string proto = absl::StrCat(default_value, non_default_value);
const std::string expected_proto = non_default_value;
const std::string& expected_proto = non_default_value;
MessageType test_message;
test_message.MergeFromString(expected_proto);
std::string text;
@ -1266,7 +1296,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestValidDataForOneofType(
GetDefaultValue(other_type));
const std::string proto = absl::StrCat(other_value, non_default_value);
const std::string expected_proto = non_default_value;
const std::string& expected_proto = non_default_value;
MessageType test_message;
test_message.MergeFromString(expected_proto);
std::string text;
@ -1420,12 +1450,18 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestUnknownOrdering() {
conformance::BINARY_TEST, prototype, "UnknownOrdering", serialized);
const ConformanceRequest& request = setting.GetRequest();
ConformanceResponse response;
suite_.RunTest(setting.GetTestName(), request, &response);
if (!suite_.RunTest(setting.GetTestName(), request, &response)) {
return;
}
MessageType response_message;
TestStatus test;
test.set_name(setting.GetTestName());
if (response.result_case() == ConformanceResponse::kSkipped) {
suite_.ReportSkip(setting.GetTestName(), request, response);
suite_.ReportSkip(test, request, response);
return;
}
suite_.ParseResponse(response, setting, &response_message);
const UnknownFieldSet& ufs = response_message.unknown_fields();
@ -1441,10 +1477,10 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::TestUnknownOrdering() {
ufs.field(1).varint() != 123 ||
ufs.field(2).length_delimited() != "def" ||
ufs.field(3).varint() != 456) {
suite_.ReportFailure(setting.GetTestName(), setting.GetLevel(), request,
response, "Unknown field mismatch");
test.set_failure_message("Unknown field mismatch");
suite_.ReportFailure(test, setting.GetLevel(), request, response);
} else {
suite_.ReportSuccess(setting.GetTestName());
suite_.ReportSuccess(test);
}
}
@ -1810,7 +1846,7 @@ void BinaryAndJsonConformanceSuiteImpl<MessageType>::
const std::string type_name =
UpperCase(absl::StrCat(".", FieldDescriptor::TypeName(type)));
const FieldDescriptor* field = GetFieldForType(type, true, Packed::kFalse);
std::string field_name = field->name();
const absl::string_view field_name = field->name();
std::string message_field =
absl::StrCat("\"", field_name, "\": [", field_value, "]");
@ -2267,7 +2303,8 @@ void BinaryAndJsonConformanceSuiteImpl<
R"({"optionalInt64": "-9223372036854775809"})");
ExpectParseFailureForJson("Uint64FieldTooLarge", REQUIRED,
R"({"optionalUint64": "18446744073709551616"})");
// Parser reject non-integer numeric values as well.
// Parser reject non-integer numeric values.
ExpectParseFailureForJson("Int32FieldNotInteger", REQUIRED,
R"({"optionalInt32": 0.5})");
ExpectParseFailureForJson("Uint32FieldNotInteger", REQUIRED,
@ -2277,6 +2314,16 @@ void BinaryAndJsonConformanceSuiteImpl<
ExpectParseFailureForJson("Uint64FieldNotInteger", REQUIRED,
R"({"optionalUint64": "0.5"})");
// Parser reject empty string values.
ExpectParseFailureForJson("Int32FieldEmptyString", REQUIRED,
R"({"optionalInt32": ""})");
ExpectParseFailureForJson("Uint32FieldEmptyString", REQUIRED,
R"({"optionalUint32": ""})");
ExpectParseFailureForJson("Int64FieldEmptyString", REQUIRED,
R"({"optionalInt64": ""})");
ExpectParseFailureForJson("Uint64FieldEmptyString", REQUIRED,
R"({"optionalUint64": ""})");
// Integers but represented as float values are accepted.
RunValidJsonTest("Int32FieldFloatTrailingZero", REQUIRED,
R"({"optionalInt32": 100000.000})",
@ -2400,12 +2447,17 @@ void BinaryAndJsonConformanceSuiteImpl<
R"({"optionalFloat": Infinity})");
ExpectParseFailureForJson("FloatFieldNegativeInfinityNotQuoted", RECOMMENDED,
R"({"optionalFloat": -Infinity})");
// Parsers should reject out-of-bound values.
ExpectParseFailureForJson("FloatFieldTooSmall", REQUIRED,
R"({"optionalFloat": -3.502823e+38})");
ExpectParseFailureForJson("FloatFieldTooLarge", REQUIRED,
R"({"optionalFloat": 3.502823e+38})");
// Parsers should reject empty string values.
ExpectParseFailureForJson("FloatFieldEmptyString", REQUIRED,
R"({"optionalFloat": ""})");
// Double fields.
RunValidJsonTest("DoubleFieldMinPositiveValue", REQUIRED,
R"({"optionalDouble": 2.22507e-308})",
@ -2458,6 +2510,10 @@ void BinaryAndJsonConformanceSuiteImpl<
ExpectParseFailureForJson("DoubleFieldTooLarge", REQUIRED,
R"({"optionalDouble": +1.89769e+308})");
// Parsers should reject empty string values.
ExpectParseFailureForJson("DoubleFieldEmptyString", REQUIRED,
R"({"optionalDouble": ""})");
// Enum fields.
RunValidJsonTest("EnumField", REQUIRED, R"({"optionalNestedEnum": "FOO"})",
"optional_nested_enum: FOO");
@ -3470,12 +3526,11 @@ BinaryAndJsonConformanceSuiteImpl<MessageType>::GetFieldForOneofType(
template <typename MessageType>
std::string BinaryAndJsonConformanceSuiteImpl<MessageType>::SyntaxIdentifier()
const {
if constexpr (std::is_same<MessageType, TestAllTypesProto2>::value) {
if (std::is_same<MessageType, TestAllTypesProto2>::value) {
return "Proto2";
} else if constexpr (std::is_same<MessageType, TestAllTypesProto3>::value) {
} else if (std::is_same<MessageType, TestAllTypesProto3>::value) {
return "Proto3";
} else if constexpr (std::is_same<MessageType,
TestAllTypesProto2Editions>::value) {
} else if (std::is_same<MessageType, TestAllTypesProto2Editions>::value) {
return "Editions_Proto2";
} else {
return "Editions_Proto3";

@ -57,11 +57,23 @@ enum TestCategory {
TEXT_FORMAT_TEST = 5;
}
// Meant to encapsulate all types of tests: successes, skips, failures, etc.
// Therefore, this may or may not have a failure message. Failure messages
// may be truncated for our failure lists.
message TestStatus {
string name = 1;
string failure_message = 2;
// What an actual test name matched to in a failure list. Can be wildcarded or
// an exact match without wildcards.
string matched_name = 3;
}
// The conformance runner will request a list of failures as the first request.
// This will be known by message_type == "conformance.FailureSet", a conformance
// test should return a serialized FailureSet in protobuf_payload.
message FailureSet {
repeated string failure = 1;
repeated TestStatus test = 2;
reserved 1;
}
// Represents a single test case's input. The testee should:

@ -7,32 +7,33 @@
#include <errno.h>
#include <stdarg.h>
#include <stdlib.h>
#include <unistd.h>
#include <cstddef>
#include <cstdint>
#include <cstdio>
#include <memory>
#include <string>
#include "google/protobuf/util/json_util.h"
#include "google/protobuf/util/type_resolver_util.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/status/status.h"
#include "absl/status/statusor.h"
#include "absl/strings/str_cat.h"
#include "conformance/conformance.pb.h"
#include "conformance/conformance.pb.h"
#include "conformance/test_protos/test_messages_edition2023.pb.h"
#include "editions/golden/test_messages_proto2_editions.pb.h"
#include "editions/golden/test_messages_proto3_editions.pb.h"
#include "google/protobuf/endian.h"
#include "google/protobuf/json/json.h"
#include "google/protobuf/message.h"
#include "google/protobuf/test_messages_proto2.pb.h"
#include "google/protobuf/test_messages_proto3.pb.h"
#include "google/protobuf/test_messages_proto3.pb.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/util/json_util.h"
#include "google/protobuf/util/type_resolver.h"
#include "google/protobuf/util/type_resolver_util.h"
#include "google/protobuf/stubs/status_macros.h"
// Must be included last.
@ -241,8 +242,9 @@ absl::StatusOr<bool> Harness::ServeConformanceRequest() {
serialized_output.size()));
if (verbose_) {
ABSL_LOG(INFO) << "conformance-cpp: request=" << request.ShortDebugString()
<< ", response=" << response->ShortDebugString();
ABSL_LOG(INFO) << "conformance-cpp: request="
<< google::protobuf::ShortFormat(request)
<< ", response=" << google::protobuf::ShortFormat(*response);
}
return false;
}

@ -86,6 +86,7 @@ function doTest($request)
case 'protobuf_test_messages.editions.proto3.TestAllTypesProto3':
$test_message = new TestAllTypesProto3Editions();
break;
case 'protobuf_test_messages.proto2.TestAllTypesProto2':
case 'protobuf_test_messages.editions.proto2.TestAllTypesProto2':
$response->setSkipped('PHP doesn\'t support proto2');
return $response;

@ -98,36 +98,38 @@ def do_test(request):
"Required.Proto2.ProtobufInput.PrematureEofInPackedField.UINT64",
]
for x in failures:
failure_set.failure.append(x)
failure_set.test.append(conformance_pb2.TestStatus(name=x))
response.protobuf_payload = failure_set.SerializeToString()
return response
isJson = (request.WhichOneof('payload') == 'json_payload')
isJson = request.WhichOneof("payload") == "json_payload"
test_message = _create_test_message(request.message_type)
if (not isJson) and (test_message is None):
raise ProtocolError("Protobuf request doesn't have specific payload type")
try:
if request.WhichOneof('payload') == 'protobuf_payload':
if request.WhichOneof("payload") == "protobuf_payload":
try:
test_message.ParseFromString(request.protobuf_payload)
except message.DecodeError as e:
response.parse_error = str(e)
return response
elif request.WhichOneof('payload') == 'json_payload':
elif request.WhichOneof("payload") == "json_payload":
try:
ignore_unknown_fields = \
request.test_category == \
conformance_pb2.JSON_IGNORE_UNKNOWN_PARSING_TEST
json_format.Parse(request.json_payload, test_message,
ignore_unknown_fields)
ignore_unknown_fields = (
request.test_category
== conformance_pb2.JSON_IGNORE_UNKNOWN_PARSING_TEST
)
json_format.Parse(
request.json_payload, test_message, ignore_unknown_fields
)
except Exception as e:
response.parse_error = str(e)
return response
elif request.WhichOneof('payload') == 'text_payload':
elif request.WhichOneof("payload") == "text_payload":
try:
text_format.Parse(request.text_payload, test_message)
except Exception as e:
@ -152,7 +154,8 @@ def do_test(request):
elif request.requested_output_format == conformance_pb2.TEXT_FORMAT:
response.text_payload = text_format.MessageToString(
test_message, print_unknown_fields=request.print_unknown_fields)
test_message, print_unknown_fields=request.print_unknown_fields
)
except Exception as e:
response.runtime_error = str(e)
@ -163,7 +166,7 @@ def do_test(request):
def do_test_io():
length_bytes = sys.stdin.buffer.read(4)
if len(length_bytes) == 0:
return False # EOF
return False # EOF
elif len(length_bytes) != 4:
raise IOError("I/O error")
@ -183,17 +186,24 @@ def do_test_io():
sys.stdout.buffer.flush()
if verbose:
sys.stderr.write("conformance_python: request=%s, response=%s\n" % (
request.ShortDebugString().c_str(),
response.ShortDebugString().c_str()))
sys.stderr.write(
"conformance_python: request=%s, response=%s\n"
% (
request.ShortDebugString().c_str(),
response.ShortDebugString().c_str(),
)
)
global test_count
test_count += 1
return True
while True:
if not do_test_io():
sys.stderr.write("conformance_python: received EOF from test runner " +
"after %s tests, exiting\n" % (test_count))
sys.stderr.write(
"conformance_python: received EOF from test runner "
+ "after %s tests, exiting\n" % (test_count,)
)
sys.exit(0)

@ -6,13 +6,9 @@
use conformance_rust_proto::{ConformanceRequest, ConformanceResponse, WireFormat};
#[cfg(cpp_kernel)]
use protobuf_cpp as kernel;
#[cfg(upb_kernel)]
use protobuf_upb as kernel;
use kernel::Optional::{Set, Unset};
use protobuf::prelude::*;
use protobuf::Optional::{Set, Unset};
use protobuf::ParseError;
use std::io::{self, ErrorKind, Read, Write};
use test_messages_edition2023_rust_proto::TestAllTypesEdition2023;
@ -73,52 +69,39 @@ fn do_test(req: &ConformanceRequest) -> ConformanceResponse {
Set(bytes) => bytes,
};
fn roundtrip<T: Message>(bytes: &[u8]) -> Result<Vec<u8>, ParseError> {
T::parse(bytes).map(|msg| msg.serialize().unwrap())
}
let serialized = match message_type.as_bytes() {
b"protobuf_test_messages.proto2.TestAllTypesProto2" => {
if let Ok(msg) = TestAllTypesProto2::parse(bytes) {
msg.serialize().unwrap()
} else {
resp.set_parse_error("failed to parse bytes");
return resp;
}
roundtrip::<TestAllTypesProto2>(bytes)
}
b"protobuf_test_messages.proto3.TestAllTypesProto3" => {
if let Ok(msg) = TestAllTypesProto3::parse(bytes) {
msg.serialize().unwrap()
} else {
resp.set_parse_error("failed to parse bytes");
return resp;
}
roundtrip::<TestAllTypesProto3>(bytes)
}
b"protobuf_test_messages.editions.TestAllTypesEdition2023" => {
if let Ok(msg) = TestAllTypesEdition2023::parse(bytes) {
msg.serialize().unwrap()
} else {
resp.set_parse_error("failed to parse bytes");
return resp;
}
roundtrip::<TestAllTypesEdition2023>(bytes)
}
b"protobuf_test_messages.editions.proto2.TestAllTypesProto2" => {
if let Ok(msg) = EditionsTestAllTypesProto2::parse(bytes) {
msg.serialize().unwrap()
} else {
resp.set_parse_error("failed to parse bytes");
return resp;
}
roundtrip::<EditionsTestAllTypesProto2>(bytes)
}
b"protobuf_test_messages.editions.proto3.TestAllTypesProto3" => {
if let Ok(msg) = EditionsTestAllTypesProto3::parse(bytes) {
msg.serialize().unwrap()
} else {
resp.set_parse_error("failed to parse bytes");
return resp;
}
roundtrip::<EditionsTestAllTypesProto3>(bytes)
}
_ => panic!("unexpected msg type {message_type}"),
};
resp.set_protobuf_payload(serialized);
return resp;
match serialized {
Ok(serialized) => {
resp.set_protobuf_payload(serialized);
}
Err(_) => {
resp.set_parse_error("failed to parse bytes");
}
}
resp
}
fn main() {

@ -9,27 +9,35 @@
#include <stdarg.h>
#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <cstdio>
#include <fstream>
#include <memory>
#include <string>
#include <utility>
#include "google/protobuf/util/field_comparator.h"
#include "google/protobuf/util/message_differencer.h"
#include "absl/container/btree_map.h"
#include "absl/container/flat_hash_set.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/status/status.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "conformance/conformance.pb.h"
#include "conformance/conformance.pb.h"
#include "failure_list_trie_node.h"
#include "google/protobuf/descriptor_legacy.h"
#include "google/protobuf/endian.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
using conformance::ConformanceRequest;
using conformance::ConformanceResponse;
using conformance::TestStatus;
using conformance::WireFormat;
using google::protobuf::util::DefaultFieldComparator;
using google::protobuf::util::MessageDifferencer;
@ -37,6 +45,15 @@ using std::string;
namespace {
static void ReplaceAll(std::string& input, std::string replace_word,
std::string replace_by) {
size_t pos = input.find(replace_word);
while (pos != std::string::npos) {
input.replace(pos, replace_word.length(), replace_by);
pos = input.find(replace_word, pos + replace_by.length());
}
}
static std::string ToOctString(const std::string& binary_string) {
std::string oct_string;
for (size_t i = 0; i < binary_string.size(); i++) {
@ -52,16 +69,83 @@ static std::string ToOctString(const std::string& binary_string) {
return oct_string;
}
template <typename SetT>
bool CheckSetEmpty(const SetT& set_to_check, absl::string_view write_to_file,
absl::string_view msg, absl::string_view output_dir,
std::string* output) {
// Returns full filename path of written .txt file if successful
static std::string ProduceOctalSerialized(const std::string& request,
uint32_t len) {
char* len_split_bytes = static_cast<char*>(static_cast<void*>(&len));
std::string out;
std::string hex_repr;
for (int i = 0; i < 4; i++) {
auto conversion = (unsigned int)static_cast<uint8_t>(len_split_bytes[i]);
std::string hex = absl::StrFormat("\\x%x", conversion);
absl::StrAppend(&hex_repr, hex);
}
absl::StrAppend(&out, hex_repr);
absl::StrAppend(&out, ToOctString(request));
return out;
}
static std::string WriteToFile(const std::string& octal_serialized,
const std::string& output_dir,
const std::string& test_name) {
std::string test_name_txt = test_name;
ReplaceAll(test_name_txt, ".", "_");
absl::StrAppend(&test_name_txt, ".txt");
std::string full_filename;
if (!output_dir.empty()) {
full_filename = output_dir;
if (*output_dir.rbegin() != '/') {
full_filename.push_back('/');
}
absl::StrAppend(&full_filename, test_name_txt);
}
std::ofstream os{std::string(full_filename)};
if (os) {
os << octal_serialized;
return full_filename;
} else {
ABSL_LOG(INFO) << "Failed to open file for debugging: " << full_filename
<< "\n";
return "";
}
}
// Removes all newlines.
static void Normalize(std::string& input) {
input.erase(std::remove(input.begin(), input.end(), '\n'), input.end());
}
// Sets up a failure message properly for our failure lists.
static TestStatus FormatFailureMessage(const TestStatus& input) {
// Make copy just this once, as we need to modify it for our failure lists.
std::string formatted_failure_message = input.failure_message();
// Remove newlines
Normalize(formatted_failure_message);
// Truncate failure message if needed
if (formatted_failure_message.length() > 128) {
formatted_failure_message = formatted_failure_message.substr(0, 128);
}
TestStatus properly_formatted;
properly_formatted.set_name(input.name());
properly_formatted.set_failure_message(formatted_failure_message);
return properly_formatted;
}
bool CheckSetEmpty(const absl::btree_map<std::string, TestStatus>& set_to_check,
absl::string_view write_to_file, absl::string_view msg,
absl::string_view output_dir, std::string* output) {
if (set_to_check.empty()) return true;
absl::StrAppendFormat(output, "\n");
absl::StrAppendFormat(output, "%s\n\n", msg);
for (absl::string_view v : set_to_check) {
absl::StrAppendFormat(output, " %s\n", v);
for (const auto& pair : set_to_check) {
absl::StrAppendFormat(output, " %s # %s\n", pair.first,
pair.second.failure_message());
}
absl::StrAppendFormat(output, "\n");
@ -70,19 +154,23 @@ bool CheckSetEmpty(const SetT& set_to_check, absl::string_view write_to_file,
absl::string_view filename = write_to_file;
if (!output_dir.empty()) {
full_filename = std::string(output_dir);
if (*output_dir.rbegin() != '/') {
full_filename.push_back('/');
}
absl::StrAppend(&full_filename, write_to_file);
filename = full_filename;
}
std::ofstream os{std::string(filename)};
if (os) {
for (absl::string_view v : set_to_check) {
os << v << "\n";
for (const auto& pair : set_to_check) {
// Additions will not have a 'matched_name' while removals will.
string potential_add_or_removal = pair.second.matched_name().empty()
? pair.first
: pair.second.matched_name();
os << potential_add_or_removal << " # " << pair.second.failure_message()
<< "\n";
}
} else {
absl::StrAppendFormat(output, "Failed to open file: %s\n", filename);
absl::StrAppendFormat(output,
"Failed to open file: %s\n",
filename);
}
}
@ -94,6 +182,8 @@ bool CheckSetEmpty(const SetT& set_to_check, absl::string_view write_to_file,
namespace google {
namespace protobuf {
constexpr int kMaximumWildcardExpansions = 5;
ConformanceTestSuite::ConformanceRequestSetting::ConformanceRequestSetting(
ConformanceLevel level, conformance::WireFormat input_format,
conformance::WireFormat output_format,
@ -265,47 +355,75 @@ ConformanceResponse ConformanceTestSuite::TruncateResponse(
return debug_response;
}
void ConformanceTestSuite::ReportSuccess(const std::string& test_name) {
if (expected_to_fail_.erase(test_name) != 0) {
absl::StrAppendFormat(
&output_,
"ERROR: test %s is in the failure list, but test succeeded. "
"Remove it from the failure list.\n",
test_name);
unexpected_succeeding_tests_.insert(test_name);
void ConformanceTestSuite::ReportSuccess(const TestStatus& test) {
if (expected_to_fail_.contains(test.name())) {
absl::StrAppendFormat(&output_,
"ERROR: test %s (matched to %s) is in the failure "
"list, but test succeeded. "
"Remove its match from the failure list.\n",
test.name(),
expected_to_fail_[test.name()].matched_name());
unexpected_succeeding_tests_[test.name()] = expected_to_fail_[test.name()];
}
expected_to_fail_.erase(test.name());
successes_++;
}
void ConformanceTestSuite::ReportFailure(const std::string& test_name,
void ConformanceTestSuite::ReportFailure(TestStatus& test,
ConformanceLevel level,
const ConformanceRequest& request,
const ConformanceResponse& response,
absl::string_view message) {
if (expected_to_fail_.erase(test_name) == 1) {
expected_failures_++;
const ConformanceResponse& response) {
if (expected_to_fail_.contains(test.name())) {
// Make copy just this once, as we need to modify them for comparison.
// Failure message from the failure list.
string expected_failure_message =
expected_to_fail_[test.name()].failure_message();
// Actual failure message from the test run.
std::string actual_failure_message = test.failure_message();
Normalize(actual_failure_message);
if (actual_failure_message.rfind(expected_failure_message, 0) == 0) {
// Our failure messages match.
expected_failures_++;
} else {
// We want to add the test to the failure list with its correct failure
// message.
unexpected_failure_messages_[test.name()] = FormatFailureMessage(test);
// We want to remove the test from the failure list. That means passing
// to it the same failure message that was in the list.
TestStatus incorrect_failure_message;
incorrect_failure_message.set_name(test.name());
incorrect_failure_message.set_failure_message(expected_failure_message);
incorrect_failure_message.set_matched_name(
expected_to_fail_[test.name()].matched_name());
expected_failure_messages_[test.name()] = incorrect_failure_message;
}
expected_to_fail_.erase(test.name());
if (!verbose_) return;
} else if (level == RECOMMENDED && !enforce_recommended_) {
absl::StrAppendFormat(&output_, "WARNING, test=%s: ", test_name);
absl::StrAppendFormat(&output_, "WARNING, test=%s: ", test.name());
} else {
absl::StrAppendFormat(&output_, "ERROR, test=%s: ", test_name);
unexpected_failing_tests_.insert(test_name);
absl::StrAppendFormat(&output_, "ERROR, test=%s: ", test.name());
unexpected_failing_tests_[test.name()] = FormatFailureMessage(test);
}
absl::StrAppendFormat(&output_, "%s, request=%s, response=%s\n", message,
absl::StrAppendFormat(&output_, "%s, request=%s, response=%s\n",
test.failure_message(),
TruncateRequest(request).ShortDebugString(),
TruncateResponse(response).ShortDebugString());
}
void ConformanceTestSuite::ReportSkip(const std::string& test_name,
void ConformanceTestSuite::ReportSkip(const TestStatus& test,
const ConformanceRequest& request,
const ConformanceResponse& response) {
if (verbose_) {
absl::StrAppendFormat(
&output_, "SKIPPED, test=%s request=%s, response=%s\n", test_name,
&output_, "SKIPPED, test=%s request=%s, response=%s\n", test.name(),
request.ShortDebugString(), response.ShortDebugString());
}
skipped_.insert(test_name);
skipped_[test.name()] = test;
}
void ConformanceTestSuite::RunValidInputTest(
@ -326,7 +444,10 @@ void ConformanceTestSuite::RunValidBinaryInputTest(
const std::string& equivalent_wire_format, bool require_same_wire_format) {
const ConformanceRequest& request = setting.GetRequest();
ConformanceResponse response;
RunTest(setting.GetTestName(), request, &response);
if (!RunTest(setting.GetTestName(), request, &response)) {
return;
}
VerifyResponse(setting, equivalent_wire_format, response, true,
require_same_wire_format);
}
@ -345,22 +466,26 @@ void ConformanceTestSuite::VerifyResponse(
ABSL_CHECK(reference_message->ParseFromString(equivalent_wire_format))
<< "Failed to parse wire data for test case: " << test_name;
TestStatus test;
test.set_name(test_name);
switch (response.result_case()) {
case ConformanceResponse::RESULT_NOT_SET:
ReportFailure(test_name, level, request, response,
"Response didn't have any field in the Response.");
test.set_failure_message(
"Response didn't have any field in the Response.");
ReportFailure(test, level, request, response);
return;
case ConformanceResponse::kParseError:
case ConformanceResponse::kTimeoutError:
case ConformanceResponse::kRuntimeError:
case ConformanceResponse::kSerializeError:
ReportFailure(test_name, level, request, response,
"Failed to parse input or produce output.");
test.set_failure_message("Failed to parse input or produce output.");
ReportFailure(test, level, request, response);
return;
case ConformanceResponse::kSkipped:
ReportSkip(test_name, request, response);
ReportSkip(test, request, response);
return;
default:
@ -386,31 +511,93 @@ void ConformanceTestSuite::VerifyResponse(
} else {
check = differencer.Compare(*reference_message, *test_message);
}
if (check) {
if (need_report_success) {
ReportSuccess(test_name);
ReportSuccess(test);
}
} else {
ReportFailure(
test_name, level, request, response,
absl::StrCat("Output was not equivalent to reference message: ",
differences));
test.set_failure_message(absl::StrCat(
"Output was not equivalent to reference message: ", differences));
ReportFailure(test, level, request, response);
}
}
void ConformanceTestSuite::RunTest(const std::string& test_name,
bool ConformanceTestSuite::RunTest(const std::string& test_name,
const ConformanceRequest& request,
ConformanceResponse* response) {
if (test_names_.insert(test_name).second == false) {
if (test_names_ran_.insert(test_name).second == false) {
ABSL_LOG(FATAL) << "Duplicated test name: " << test_name;
}
// In essence, find what wildcarded test names expand to or direct matches
// (without wildcards).
auto result = failure_list_root_.WalkDownMatch(test_name);
if (result.has_value()) {
string matched_equivalent = result.value();
unmatched_.erase(matched_equivalent);
TestStatus expansion;
expansion.set_name(test_name);
expansion.set_matched_name(matched_equivalent);
expansion.set_failure_message(saved_failure_messages_[matched_equivalent]);
expected_to_fail_[test_name] = expansion;
if (number_of_matches_.contains(matched_equivalent)) {
if (number_of_matches_[matched_equivalent] > kMaximumWildcardExpansions &&
!exceeded_max_matches_.contains(matched_equivalent)) {
exceeded_max_matches_[matched_equivalent] = expansion;
}
number_of_matches_[matched_equivalent]++;
} else {
number_of_matches_[matched_equivalent] = 1;
}
}
std::string serialized_request;
std::string serialized_response;
request.SerializeToString(&serialized_request);
runner_->RunTest(test_name, serialized_request, &serialized_response);
uint32_t len = internal::little_endian::FromHost(
static_cast<uint32_t>(serialized_request.size()));
if (isolated_) {
if (names_to_test_.erase(test_name) ==
0) { // Tests were asked to be run in isolated mode, but this test was
// not asked to be run.
expected_to_fail_.erase(test_name);
return false;
}
if (debug_) {
std::string octal = ProduceOctalSerialized(serialized_request, len);
std::string full_filename = WriteToFile(octal, output_dir_, test_name);
if (!full_filename.empty()) {
absl::StrAppendFormat(
&output_, "Produced octal serialized request file for test %s\n",
test_name);
absl::StrAppendFormat(
&output_,
" To pipe the "
"serialized request directly to "
"the "
"testee run from the root of your workspace:\n printf $("
"<\"%s\") | %s\n\n",
full_filename, testee_);
absl::StrAppendFormat(
&output_,
" To inspect the wire format of the serialized request with "
"protoscope run "
"(Disclaimer: This may not work properly on non-Linux "
"platforms):\n "
" "
"contents=$(<\"%s\"); sub=$(cut -d \\\\ -f 6- <<< "
"$contents) ; printf \"\\\\${sub}\" | protoscope \n\n\n",
full_filename);
}
}
}
response->set_protobuf_payload(serialized_request);
runner_->RunTest(test_name, len, serialized_request, &serialized_response);
if (!response->ParseFromString(serialized_response)) {
response->Clear();
@ -423,6 +610,7 @@ void ConformanceTestSuite::RunTest(const std::string& test_name,
test_name, TruncateRequest(request).ShortDebugString(),
TruncateResponse(*response).ShortDebugString());
}
return true;
}
std::string ConformanceTestSuite::WireFormatToString(WireFormat wire_format) {
@ -443,8 +631,17 @@ std::string ConformanceTestSuite::WireFormatToString(WireFormat wire_format) {
return "";
}
void ConformanceTestSuite::AddExpectedFailedTest(const std::string& test_name) {
expected_to_fail_.insert(test_name);
bool ConformanceTestSuite::AddExpectedFailedTest(
const TestStatus& expected_failure) {
absl::Status attempt = failure_list_root_.Insert(expected_failure.name());
if (!attempt.ok()) {
absl::StrAppend(&output_, attempt.message(), "\n\n");
return false;
}
unmatched_[expected_failure.name()] = expected_failure;
saved_failure_messages_[expected_failure.name()] =
expected_failure.failure_message();
return true;
}
bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
@ -452,53 +649,122 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
const std::string& filename,
conformance::FailureSet* failure_list) {
runner_ = runner;
failure_list_root_ = FailureListTrieNode("root");
successes_ = 0;
expected_failures_ = 0;
skipped_.clear();
test_names_.clear();
test_names_ran_.clear();
unexpected_failing_tests_.clear();
unexpected_succeeding_tests_.clear();
output_ = "\nCONFORMANCE TEST BEGIN ====================================\n\n";
std::string mode = debug_ ? "DEBUG" : "TEST";
absl::StrAppendFormat(
&output_, "CONFORMANCE %s BEGIN ====================================\n\n",
mode);
failure_list_filename_ = filename;
expected_to_fail_.clear();
for (const std::string& failure : failure_list->failure()) {
AddExpectedFailedTest(failure);
for (const TestStatus& expected_failure : failure_list->test()) {
if (!AddExpectedFailedTest(expected_failure)) {
output->assign(output_);
return false;
}
}
RunSuiteImpl();
if (*output_dir_.rbegin() != '/') {
output_dir_.push_back('/');
}
bool ok = true;
if (!CheckSetEmpty(
expected_to_fail_, "nonexistent_tests.txt",
absl::StrCat("These tests were listed in the failure list, but they "
"don't exist. Remove them from the failure list by "
"running:\n"
" ./update_failure_list.py ",
failure_list_filename_,
" --remove nonexistent_tests.txt"),
unmatched_, "unmatched.txt",
absl::StrCat(
"These test names were listed in the failure list, but they "
"didn't match any actual test name. Remove them from the "
"failure list by running from the root of your workspace:\n"
" bazel run "
"//google/protobuf/conformance:update_failure_list -- ",
failure_list_filename_, " --remove ", output_dir_,
"unmatched.txt"),
output_dir_, &output_)) {
ok = false;
}
if (!CheckSetEmpty(
unexpected_failing_tests_, "failing_tests.txt",
absl::StrCat("These tests failed. If they can't be fixed right now, "
"you can add them to the failure list so the overall "
"suite can succeed. Add them to the failure list by "
"running:\n"
" ./update_failure_list.py ",
failure_list_filename_, " --add failing_tests.txt"),
expected_failure_messages_, "expected_failure_messages.txt",
absl::StrCat(
"These tests (either expanded from wildcard(s) or direct "
"matches) were listed in the failure list, but their "
"failure messages do not match. Remove their match from the "
"failure list by running from the root of your workspace:\n"
" bazel run ",
"//google/protobuf/conformance:update_failure_list -- ",
failure_list_filename_, " --remove ", output_dir_,
"expected_failure_messages.txt"),
output_dir_, &output_)) {
ok = false;
}
if (!CheckSetEmpty(
unexpected_succeeding_tests_, "succeeding_tests.txt",
absl::StrCat("These tests succeeded, even though they were listed in "
"the failure list. Remove them from the failure list "
"by running:\n"
" ./update_failure_list.py ",
failure_list_filename_,
" --remove succeeding_tests.txt"),
absl::StrCat(
"These tests succeeded, even though they were listed in "
"the failure list (expanded from wildcard(s) or direct matches). "
" Remove their match from the failure list by "
"running from the root of your workspace:\n"
" bazel run "
"//google/protobuf/conformance:update_failure_list -- ",
failure_list_filename_, " --remove ", output_dir_,
"succeeding_tests.txt"),
output_dir_, &output_)) {
ok = false;
}
if (!CheckSetEmpty(
exceeded_max_matches_, "exceeded_max_matches.txt",
absl::StrFormat(
"These failure list entries served as matches to too many test "
"names exceeding the max amount of %d. "
"Remove them from the failure list by running from the root of "
"your workspace:\n"
" bazel run "
"//google/protobuf/conformance:update_failure_list -- %s "
"--remove %sexceeded_max_matches.txt",
kMaximumWildcardExpansions, failure_list_filename_, output_dir_),
output_dir_, &output_)) {
ok = false;
}
if (!CheckSetEmpty(
unexpected_failure_messages_, "unexpected_failure_messages.txt",
absl::StrCat(
"These tests (expanded from wildcard(s) or direct matches from "
"the failure list) failed because their failure messages did "
"not match. If they can't be fixed right now, "
"you can add them to the failure list so the overall "
"suite can succeed. Add them to the failure list by "
"running from the root of your workspace:\n"
" bazel run "
"//google/protobuf/conformance:update_failure_list -- ",
failure_list_filename_, " --add ", output_dir_,
"unexpected_failure_messages.txt"),
output_dir_, &output_)) {
ok = false;
}
if (!CheckSetEmpty(
unexpected_failing_tests_, "failing_tests.txt",
absl::StrCat(
"These tests failed. If they can't be fixed right now, "
"you can add them to the failure list so the overall "
"suite can succeed. Add them to the failure list by "
"running from the root of your workspace:\n"
" bazel run "
"//google/protobuf/conformance:update_failure_list -- ",
failure_list_filename_, " --add ", output_dir_,
"failing_tests.txt"),
output_dir_, &output_)) {
ok = false;
}

@ -15,18 +15,18 @@
#define CONFORMANCE_CONFORMANCE_TEST_H
#include <cstddef>
#include <cstdint>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/util/type_resolver.h"
#include "absl/container/btree_set.h"
#include "absl/container/btree_map.h"
#include "absl/container/flat_hash_set.h"
#include "absl/strings/string_view.h"
#include "conformance/conformance.pb.h"
#include "failure_list_trie_node.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/wire_format_lite.h"
namespace conformance {
class ConformanceRequest;
@ -46,17 +46,18 @@ class ConformanceTestSuite;
class ConformanceTestRunner {
public:
virtual ~ConformanceTestRunner() {}
virtual ~ConformanceTestRunner() = default;
// Call to run a single conformance test.
//
// "len" is the byte length of a serialized conformance.ConformanceRequest.
// "input" is a serialized conformance.ConformanceRequest.
// "output" should be set to a serialized conformance.ConformanceResponse.
//
// If there is any error in running the test itself, set "runtime_error" in
// the response.
virtual void RunTest(const std::string& test_name, const std::string& input,
std::string* output) = 0;
virtual void RunTest(const std::string& test_name, uint32_t len,
const std::string& input, std::string* output) = 0;
};
// Test runner that spawns the process being tested and communicates with it
@ -78,10 +79,10 @@ class ForkPipeRunner : public ConformanceTestRunner {
explicit ForkPipeRunner(const std::string& executable)
: child_pid_(-1), executable_(executable) {}
virtual ~ForkPipeRunner() {}
~ForkPipeRunner() override = default;
void RunTest(const std::string& test_name, const std::string& request,
std::string* response);
void RunTest(const std::string& test_name, uint32_t len,
const std::string& request, std::string* response) override;
private:
void SpawnTestProgram();
@ -128,13 +129,8 @@ class ForkPipeRunner : public ConformanceTestRunner {
//
class ConformanceTestSuite {
public:
ConformanceTestSuite()
: verbose_(false),
performance_(false),
enforce_recommended_(false),
maximum_edition_(Edition::EDITION_PROTO3),
failure_list_flag_name_("--failure_list") {}
virtual ~ConformanceTestSuite() {}
ConformanceTestSuite() = default;
virtual ~ConformanceTestSuite() = default;
void SetPerformance(bool performance) { performance_ = performance; }
void SetVerbose(bool verbose) { verbose_ = verbose; }
@ -161,7 +157,25 @@ class ConformanceTestSuite {
}
// Sets the path of the output directory.
void SetOutputDir(const char* output_dir) { output_dir_ = output_dir; }
void SetOutputDir(const std::string& output_dir) { output_dir_ = output_dir; }
// Sets if we are running the test in debug mode.
void SetDebug(bool debug) { debug_ = debug; }
// Sets if we are running ONLY the tests provided in the 'names_to_test_' set.
void SetIsolated(bool isolated) { isolated_ = isolated; }
// Sets the file path of the testee.
void SetTestee(const std::string& testee) { testee_ = testee; }
// Sets the names of tests to ONLY be run isolated from all the others.
void SetNamesToTest(absl::flat_hash_set<std::string> names_to_test) {
names_to_test_ = std::move(names_to_test);
}
absl::flat_hash_set<std::string> GetExpectedTestsNotRun() {
return names_to_test_;
}
// Run all the conformance tests against the given test runner.
// Test output will be stored in "output".
@ -170,6 +184,7 @@ class ConformanceTestSuite {
// failure list.
// The filename here is *only* used to create/format useful error messages for
// how to update the failure list. We do NOT read this file at all.
bool RunSuite(ConformanceTestRunner* runner, std::string* output,
const std::string& filename,
conformance::FailureSet* failure_list);
@ -201,7 +216,7 @@ class ConformanceTestSuite {
const Message& prototype_message,
const std::string& test_name,
const std::string& input);
virtual ~ConformanceRequestSetting() {}
virtual ~ConformanceRequestSetting() = default;
std::unique_ptr<Message> NewTestMessage() const;
@ -259,12 +274,11 @@ class ConformanceTestSuite {
conformance::ConformanceResponse TruncateResponse(
const conformance::ConformanceResponse& response);
void ReportSuccess(const std::string& test_name);
void ReportFailure(const std::string& test_name, ConformanceLevel level,
void ReportSuccess(const conformance::TestStatus& test);
void ReportFailure(conformance::TestStatus& test, ConformanceLevel level,
const conformance::ConformanceRequest& request,
const conformance::ConformanceResponse& response,
absl::string_view message);
void ReportSkip(const std::string& test_name,
const conformance::ConformanceResponse& response);
void ReportSkip(const conformance::TestStatus& test,
const conformance::ConformanceRequest& request,
const conformance::ConformanceResponse& response);
@ -274,42 +288,83 @@ class ConformanceTestSuite {
const std::string& equivalent_wire_format,
bool require_same_wire_format = false);
void RunTest(const std::string& test_name,
// Returns true if our runner_ ran the test and false if it did not.
bool RunTest(const std::string& test_name,
const conformance::ConformanceRequest& request,
conformance::ConformanceResponse* response);
void AddExpectedFailedTest(const std::string& test_name);
// Will return false if an entry from the failure list was either a
// duplicate of an already added one to the trie or it contained invalid
// wildcards; otherwise, returns true.
bool AddExpectedFailedTest(const conformance::TestStatus& failure);
virtual void RunSuiteImpl() = 0;
ConformanceTestRunner* runner_;
FailureListTrieNode failure_list_root_;
std::string testee_;
int successes_;
int expected_failures_;
bool verbose_;
bool performance_;
bool enforce_recommended_;
Edition maximum_edition_;
bool verbose_ = false;
bool performance_ = false;
bool enforce_recommended_ = false;
Edition maximum_edition_ = Edition::EDITION_PROTO3;
std::string output_;
std::string output_dir_;
std::string failure_list_flag_name_;
std::string failure_list_flag_name_ = "--failure_list";
std::string failure_list_filename_;
// The set of test names that are expected to fail in this run, but haven't
// failed yet.
absl::btree_set<std::string> expected_to_fail_;
absl::flat_hash_set<std::string> names_to_test_;
bool debug_ = false;
// If names were given for names_to_test_, only those tests
// will be run and this bool will be set to true.
bool isolated_ = false;
// The set of test names (expanded from wildcard(s) and non-expanded) that are
// expected to fail in this run, but haven't failed yet.
absl::btree_map<std::string, conformance::TestStatus> expected_to_fail_;
// The set of tests that failed because their failure message did not match
// the actual failure message. These are failure messages that may need to be
// removed from our failure lists.
absl::btree_map<std::string, conformance::TestStatus>
expected_failure_messages_;
// The set of test names that have been run. Used to ensure that there are no
// duplicate names in the suite.
absl::flat_hash_set<std::string> test_names_;
absl::flat_hash_set<std::string> test_names_ran_;
// The set of tests that failed, but weren't expected to: They weren't
// present in our failure lists.
absl::btree_map<std::string, conformance::TestStatus>
unexpected_failing_tests_;
// The set of tests that succeeded, but weren't expected to: They were present
// in our failure lists, but managed to succeed.
absl::btree_map<std::string, conformance::TestStatus>
unexpected_succeeding_tests_;
// The set of tests that failed, but weren't expected to.
absl::btree_set<std::string> unexpected_failing_tests_;
// The set of tests that failed because their failure message did not match
// the actual failure message. These are failure messages that may need to be
// added to our failure lists.
absl::btree_map<std::string, conformance::TestStatus>
unexpected_failure_messages_;
// The set of tests that succeeded, but weren't expected to.
absl::btree_set<std::string> unexpected_succeeding_tests_;
// The set of test names (wildcarded or not) from the failure list that did
// not match any actual test name.
absl::btree_map<std::string, conformance::TestStatus> unmatched_;
// The set of tests that the testee opted out of;
absl::btree_set<std::string> skipped_;
absl::btree_map<std::string, conformance::TestStatus> skipped_;
// Allows us to remove from unmatched_.
absl::btree_map<std::string, std::string> saved_failure_messages_;
// If a failure list entry served as a match for more than 'max_matches_',
// those will be added here for removal.
absl::btree_map<std::string, conformance::TestStatus> exceeded_max_matches_;
// Keeps track of how many tests matched to each failure list entry.
absl::btree_map<std::string, int> number_of_matches_;
};
} // namespace protobuf

@ -32,26 +32,32 @@
#include <errno.h>
#include <signal.h>
#include <stdio.h>
#include <sys/types.h>
#include <sys/wait.h>
#include <unistd.h>
#include <algorithm>
#include <cctype>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <fstream>
#include <future>
#include <memory>
#include <string>
#include <vector>
#include "absl/container/flat_hash_set.h"
#include "absl/log/absl_log.h"
#include "absl/strings/ascii.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "conformance/conformance.pb.h"
#include "conformance_test.h"
#include "google/protobuf/endian.h"
using conformance::ConformanceResponse;
using google::protobuf::ConformanceTestSuite;
using std::string;
using std::vector;
@ -76,15 +82,34 @@ void ParseFailureList(const char *filename,
exit(1);
}
for (string line; getline(infile, line);) {
// Remove whitespace.
line.erase(std::remove_if(line.begin(), line.end(), ::isspace), line.end());
for (string line; std::getline(infile, line);) {
// Remove comments.
line = line.substr(0, line.find("#"));
string test_name = line.substr(0, line.find('#'));
test_name.erase(
std::remove_if(test_name.begin(), test_name.end(), ::isspace),
test_name.end());
if (test_name.empty()) { // Skip empty lines.
continue;
}
if (!line.empty()) {
failure_list->add_failure(line);
// If we remove whitespace from the beginning of a line, and what we have
// left at first is a '#', then we have a comment.
if (test_name[0] != '#') {
// Find our failure message if it exists. Will be set to an empty string
// if no message is found. Empty failure messages also pass our tests.
size_t check_message = line.find('#');
string message;
if (check_message != std::string::npos) {
message = line.substr(check_message + 1); // +1 to skip the delimiter
// If we had only whitespace after the delimiter, we will have an empty
// failure message and the test will still pass.
message = std::string(absl::StripAsciiWhitespace(message));
}
conformance::TestStatus *test = failure_list->add_test();
test->set_name(test_name);
test->set_failure_message(message);
}
}
}
@ -100,7 +125,7 @@ void UsageError() {
fprintf(stderr,
" should contain one test name per\n");
fprintf(stderr,
" line. Use '#' for comments.\n");
" line. Use '#' for comments.\n\n");
fprintf(stderr,
" --text_format_failure_list <filename> Use to specify list \n");
fprintf(stderr,
@ -111,7 +136,7 @@ void UsageError() {
fprintf(stderr,
" File should contain one test name \n");
fprintf(stderr,
" per line. Use '#' for comments.\n");
" per line. Use '#' for comments.\n\n");
fprintf(stderr,
" --enforce_recommended Enforce that recommended test\n");
@ -121,19 +146,32 @@ void UsageError() {
" this flag if you want to be\n");
fprintf(stderr,
" strictly conforming to protobuf\n");
fprintf(stderr, " spec.\n");
fprintf(stderr, " spec.\n\n");
fprintf(stderr,
" --maximum_edition <edition> Only run conformance tests up\n");
" --maximum_edition <edition> Only run conformance tests up to\n");
fprintf(stderr,
" to and including the specified\n");
fprintf(stderr, " edition.\n");
" and including the specified\n");
fprintf(stderr, " edition.\n\n");
fprintf(stderr,
" --output_dir <dirname> Directory to write\n"
" output files.\n");
" output files.\n\n");
fprintf(stderr, " --test <test_name> Only run\n");
fprintf(stderr,
" the specified test. Multiple tests\n"
" can be specified by repeating the \n"
" flag.\n\n");
fprintf(stderr,
" --debug Enable debug mode\n"
" to produce octal serialized\n"
" ConformanceRequest for the tests\n"
" passed to --test (required)\n\n");
fprintf(stderr, " --performance Boolean option\n");
fprintf(stderr, " for enabling run of\n");
fprintf(stderr, " performance tests.\n");
exit(1);
}
void ForkPipeRunner::RunTest(const std::string &test_name,
void ForkPipeRunner::RunTest(const std::string &test_name, uint32_t len,
const std::string &request,
std::string *response) {
if (child_pid_ < 0) {
@ -141,8 +179,6 @@ void ForkPipeRunner::RunTest(const std::string &test_name,
}
current_test_name_ = test_name;
uint32_t len =
internal::little_endian::FromHost(static_cast<uint32_t>(request.size()));
CheckedWrite(write_fd_, &len, sizeof(uint32_t));
CheckedWrite(write_fd_, request.c_str(), request.size());
@ -188,57 +224,93 @@ int ForkPipeRunner::Run(int argc, char *argv[],
fprintf(stderr, "No test suites found.\n");
return EXIT_FAILURE;
}
string program;
string testee;
std::vector<string> program_args;
bool performance = false;
bool debug = false;
absl::flat_hash_set<string> names_to_test;
bool enforce_recommended = false;
Edition maximum_edition = EDITION_UNKNOWN;
std::string output_dir;
bool verbose = false;
bool isolated = false;
for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], "--performance") == 0) {
performance = true;
} else if (strcmp(argv[arg], "--debug") == 0) {
debug = true;
} else if (strcmp(argv[arg], "--verbose") == 0) {
verbose = true;
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
enforce_recommended = true;
} else if (strcmp(argv[arg], "--maximum_edition") == 0) {
if (++arg == argc) UsageError();
Edition edition = EDITION_UNKNOWN;
if (!Edition_Parse(absl::StrCat("EDITION_", argv[arg]), &edition)) {
fprintf(stderr, "Unknown edition: %s\n", argv[arg]);
UsageError();
}
maximum_edition = edition;
} else if (strcmp(argv[arg], "--output_dir") == 0) {
if (++arg == argc) UsageError();
output_dir = argv[arg];
} else if (strcmp(argv[arg], "--test") == 0) {
if (++arg == argc) UsageError();
names_to_test.insert(argv[arg]);
} else if (argv[arg][0] == '-') {
bool recognized_flag = false;
for (ConformanceTestSuite *suite : suites) {
if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
if (++arg == argc) UsageError();
recognized_flag = true;
}
}
if (!recognized_flag) {
fprintf(stderr, "Unknown option: %s\n", argv[arg]);
UsageError();
}
} else {
program += argv[arg++];
while (arg < argc) {
program_args.push_back(argv[arg]);
arg++;
}
}
}
if (debug && names_to_test.empty()) {
UsageError();
}
if (!names_to_test.empty()) {
isolated = true;
}
bool all_ok = true;
for (ConformanceTestSuite *suite : suites) {
string program;
std::vector<string> program_args;
string failure_list_filename;
conformance::FailureSet failure_list;
bool performance = false;
for (int arg = 1; arg < argc; ++arg) {
if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
if (++arg == argc) UsageError();
failure_list_filename = argv[arg];
ParseFailureList(argv[arg], &failure_list);
} else if (strcmp(argv[arg], "--performance") == 0) {
performance = true;
suite->SetPerformance(true);
} else if (strcmp(argv[arg], "--verbose") == 0) {
suite->SetVerbose(true);
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
suite->SetEnforceRecommended(true);
} else if (strcmp(argv[arg], "--maximum_edition") == 0) {
if (++arg == argc) UsageError();
Edition edition = EDITION_UNKNOWN;
if (!Edition_Parse(absl::StrCat("EDITION_", argv[arg]), &edition)) {
fprintf(stderr, "Unknown edition: %s\n", argv[arg]);
UsageError();
}
suite->SetMaximumEdition(edition);
} else if (strcmp(argv[arg], "--output_dir") == 0) {
if (++arg == argc) UsageError();
suite->SetOutputDir(argv[arg]);
} else if (argv[arg][0] == '-') {
bool recognized_flag = false;
for (ConformanceTestSuite *suite : suites) {
if (strcmp(argv[arg], suite->GetFailureListFlagName().c_str()) == 0) {
if (++arg == argc) UsageError();
recognized_flag = true;
}
}
if (!recognized_flag) {
fprintf(stderr, "Unknown option: %s\n", argv[arg]);
UsageError();
}
} else {
program += argv[arg++];
while (arg < argc) {
program_args.push_back(argv[arg]);
arg++;
}
}
}
suite->SetPerformance(performance);
suite->SetVerbose(verbose);
suite->SetEnforceRecommended(enforce_recommended);
suite->SetMaximumEdition(maximum_edition);
suite->SetOutputDir(output_dir);
suite->SetDebug(debug);
suite->SetNamesToTest(names_to_test);
suite->SetTestee(program);
suite->SetIsolated(isolated);
ForkPipeRunner runner(program, program_args, performance);
@ -246,8 +318,19 @@ int ForkPipeRunner::Run(int argc, char *argv[],
all_ok = all_ok && suite->RunSuite(&runner, &output, failure_list_filename,
&failure_list);
names_to_test = suite->GetExpectedTestsNotRun();
fwrite(output.c_str(), 1, output.size(), stderr);
}
if (!names_to_test.empty()) {
fprintf(stderr,
"These tests were requested to be ran isolated, but they do "
"not exist. Revise the test names:\n\n");
for (const string &test_name : names_to_test) {
fprintf(stderr, " %s\n", test_name.c_str());
}
fprintf(stderr, "\n\n");
}
return all_ok ? EXIT_SUCCESS : EXIT_FAILURE;
}

@ -7,105 +7,31 @@
# TODO: insert links to corresponding bugs tracking the issue.
# Should we use GitHub issues or the Google-internal bug tracker?
Recommended.Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Editions_Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Editions_Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Editions_Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Editions_Proto3.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Proto3.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Editions_Proto3.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Proto3.JsonInput.FieldMaskInvalidCharacter
Recommended.Editions_Proto3.JsonInput.FieldMaskInvalidCharacter
Recommended.Proto3.JsonInput.FieldNameDuplicate
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicate
Recommended.Proto3.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Proto3.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Proto3.JsonInput.FieldNameNotQuoted
Recommended.Editions_Proto3.JsonInput.FieldNameNotQuoted
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput
Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput
Recommended.Proto3.JsonInput.MapFieldValueIsNull
Recommended.Editions_Proto3.JsonInput.MapFieldValueIsNull
Recommended.Proto3.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Editions_Proto3.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Proto3.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Editions_Proto3.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Proto3.JsonInput.RepeatedFieldTrailingComma
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingComma
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Proto3.JsonInput.StringFieldSingleQuoteBoth
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteBoth
Recommended.Proto3.JsonInput.StringFieldSingleQuoteKey
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteKey
Recommended.Proto3.JsonInput.StringFieldSingleQuoteValue
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteValue
Recommended.Proto3.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Editions_Proto3.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Proto3.JsonInput.TrailingCommaInAnObject
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObject
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.Proto2.JsonInput.FieldNameExtension.Validator
Recommended.Editions_Proto2.JsonInput.FieldNameExtension.Validator
Recommended.Editions_Proto2.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Editions_Proto2.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Editions_Proto2.JsonInput.FieldNameDuplicate
Recommended.Editions_Proto2.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Editions_Proto2.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Editions_Proto2.JsonInput.FieldNameNotQuoted
Recommended.Editions_Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto2.JsonInput.MapFieldValueIsNull
Recommended.Editions_Proto2.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Editions_Proto2.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Editions_Proto2.JsonInput.RepeatedFieldTrailingComma
Recommended.Editions_Proto2.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Editions_Proto2.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Editions_Proto2.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Editions_Proto2.JsonInput.StringFieldSingleQuoteBoth
Recommended.Editions_Proto2.JsonInput.StringFieldSingleQuoteKey
Recommended.Editions_Proto2.JsonInput.StringFieldSingleQuoteValue
Recommended.Editions_Proto2.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Editions_Proto2.JsonInput.TrailingCommaInAnObject
Recommended.Editions_Proto2.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Editions_Proto2.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Editions_Proto2.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.Proto2.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Proto2.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Proto2.JsonInput.FieldNameDuplicate
Recommended.Proto2.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Proto2.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Proto2.JsonInput.FieldNameNotQuoted
Recommended.Proto2.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Proto2.JsonInput.MapFieldValueIsNull
Recommended.Proto2.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Proto2.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Proto2.JsonInput.RepeatedFieldTrailingComma
Recommended.Proto2.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Proto2.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Proto2.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Proto2.JsonInput.StringFieldSingleQuoteBoth
Recommended.Proto2.JsonInput.StringFieldSingleQuoteKey
Recommended.Proto2.JsonInput.StringFieldSingleQuoteValue
Recommended.Proto2.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Proto2.JsonInput.TrailingCommaInAnObject
Recommended.Proto2.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Proto2.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Proto2.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.*.JsonInput.BoolFieldDoubleQuotedFalse # Should have failed to parse, but didn't.
Recommended.*.JsonInput.BoolFieldDoubleQuotedTrue # Should have failed to parse, but didn't.
Recommended.*.JsonInput.FieldNameDuplicate # Should have failed to parse, but didn't.
Recommended.*.JsonInput.FieldNameDuplicateDifferentCasing1 # Should have failed to parse, but didn't.
Recommended.*.JsonInput.FieldNameDuplicateDifferentCasing2 # Should have failed to parse, but didn't.
Recommended.*.JsonInput.FieldNameExtension.Validator # Expected JSON payload but got type 1
Recommended.*.JsonInput.FieldNameNotQuoted # Should have failed to parse, but didn't.
Recommended.*.JsonInput.IgnoreUnknownEnumStringValueInMapPart.ProtobufOutput # Output was not equivalent to reference message: added: map_string_nested_enum[key2]: FOO
Recommended.*.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput # Output was not equivalent to reference message: added: map_string_nested_enum[key]: FOO
Recommended.*.JsonInput.MapFieldValueIsNull # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldMessageElementIsNull # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldPrimitiveElementIsNull # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldTrailingComma # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldTrailingCommaWithNewlines # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldTrailingCommaWithSpace # Should have failed to parse, but didn't.
Recommended.*.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace # Should have failed to parse, but didn't.
Recommended.*.JsonInput.StringFieldSingleQuoteBoth # Should have failed to parse, but didn't.
Recommended.*.JsonInput.StringFieldSingleQuoteKey # Should have failed to parse, but didn't.
Recommended.*.JsonInput.StringFieldSingleQuoteValue # Should have failed to parse, but didn't.
Recommended.*.JsonInput.StringFieldUppercaseEscapeLetter # Should have failed to parse, but didn't.
Recommended.*.JsonInput.TrailingCommaInAnObject # Should have failed to parse, but didn't.
Recommended.*.JsonInput.TrailingCommaInAnObjectWithNewlines # Should have failed to parse, but didn't.
Recommended.*.JsonInput.TrailingCommaInAnObjectWithSpace # Should have failed to parse, but didn't.
Recommended.*.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace # Should have failed to parse, but didn't.
Recommended.*.FieldMaskNumbersDontRoundTrip.JsonOutput # Should have failed to serialize, but didn't.
Recommended.*.FieldMaskPathsDontRoundTrip.JsonOutput # Should have failed to serialize, but didn't.
Recommended.*.FieldMaskTooManyUnderscore.JsonOutput # Should have failed to serialize, but didn't.
Recommended.*.JsonInput.FieldMaskInvalidCharacter # Should have failed to parse, but didn't.

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save