Merge to main

gha-test
Deanna Garcia 5 months ago
commit c19d666e96
  1. 23
      .bazelrc
  2. 8
      .bcr/README.md
  3. 23
      .bcr/metadata.template.json
  4. 36
      .bcr/presubmit.yml
  5. 5
      .bcr/source.template.json
  6. 3
      .clang-format
  7. 54
      .github/BUILD.bazel
  8. 1
      .github/CODEOWNERS
  9. 8
      .github/dependabot.yml
  10. 15
      .github/mergeable.yml
  11. 216
      .github/workflows/README.md
  12. 35
      .github/workflows/clear_caches.yml
  13. 18
      .github/workflows/codespell.yml
  14. 30
      .github/workflows/forked_pr_workflow_check.yml
  15. 78
      .github/workflows/janitor.yml
  16. 40
      .github/workflows/objc_cocoapods.yml
  17. 60
      .github/workflows/scorecard.yml
  18. 63
      .github/workflows/staleness_check.yml
  19. 10
      .github/workflows/staleness_refresh.yml
  20. 54
      .github/workflows/test_bazel.yml
  21. 482
      .github/workflows/test_cpp.yml
  22. 118
      .github/workflows/test_csharp.yml
  23. 120
      .github/workflows/test_java.yml
  24. 136
      .github/workflows/test_objectivec.yml
  25. 205
      .github/workflows/test_php.yml
  26. 57
      .github/workflows/test_php_ext.yml
  27. 92
      .github/workflows/test_python.yml
  28. 188
      .github/workflows/test_ruby.yml
  29. 119
      .github/workflows/test_runner.yml
  30. 32
      .github/workflows/test_rust.yml
  31. 283
      .github/workflows/test_upb.yml
  32. 13
      .github/workflows/update_php_repo.yml
  33. 8
      .gitignore
  34. 2
      .gitmodules
  35. 2
      .readthedocs.yml
  36. 246
      BUILD.bazel
  37. 75
      CHANGES.txt
  38. 130
      CMakeLists.txt
  39. 933
      Cargo.bazel.lock
  40. 137
      Cargo.lock
  41. 72
      MODULE.bazel
  42. 14
      PrivacyInfo.xcprivacy
  43. 20
      Protobuf-C++.podspec
  44. 12
      Protobuf.podspec
  45. 40
      README.md
  46. 196
      WORKSPACE
  47. 4
      appveyor.bat
  48. 57
      bazel/BUILD.bazel
  49. 3
      bazel/cc_proto_library.bzl
  50. 34
      bazel/common/BUILD.bazel
  51. 5
      bazel/common/proto_common.bzl
  52. 5
      bazel/common/proto_info.bzl
  53. 5
      bazel/common/proto_lang_toolchain_info.bzl
  54. 3
      bazel/java_lite_proto_library.bzl
  55. 3
      bazel/java_proto_library.bzl
  56. 42
      bazel/private/BUILD.bazel
  57. 5
      bazel/private/native.bzl
  58. 47
      bazel/private/proto_toolchain_rule.bzl
  59. 303
      bazel/private/upb_proto_library_internal/aspect.bzl
  60. 86
      bazel/private/upb_proto_library_internal/cc_library_func.bzl
  61. 16
      bazel/private/upb_proto_library_internal/copts.bzl
  62. 39
      bazel/private/upb_proto_library_internal/rule.bzl
  63. 3
      bazel/proto_library.bzl
  64. 211
      bazel/py_proto_library.bzl
  65. 17
      bazel/system_python.bzl
  66. 23
      bazel/toolchains/BUILD.bazel
  67. 34
      bazel/toolchains/proto_lang_toolchain.bzl
  68. 26
      bazel/toolchains/proto_toolchain.bzl
  69. 69
      bazel/upb_c_proto_library.bzl
  70. 77
      bazel/upb_minitable_proto_library.bzl
  71. 35
      bazel/upb_proto_library.bzl
  72. 67
      bazel/upb_proto_reflection_library.bzl
  73. 245
      benchmarks/BUILD
  74. 59
      benchmarks/BUILD.googleapis
  75. 471
      benchmarks/benchmark.cc
  76. 63
      benchmarks/build_defs.bzl
  77. 123
      benchmarks/compare.py
  78. 865
      benchmarks/descriptor.proto
  79. 867
      benchmarks/descriptor_sv.proto
  80. 12
      benchmarks/empty.proto
  81. 69
      benchmarks/gen_protobuf_binary_cc.py
  82. 123
      benchmarks/gen_synthetic_protos.py
  83. 70
      benchmarks/gen_upb_binary_c.py
  84. 55
      build_defs/BUILD.bazel
  85. 15
      build_defs/cpp_opts.bzl
  86. 4
      build_defs/internal_shell.bzl
  87. 82
      build_defs/java_opts.bzl
  88. 11
      build_defs/upb.patch
  89. 3
      ci/Linux.bazelrc
  90. 17
      ci/README.md
  91. 5
      ci/Windows.bazelrc
  92. 3
      ci/clang_wrapper
  93. 3
      ci/clang_wrapper++
  94. 80
      ci/common.bazelrc
  95. 5
      ci/macOS.bazelrc
  96. 5
      ci/push_auto_update.sh
  97. 9
      cmake/CMakeLists.txt
  98. 33
      cmake/README.md
  99. 20
      cmake/abseil-cpp.cmake
  100. 141
      cmake/conformance.cmake
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,4 +1,4 @@
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
build --cxxopt=-std=c++17 --host_cxxopt=-std=c++17
build:dbg --compilation_mode=dbg
@ -7,7 +7,6 @@ build:opt --compilation_mode=opt
build:san-common --config=dbg --strip=never --copt=-O0 --copt=-fno-omit-frame-pointer
build:asan --config=san-common --copt=-fsanitize=address --linkopt=-fsanitize=address
build:asan --copt=-DADDRESS_SANITIZER=1
# ASAN hits ODR violations with shared linkage due to rules_proto.
build:asan --dynamic_mode=off
@ -15,21 +14,21 @@ build:msan --config=san-common --copt=-fsanitize=memory --linkopt=-fsanitize=mem
build:msan --copt=-fsanitize-memory-track-origins
build:msan --copt=-fsanitize-memory-use-after-dtor
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1
build:msan --copt=-DMEMORY_SANITIZER=1
# Use our instrumented LLVM libc++ in Kokoro.
build:kokoro-msan --config=msan
build:kokoro-msan --linkopt=-L/opt/libcxx_msan/lib
build:kokoro-msan --linkopt=-Wl,-rpath,/opt/libcxx_msan/lib
build:kokoro-msan --cxxopt=-stdlib=libc++ --linkopt=-stdlib=libc++
build:tsan --config=san-common --copt=-fsanitize=thread --linkopt=-fsanitize=thread
build:tsan --copt=-DTHREAD_SANITIZER=1
build:ubsan --config=san-common --copt=-fsanitize=undefined --linkopt=-fsanitize=undefined
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
build:ubsan --copt=-DUNDEFINED_SANITIZER=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313
common --noenable_bzlmod
# Important: this flag ensures that we remain compliant with the C++ layering
# check.
build --features=layering_check
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1

@ -0,0 +1,8 @@
# Bazel Central Registry
When protobuf is released, we want it to be published to the Bazel Central
Registry automatically: <https://registry.bazel.build>
This folder contains configuration files to automate the publish step. See
<https://github.com/bazel-contrib/publish-to-bcr/blob/main/templates/README.md>
for authoritative documentation about these files.

@ -0,0 +1,23 @@
{
"homepage": "https://github.com/protocolbuffers/protobuf",
"maintainers": [
{
"email": "sandyzhang@google.com",
"github": "zhangskz",
"name": "Sandy Zhang"
},
{
"email": "mkruskal@google.com",
"github": "mkruskal-google",
"name": "Mike Kruskal"
},
{
"email": "gberg@google.com",
"github": "googleberg",
"name": "Jerry Berg"
}
],
"repository": ["github:protocolbuffers/protobuf"],
"versions": [],
"yanked_versions": {}
}

@ -0,0 +1,36 @@
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
tasks:
verify_targets:
name: "Verify build targets"
platform: ${{ platform }}
bazel: ${{ bazel }}
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
build_targets:
- '@protobuf//:protobuf'
- '@protobuf//:protobuf_lite'
- '@protobuf//:protobuf_python'
- '@protobuf//:protobuf_rust'
- '@protobuf//:protoc'
- '@protobuf//:test_messages_proto2_cc_proto'
- '@protobuf//:test_messages_proto3_cc_proto'
bcr_test_module:
module_path: "examples"
matrix:
platform: ["debian10", "macos", "ubuntu2004", "windows"]
bazel: [6.x, 7.x]
tasks:
run_test_module:
name: "Run test module"
platform: ${{ platform }}
bazel: ${{ bazel }}
build_flags:
- '--host_cxxopt=-std=c++14'
- '--cxxopt=-std=c++14'
build_targets:
- "//..."

@ -0,0 +1,5 @@
{
"integrity": "**leave this alone**",
"strip_prefix": "{REPO}-{VERSION}",
"url": "https://github.com/{OWNER}/{REPO}/releases/download/{TAG}/{REPO}-{VERSION}.zip"
}

@ -0,0 +1,3 @@
BasedOnStyle: Google
DerivePointerAlignment: false
PointerAlignment: Left

@ -0,0 +1,54 @@
# This information is extracted from the MacOS runner specs located at:
# https://github.com/actions/runner-images/blob/main/images/macos/macos-12-Readme.md
#
# When updating, also ensure the "xcode_destination" entries in
# `.github/workflows/test_objectivec.yml` are supported for the given versions
# of Xcode.
xcode_version(
name = "version15_2_15C500b",
aliases = [
"15C500b",
"15.2",
],
default_ios_sdk_version = "17.2",
default_macos_sdk_version = "14.2",
default_tvos_sdk_version = "17.2",
default_watchos_sdk_version = "10.2",
version = "15.2.0.15C500b",
)
xcode_version(
name = "version14_2_14C18",
aliases = [
"14C18",
"14.2",
],
default_ios_sdk_version = "16.2",
default_macos_sdk_version = "13.1",
default_tvos_sdk_version = "16.1",
default_watchos_sdk_version = "9.1",
version = "14.2.0.14C18",
)
xcode_version(
name = "version14_1_0_14B47b",
aliases = [
"14B47b",
"14.1",
],
default_ios_sdk_version = "16.1",
default_macos_sdk_version = "13.0",
default_tvos_sdk_version = "16.1",
default_watchos_sdk_version = "9.1",
version = "14.1.0.14B47b",
)
xcode_config(
name = "host_xcodes",
default = ":version14_2_14C18",
versions = [
":version15_2_15C500b",
":version14_2_14C18",
":version14_1_0_14B47b",
],
)

@ -34,3 +34,4 @@
/kokoro/ @protocolbuffers/protobuf-btr
/third_party/ @protocolbuffers/protobuf-btr
*.bazel @protocolbuffers/protobuf-btr
/.github/ @protocolbuffers/protobuf-btr

@ -0,0 +1,8 @@
version: 2
updates:
- package-ecosystem: "github-actions" # Necessary to update action hashs
directory: "/"
schedule:
interval: "weekly"
# Allow up to 3 opened pull requests for github-actions versions
open-pull-requests-limit: 3

@ -1,15 +0,0 @@
mergeable:
pull_requests:
label:
and:
- must_exclude:
regex: '^disposition/DO NOT MERGE'
message: 'Pull request marked not mergeable'
- must_include:
regex: 'mergeable:force-allow'
message: 'Pull requests should not be merged directly and should instead
be handled by Copybara.
To enable Github merges, add the `mergeable:force-allow` label and get a second
approval. This should almost never be used except for releases or as a break glass
measure after discussing with the team.'

@ -0,0 +1,216 @@
This directory contains all of our automatically triggered workflows.
# Test runner
Our top level `test_runner.yml` is responsible for kicking off all tests, which
are represented as reusable workflows. This is carefully constructed to satisfy
the design laid out in go/protobuf-gha-protected-resources (see below), and
duplicating it across every workflow file would be difficult to maintain. As an
added bonus, we can manually dispatch our full test suite with a single button
and monitor the progress of all of them simultaneously in GitHub's actions UI.
There are five ways our test suite can be triggered:
- **Post-submit tests** (`push`): These are run over newly submitted code
that we can assume has been thoroughly reviewed. There are no additional
security concerns here and these jobs can be given highly privileged access to
our internal resources and caches.
- **Pre-submit tests from a branch** (`push_request`): These are run over
every PR as changes are made. Since they are coming from branches in our
repository, they have secret access by default and can also be given highly
privileged access. However, we expect *many* of these events per change,
and likely many from abandoned/exploratory changes. Given the much higher
frequency, we restrict the ability to *write* to our more expensive caches.
- **Pre-submit tests from a fork** (`push_request_target`): These are run
over every PR from a forked repository as changes are made. These have much
more restricted access, since they could be coming from anywhere. To protect
our secret keys and our resources, tests will not run until a commit has been
labeled `safe to submit`. Further commits will require further approvals to
run our test suite. Once marked as safe, we will provide read-only access to
our caches and Docker images, but will generally disallow any writes to shared
resources.
- **Continuous tests** (`schedule`): These are run on a fixed schedule. We
currently have them set up to run daily, and can help identify non-hermetic
issues in tests that don't get run often (such as due to test caching) or during
slow periods like weekends and holidays. Similar to post-submit tests, these
are run over submitted code and are highly privileged in the resources they
can use.
- **Manual testing** (`workflow_dispatch`): Our test runner can be triggered
manually over any branch. This is treated similarly to pre-submit tests,
which should be highly privileged because they can only be triggered by the
protobuf team.
# Staleness handling
While Bazel handles code generation seamlessly, we do support build systems that
don't. There are a handful of cases where we need to check in generated files
that can become stale over time. In order to provide a good developer
experience, we've implemented a system to make this more manageable.
- Stale files should have a corresponding `staleness_test` Bazel target. This
should be marked `manual` to avoid getting picked up in CI, but will fail if
files become stale. It also provides a `--fix` flag to update the stale files.
- Bazel tests will never depend on the checked-in versions, and will generate
new ones on-the-fly during build.
- Non-Bazel tests will always regenerate necessary files before starting. This
is done using our `bash` and `docker` actions, which should be used for any
non-Bazel tests. This way, no tests will fail due to stale files.
- A post-submit job will immediately regenerate any stale files and commit them
if they've changed.
- A scheduled job will run late at night every day to make sure the post-submit
is working as expected (that is, it will run all the staleness tests).
The `regenerate_stale_files.sh` script is the central script responsible for all
the re-generation of stale files.
# Forked PRs
Because we need secret access to run our tests, we use the `pull_request_target`
event for PRs coming from forked repositories. We do checkout the code from the
PR's head, but the workflow files themselves are always fetched from the *base*
branch (that is, the branch we're merging to). Therefore, any changes to these
files won't be tested, so we explicitly ban PRs that touch these files.
# Caches
We have a number of different caching strategies to help speed up tests. These
live either in GCP buckets or in our GitHub repository cache. The former has
a lot of resources available and we don't have to worry as much about bloat.
On the other hand, the GitHub repository cache is limited to 10GB, and will
start pruning old caches when it exceeds that threshold. Therefore, we need
to be very careful about the size and quantity of our caches in order to
maximize the gains.
## Bazel remote cache
As described in https://bazel.build/remote/caching, remote caching allows us to
offload a lot of our build steps to a remote server that holds a cache of
previous builds. We use our GCP project for this storage, and configure
*every* Bazel call to use it. This provides substantial performance
improvements at minimal cost.
We do not allow forked PRs to upload updates to our Bazel caches, but they
do use them. Every other event is given read/write access to the caches.
Because Bazel behaves poorly under certain environment changes (such as
toolchain, operating system), we try to use finely-grained caches. Each job
should typically have its own cache to avoid cross-pollution.
## Bazel repository cache
When Bazel starts up, it downloads all the external dependencies for a given
build and stores them in the repository cache. This cache is *separate* from
the remote cache, and only exists locally. Because we have so many Bazel
dependencies, this can be a source of frequent flakes due to network issues.
To avoid this, we keep a cached version of the repository cache in GitHub's
action cache. Our full set of repository dependencies ends up being ~300MB,
which is fairly expensive given our 10GB maximum. The most expensive ones seem
to come from Java, which has some very large downstream dependencies.
Given the cost, we take a more conservative approach for this cache. Only push
events will ever write to this cache, but all events can read from them.
Additionally, we only store three caches for any given commit, one per platform.
This means that multiple jobs are trying to update the same cache, leading to a
race. GitHub rejects all but one of these updates, so we designed the system so
that caches are only updated if they've actually changed. That way, over time
(and multiple pushes) the repository caches will incrementally grow to encompass
all of our dependencies. A scheduled job will run monthly to clear these caches
to prevent unbounded growth as our dependencies evolve.
## ccache
In order to speed up non-Bazel builds to be on par with Bazel, we make use of
[ccache](https://ccache.dev/). This intercepts all calls to the compiler, and
caches the result. Subsequent calls with a cache-hit will very quickly
short-circuit and return the already computed result. This has minimal affect
on any *single* job, since we typically only run a single build. However, by
caching the ccache results in GitHub's action cache we can substantially
decrease the build time of subsequent runs.
One useful feature of ccache is that you can set a maximum cache size, and it
will automatically prune older results to keep below that limit. On Linux and
Mac cmake builds, we generally get 30MB caches and set a 100MB cache limit. On
Windows, with debug symbol stripping we get ~70MB and set a 200MB cache limit.
Because CMake build tend to be our slowest, bottlenecking the entire CI process,
we use a fairly expensive strategy with ccache. All events will cache their
ccache directory, keyed by the commit and the branch. This means that each
PR and each branch will write its own set of caches. When looking up which
cache to use initially, each job will first look for a recent cache in its
current branch. If it can't find one, it will accept a cache from the base
branch (for example, PRs will initially use the latest cache from their target
branch).
While the ccache caches quickly over-run our GitHub action cache, they also
quickly become useless. Since GitHub prunes caches based on the time they were
last used, this just means that we'll see quicker turnover.
## sccache
An alternative to ccache is [sccache](https://github.com/mozilla/sccache). The
two tools are very similar in function, but sccache requires (and allows) much
less configuration and supports GCS storage right out of the box. By hooking
this up to our project that we already use for Bazel caching, we're able to get
even bigger CMake wins in CI because we're no longer constrained by GitHub's
10GB cache limit.
Similar to the Bazel remote cache, we give read access to every CI run, but
disallow writing in PRs from forks.
## Bazelisk
Bazelisk will automatically download a pinned version of Bazel on first use.
This can lead to flakes, and to avoid that we cache the result keyed on the
Bazel version. Only push events will write to this cache, but it's unlikely
to change very often.
## Docker images
Instead of downloading a fresh Docker image for every test run, we can save it
as a tar and cache it using `docker image save` and later restore using
`docker image load`. This can decrease download times and also reduce flakes.
Note, Docker's load can actually be significantly slower than a pull in certain
situations. Therefore, we should reserve this strategy for only Docker images
that are causing noticeable flakes.
## Pip dependencies
The actions/setup-python action we use for Python supports automated caching
of pip dependencies. We enable this to avoid having to download these
dependencies on every run, which can lead to flakes.
# Custom actions
We've defined a number of custom actions to abstract out shared pieces of our
workflows.
- **Bazel** use this for running all Bazel tests. It can take either a single
Bazel command or a more general bash command. In the latter case, it provides
environment variables for running Bazel with all our standardized settings.
- **Bazel-Docker** nearly identical to the **Bazel** action, this additionally
runs everything in a specified Docker image.
- **Bash** use this for running non-Bazel tests. It takes a bash command and
runs it verbatim. It also handles the regeneration of stale files (which does
use Bazel), which non-Bazel tests might depend on.
- **Docker** nearly identical to the **Bash** action, this additionally runs
everything in a specified Docker image.
- **ccache** this sets up a ccache environment, and initializes some
environment variables for standardized usage of ccache.
- **Cross-compile protoc** this abstracts out the compilation of protoc using
our cross-compilation infrastructure. It will set a `PROTOC` environment
variable that gets automatically picked up by a lot of our infrastructure.
This is most useful in conjunction with the **Bash** action with non-Bazel
tests.

@ -0,0 +1,35 @@
name: Clear expensive caches to prevent unbounded growth
on:
schedule:
# Run every 4 months at 10 AM UTC (2 AM PDT)
- cron: 0 10 5 */4 *
# manual
workflow_dispatch:
permissions:
contents: read
jobs:
bazel-repository-cache:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Clear Bazel repository cache ${{ matrix.os }}
runs-on: ${{ matrix.os }}
permissions:
actions: write # permission is required to delete caches
contents: read
steps:
- uses: actions/cache@627f0f41f6904a5b1efbaed9f96d9eb58e92e920 # v3.2.4
with:
path: ${{ github.workspace }}/${{ steps.output.outputs.repository-cache }}
key: repository-cache-${{ github.ref_name }}-${{ runner.os }}-reset-${{ github.sha }}
- name: Create an empty cache with a single file
run: |
rm -rf .repository-cache
mkdir -p .repository-cache
touch .repository-cache/reset_file

@ -1,18 +0,0 @@
# GitHub Action to automate the identification of common misspellings in text files.
# https://github.com/codespell-project/actions-codespell
# https://github.com/codespell-project/codespell
name: codespell
on: [push, pull_request]
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: codespell-project/actions-codespell@master
with:
check_filenames: true
skip: ./.git,./third_party,./conformance/third_party,*.snk,*.pb,*.pb.cc,*.pb.h,./src/google/protobuf/testdata,./objectivec/Tests,./python/compatibility_tests/v2.5.0/tests/google/protobuf/internal,./.github/workflows/codespell.yml
ignore_words_list: "alow,alse,atleast,ba,chec,cleare,copyable,cloneable,dedup,dur,errorprone,falsy,files',fo,fundementals,hel,importd,inout,leapyear,nd,nin,ois,ons,parseable,process',ro,te,testof,ue,unparseable,wasn,wee,gae,keyserver,objext,od,optin,streem,sur,falsy"

@ -0,0 +1,30 @@
name: Forked PR workflow check
# This workflow prevents modifications to our workflow files in PRs from forked
# repositories. Since tests in these PRs always use the workflows in the
# *target* branch, modifications to these files can't be properly tested.
on:
# safe presubmit
pull_request:
branches:
- main
- '[0-9]+.x'
# The 21.x branch still uses Kokoro
- '!21.x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
paths:
- '.github/workflows/**'
permissions:
contents: read
jobs:
check:
name: Check PR source
runs-on: ubuntu-latest
steps:
- run: >
${{ github.event.pull_request.head.repo.full_name == 'protocolbuffers/protobuf' }} ||
(echo "This pull request is from an unsafe fork (${{ github.event.pull_request.head.repo.full_name }}) and isn't allowed to modify workflow files!" && exit 1)

@ -0,0 +1,78 @@
name: Protobuf Janitor
on:
schedule:
# Run daily at 10 AM UTC (2 AM PDT)
- cron: 0 10 * * *
workflow_dispatch:
permissions: {}
jobs:
stale-prs:
name: Close Stale Copybara PRs
runs-on: ubuntu-latest
permissions:
contents: write # to allow deleting branches
pull-requests: write # to allow closing the PR
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_REPO: ${{ github.repository }}
steps:
- run: |
set -ex
STALE_PRS=$(gh pr list --author "app/copybara-service" --limit 500 \
--json "number" --search "updated:<=$(date --date="-7 day" +%F)" \
| jq ".[].number")
for pr in $STALE_PRS; do
echo "Closing #$pr..."
gh pr close --comment "Auto-closing Copybara pull request" --delete-branch "$pr"
done
stale-others:
name: Close stale non-copybara PRs and issues
runs-on: ubuntu-latest
permissions:
issues: write # allow the action to comment on, add labels to, and close issues
pull-requests: write # allow the action to comment on, add labels to, and close PRs
steps:
- uses: actions/stale@b69b346013879cedbf50c69f572cd85439a41936
with:
stale-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active
again, please add a comment.
This issue is labeled `inactive` because the last activity was over
90 days ago.
close-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active
again, please reopen it.
This issue was closed and archived because there has been no new
activity in the 14 days since the `inactive` label was added.
stale-pr-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this PR should remain active, please add a comment.
This PR is labeled `inactive` because the last activity was over 90
days ago. This PR will be closed and archived after 14 additional
days without activity.
close-pr-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this PR should remain active or becomes active
again, please reopen it.
This PR was closed and archived because there has been no new
activity in the 14 days since the `inactive` label was added.
stale-issue-label: 'inactive'
stale-pr-label: 'inactive'
exempt-issue-labels: 'help wanted'
days-before-stale: 90
days-before-close: 14
operations-per-run: 100

@ -1,40 +0,0 @@
name: 'ObjC CocoaPods'
on:
push:
paths:
- '.github/workflows/objc_cocoapods.yml'
- 'Protobuf.podspec'
- 'objectivec/**'
- '!objectivec/DevTools/**'
- '!objectivec/ProtocolBuffers_*.xcodeproj/**'
- '!objectivec/Tests/**'
pull_request:
paths:
- '.github/workflows/objc_cocoapods.yml'
- 'Protobuf.podspec'
- 'objectivec/**'
- '!objectivec/DevTools/**'
- '!objectivec/ProtocolBuffers_*.xcodeproj/**'
- '!objectivec/Tests/**'
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
pod-lib-lint:
runs-on: macos-latest
strategy:
fail-fast: false
matrix:
# Add back 'watchos'. See CocoaPods/CocoaPods#11558
PLATFORM: ["ios", "macos", "tvos"]
CONFIGURATION: ["Debug", "Release"]
steps:
- uses: actions/checkout@v3
- name: Pod lib lint
run: |
pod lib lint --verbose \
--configuration=${{ matrix.CONFIGURATION }} \
--platforms=${{ matrix.PLATFORM }} \
Protobuf.podspec

@ -0,0 +1,60 @@
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.
name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: '20 5 * * 2'
push:
branches: [ "main" ]
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
security-events: write # to upload the results to code-scanning dashboard
id-token: write # to publish results and get a badge
steps:
- name: "Checkout code"
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if
# you want to enable the Branch-Protection check on a *public* repository, or
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-fine-grained-pat-optional.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Allows the repository to include the Scorecard badge.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
with:
sarif_file: results.sarif

@ -0,0 +1,63 @@
name: Staleness tests
on:
schedule:
# Run daily at 10 AM UTC (2 AM PDT)
- cron: 0 10 * * *
workflow_call:
inputs:
safe-checkout:
required: false
description: "The SHA key for the commit we want to run over"
type: string
workflow_dispatch:
permissions: {}
jobs:
test:
strategy:
fail-fast: false
matrix:
branch: [main, 25.x, 27.x]
os: [{ name: Linux, value: ubuntu-latest}]
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }}
runs-on: ${{ matrix.os.value }}
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }}
steps:
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }}
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }}
- name: Mark runs associated with commits
if: ${{ github.event_name != 'schedule' && github.event_name != 'workflow_dispatch' }}
run: echo "COMMIT_TRIGGERED_RUN=1" >> $GITHUB_ENV
- name: Mark runs from the main branch
if: ${{ github.base_ref == 'main' || github.ref == 'refs/heads/main' }}
run: echo "MAIN_RUN=1" >> $GITHUB_ENV
- name: Run all staleness tests
# Run all tests if either of the following is true, otherwise simply run the query to make
# sure it continues to work:
# 1) If this is not a commit-based run it means it's scheduled or manually dispatched. In
# this case we want to make sure there are no stale files.
# 2) Release branches don't work with automated commits (see b/287117570). Until this is
# fixed, we want to run the tests to force manual regeneration when necessary.
#
# In branches where automatic updates work as post-submits, we don't want to run staleness
# tests along with user changes. Any stale files will be automatically fixed in a follow-up
# commit.
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: staleness
bash: >
set -ex;
echo "Please run ./regenerate_stale_files.sh to regenerate stale files";
if [[ -z $COMMIT_TRIGGERED_RUN || -z $MAIN_RUN ]]; then
bazel query 'attr(tags, "staleness_test", //...)' | xargs bazel test $BAZEL_FLAGS;
else
bazel query 'attr(tags, "staleness_test", //...)';
fi

@ -1,4 +1,4 @@
name: Auto-generate checked-in files
name: Auto-generate stale checked-in files
on:
push:
@ -8,21 +8,23 @@ on:
# The 21.x branch predates support for auto-generation, so we make sure
# to exclude it.
- '!21.x'
workflow_dispatch:
permissions: {}
jobs:
cmake:
run:
permissions:
contents: write # for git push
if: github.repository == 'protocolbuffers/protobuf'
runs-on: ubuntu-latest
name: Refresh stale files
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
# Note: this token has an expiration date, so if the workflow starts
# failing then you may need to generate a fresh token.
@ -30,4 +32,4 @@ jobs:
- name: Configure name and email address in Git
run: cd ${{ github.workspace }} && git config user.name "Protobuf Team Bot" && git config user.email "protobuf-team-bot@google.com"
- name: Commit and push update
run: cd ${{ github.workspace }} && ./push_auto_update.sh
run: cd ${{ github.workspace }} && ./ci/push_auto_update.sh

@ -0,0 +1,54 @@
name: Bazel Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
examples:
strategy:
fail-fast: false
matrix:
runner: [ ubuntu, windows, macos ]
bazelversion: [ '7.1.2' ]
bzlmod: [true, false ]
include:
- runner: ubuntu
bazelversion: '6.4.0'
bzlmod: true
- runner: ubuntu
bazelversion: '6.4.0'
bzlmod: false
runs-on: ${{ matrix.runner }}-latest
name: Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Windows startup flags
if: runner.os == 'Windows'
working-directory: examples
shell: bash
run: echo "startup --output_user_root=C:/ --windows_enable_symlinks" >> .bazelrc
- name: Configure Bazel version
working-directory: examples
shell: bash
run: echo "${{ matrix.bazelversion }}" > .bazelversion
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: examples
version: ${{ matrix.bazelversion }}
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }}

@ -8,6 +8,9 @@ on:
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
strategy:
@ -16,31 +19,468 @@ jobs:
config:
- { name: Optimized, flags: --config=opt }
- { name: Debug, flags: --config=dbg }
- { name: ASAN, flags: --config=asan }
- { name: MSAN, flags: --config=kokoro-msan }
- { name: TSAN, flags: --config=tsan }
- { name: ASAN, flags: --config=asan, runner: ubuntu-20-large }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-20-large }
- { name: TSAN, flags: --config=tsan, runner: ubuntu-20-large }
- { name: UBSAN, flags: --config=ubsan }
- { name: No-RTTI, flags: --cxxopt=-fno-rtti }
include:
# Set defaults
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:dbd2f15fb69734d72c3fd10cb819bbe2ce4890acf49e9a2f9403983fe48e8807
- targets: //pkg/... //src/... @com_google_protobuf_examples//...
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:3d959f731dc5c54af4865c31ee2bd581ec40028adcdf4c038f3122581f595191
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
# Override cases with custom images
- config: { name: "Bazel7" }
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.2-cf84e92285ca133b9c8104ad7b14d70e953cbb8e"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "TCMalloc" }
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:9d975616c3fd44d5a091aeb60ee94f37e22fb367d471d258fc18cb4a2387c943"
targets: "//src/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:1c5133455481f4d1bb8afa477029604f41f1a3c46cebe4d9958cf1af95b5c87c"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "aarch64" }
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test"
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-e863f8ec6b1dfe41f7dc573bac9c8072a0a68b1b"
- config: { name: "Bazel4" }
targets: "//src/..."
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:4.2.3-20a3cc217eaa012bb120ea7cfa4c2234827d790b"
name: matrix${{ matrix.none }}
uses: ./.github/workflows/tool_docker.yml
with:
name: Linux ${{ matrix.config.name }}
safe-checkout: ${{ inputs.safe-checkout }}
image: ${{ matrix.image }}
bazel: test ${{ matrix.targets }} ${{ matrix.config.flags }} --distinct_host_configuration=false
bazel-cache: cpp_bazel/${{ matrix.config.name }}
secrets: inherit
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test //third_party/utf8_range/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:6.3.0-aarch64-68e662b3a56b881804dc4e9d45f949791cbc4b94"
name: Linux ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/${{ matrix.config.name }}
bazel: test ${{ matrix.targets }} ${{ matrix.config.flags }}
linux-gcc:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ['9.5', '13.1']
name: Linux GCC ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:${{ matrix.version }}-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/gcc-${{ matrix.version }}
bazel: test //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
linux-release:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
arch: [x86_64, aarch64]
name: Linux Release ${{ matrix.arch}}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Cross compile protoc for ${{ matrix.arch }}
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-${{ matrix.arch }}
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-release-${{ matrix.arch }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:${{ matrix.arch }}-384d5abe83a791c6b1ce04f5d7bc0b1f84a30d38
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >
-c "set -ex;
sccache -z;
cmake . -DWITH_PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }}
-Dprotobuf_BUILD_LIBUPB=OFF -Dprotobuf_BUILD_CONFORMANCE=ON -DCMAKE_CXX_STANDARD=14
-Dprotobuf_WITH_ZLIB=OFF ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake --build . --parallel 20;
ctest --parallel 20;
sccache -s"
linux-cmake:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- flags: -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
- name: Ninja
flags: -G Ninja -DCMAKE_CXX_STANDARD=14
- name: Shared
flags: -Dprotobuf_BUILD_SHARED_LIBS=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
# TODO Re-enable this.
#- name: C++20
# flags: -DCMAKE_CXX_STANDARD=20
name: Linux CMake ${{ matrix.name}}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ matrix.flags}} ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_BUILD_TESTS=ON -Dprotobuf_USE_EXTERNAL_GTEST=ON
-Dprotobuf_ABSL_PROVIDER=package
linux-cmake-install:
name: Linux CMake Install
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-install
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
-Dprotobuf_BUILD_SHARED_LIBS=ON \&\&
/test.sh
${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
-Dprotobuf_BUILD_CONFORMANCE=ON
-DCMAKE_CXX_STANDARD=14
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
linux-cmake-examples:
name: Linux CMake Examples
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-examples
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package
-Dprotobuf_BUILD_EXAMPLES=OFF \&\&
mkdir examples/build \&\&
cd examples/build \&\&
cmake .. -DCMAKE_CXX_STANDARD=14 \&\&
cmake --build .
linux-cmake-gcc:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- name: C++14
flags: -DCMAKE_CXX_STANDARD=14
- name: C++17
flags: -DCMAKE_CXX_STANDARD=17
- name: C++20
flags: -DCMAKE_CXX_STANDARD=20
name: Linux CMake GCC ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-gcc
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: bash
command: >-
-c 'set -ex;
cd /workspace;
sccache -z;
cmake . ${{ matrix.flags }} ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake --build . --parallel 20;
ctest --verbose --parallel 20;
sccache -s'
linux-cmake-submodules:
name: Linux CMake Submodules
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-submodules
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/test.sh ${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_BUILD_CONFORMANCE=ON -Dprotobuf_BUILD_EXAMPLES=ON -DCMAKE_CXX_STANDARD=14
linux-cmake-32-bit:
name: Linux CMake 32-bit
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: linux-cmake-32-bit
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:8275360dc5d676f3470872d79087901c0e4153453976bea908a92c82e8d209ea
platform: linux/386
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
cd /workspace;
sccache -z;
cmake . -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }};
cmake --build . --parallel 20;
ctest --verbose --parallel 20;
sccache -s'
non-linux:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- name: MacOS Bazel
os: macos-12
cache_key: macos-12
bazel: test //src/... //third_party/utf8_range/...
- name: MacOS Bazel 7
os: macos-12
cache_key: macos-12-bazel7
bazel: test //src/... //third_party/utf8_range/...
bazel_version: '7.1.2'
- name: MacOS Apple Silicon (build only) Bazel
os: macos-12
cache_key: macos-12-arm
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/...
- name: Windows Bazel
os: windows-2022
cache_key: windows-2022
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
- name: Windows Bazel 7
os: windows-2022
cache_key: windows-2022-bazel7
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
bazel_version: '7.1.2'
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel: ${{ matrix.bazel }}
bazel-cache: cpp_${{ matrix.cache_key }}
version: ${{ matrix.bazel_version || '6.3.0' }}
non-linux-cmake:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
# TODO: investigate and fix
# - name: MacOS CMake
# os: macos-12
# flags: -DCMAKE_CXX_STANDARD=14
# cache-prefix: macos-cmake
- name: Windows CMake
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
- name: Windows CMake 2019
os: windows-2019
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2019'
cache-prefix: windows-2019-cmake
# windows-2019 has python3.7 installed, which is incompatible with the latest gcloud
python-version: '3.9'
- name: Windows CMake 32-bit
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
vsversion: '2022'
windows-arch: 'win32'
cache-prefix: windows-2022-win32-cmake
- name: Windows CMake Shared
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
- name: Windows CMake Install
os: windows-2022
install-flags: -G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF -Dprotobuf_BUILD_TESTS=OFF
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
vsversion: '2022'
cache-prefix: windows-2022-cmake
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup MSVC
if: ${{ runner.os == 'Windows' }}
uses: ilammy/msvc-dev-cmd@cec98b9d092141f74527d0afa6feb2af698cfe89 # v1.12.1
with:
arch: ${{ matrix.windows-arch || 'x64' }}
vsversion: ${{ matrix.vsversion }}
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
if: ${{ matrix.python-version }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Use custom python for gcloud
if: ${{ matrix.python-version }}
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV
shell: bash
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v3
with:
cache-prefix: ${{ matrix.cache-prefix }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
# Install phase.
- name: Configure CMake for install
if: matrix.install-flags
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.install-flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build for install
if: matrix.install-flags
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Install
if: matrix.install-flags
shell: bash
run: cmake --build . --target install
- name: Report and clear sccache stats
if: matrix.install-flags
shell: bash
run: sccache -s && sccache -z
- name: Clear CMake cache
if: matrix.install-flags
shell: bash
run: cmake --build . --target clean && rm CMakeCache.txt
- name: Configure CMake
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: cmake . ${{ matrix.flags }} ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_ALLOW_CCACHE=ON
- name: Build
shell: bash
run: VERBOSE=1 cmake --build . --parallel 20
- name: Test
shell: bash
run: ctest --verbose --parallel 20 -C Debug
- name: Report sccache stats
shell: bash
run: sccache -s

@ -0,0 +1,118 @@
name: C# Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
name: Linux
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
# TODO Run this with Bazel once codegen is handled properly.
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
entrypoint: /bin/bash
command: >-
-c "
cd csharp &&
dotnet restore src/Google.Protobuf.sln &&
dotnet build -c Release src/Google.Protobuf.sln &&
dotnet test -c Release -f net6.0 src/Google.Protobuf.Test/Google.Protobuf.Test.csproj"
- name: Clear bazel between docker instances
run: sudo rm -rf _build .repository-cache
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: csharp_linux
bazel: test //csharp:conformance_test --action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 --test_env=DOTNET_CLI_HOME=/home/bazel
windows:
name: Windows
runs-on: windows-2019
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup dotnet
uses: actions/setup-dotnet@3447fd6a9f9e57506b15f895c5b76d3b197dc7c2 # v3.2.0
with:
dotnet-version: '6.0.x'
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: '3.9'
- name: Use custom python for gcloud
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV
shell: bash
- name: Run tests
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: |
dotnet build csharp/src/Google.Protobuf.sln
dotnet test csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Build protobuf C# tests under x86_64 docker image
# Tests are built "dotnet publish" because we want all the dependencies to the copied to the destination directory
# (we want to avoid references to ~/.nuget that won't be available in the subsequent docker run)
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: mcr.microsoft.com/dotnet/sdk:6.0.100-bullseye-slim
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
DOTNET_CLI_TELEMETRY_OPTOUT=true
DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true
dotnet publish -c Release -f net6.0 /workspace/csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj'
- name: Use an actual aarch64 docker image to run protobuf C# tests with an emulator
# "dotnet vstest" allows running tests from a pre-built project.
# * mount the protobuf root as /work to be able to access the crosscompiled files
# * to avoid running the process inside docker as root (which can pollute the workspace with files owned by root), we force
# running under current user's UID and GID. To be able to do that, we need to provide a home directory for the user
# otherwise the UID would be homeless under the docker container and pip install wouldn't work. For simplicity,
# we just run map the user's home to a throwaway temporary directory
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: mcr.microsoft.com/dotnet/sdk:6.0.100-bullseye-slim-arm64v8
skip-staleness-check: true
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
DOTNET_CLI_TELEMETRY_OPTOUT=true
DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true
dotnet vstest /workspace/csharp/src/Google.Protobuf.Test/bin/Release/net6.0/publish/Google.Protobuf.Test.dll'

@ -0,0 +1,120 @@
name: Java Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
strategy:
fail-fast: false
matrix:
include:
- name: OpenJDK 8
version: '8'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
# TODO: b/318555165 - enable the layering check. Currently it does
# not work correctly with the toolchain in this Docker image.
targets: //java/... //java/internal:java_version --features=-layering_check
- name: OpenJDK 11
version: '11'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
targets: //java/... //java/internal:java_version
- name: OpenJDK 17
version: '17'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:17-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
targets: //java/... //java/internal:java_version
- name: aarch64
version: 'aarch64'
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
targets: //java/... //src/google/protobuf/compiler:protoc_aarch64_test
name: Linux ${{ matrix.name }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/${{ matrix.version }}
bazel: test ${{ matrix.targets }} --test_env=KOKORO_JAVA_VERSION
# TODO restore this test (or a better one) when gRPC has rebuilt with 26.x
# linkage-monitor:
# name: Linux Linkage Monitor
# runs-on: ubuntu-latest
# steps:
# - name: Checkout pending changes
# uses: protocolbuffers/protobuf-ci/checkout@v3
# with:
# ref: ${{ inputs.safe-checkout }}
# - name: Run Linkage Monitor test
# uses: protocolbuffers/protobuf-ci/bazel-docker@v3
# with:
# image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
# credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
# bazel-cache: java_linux/8
# # TODO: b/318555165 - enable the layering check. Currently it does
# # not work correctly with the toolchain in this Docker image.
# bazel: test --test_output=all //java:linkage_monitor --spawn_strategy=standalone --features=-layering_check
protobuf-bom:
name: Protobuf Maven BOM
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Build protoc
id: build-protoc
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-x86_64
- name: Move protoc into place and clean up
run: |
mv ${{ steps.build-protoc.outputs.protoc }} protoc
sudo rm -rf _build
- name: Install snapshot version locally (not using generated pom.xml)
run: |
mvn -e -B -Dhttps.protocols=TLSv1.2 install -Dmaven.test.skip=true
working-directory: java
- name: Generate pom.xml files from the template
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: java_linux/11
# protobuf-java and protobuf-java-util are the member of the BOM
bash: |
bazel build //java/core:core_mvn-pom //java/util:util_mvn-pom
cp bazel-bin/java/core/core_mvn-pom.xml .
cp bazel-bin/java/util/util_mvn-pom.xml .
- name: Copy the generated pom.xml files to the local Maven repository
shell: bash
run: |
LOCAL_MAVEN_GROUP_DIR="${HOME}/.m2/repository/com/google/protobuf"
VERSION=$(grep "<version>" core_mvn-pom.xml | sed "s/<version>\(.*\)<\/version>/\1/" | xargs)
cp core_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java/${VERSION}/protobuf-java-${VERSION}.pom
cp util_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java-util/${VERSION}/protobuf-java-util-${VERSION}.pom
- name: Clean up
run: |
sudo rm -rf _build
- name: Validate Protobuf BOM
uses: googleapis/java-cloud-bom/tests/validate-bom@fd56f04bb0bc581776a74031591f0b3bc5e7920a # v26.13.0
with:
bom-path: java/bom/pom.xml

@ -0,0 +1,136 @@
name: Objective-C Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
xcode:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
platform: ["macOS", "iOS"]
xc_config: ["Debug", "Release"]
# The "destination" entries need to match what is available for the selected Xcode.
# See `.github/BUILD.bazel` for the Xcode info.
include:
- platform: "macOS"
destination: "platform=macOS"
xc_project: "ProtocolBuffers_OSX.xcodeproj"
- platform: "iOS"
destination: "platform=iOS Simulator,name=iPhone 13,OS=latest"
xc_project: "ProtocolBuffers_iOS.xcodeproj"
name: Xcode ${{ matrix.platform}} ${{ matrix.xc_config }}
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup ccache
uses: protocolbuffers/protobuf-ci/ccache@v3
with:
cache-prefix: objectivec_${{ matrix.platform }}_${{ matrix.xc_config }}
support-modules: true
- name: Run tests
uses: protocolbuffers/protobuf-ci/bash@v3
env:
CC: ${{ github.workspace }}/ci/clang_wrapper
CXX: ${{ github.workspace }}/ci/clang_wrapper++
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: |
xcodebuild \
-project "objectivec/${{ matrix.xc_project }}" \
-scheme ProtocolBuffers \
-configuration ${{ matrix.xc_config }} \
-destination "${{ matrix.destination }}" \
test \
| xcpretty
- name: Report ccache stats
shell: bash
run: ccache -s -v
pod-lib-lint:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
PLATFORM: ["ios", "macos", "tvos", "watchos", "visionos"]
CONFIGURATION: ["Debug", "Release"]
include:
- OS: macos-12
XCODE: "14.1"
- OS: macos-14
PLATFORM: "visionos"
XCODE: "15.2"
name: CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }}
runs-on: ${{ matrix.OS }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Xcode version
run: sudo xcode-select -switch /Applications/Xcode_${{ matrix.XCODE }}.app
- name: Pod lib lint
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cocoapods/${{ matrix.XCODE }}
bash: |
./regenerate_stale_files.sh $BAZEL_FLAGS --xcode_version="${{ matrix.XCODE }}"
pod lib lint --verbose \
--configuration=${{ matrix.CONFIGURATION }} \
--platforms=${{ matrix.PLATFORM }} \
Protobuf.podspec
bazel:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- name: Optimized
flags: --config=opt
bazel_action: test
- name: Debug
flags: --config=dbg
bazel_action: test
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
- name: Apple_Silicon_Optimized
flags: --config=opt --cpu=darwin_arm64
bazel_action: build
- name: Apple_Silicon_Debug
flags: --config=dbg --cpu=darwin_arm64
bazel_action: build
# TODO: Could add iOS to atleast build the objc_library targets for that.
platform: ["macOS"]
include:
- platform: "macOS"
bazel_targets: //objectivec/...
name: Bazel ${{ matrix.platform }} ${{ matrix.config.name }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: bazel ${{ matrix.config.bazel_action }}
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel: ${{ matrix.config.bazel_action }} ${{ matrix.config.flags }} ${{ matrix.bazel_targets }}
bazel-cache: objc_${{ matrix.platform }}_${{ matrix.config.name }}

@ -0,0 +1,205 @@
name: PHP Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- name: 8.1 Optimized
version: "8.1.14"
version-short: "8.1"
command: composer test \&\& composer test_c
- name: 8.1 Debug
version: 8.1.14-dbg
version-short: "8.1"
command: composer test \&\& composer test_c
- name: 8.1 Memory Leak
version: 8.1.14-dbg
version-short: "8.1"
# Run specialized memory leak & multirequest tests.
command: composer test_c \&\& tests/multirequest.sh \&\& tests/memory_leak_test.sh
- name: 8.1 Valgrind
version: 8.1.14-dbg
version-short: "8.1"
command: composer test_valgrind
- name: 8.3 Optimized
version: "8.3.1"
version-short: "8.3"
command: composer test \&\& composer test_c
name: Linux ${{ matrix.name}}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup composer
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version-short }}
directory: php
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:${{ matrix.version }}-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache
command: ${{ matrix.command }}
linux-32bit:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ['8.1']
suffix: [ '', '-zts']
test: ['test', 'test_c']
exclude:
- suffix: '-zts'
test: 'test'
include:
- suffix: '-zts'
suffix_name: ' Thread Safe'
- test: 'test_c'
test_name: ' Extension'
name: Linux 32-bit ${{ matrix.version}}${{ matrix.suffix_name }}${{ matrix.test_name }}
runs-on: ubuntu-latest
env:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:836f2cedcfe351d9a30055076630408e61994fc7d783e8333a99570968990eeb
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for i386
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
- name: Setup composer
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: ${{ env.image }}
platform: linux/386
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache -e PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }}
command: >-
/bin/bash -cex '
PATH="/usr/local/php-${{ matrix.version }}${{matrix.suffix}}/bin:$PATH";
cd php && php -v && php -m;
composer update --ignore-platform-reqs;
composer ${{ matrix.test }}'
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for aarch64
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
- name: Setup composer
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-8.1
directory: php
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php-aarch64@sha256:77ff9fdec867bbfb290ee0b10d8b7a3e5e434155daa5ec93de7341c7592b858d
platform: linux/arm64
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache -e PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }}
command: >-
-c '
cd php;
composer update --ignore-platform-reqs;
composer test;
composer test_c'
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ['8.2', '8.3']
name: MacOS PHP ${{ matrix.version }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Uninstall problematic libgd
run: brew uninstall --ignore-dependencies gd
- name: Install dependencies
run: brew install coreutils gd
- name: Pin PHP version
uses: shivammathur/setup-php@8872c784b04a1420e81191df5d64fbd59d3d3033 # 2.30.2
with:
php-version: ${{ matrix.version }}
- name: Check PHP version
run: php --version | grep ${{ matrix.version }} || (echo "Invalid PHP version - $(php --version)" && exit 1)
- name: Setup composer
uses: protocolbuffers/protobuf-ci/composer-setup@v3
with:
cache-prefix: php-${{ matrix.version }}
directory: php
- name: Run tests
uses: protocolbuffers/protobuf-ci/bash@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: |
pushd php
php -v
php -m
composer update
composer test_c
popd
- name: Run conformance tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: php_macos/${{ matrix.version }}
bazel: test //php:conformance_test_c --action_env=PATH --test_env=PATH

@ -1,57 +0,0 @@
name: PHP Extension Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
build-php:
name: Build
runs-on: ubuntu-latest
container: ${{ matrix.php-image }}
strategy:
matrix:
php-image:
- php:7.4-cli
- php:8.1-cli
# TODO(b/266868629) Dockerize these instead of installing all the
# dependencies on each run.
steps:
- name: Install python3
run: |
apt-get update -q
apt-get install -qy python3
- name: Install bazel
run: |
apt-get install -qy wget
mkdir $HOME/bin
wget -O $HOME/bin/bazel https://github.com/bazelbuild/bazel/releases/download/5.3.2/bazel-5.3.2-linux-x86_64
chmod a+x $HOME/bin/bazel
- name: Install git
run: |
apt-get install -qy --no-install-recommends git
- name: Checkout
uses: actions/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Create package
run: |
cd $GITHUB_WORKSPACE
rm -rf bazel-bin/php/protobuf-*.tgz
$HOME/bin/bazel build php:release
- name: Compile extension
run: |
cd /tmp
MAKE="make -j$(nproc)" pecl install $GITHUB_WORKSPACE/bazel-bin/php/protobuf-*.tgz
- name: Enable extension
run: docker-php-ext-enable protobuf
- name: Inspect extension
run: php --ri protobuf

@ -8,32 +8,94 @@ on:
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
version: [ "3.7", "3.8", "3.9", "3.10" ]
# TODO: b/309627662 - Add coverage for Python 3.12.
version: ["3.8", "3.9", "3.10", "3.11"]
include:
- type: Pure
targets: //python/... @upb//python/... //python:python_version
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=false
- type: C++
targets: //python/... //python:python_version
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=true
- type: C++
version: aarch64
targets: //python/... //python:aarch64_test
# TODO(b/262628111) Enable this once conformance tests are fixed.
# TODO Enable this once conformance tests are fixed.
flags: --define=use_fast_cpp_protos=true --test_tag_filters=-conformance
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-e863f8ec6b1dfe41f7dc573bac9c8072a0a68b1b
name: matrix${{ matrix.none }}
uses: ./.github/workflows/tool_docker.yml
with:
name: Linux ${{ matrix.type }} ${{matrix.version}}
safe-checkout: ${{ inputs.safe-checkout }}
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:{0}-65526ea124d1034eac33e7c37cc6d65c5bef054f', matrix.version) }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_PYTHON_VERSION
bazel-cache: python_bazel/${{ matrix.type }}_${{ matrix.version }}
secrets: inherit
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17
name: Linux ${{ matrix.type }} ${{ matrix.version }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/python:{0}-63dd26c0c7a808d92673a3e52e848189d4ab0f17', matrix.version) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: python_linux/${{ matrix.type }}_${{ matrix.version }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_PYTHON_VERSION
exclude-targets: -//python/pb_unit_tests/...
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
# TODO Consider expanding this set of versions.
version: [ "3.12" ]
include:
- type: Pure
targets: //python/... //python:python_version_test
- type: C++
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=true
name: MacOS ${{ matrix.type }} ${{ matrix.version }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Pin Python version
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.version }}
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Validate version
run: python3 --version | grep ${{ matrix.version }} || (echo "Invalid Python version - $(python3 --version)" && exit 1)
- name: Create and start virtual environment
run: |
python3 -m venv venv
source venv/bin/activate
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
env:
KOKORO_PYTHON_VERSION: ${{ matrix.version }}
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: python_macos/${{ matrix.type }}_${{ matrix.version }}
bazel: >-
test ${{ matrix.targets }} ${{ matrix.flags }}
--test_env=KOKORO_PYTHON_VERSION=${{ matrix.version }}
--macos_minimum_os=10.9
exclude-targets: -//python/pb_unit_tests/...

@ -0,0 +1,188 @@
name: Ruby Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
strategy:
fail-fast: false
matrix:
include:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x.
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE }
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI }
- { name: Ruby 3.1, ruby: ruby-3.1.0 }
- { name: Ruby 3.2, ruby: ruby-3.2.0 }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI }
name: Linux ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:{0}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec', matrix.ruby) }}
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_linux/${{ matrix.ruby }}_${{ matrix.bazel }}
bazel: test //ruby/... //ruby/tests:ruby_version --test_env=KOKORO_RUBY_VERSION --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }}
- name: Archive log artifacts
uses: actions/upload-artifact@v4
with:
name: test-logs-${{ matrix.ruby }}_${{ matrix.ffi || 'NATIVE' }}
path: logs
linux-32bit:
name: Linux 32-bit
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for i386
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-i386
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: i386/ruby:3.0.2-buster
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
rake clobber_package gem;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake test'
linux-aarch64:
name: Linux aarch64
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Cross compile protoc for aarch64
id: cross-compile
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
architecture: linux-aarch64
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v3
with:
image: arm64v8/ruby:3.0.2-buster
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/bin/bash -cex '
gem install bundler -v 2.5.6;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
rake clobber_package gem;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake test'
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby, but only on Bazel 5.x.
# Quote versions numbers otherwise 3.0 will render as 3
- { version: "3.0", ffi: NATIVE }
- { version: "3.0", ffi: FFI }
- { version: "3.1" }
- { version: "3.2" }
- { version: "3.3", ffi: NATIVE }
- { version: "3.3", ffi: FFI }
name: MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Pin Ruby version
uses: ruby/setup-ruby@961f85197f92e4842e3cb92a4f97bd8e010cdbaf # v1.165.0
with:
ruby-version: ${{ matrix.version }}
- name: Validate version
run: ruby --version | grep ${{ matrix.version }} || (echo "Invalid Ruby version - $(ruby --version)" && exit 1)
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_macos/${{ matrix.version }}
bazel: test //ruby/... --test_env=KOKORO_RUBY_VERSION=${{ matrix.version }} --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }}
test_ruby_gems:
strategy:
fail-fast: false
matrix:
include:
# Test both FFI and Native implementations on the highest and lowest
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x.
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE}
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI}
- { name: Ruby 3.1, ruby: ruby-3.1.0}
- { name: Ruby 3.2, ruby: ruby-3.2.0}
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE }
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE }
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI }
name: Install ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }}
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:${{ matrix.ruby }}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: ruby_install/${{ matrix.ruby }}_${{ matrix.bazel }}
bash: >
bazel --version;
ruby --version;
./regenerate_stale_files.sh $BAZEL_FLAGS;
bazel build //ruby:release //:protoc ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled' || '' }} $BAZEL_FLAGS;
gem install bazel-bin/ruby/google-protobuf-*;
bazel-bin/protoc --proto_path=src --proto_path=ruby/tests --proto_path=ruby --ruby_out=ruby tests/test_import_proto2.proto;
bazel-bin/protoc --proto_path=src --proto_path=ruby/tests --proto_path=ruby --ruby_out=ruby tests/basic_test.proto;
${{ matrix.ffi == 'FFI' && 'PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} ruby ruby/tests/basic.rb;
${{ matrix.ffi == 'FFI' && 'PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} ruby ruby/tests/implementation.rb

@ -12,14 +12,16 @@ name: Tests
on:
# continuous
schedule:
# Run daily at 10 AM UTC (2 AM PDT)
- cron: 0 10 * * *
# Run every hour
- cron: "0 * * * *"
# postsubmit
push:
branches:
- main
- '[0-9]+.x'
# The 21.x and 22.x branches still use Kokoro
- '!2[12].x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
@ -28,6 +30,8 @@ on:
branches:
- main
- '[0-9]+.x'
# The 21.x and 22.x branches still use Kokoro
- '!2[12].x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
@ -36,12 +40,21 @@ on:
branches:
- main
- '[0-9]+.x'
# The 21.x branch still use Kokoro
- '!21.x'
# For testing purposes so we can stage this on the `gha` branch.
- gha
types: [labeled, opened, reopened, synchronize]
# manual
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.event_name }}-${{ github.workflow }}-${{ github.head_ref || github.ref }}
cancel-in-progress: ${{ contains(fromJSON('["pull_request", "pull_request_target", "workflow_dispatch"]'), github.event_name) }}
jobs:
check-tag:
@ -51,11 +64,13 @@ jobs:
# repository, it's safe and we can use `pull_request`. Otherwise, we should
# use `pull_request_target`.
if: |
github.event_name == 'push' ||
(github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.full_name == 'protocolbuffers/protobuf') ||
(github.event_name == 'pull_request_target' &&
github.event.pull_request.head.repo.full_name != 'protocolbuffers/protobuf')
(github.event_name != 'pull_request' &&
github.event_name != 'pull_request_target' &&
github.event.repository.full_name == 'protocolbuffers/protobuf') ||
(github.event_name == 'pull_request' &&
github.event.pull_request.head.repo.full_name == 'protocolbuffers/protobuf') ||
(github.event_name == 'pull_request_target' &&
github.event.pull_request.head.repo.full_name != 'protocolbuffers/protobuf')
runs-on: ubuntu-latest
outputs:
@ -67,8 +82,10 @@ jobs:
# Trivially pass for safe PRs, and explicitly error for unsafe ones
# unless this is specifically an event for adding the safe label.
run: >
${{ github.event_name != 'pull_request_target' || github.event.label.name == 'safe for tests' }} ||
(echo "This pull request is from an unsafe fork and hasn't been approved to run tests!" && exit 1)
${{ github.event_name != 'pull_request_target' || github.event.label.name == ':a: safe for tests' }} ||
(echo "This pull request is from an unsafe fork and hasn't been approved to run tests." &&
echo "A protobuf team member will need to review the PR and add the 'safe for tests' tag." &&
exit 1)
- name: Cache safe commit
id: safe-checkout
@ -77,18 +94,30 @@ jobs:
echo "sha=${{ github.event.pull_request.head.sha }}" >> $GITHUB_OUTPUT
remove-tag:
name: Remove safety tag
needs: [check-tag]
if: github.event.action == 'labeled'
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions-ecosystem/action-remove-labels@v1
- uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0 # v1.3.0
with:
labels: safe for tests
fail_on_error: true
labels: ':a: safe for tests'
# Note: this pattern of passing the head sha is vulnerable to PWN requests for
# pull_request_target events. We carefully limit those workflows to require a
# human stamp before continuing.
cpp-bazel:
bazel:
name: Bazel
needs: [check-tag]
uses: ./.github/workflows/test_bazel.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
cpp:
name: C++
needs: [check-tag]
uses: ./.github/workflows/test_cpp.yml
@ -96,7 +125,15 @@ jobs:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
python-bazel:
java:
name: Java
needs: [check-tag]
uses: ./.github/workflows/test_java.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
python:
name: Python
needs: [check-tag]
uses: ./.github/workflows/test_python.yml
@ -104,10 +141,18 @@ jobs:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
ruby-install:
name: Ruby Install
ruby:
name: Ruby
needs: [check-tag]
uses: ./.github/workflows/test_ruby_install.yml
uses: ./.github/workflows/test_ruby.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
php:
name: PHP
needs: [check-tag]
uses: ./.github/workflows/test_php.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
@ -119,3 +164,45 @@ jobs:
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
csharp:
name: C#
needs: [check-tag]
uses: ./.github/workflows/test_csharp.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
objectivec:
name: Objective-C
needs: [check-tag]
uses: ./.github/workflows/test_objectivec.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
rust:
name: Rust
needs: [check-tag]
uses: ./.github/workflows/test_rust.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
upb:
name: μpb
needs: [check-tag]
uses: ./.github/workflows/test_upb.yml
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit
staleness:
name: Staleness
needs: [check-tag]
uses: ./.github/workflows/staleness_check.yml
# Staleness tests have scheduled runs during off-hours to avoid race conditions.
if: ${{ github.event_name != 'schedule' }}
with:
safe-checkout: ${{ needs.check-tag.outputs.checkout-sha }}
secrets: inherit

@ -0,0 +1,32 @@
name: Rust Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux:
name: Linux
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.1-97f82260fd504923d8af642d567afb2d83a1959d"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: rust_linux
bazel: >-
test //rust:protobuf_upb_test //rust:protobuf_cpp_test
//rust/test/rust_proto_library_unit_test:rust_upb_aspect_test
//src/google/protobuf/compiler/rust/...

@ -0,0 +1,283 @@
name: μpb Tests
on:
workflow_call:
inputs:
safe-checkout:
required: true
description: "The SHA key for the commit we want to run over"
type: string
permissions:
contents: read
jobs:
linux-clang:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "Bazel 7", bazel_version: "7.1.1" }
- { name: "Fastbuild" }
- { name: "Optimized", flags: "-c opt" }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/...", runner: ubuntu-20-large }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/..." }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/..." }
# TODO: Add 32-bit ASAN test
# TODO: Restore the FastTable tests
name: ${{ matrix.config.name }}
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-75f2a85ece6526cc3d54087018c0f1097d78d42b
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
exclude-targets: ${{ matrix.config.exclude-targets }}
linux-gcc:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
name: GCC Optimized
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-gcc"
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/...
windows:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
name: Windows
runs-on: windows-2022
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-windows"
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/... //protos/... //hpb_generator/...
version: 6.3.0
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test
macos:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "macOS", bazel-command: "test" }
- { name: "macOS ARM (build only)", bazel-command: "build", flags: "--cpu=darwin_arm64" }
name: ${{ matrix.config.name }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v3
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-macos"
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/...
version: 6.3.0
no-python:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
name: No System Python
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-no-python"
bash: >-
which python3 &&
mv `which python3` /tmp &&
! which python3 &&
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //python/... -- -//python/dist:source_wheel
build_wheels:
name: Build Wheels
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request_target' }}
steps:
- name: Checkout pending changes
uses: protocolbuffers/protobuf-ci/checkout@v3
with:
ref: ${{ inputs.safe-checkout }}
- name: Build Wheels
uses: protocolbuffers/protobuf-ci/bazel-docker@v3
with:
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.3.0-53225851b051e66f8543e972c143f35be757a181
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel-python
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //python/dist //python/dist:test_wheel //python/dist:source_wheel
- name: Move Wheels
run: mkdir wheels && find _build/out \( -name 'protobuf*.whl' -o -name 'protobuf-*.tar.gz' \) -exec mv '{}' wheels ';'
- uses: actions/upload-artifact@v3
with:
name: python-wheels
path: wheels/
- uses: actions/upload-artifact@v3
with:
name: requirements
# Tests shouldn't have access to the whole upb repo, upload the one file we need
path: python/requirements.txt
test_wheels:
name: Test Wheels
needs: build_wheels
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
# Linux and Mac use the limited API, so all Python versions will use
# a single wheel. As a result we can just test the oldest and newest
# supported Python versions and assume this gives us sufficient test
# coverage.
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source' }
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source' }
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'source' }
# Windows uses the full API up until Python 3.10.
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' }
runs-on: ${{ matrix.os }}
if: ${{ github.event_name != 'pull_request_target' }}
defaults:
run:
shell: bash
steps:
- name: Download Wheels
uses: actions/download-artifact@v3
with:
name: python-wheels
path: wheels
- name: Download Requirements
uses: actions/download-artifact@v3
with:
name: requirements
path: requirements
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
- name: Setup Python venv
run: |
python -m pip install --upgrade pip
python -m venv env
# Windows uses 'Scripts' instead of 'bin'
source env/bin/activate || source env/Scripts/activate
echo "VIRTUAL ENV:" $VIRTUAL_ENV
- name: Install tzdata
run: pip install tzdata
# Only needed on Windows, Linux ships with tzdata.
if: ${{ contains(matrix.os, 'windows') }}
- name: Install requirements
run: pip install -r requirements/requirements.txt
- name: Install Protobuf Binary Wheel
run: pip install -vvv --no-index --find-links wheels protobuf
if: ${{ matrix.type == 'binary' }}
- name: Install Protobuf Source Wheel
run: |
cd wheels
tar -xzvf *.tar.gz
cd protobuf-*/
pip install .
if: ${{ matrix.type == 'source' }}
- name: Test that module is importable
run: python -v -c 'from google._upb import _message; assert "google._upb._message.MessageMeta" in str(_message.MessageMeta)'
- name: Install Protobuf Test Wheel
run: pip install -vvv --no-index --find-links wheels protobuftests
- name: Run the unit tests
run: |
TESTS=$(pip show -f protobuftests | grep pb_unit_tests.*py$ | sed 's,/,.,g' | sed 's,\\,.,g' | sed -E 's,.py$,,g')
for test in $TESTS; do
python -m unittest -v $test
done
test_pure_python_wheels:
name: Test Pure Python Wheels
needs: build_wheels
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
python-version: ["3.8", "3.12"]
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request_target' }}
steps:
- name: Download Wheels
uses: actions/download-artifact@v3
with:
name: python-wheels
path: wheels
- name: Delete Binary Wheels
run: find wheels -type f | grep -v none-any | xargs rm
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Setup Python venv
run: |
python -m pip install --upgrade pip
python -m venv env
source env/bin/activate
echo "VIRTUAL ENV:" $VIRTUAL_ENV
- name: Install numpy
run: pip install numpy
- name: Install Protobuf Wheels
run: pip install -vvv --no-index --find-links wheels protobuf protobuftests
- name: Run the unit tests
run: |
TESTS=$(pip show -f protobuftests | grep _test.py | grep --invert-match _pybind11_test.py | sed 's,/,.,g' | sed -E 's,.py$,,g')
for test in $TESTS; do
python -m unittest -v $test
done

@ -15,12 +15,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout protobuf-php
uses: actions/checkout@v3
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
repository: protocolbuffers/protobuf-php
token: ${{ secrets.BOT_ACCESS_TOKEN }}
- name: Clone protobuf
uses: actions/checkout@v3
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
path: protobuf
- name: Configure Git Bot
@ -29,7 +29,7 @@ jobs:
git config user.email "protobuf-team-bot@google.com"
- name: Get PHP Version
run: |
unformatted_version=$( cat protobuf/version.json | jq -r '.main.languages.php' )
unformatted_version=$( cat protobuf/version.json | jq -r '.[].languages.php' )
version=${unformatted_version/-rc/RC}
version_tag=v$version
echo "VERSION=$version" >> $GITHUB_ENV
@ -42,7 +42,8 @@ jobs:
rm -rf protobuf
- name: Push Changes
run: |
git commit -a -m "$VERSION sync"
git add --all
git commit --allow-empty -m "${{ env.VERSION }} sync"
git push --force origin master
git tag -a $VERSION_TAG -m "Tag release $VERSION_TAG"
git push origin $VERSION_TAG
git tag -a ${{ env.VERSION_TAG }} -m "Tag release ${{ env.VERSION_TAG }}"
git push origin ${{ env.VERSION_TAG }}

8
.gitignore vendored

@ -58,6 +58,7 @@ src/js_embed
# vim generated
*.swp
*~
# Generated test scaffolding
src/no_warning_test.cc
@ -172,6 +173,7 @@ ruby/tests/test_import_pb.rb
ruby/tests/test_ruby_package_pb.rb
ruby/tests/generated_code_proto2_pb.rb
ruby/tests/multi_level_nesting_test_pb.rb
ruby/tests/service_test_pb.rb
ruby/tests/test_import_proto2_pb.rb
ruby/tests/test_ruby_package_proto2_pb.rb
ruby/compatibility_tests/v3.0.0/protoc
@ -208,3 +210,9 @@ BenchmarkDotNet.Artifacts/
# Clangd uses these common ephemeral files
.cache
compile_commands.json
# Ignore GHA NodeJS files
.github/**/node_modules/
# Ignore Bzlmod lock file until it is more stable
MODULE.bazel.lock

2
.gitmodules vendored

@ -5,7 +5,7 @@
[submodule "third_party/abseil-cpp"]
path = third_party/abseil-cpp
url = https://github.com/abseil/abseil-cpp.git
branch = lts_2023_01_24
branch = lts_2023_08_02
[submodule "third_party/jsoncpp"]
path = third_party/jsoncpp
url = https://github.com/open-source-parsers/jsoncpp.git

@ -16,7 +16,7 @@ conda:
environment: python/docs/environment.yml
python:
version: 3.7
version: 3.8
install:
- method: setuptools
path: python

@ -1,11 +1,14 @@
# Bazel (https://bazel.build/) BUILD file for Protobuf.
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_lang_toolchain", "proto_library")
load("@rules_java//java:defs.bzl", "java_lite_proto_library", "java_proto_library")
load("//build_defs:cpp_opts.bzl", "COPTS", "LINK_OPTS", "PROTOC_LINK_OPTS")
load(":protobuf.bzl", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library", "internal_ruby_proto_library")
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library")
load("@rules_pkg//pkg:mappings.bzl", "pkg_files", "strip_prefix")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:java_lite_proto_library.bzl", "java_lite_proto_library")
load("//bazel:java_proto_library.bzl", "java_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain")
load("//build_defs:cpp_opts.bzl", "COPTS", "LINK_OPTS")
load(":protobuf.bzl", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library")
licenses(["notice"])
@ -21,7 +24,7 @@ exports_files(["LICENSE"])
#
# java_proto_library(
# name = "any_java_proto",
# deps = ["@com_google_protobuf//:any_proto"],
# deps = ["@protobuf//:any_proto"],
# )
################################################################################
@ -85,6 +88,68 @@ alias(
visibility = ["//visibility:public"],
)
# C++ targets for the well-known types
alias(
name = "any_cc_proto",
actual = "//src/google/protobuf:any_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "api_cc_proto",
actual = "//src/google/protobuf:api_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "duration_cc_proto",
actual = "//src/google/protobuf:duration_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "empty_cc_proto",
actual = "//src/google/protobuf:empty_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "field_mask_cc_proto",
actual = "//src/google/protobuf:field_mask_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "source_context_cc_proto",
actual = "//src/google/protobuf:source_context_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "struct_cc_proto",
actual = "//src/google/protobuf:struct_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "timestamp_cc_proto",
actual = "//src/google/protobuf:timestamp_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "type_cc_proto",
actual = "//src/google/protobuf:type_cc_proto",
visibility = ["//visibility:public"],
)
alias(
name = "wrappers_cc_proto",
actual = "//src/google/protobuf:wrappers_cc_proto",
visibility = ["//visibility:public"],
)
# Source files: these are aliases to a filegroup, not a `proto_library`.
#
# (This is _probably_ not what you want.)
@ -144,17 +209,6 @@ filegroup(
visibility = ["//visibility:public"],
)
internal_ruby_proto_library(
name = "well_known_ruby_protos",
srcs = [":well_known_protos"],
includes = ["src"],
default_runtime = "",
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
################################################################################
# Protocol Buffers Compiler
################################################################################
@ -168,7 +222,22 @@ alias(
cc_binary(
name = "protoc",
copts = COPTS,
linkopts = LINK_OPTS + PROTOC_LINK_OPTS,
linkopts = LINK_OPTS,
visibility = ["//visibility:public"],
deps = ["//src/google/protobuf/compiler:protoc_lib"],
)
cc_binary(
name = "protoc_static",
copts = COPTS,
features = select({
# This isn't possible on mac because there is no static library for lcrt0.o
"@platforms//os:osx": [],
"//build_defs:config_osx": [],
# When cross-compiling we need to statically link all C++ libraries.
"//conditions:default": ["fully_static_link"],
}),
linkopts = LINK_OPTS,
visibility = ["//visibility:public"],
deps = ["//src/google/protobuf/compiler:protoc_lib"],
)
@ -177,6 +246,21 @@ cc_binary(
# C++ runtime
################################################################################
# Expose the runtime for the proto_lang_toolchain so that it can also be used in
# a user-defined proto_lang_toolchain.
alias(
name = "protobuf",
actual = "//src/google/protobuf:protobuf_layering_check_legacy",
visibility = ["//visibility:public"],
)
alias(
name = "protobuf_nowkt",
actual = "//src/google/protobuf:protobuf_layering_check_legacy",
deprecation = "Use //:protobuf instead",
visibility = ["//visibility:public"],
)
# The "lite" runtime works for .proto files that specify the option:
# optimize_for = LITE_RUNTIME;
#
@ -192,32 +276,10 @@ alias(
visibility = ["//visibility:public"],
)
cc_library(
name = "protobuf",
hdrs = glob([
"src/**/*.h",
"src/**/*.inc",
]),
copts = COPTS,
include_prefix = "google/protobuf/io",
linkopts = LINK_OPTS,
visibility = ["//visibility:public"],
deps = [
"//src/google/protobuf",
"//src/google/protobuf/compiler:importer",
"//src/google/protobuf/util:delimited_message_util",
"//src/google/protobuf/util:differencer",
"//src/google/protobuf/util:field_mask_util",
"//src/google/protobuf/util:json_util",
"//src/google/protobuf/util:time_util",
"//src/google/protobuf/util:type_resolver_util",
],
)
# This provides just the header files for use in projects that need to build
# shared libraries for dynamic loading. This target is available until Bazel
# adds native support for such use cases.
# TODO(keveman): Remove this target once the support gets added to Bazel.
# TODO: Remove this target once the support gets added to Bazel.
alias(
name = "protobuf_headers",
actual = "//src/google/protobuf:protobuf_headers",
@ -230,6 +292,48 @@ alias(
visibility = ["//visibility:public"],
)
alias(
name = "delimited_message_util",
actual = "//src/google/protobuf/util:delimited_message_util",
visibility = ["//visibility:public"],
)
alias(
name = "differencer",
actual = "//src/google/protobuf/util:differencer",
visibility = ["//visibility:public"],
)
alias(
name = "field_mask_util",
actual = "//src/google/protobuf/util:field_mask_util",
visibility = ["//visibility:public"],
)
alias(
name = "json_util",
actual = "//src/google/protobuf/util:json_util",
visibility = ["//visibility:public"],
)
alias(
name = "time_util",
actual = "//src/google/protobuf/util:time_util",
visibility = ["//visibility:public"],
)
alias(
name = "type_resolver",
actual = "//src/google/protobuf/util:type_resolver",
visibility = ["//visibility:public"],
)
alias(
name = "cpp_features_proto",
actual = "//src/google/protobuf:cpp_features_proto", # proto_library
visibility = ["//visibility:public"],
)
################################################################################
# Java support
################################################################################
@ -264,6 +368,12 @@ alias(
visibility = ["//visibility:public"],
)
alias(
name = "java_features_proto",
actual = "//java/core:java_features_proto", # proto_library
visibility = ["//visibility:public"],
)
################################################################################
# Python support
################################################################################
@ -277,13 +387,13 @@ alias(
alias(
name = "python_srcs",
actual = "//python:python_srcs",
visibility = ["@upb//:__subpackages__"],
visibility = ["//python:__subpackages__"],
)
alias(
name = "python_test_srcs",
actual = "//python:python_test_srcs",
visibility = ["@upb//:__subpackages__"],
visibility = ["//python:__subpackages__"],
)
alias(
@ -308,10 +418,11 @@ proto_lang_toolchain(
name = "cc_toolchain",
blacklisted_protos = [
"//:compiler_plugin_proto",
"//:cpp_features_proto",
"//:descriptor_proto",
],
command_line = "--cpp_out=$(OUT)",
runtime = ":protobuf",
runtime = "//src/google/protobuf",
visibility = ["//visibility:public"],
)
@ -333,6 +444,16 @@ alias(
visibility = ["//visibility:public"],
)
################################################################################
# Rust support
################################################################################
alias(
name = "protobuf_rust",
actual = "//rust:protobuf",
visibility = ["//visibility:public"],
)
################################################################################
# Test protos
################################################################################
@ -355,6 +476,12 @@ alias(
visibility = ["//:__subpackages__"],
)
alias(
name = "test_proto_editions_srcs",
actual = "//src/google/protobuf:test_proto_editions_srcs", # filegroup
visibility = ["//:__subpackages__"],
)
alias(
name = "test_protos",
actual = "//src/google/protobuf:test_protos", # proto_library
@ -500,33 +627,6 @@ internal_php_proto_library(
],
)
internal_ruby_proto_library(
name = "test_messages_proto2_ruby_proto",
testonly = 1,
srcs = ["//src/google/protobuf:test_messages_proto2.proto"],
includes = ["src/google/protobuf"],
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
internal_ruby_proto_library(
name = "test_messages_proto3_ruby_proto",
testonly = 1,
srcs = ["//src/google/protobuf:test_messages_proto3.proto"],
includes = [
"src/google/protobuf",
# The above must come first.
"src",
],
deps = [":well_known_ruby_protos"],
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
filegroup(
name = "bzl_srcs",
srcs = glob(["**/*.bzl"]),
@ -550,13 +650,11 @@ pkg_files(
allow_empty = True,
) + [
"BUILD.bazel",
"CHANGES.txt",
"CMakeLists.txt",
"CONTRIBUTORS.txt",
"LICENSE",
"README.md",
"WORKSPACE",
"cmake/CMakeLists.txt",
"cmake/README.md",
"generate_descriptor_proto.sh",
"maven_install.json",

@ -1,75 +0,0 @@
2022-07-01 Unreleased version
# C++
* cpp_generated_lib_linked support is removed in protoc
* Reduced .pb.o object file size slightly by explicitly instantiating
InternalMetadata templates in the runtime.
* Breaking change: Add C++20 reserved keywords.
* Fixed crash in ThreadLocalStorage for pre-C++17 compilers on 32-bit ARM.
* Clarified that JSON API non-OK statuses are not a stable API.
* Added a default implementation of MessageDifferencer::Reporter methods.
* proto2::MapPair is now an alias to std::pair.
* Hide C++ RepeatedField::UnsafeArenaSwap
* Use table-driven parser for reflection based objects.
* Update Map's InternalSwap() to take a pointer to the other Map.
* Add ARM-optimized Varint decoding functions.
* Minor optimization for parsing groups
* Declare ReflectiveProtoHook class
* Reduce size of VarintParse code in protocol buffers, by calling the shared
routine after handling just one-byte varint encoding inline, rather than
handling one-byte and two-byte varints inline.
* Avoid inlining some large heavily duplicated routines in repeated_ptr_field.h
* Add ReflectiveProtoHook to Reflection.
* Turns on table-driven parser for reflection based objects.
* Save code space by avoiding inlining of large-in-aggregate code-space MessageLite::~MessageLite destructor.
* Undefine the macro `linux` when compiling protobuf
* Reduce memory consumption of MessageSet parsing.
* Save code space by avoiding inlining of large-in-aggregate code-space MessageLite::~MessageLite destructor.
* Breaking change: Delete Arena::Init
* Make a PROTOBUF_POISON/UNPOISON to reduce noise in the source
* Put alignment functions in "arena_align.h"
* Split off `cleanup` arena functions into "arena_cleanup.h"
* Fix signed / unsigned match in CHECK_EQ
* Kill Atomic<>. it's not pulling it's weight
* Move AllocationPolicy out of arena_impl, and unify arena_config for bazel
* Fix failure case in table-driven parser.
* Add a new JSON parser.
* Removed old JSON parsing code.
* Introduce the Printer::{SetRedactDebugString,SetRandomizeDebugString} private flags.
* Introduce global flags to control Printer::{SetRedactDebugString, SetRandomizeDebugString}.
* proto3 string fields no longer trigger clang-tidy warning bugprone-branch-clone.
* Fix the API of DescriptorUpgrader::set_allow_unknown_dependencies to set to True always, and to populate into the DescriptorPool as well.
* Report line numbers consistently in text-format deprecated-field warnings.
* Reserve C++20 keywords
* Fixed C++ code generation for protos that use int32_t, uint32_t, int64_t, uint64_t, size_t as field names.
* Annotate generated C++ public aliases for enum types.
* Change default arena max block size from 8K to 32K.
# Kotlin
* Suppress deprecation warnings in Kotlin generated code.
* Kotlin generated code comments now use kdoc format instead of javadoc.
* Escape keywords in package names in proto generated code
* Add Kotlin enum int value getters and setters
# Java
* Performance improvement for repeated use of FieldMaskUtil#merge by caching
constructed FieldMaskTrees.
* Optimized Java proto serialization gencode for protos having many extension ranges with few fields in between.
* More thoroughly annotate public generated code in Java lite protocol buffers.
* Fixed Bug in proto3 java lite repeated enum fields. Failed to call copyOnWrite before modifying previously built message. Causes modification to already "built" messages that should be immutable.
* Fix Java reflection serialization of empty packed fields.
* Refactoring java full runtime to reuse sub-message builders and prepare to migrate parsing logic from parse constructor to builder.
* Move proto wireformat parsing functionality from the private "parsing constructor" to the Builder class.
* Change the Lite runtime to prefer merging from the wireformat into mutable messages rather than building up a new immutable object before merging. This way results in fewer allocations and copy operations.
* Make message-type extensions merge from wire-format instead of building up instances and merging afterwards. This has much better performance.
* Fix TextFormat parser to build up recurring (but supposedly not repeated) sub-messages directly from text rather than building a new sub-message and merging the fully formed message into the existing field.
* Fix bug in nested builder caching logic where cleared sub-field builders would remain dirty after a clear and build in a parent layer. https://github.com/protocolbuffers/protobuf/issues/10624
# Python
* Changes ordering of printed fields in .pyi files from lexicographic to the same ordering found in the proto descriptor.
* Adds GeneratedCodeInfo annotations to python proto .pyi outputs as a base64 encoded docstring in the last line of the .pyi file for code analysis tools.
* Fix message factory's behavior in python cpp extension to return same message classes for same descriptor, even if the factories are different.
* Add type annotation for enum value fields in enum classes.
# Compiler
* Print full path name of source .proto file on error
* Include proto message type in the annotation comments.

@ -1,32 +1,15 @@
# Minimum CMake required
cmake_minimum_required(VERSION 3.5)
# Minimum CMake required. If available, accept the policy-controlled behavior up
# to 3.26.
cmake_minimum_required(VERSION 3.10...3.26)
if(protobuf_VERBOSE)
message(STATUS "Protocol Buffers Configuring...")
# Revert to old behavior for MSVC debug symbols.
if(POLICY CMP0141)
cmake_policy(SET CMP0141 OLD)
endif()
# CMake policies
cmake_policy(SET CMP0022 NEW)
# On MacOS use @rpath/ for target's install name prefix path
if (POLICY CMP0042)
cmake_policy(SET CMP0042 NEW)
endif ()
# Clear VERSION variables when no VERSION is given to project()
if(POLICY CMP0048)
cmake_policy(SET CMP0048 NEW)
endif()
# MSVC runtime library flags are selected by an abstraction.
if(POLICY CMP0091)
cmake_policy(SET CMP0091 NEW)
endif()
# Honor visibility properties for all target types.
if(POLICY CMP0063)
cmake_policy(SET CMP0063 NEW)
if(protobuf_VERBOSE)
message(STATUS "Protocol Buffers Configuring...")
endif()
# option() honor variables
if (POLICY CMP0077)
cmake_policy(SET CMP0077 NEW)
endif (POLICY CMP0077)
# Project
project(protobuf C CXX)
@ -40,27 +23,18 @@ if(protobuf_DEPRECATED_CMAKE_SUBDIRECTORY_USAGE)
get_filename_component(protobuf_SOURCE_DIR ${protobuf_SOURCE_DIR} DIRECTORY)
endif()
# The Intel compiler isn't able to deal with noinline member functions of
# template classes defined in headers. As such it spams the output with
# warning #2196: routine is both "inline" and "noinline"
# This silences that warning.
if (CMAKE_CXX_COMPILER_ID MATCHES Intel)
string(APPEND CMAKE_CXX_FLAGS " -diag-disable=2196")
endif()
# Options
option(protobuf_INSTALL "Install protobuf binaries and files" ON)
if(WITH_PROTOC)
set(protobuf_PROTOC_EXE ${WITH_PROTOC} CACHE FILEPATH "Protocol Buffer Compiler executable" FORCE)
endif()
option(protobuf_BUILD_TESTS "Build tests" ON)
option(protobuf_BUILD_CONFORMANCE "Build conformance tests" OFF)
option(protobuf_BUILD_EXAMPLES "Build examples" OFF)
option(protobuf_BUILD_PROTOBUF_BINARIES "Build protobuf libraries and protoc compiler" ON)
option(protobuf_BUILD_PROTOC_BINARIES "Build libprotoc and protoc compiler" ON)
option(protobuf_BUILD_LIBPROTOC "Build libprotoc" OFF)
option(protobuf_BUILD_LIBUPB "Build libupb" ON)
option(protobuf_DISABLE_RTTI "Remove runtime type information in the binaries" OFF)
option(protobuf_TEST_XML_OUTDIR "Output directory for XML logs from tests." "")
option(protobuf_ALLOW_CCACHE "Adjust build flags to allow for ccache support." OFF)
if (BUILD_SHARED_LIBS)
set(protobuf_BUILD_SHARED_LIBS_DEFAULT ON)
else (BUILD_SHARED_LIBS)
@ -75,6 +49,15 @@ option(protobuf_WITH_ZLIB "Build with zlib support" ${protobuf_WITH_ZLIB_DEFAULT
set(protobuf_DEBUG_POSTFIX "d"
CACHE STRING "Default debug postfix")
mark_as_advanced(protobuf_DEBUG_POSTFIX)
if(WITH_PROTOC)
set(protobuf_PROTOC_EXE protoc)
set(protobuf_BUILD_PROTOC_BINARIES OFF)
add_executable(protoc IMPORTED GLOBAL)
add_executable(protobuf::protoc ALIAS protoc)
set_property(TARGET protoc PROPERTY IMPORTED_LOCATION ${WITH_PROTOC})
endif()
# User options
include(${protobuf_SOURCE_DIR}/cmake/protobuf-options.cmake)
@ -98,7 +81,7 @@ if (protobuf_BUILD_SHARED_LIBS)
endif ()
# Version metadata
set(protobuf_VERSION_STRING "3.21.4")
set(protobuf_VERSION_STRING "5.28.0")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")
@ -124,7 +107,7 @@ message(STATUS "${protobuf_VERSION_PRERELEASE}")
# Package version
set(protobuf_VERSION
"${protobuf_VERSION_MAJOR}.${protobuf_VERSION_MINOR}.${protobuf_VERSION_PATCH}")
"${protobuf_VERSION_MINOR}.${protobuf_VERSION_PATCH}")
if(protobuf_VERSION_PRERELEASE)
set(protobuf_VERSION "${protobuf_VERSION}.${protobuf_VERSION_PRERELEASE}")
@ -141,12 +124,6 @@ if(protobuf_VERBOSE)
message(STATUS "]")
endif()
add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
if (protobuf_DISABLE_RTTI)
add_definitions(-DGOOGLE_PROTOBUF_NO_RTTI=1)
endif()
file(WRITE ${CMAKE_CURRENT_BINARY_DIR}/cmaketest.map
"{
global:
@ -203,10 +180,6 @@ if (protobuf_WITH_ZLIB)
endif (ZLIB_FOUND)
endif (protobuf_WITH_ZLIB)
if (HAVE_ZLIB)
add_definitions(-DHAVE_ZLIB)
endif (HAVE_ZLIB)
# We need to link with libatomic on systems that do not have builtin atomics, or
# don't have builtin support for 8 byte atomics
set(protobuf_LINK_LIBATOMIC false)
@ -262,34 +235,28 @@ endif (protobuf_BUILD_SHARED_LIBS)
SET(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS ON)
if (MSVC)
if (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Build with multiple processes
add_compile_options(/MP)
endif()
# Set source file and execution character sets to UTF-8
add_compile_options(/utf-8)
# MSVC warning suppressions
add_compile_options(
/wd4065 # switch statement contains 'default' but no 'case' labels
/wd4146 # unary minus operator applied to unsigned type
/wd4244 # 'conversion' conversion from 'type1' to 'type2', possible loss of data
/wd4251 # 'identifier' : class 'type' needs to have dll-interface to be used by clients of class 'type2'
/wd4267 # 'var' : conversion from 'size_t' to 'type', possible loss of data
/wd4305 # 'identifier' : truncation from 'type1' to 'type2'
/wd4307 # 'operator' : integral constant overflow
/wd4309 # 'conversion' : truncation of constant value
/wd4334 # 'operator' : result of 32-bit shift implicitly converted to 64 bits (was 64-bit shift intended?)
/wd4355 # 'this' : used in base member initializer list
/wd4506 # no definition for inline function 'function'
/wd4800 # 'type' : forcing value to bool 'true' or 'false' (performance warning)
/wd4996 # The compiler encountered a deprecated declaration.
)
# Allow big object
add_compile_options(/bigobj)
string(REPLACE "/" "\\" PROTOBUF_SOURCE_WIN32_PATH ${protobuf_SOURCE_DIR})
string(REPLACE "/" "\\" PROTOBUF_BINARY_WIN32_PATH ${protobuf_BINARY_DIR})
string(REPLACE "." "," protobuf_RC_FILEVERSION "${protobuf_VERSION}")
if (protobuf_ALLOW_CCACHE)
# In order to support ccache, we need to remove the /Zi option because it
# puts debug symbols into separate pdb files (which in incompatible with
# ccache). This can be replaced with /Z7 to preserve debug symbols, which
# embeds debug symbols into the object files instead of creating a separate
# pdb file, which isn't currently supported by ccache. However, this bloats
# the ccache size by about a factor of 2x, making it very expensive in CI.
# Instead, we strip debug symbols to reduce this overhead.
foreach(v
CMAKE_C_FLAGS_DEBUG
CMAKE_CXX_FLAGS_DEBUG
CMAKE_C_FLAGS_RELWITHDEBINFO
CMAKE_CXX_FLAGS_RELWITHDEBINFO
)
string(REGEX REPLACE "[-/]Z[iI7]" "/DEBUG:NONE" ${v} "${${v}}")
endforeach()
endif()
# Suppress linker warnings about files with no symbols defined.
string(APPEND CMAKE_STATIC_LINKER_FLAGS " /ignore:4221")
@ -314,15 +281,17 @@ endif (MSVC)
include_directories(
${ZLIB_INCLUDE_DIRECTORIES}
${protobuf_BINARY_DIR}
# Support #include-ing other top-level directories, i.e. upb_generator.
${protobuf_SOURCE_DIR}
${protobuf_BINARY_DIR}/src
${protobuf_SOURCE_DIR}/src)
if (protobuf_UNICODE)
add_definitions(-DUNICODE -D_UNICODE)
endif (protobuf_UNICODE)
set(protobuf_ABSL_PROVIDER "module" CACHE STRING "Provider of absl library")
set_property(CACHE protobuf_ABSL_PROVIDER PROPERTY STRINGS "module" "package")
set(protobuf_JSONCPP_PROVIDER "module" CACHE STRING "Provider of jsoncpp library")
set_property(CACHE protobuf_JSONCPP_PROVIDER PROPERTY STRINGS "module" "package")
if (protobuf_BUILD_TESTS)
include(${protobuf_SOURCE_DIR}/cmake/gtest.cmake)
endif (protobuf_BUILD_TESTS)
@ -345,6 +314,13 @@ if (protobuf_BUILD_PROTOBUF_BINARIES)
set(protobuf_LIB_PROTOC libprotoc)
endif ()
endif ()
if (protobuf_BUILD_LIBUPB)
include(${protobuf_SOURCE_DIR}/cmake/libupb.cmake)
if (NOT DEFINED protobuf_LIB_UPB)
set(protobuf_LIB_UPB libupb)
endif ()
include(${protobuf_SOURCE_DIR}/cmake/upb_generators.cmake)
endif ()
if (protobuf_BUILD_PROTOC_BINARIES)
include(${protobuf_SOURCE_DIR}/cmake/protoc.cmake)
if (NOT DEFINED protobuf_PROTOC_EXE)
@ -358,6 +334,7 @@ else ()
set(protobuf_LIB_PROTOC protobuf::libprotoc)
set(protobuf_LIB_PROTOBUF protobuf::libprotobuf)
set(protobuf_LIB_PROTOBUF_LITE protobuf::libprotobuf-lite)
set(protobuf_LIB_UPB protobuf::libupb)
message(STATUS "CMake installation of Protobuf found.")
endif ()
endif ()
@ -373,6 +350,7 @@ if (protobuf_BUILD_TESTS OR protobuf_BUILD_CONFORMANCE OR protobuf_BUILD_EXAMPLE
message(STATUS "Using libprotobuf : ${protobuf_LIB_PROTOBUF}")
message(STATUS "Using libprotobuf-lite : ${protobuf_LIB_PROTOBUF_LITE}")
message(STATUS "Using libprotoc : ${protobuf_LIB_PROTOC}")
message(STATUS "Using libupb : ${protobuf_LIB_UPB}")
endif(protobuf_VERBOSE)
endif ()

@ -0,0 +1,933 @@
{
"checksum": "8863e5b8f3da7cf4502f68bea0d455dec4834bf25ff070caaa58a8e1c5ea1a3d",
"crates": {
"aho-corasick 1.1.2": {
"name": "aho-corasick",
"version": "1.1.2",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/aho-corasick/1.1.2/download",
"sha256": "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
}
},
"targets": [
{
"Library": {
"crate_name": "aho_corasick",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "aho_corasick",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"perf-literal",
"std"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "memchr 2.6.4",
"target": "memchr"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.1.2"
},
"license": "Unlicense OR MIT"
},
"autocfg 1.1.0": {
"name": "autocfg",
"version": "1.1.0",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/autocfg/1.1.0/download",
"sha256": "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
}
},
"targets": [
{
"Library": {
"crate_name": "autocfg",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "autocfg",
"common_attrs": {
"compile_data_glob": [
"**"
],
"edition": "2015",
"version": "1.1.0"
},
"license": "Apache-2.0 OR MIT"
},
"direct-cargo-bazel-deps 0.0.1": {
"name": "direct-cargo-bazel-deps",
"version": "0.0.1",
"repository": null,
"targets": [
{
"Library": {
"crate_name": "direct_cargo_bazel_deps",
"crate_root": ".direct_cargo_bazel_deps.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "direct_cargo_bazel_deps",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "googletest 0.11.0",
"target": "googletest"
}
],
"selects": {}
},
"edition": "2018",
"proc_macro_deps": {
"common": [
{
"id": "paste 1.0.14",
"target": "paste"
}
],
"selects": {}
},
"version": "0.0.1"
},
"license": null
},
"googletest 0.11.0": {
"name": "googletest",
"version": "0.11.0",
"repository": {
"Git": {
"remote": "https://github.com/google/googletest-rust",
"commitish": {
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
},
"strip_prefix": "googletest"
}
},
"targets": [
{
"Library": {
"crate_name": "googletest",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "googletest",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "num-traits 0.2.17",
"target": "num_traits"
},
{
"id": "regex 1.10.0",
"target": "regex"
}
],
"selects": {}
},
"edition": "2021",
"proc_macro_deps": {
"common": [
{
"id": "googletest_macro 0.11.0",
"target": "googletest_macro"
},
{
"id": "rustversion 1.0.14",
"target": "rustversion"
}
],
"selects": {}
},
"version": "0.11.0"
},
"license": "Apache-2.0"
},
"googletest_macro 0.11.0": {
"name": "googletest_macro",
"version": "0.11.0",
"repository": {
"Git": {
"remote": "https://github.com/google/googletest-rust",
"commitish": {
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
},
"strip_prefix": "googletest_macro"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "googletest_macro",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "googletest_macro",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "quote 1.0.33",
"target": "quote"
},
{
"id": "syn 2.0.43",
"target": "syn"
}
],
"selects": {}
},
"edition": "2021",
"version": "0.11.0"
},
"license": "Apache-2.0"
},
"memchr 2.6.4": {
"name": "memchr",
"version": "2.6.4",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/memchr/2.6.4/download",
"sha256": "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
}
},
"targets": [
{
"Library": {
"crate_name": "memchr",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "memchr",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"alloc",
"default",
"std"
],
"selects": {}
},
"edition": "2021",
"version": "2.6.4"
},
"license": "Unlicense OR MIT"
},
"num-traits 0.2.17": {
"name": "num-traits",
"version": "0.2.17",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/num-traits/0.2.17/download",
"sha256": "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
}
},
"targets": [
{
"Library": {
"crate_name": "num_traits",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "num_traits",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"std"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "num-traits 0.2.17",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "0.2.17"
},
"build_script_attrs": {
"data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "autocfg 1.1.0",
"target": "autocfg"
}
],
"selects": {}
}
},
"license": "MIT OR Apache-2.0"
},
"paste 1.0.14": {
"name": "paste",
"version": "1.0.14",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/paste/1.0.14/download",
"sha256": "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "paste",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "paste",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "paste 1.0.14",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.14"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"proc-macro2 1.0.69": {
"name": "proc-macro2",
"version": "1.0.69",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/proc-macro2/1.0.69/download",
"sha256": "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
}
},
"targets": [
{
"Library": {
"crate_name": "proc_macro2",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "proc_macro2",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"proc-macro"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "build_script_build"
},
{
"id": "unicode-ident 1.0.12",
"target": "unicode_ident"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.0.69"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"quote 1.0.33": {
"name": "quote",
"version": "1.0.33",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/quote/1.0.33/download",
"sha256": "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
}
},
"targets": [
{
"Library": {
"crate_name": "quote",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "quote",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"proc-macro"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "proc_macro2"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.33"
},
"license": "MIT OR Apache-2.0"
},
"regex 1.10.0": {
"name": "regex",
"version": "1.10.0",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/regex/1.10.0/download",
"sha256": "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87"
}
},
"targets": [
{
"Library": {
"crate_name": "regex",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"perf",
"perf-backtrack",
"perf-cache",
"perf-dfa",
"perf-inline",
"perf-literal",
"perf-onepass",
"std",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "aho-corasick 1.1.2",
"target": "aho_corasick"
},
{
"id": "memchr 2.6.4",
"target": "memchr"
},
{
"id": "regex-automata 0.4.1",
"target": "regex_automata"
},
{
"id": "regex-syntax 0.8.1",
"target": "regex_syntax"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.10.0"
},
"license": "MIT OR Apache-2.0"
},
"regex-automata 0.4.1": {
"name": "regex-automata",
"version": "0.4.1",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/regex-automata/0.4.1/download",
"sha256": "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b"
}
},
"targets": [
{
"Library": {
"crate_name": "regex_automata",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex_automata",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"alloc",
"dfa-onepass",
"hybrid",
"meta",
"nfa-backtrack",
"nfa-pikevm",
"nfa-thompson",
"perf-inline",
"perf-literal",
"perf-literal-multisubstring",
"perf-literal-substring",
"std",
"syntax",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment",
"unicode-word-boundary"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "aho-corasick 1.1.2",
"target": "aho_corasick"
},
{
"id": "memchr 2.6.4",
"target": "memchr"
},
{
"id": "regex-syntax 0.8.1",
"target": "regex_syntax"
}
],
"selects": {}
},
"edition": "2021",
"version": "0.4.1"
},
"license": "MIT OR Apache-2.0"
},
"regex-syntax 0.8.1": {
"name": "regex-syntax",
"version": "0.8.1",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/regex-syntax/0.8.1/download",
"sha256": "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33"
}
},
"targets": [
{
"Library": {
"crate_name": "regex_syntax",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex_syntax",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"std",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment"
],
"selects": {}
},
"edition": "2021",
"version": "0.8.1"
},
"license": "MIT OR Apache-2.0"
},
"rustversion 1.0.14": {
"name": "rustversion",
"version": "1.0.14",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/rustversion/1.0.14/download",
"sha256": "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "rustversion",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build/build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "rustversion",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "rustversion 1.0.14",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.14"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"syn 2.0.43": {
"name": "syn",
"version": "2.0.43",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/syn/2.0.43/download",
"sha256": "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53"
}
},
"targets": [
{
"Library": {
"crate_name": "syn",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "syn",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"clone-impls",
"default",
"derive",
"full",
"parsing",
"printing",
"proc-macro",
"quote"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "proc_macro2"
},
{
"id": "quote 1.0.33",
"target": "quote"
},
{
"id": "unicode-ident 1.0.12",
"target": "unicode_ident"
}
],
"selects": {}
},
"edition": "2021",
"version": "2.0.43"
},
"license": "MIT OR Apache-2.0"
},
"unicode-ident 1.0.12": {
"name": "unicode-ident",
"version": "1.0.12",
"repository": {
"Http": {
"url": "https://static.crates.io/crates/unicode-ident/1.0.12/download",
"sha256": "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
}
},
"targets": [
{
"Library": {
"crate_name": "unicode_ident",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "unicode_ident",
"common_attrs": {
"compile_data_glob": [
"**"
],
"edition": "2018",
"version": "1.0.12"
},
"license": "(MIT OR Apache-2.0) AND Unicode-DFS-2016"
}
},
"binary_crates": [],
"workspace_members": {
"direct-cargo-bazel-deps 0.0.1": ""
},
"conditions": {
"aarch64-apple-darwin": [
"aarch64-apple-darwin"
],
"aarch64-apple-ios": [
"aarch64-apple-ios"
],
"aarch64-apple-ios-sim": [
"aarch64-apple-ios-sim"
],
"aarch64-fuchsia": [
"aarch64-fuchsia"
],
"aarch64-linux-android": [
"aarch64-linux-android"
],
"aarch64-pc-windows-msvc": [
"aarch64-pc-windows-msvc"
],
"aarch64-unknown-linux-gnu": [
"aarch64-unknown-linux-gnu"
],
"arm-unknown-linux-gnueabi": [
"arm-unknown-linux-gnueabi"
],
"armv7-linux-androideabi": [
"armv7-linux-androideabi"
],
"armv7-unknown-linux-gnueabi": [
"armv7-unknown-linux-gnueabi"
],
"i686-apple-darwin": [
"i686-apple-darwin"
],
"i686-linux-android": [
"i686-linux-android"
],
"i686-pc-windows-msvc": [
"i686-pc-windows-msvc"
],
"i686-unknown-freebsd": [
"i686-unknown-freebsd"
],
"i686-unknown-linux-gnu": [
"i686-unknown-linux-gnu"
],
"powerpc-unknown-linux-gnu": [
"powerpc-unknown-linux-gnu"
],
"riscv32imc-unknown-none-elf": [
"riscv32imc-unknown-none-elf"
],
"riscv64gc-unknown-none-elf": [
"riscv64gc-unknown-none-elf"
],
"s390x-unknown-linux-gnu": [
"s390x-unknown-linux-gnu"
],
"thumbv7em-none-eabi": [
"thumbv7em-none-eabi"
],
"thumbv8m.main-none-eabi": [
"thumbv8m.main-none-eabi"
],
"wasm32-unknown-unknown": [
"wasm32-unknown-unknown"
],
"wasm32-wasi": [
"wasm32-wasi"
],
"x86_64-apple-darwin": [
"x86_64-apple-darwin"
],
"x86_64-apple-ios": [
"x86_64-apple-ios"
],
"x86_64-fuchsia": [
"x86_64-fuchsia"
],
"x86_64-linux-android": [
"x86_64-linux-android"
],
"x86_64-pc-windows-msvc": [
"x86_64-pc-windows-msvc"
],
"x86_64-unknown-freebsd": [
"x86_64-unknown-freebsd"
],
"x86_64-unknown-linux-gnu": [
"x86_64-unknown-linux-gnu"
],
"x86_64-unknown-none": [
"x86_64-unknown-none"
]
}
}

137
Cargo.lock generated

@ -0,0 +1,137 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "direct-cargo-bazel-deps"
version = "0.0.1"
dependencies = [
"googletest",
"paste",
]
[[package]]
name = "googletest"
version = "0.11.0"
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
dependencies = [
"googletest_macro",
"num-traits",
"regex",
"rustversion",
]
[[package]]
name = "googletest_macro"
version = "0.11.0"
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "memchr"
version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "num-traits"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
]
[[package]]
name = "paste"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33"
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "syn"
version = "2.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

@ -0,0 +1,72 @@
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313
module(
name = "protobuf",
version = "28.0-dev", # Automatically updated on release
compatibility_level = 1,
repo_name = "com_google_protobuf",
)
# LOWER BOUND dependency versions.
# Bzlmod follows MVS:
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution
# Thus the highest version in their module graph is resolved.
bazel_dep(name = "abseil-cpp", version = "20230802.0.bcr.1", repo_name = "com_google_absl")
bazel_dep(name = "bazel_skylib", version = "1.4.1")
bazel_dep(name = "jsoncpp", version = "1.9.5")
bazel_dep(name = "rules_cc", version = "0.0.9")
bazel_dep(name = "rules_fuzzing", version = "0.5.2")
bazel_dep(name = "rules_java", version = "5.3.5")
bazel_dep(name = "rules_jvm_external", version = "5.1")
bazel_dep(name = "rules_pkg", version = "0.7.0")
bazel_dep(name = "rules_python", version = "0.28.0")
bazel_dep(name = "rules_rust", version = "0.45.1")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "zlib", version = "1.2.11")
# TODO: remove after toolchain types are moved to protobuf
bazel_dep(name = "rules_proto", version = "4.0.0")
SUPPORTED_PYTHON_VERSIONS = [
"3.8",
"3.9",
"3.10",
"3.11",
"3.12",
]
python = use_extension("@rules_python//python/extensions:python.bzl", "python")
[
python.toolchain(
is_default = python_version == SUPPORTED_PYTHON_VERSIONS[-1],
python_version = python_version,
)
for python_version in SUPPORTED_PYTHON_VERSIONS
]
use_repo(python, system_python = "python_{}".format(SUPPORTED_PYTHON_VERSIONS[-1].replace(".", "_")))
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
[
pip.parse(
hub_name = "pip_deps",
python_version = python_version,
requirements_lock = "//python:requirements.txt",
)
for python_version in SUPPORTED_PYTHON_VERSIONS
]
use_repo(pip, "pip_deps")
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust")
rust.toolchain(edition = "2021")
use_repo(rust, "rust_toolchains")
register_toolchains("@rust_toolchains//:all")
crate = use_extension("@rules_rust//crate_universe:extension.bzl", "crate")
crate.spec(
package = "googletest",
version = ">0.0.0",
)
crate.spec(
package = "paste",
version = ">=1",
)
crate.from_specs()
use_repo(crate, crate_index = "crates")

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>NSPrivacyTracking</key>
<false/>
<key>NSPrivacyTrackingDomains</key>
<array/>
<key>NSPrivacyCollectedDataTypes</key>
<array/>
<key>NSPrivacyAccessedAPITypes</key>
<array/>
</dict>
</plist>

@ -1,11 +1,14 @@
Pod::Spec.new do |s|
s.name = 'Protobuf-C++'
s.version = '3.21.12'
s.version = '5.28.0'
s.summary = 'Protocol Buffers v3 runtime library for C++.'
s.homepage = 'https://github.com/google/protobuf'
s.license = 'BSD-3-Clause'
s.authors = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' }
s.cocoapods_version = '>= 1.0'
# Ensure developers won't hit CocoaPods/CocoaPods#11402 with the resource
# bundle for the privacy manifest.
s.cocoapods_version = '>= 1.12.0'
s.source = { :git => 'https://github.com/google/protobuf.git',
:tag => "v#{s.version}" }
@ -23,12 +26,17 @@ Pod::Spec.new do |s|
'src/google/protobuf/map_test_util*.{h,cc,inc}',
'src/google/protobuf/reflection_tester.{h,cc}'
s.resource_bundle = {
"Protobuf-C++_Privacy" => "PrivacyInfo.xcprivacy"
}
s.header_mappings_dir = 'src'
s.ios.deployment_target = '7.0'
s.osx.deployment_target = '10.9'
s.tvos.deployment_target = '9.0'
s.watchos.deployment_target = '2.0'
s.ios.deployment_target = '12.0'
s.osx.deployment_target = '10.13'
s.tvos.deployment_target = '12.0'
s.watchos.deployment_target = '6.0'
s.visionos.deployment_target = '1.0'
s.pod_target_xcconfig = {
# Do not let src/google/protobuf/stubs/time.h override system API

@ -5,12 +5,15 @@
# dependent projects use the :git notation to refer to the library.
Pod::Spec.new do |s|
s.name = 'Protobuf'
s.version = '3.21.12'
s.version = '3.28.0'
s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.'
s.homepage = 'https://github.com/protocolbuffers/protobuf'
s.license = 'BSD-3-Clause'
s.authors = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' }
s.cocoapods_version = '>= 1.0'
# Ensure developers won't hit CocoaPods/CocoaPods#11402 with the resource
# bundle for the privacy manifest.
s.cocoapods_version = '>= 1.12.0'
s.source = { :git => 'https://github.com/protocolbuffers/protobuf.git',
:tag => "v#{s.version}" }
@ -30,6 +33,10 @@ Pod::Spec.new do |s|
# left out, as it's an umbrella implementation file.
s.exclude_files = 'objectivec/GPBProtocolBuffers.m'
s.resource_bundle = {
"Protobuf_Privacy" => "PrivacyInfo.xcprivacy"
}
# Set a CPP symbol so the code knows to use framework imports.
s.user_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1' }
s.pod_target_xcconfig = { 'GCC_PREPROCESSOR_DEFINITIONS' => '$(inherited) GPB_USE_PROTOBUF_FRAMEWORK_IMPORTS=1' }
@ -38,6 +45,7 @@ Pod::Spec.new do |s|
s.osx.deployment_target = '10.12'
s.tvos.deployment_target = '12.0'
s.watchos.deployment_target = '6.0'
s.visionos.deployment_target = '1.0'
s.requires_arc = false
# The unittest need the generate sources from the testing related .proto

@ -1,25 +1,42 @@
Protocol Buffers - Google's data interchange format
===================================================
Copyright 2008 Google Inc.
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/protocolbuffers/protobuf/badge)](https://securityscorecards.dev/viewer/?uri=github.com/protocolbuffers/protobuf)
[Protocol Buffers documentation](https://developers.google.com/protocol-buffers/)
Copyright 2023 Google LLC
Overview
--------
Protocol Buffers (a.k.a., protobuf) are Google's language-neutral,
platform-neutral, extensible mechanism for serializing structured data. You
can find [protobuf's documentation on the Google Developers site](https://developers.google.com/protocol-buffers/).
can learn more about it in [protobuf's documentation](https://protobuf.dev).
This README file contains protobuf installation instructions. To install
protobuf, you need to install the protocol compiler (used to compile .proto
files) and the protobuf runtime for your chosen programming language.
Protocol Compiler Installation
Working With Protobuf Source Code
---------------------------------
Most users will find working from
[supported releases](https://github.com/protocolbuffers/protobuf/releases) to be
the easiest path.
If you choose to work from the head revision of the main branch your build will
occasionally be broken by source-incompatible changes and insufficiently-tested
(and therefore broken) behavior.
If you are using C++ or otherwise need to build protobuf from source as a part
of your project, you should pin to a release commit on a release branch.
This is because even release branches can experience some instability in between
release commits.
Protobuf Compiler Installation
------------------------------
The protocol compiler is written in C++. If you are using C++, please follow
The protobuf compiler is written in C++. If you are using C++, please follow
the [C++ Installation Instructions](src/README.md) to install protoc along
with the C++ runtime.
@ -58,13 +75,13 @@ how to install protobuf runtime for that specific language:
| Go | [protocolbuffers/protobuf-go](https://github.com/protocolbuffers/protobuf-go)|
| PHP | [php](php) |
| Dart | [dart-lang/protobuf](https://github.com/dart-lang/protobuf) |
| Javascript | [protocolbuffers/protobuf-javascript](https://github.com/protocolbuffers/protobuf-javascript)|
| JavaScript | [protocolbuffers/protobuf-javascript](https://github.com/protocolbuffers/protobuf-javascript)|
Quick Start
-----------
The best way to learn how to use protobuf is to follow the [tutorials in our
developer guide](https://developers.google.com/protocol-buffers/docs/tutorials).
developer guide](https://protobuf.dev/getting-started).
If you want to learn from code examples, take a look at the examples in the
[examples](examples) directory.
@ -72,11 +89,16 @@ If you want to learn from code examples, take a look at the examples in the
Documentation
-------------
The complete documentation is available via the [Protocol Buffers documentation](https://developers.google.com/protocol-buffers/).
The complete documentation is available at the [Protocol Buffers doc site](https://protobuf.dev).
Support Policy
--------------
Read about our [version support policy](https://protobuf.dev/version-support/)
to stay current on support timeframes for the language libraries.
Developer Community
-------------------
To be alerted to upcoming changes in Protocol Buffers and connect with protobuf developers and users,
[join the Google Group](https://groups.google.com/g/protobuf).

@ -1,5 +1,14 @@
workspace(name = "com_google_protobuf")
# An explicit self-reference to work around changes in Bazel 7.0
# See https://github.com/bazelbuild/bazel/issues/19973#issuecomment-1787814450
# buildifier: disable=duplicated-name
local_repository(name = "com_google_protobuf", path = ".")
# Second self-reference that makes it possible to load proto rules from @protobuf.
# buildifier: disable=duplicated-name
local_repository(name = "protobuf", path = ".")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
local_repository(
@ -7,33 +16,41 @@ local_repository(
path = "examples",
)
# Load common dependencies first to ensure we use the correct version
load("//:protobuf_deps.bzl", "PROTOBUF_MAVEN_ARTIFACTS", "protobuf_deps")
protobuf_deps()
load("@rules_python//python:repositories.bzl", "py_repositories")
py_repositories()
load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
pip_install_dependencies()
# Bazel platform rules.
http_archive(
name = "com_google_googletest",
sha256 = "ea54c9845568cb31c03f2eddc7a40f7f83912d04ab977ff50ec33278119548dd",
strip_prefix = "googletest-4c9a3bb62bf3ba1f1010bf96f9c8ed767b363774",
name = "platforms",
urls = [
"https://github.com/google/googletest/archive/4c9a3bb62bf3ba1f1010bf96f9c8ed767b363774.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/0.0.7/platforms-0.0.7.tar.gz",
"https://github.com/bazelbuild/platforms/releases/download/0.0.7/platforms-0.0.7.tar.gz",
],
sha256 = "3a561c99e7bdbe9173aa653fd579fe849f1d8d67395780ab4770b1f381431d51",
)
http_archive(
name = "com_googlesource_code_re2",
sha256 = "906d0df8ff48f8d3a00a808827f009a840190f404559f649cb8e4d7143255ef9",
strip_prefix = "re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502",
urls = ["https://github.com/google/re2/archive/a276a8c738735a0fe45a6ee590fe2df69bcf4502.zip"], # 2022-04-08
name = "com_google_googletest",
sha256 = "730215d76eace9dd49bf74ce044e8daa065d175f1ac891cc1d6bb184ef94e565",
strip_prefix = "googletest-f53219cdcb7b084ef57414efea92ee5b71989558",
urls = [
"https://github.com/google/googletest/archive/f53219cdcb7b084ef57414efea92ee5b71989558.tar.gz" # 2023-03-16
],
)
# Bazel platform rules.
http_archive(
name = "platforms",
sha256 = "a879ea428c6d56ab0ec18224f976515948822451473a80d06c2e50af0bbe5121",
strip_prefix = "platforms-da5541f26b7de1dc8e04c075c99df5351742a4a2",
urls = ["https://github.com/bazelbuild/platforms/archive/da5541f26b7de1dc8e04c075c99df5351742a4a2.zip"], # 2022-05-27
)
load("@com_google_googletest//:googletest_deps.bzl", "googletest_deps")
# Load common dependencies.
load("//:protobuf_deps.bzl", "PROTOBUF_MAVEN_ARTIFACTS", "protobuf_deps")
protobuf_deps()
googletest_deps()
load("@rules_jvm_external//:repositories.bzl", "rules_jvm_external_deps")
@ -66,54 +83,165 @@ load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()
load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
rules_pkg_dependencies()
load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies")
apple_rules_dependencies()
load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies")
apple_support_dependencies()
load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies")
rules_cc_dependencies()
# For `kt_jvm_library`
load("@io_bazel_rules_kotlin//kotlin:repositories.bzl", "kotlin_repositories")
kotlin_repositories()
load("@io_bazel_rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
kt_register_toolchains()
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
ruby_runtime("system_ruby")
register_toolchains("@system_ruby//:toolchain")
# Uncomment pairs of ruby_runtime() + register_toolchain() calls below to enable
# local JRuby testing. Do not submit the changes (due to impact on test duration
# for non JRuby builds due to downloading JRuby SDKs).
#ruby_runtime("jruby-9.2")
#
#register_toolchains("@jruby-9.2//:toolchain")
#
#ruby_runtime("jruby-9.3")
#
#register_toolchains("@jruby-9.3//:toolchain")
load("@system_ruby//:bundle.bzl", "ruby_bundle")
ruby_bundle(
name = "protobuf_bundle",
srcs = ["//ruby:google-protobuf.gemspec"],
gemfile = "//ruby:Gemfile",
)
load("@upb//bazel:workspace_deps.bzl", "upb_deps")
upb_deps()
http_archive(
name = "lua",
build_file = "//python/dist:lua.BUILD",
sha256 = "b9e2e4aad6789b3b63a056d442f7b39f0ecfca3ae0f1fc0ae4e9614401b69f4b",
strip_prefix = "lua-5.2.4",
urls = [
"https://mirror.bazel.build/www.lua.org/ftp/lua-5.2.4.tar.gz",
"https://www.lua.org/ftp/lua-5.2.4.tar.gz",
],
)
load("@upb//bazel:system_python.bzl", "system_python")
system_python(
name = "system_python",
minimum_python_version = "3.7",
http_archive(
name = "com_github_google_benchmark",
urls = ["https://github.com/google/benchmark/archive/0baacde3618ca617da95375e0af13ce1baadea47.zip"],
strip_prefix = "benchmark-0baacde3618ca617da95375e0af13ce1baadea47",
sha256 = "62e2f2e6d8a744d67e4bbc212fcfd06647080de4253c97ad5c6749e09faf2cb0",
)
http_archive(
name = "com_google_googleapis",
urls = ["https://github.com/googleapis/googleapis/archive/d81d0b9e6993d6ab425dff4d7c3d05fb2e59fa57.zip"],
strip_prefix = "googleapis-d81d0b9e6993d6ab425dff4d7c3d05fb2e59fa57",
sha256 = "d986023c3d8d2e1b161e9361366669cac9fb97c2a07e656c2548aca389248bb4",
build_file = "//benchmarks:BUILD.googleapis",
patch_cmds = ["find google -type f -name BUILD.bazel -delete"],
)
load("@system_python//:pip.bzl", "pip_parse")
pip_parse(
name="pip_deps",
requirements = "@upb//python:requirements.txt",
requirements_overrides = {
"3.11": "@upb//python:requirements_311.txt",
},
name = "pip_deps",
requirements = "//python:requirements.txt",
)
load("@pip_deps//:requirements.bzl", "install_deps")
install_deps()
load("@utf8_range//:workspace_deps.bzl", "utf8_range_deps")
utf8_range_deps()
http_archive(
name = "rules_fuzzing",
sha256 = "77206c54b71f4dd5335123a6ff2a8ea688eca5378d34b4838114dff71652cf26",
strip_prefix = "rules_fuzzing-0.5.1",
urls = ["https://github.com/bazelbuild/rules_fuzzing/releases/download/v0.5.1/rules_fuzzing-0.5.1.zip"],
patches = ["//third_party:rules_fuzzing.patch"],
patch_args = ["-p1"],
)
load("@rules_fuzzing//fuzzing:repositories.bzl", "rules_fuzzing_dependencies")
rules_fuzzing_dependencies()
load("@rules_fuzzing//fuzzing:init.bzl", "rules_fuzzing_init")
rules_fuzzing_init()
load("@fuzzing_py_deps//:requirements.bzl", fuzzing_py_deps_install_deps = "install_deps")
fuzzing_py_deps_install_deps()
http_archive(
name = "rules_rust",
sha256 = "9ecd0f2144f0a24e6bc71ebcc50a1ee5128cedeceb32187004532c9710cb2334",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.29.1/rules_rust-v0.29.1.tar.gz"],
)
bind(
name = "python_headers",
actual = "@system_python//:python_headers",
load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_register_toolchains")
rules_rust_dependencies()
rust_register_toolchains(edition = "2021")
load("@rules_rust//crate_universe:defs.bzl", "crate", "crates_repository")
# to repin, invoke `CARGO_BAZEL_REPIN=1 bazel sync --only=crate_index`
crates_repository(
name = "crate_index",
cargo_lockfile = "//:Cargo.lock",
lockfile = "//:Cargo.bazel.lock",
packages = {
"googletest": crate.spec(
git = "https://github.com/google/googletest-rust",
rev = "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f",
),
"paste": crate.spec(
version = ">=1",
),
},
)
load("@crate_index//:defs.bzl", "crate_repositories")
crate_repositories()
# For testing runtime against old gencode from a previous major version.
http_archive(
name = "com_google_protobuf_v25.0",
strip_prefix = "protobuf-25.0",
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz",
)
# Needed as a dependency of @com_google_protobuf_v25.x, which was before
# utf8_range was merged in.
http_archive(
name = "utf8_range",
strip_prefix = "utf8_range-d863bc33e15cba6d873c878dcca9e6fe52b2f8cb",
url = "https://github.com/protocolbuffers/utf8_range/archive/d863bc33e15cba6d873c878dcca9e6fe52b2f8cb.zip",
)

@ -38,8 +38,8 @@ dotnet restore
dotnet build -c %configuration% || goto error
echo Testing C#
dotnet test -c %configuration% -f netcoreapp3.1 Google.Protobuf.Test\Google.Protobuf.Test.csproj || goto error
dotnet test -c %configuration% -f net451 Google.Protobuf.Test\Google.Protobuf.Test.csproj || goto error
dotnet test -c %configuration% -f net6.0 Google.Protobuf.Test\Google.Protobuf.Test.csproj || goto error
dotnet test -c %configuration% -f net462 Google.Protobuf.Test\Google.Protobuf.Test.csproj || goto error
goto :EOF

@ -0,0 +1,57 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
bzl_library(
name = "proto_library_bzl",
srcs = ["proto_library.bzl"],
visibility = ["//visibility:public"],
)
bzl_library(
name = "cc_proto_library_bzl",
srcs = ["cc_proto_library.bzl"],
visibility = ["//visibility:public"],
)
bzl_library(
name = "java_proto_library_bzl",
srcs = ["java_proto_library.bzl"],
visibility = ["//visibility:public"],
)
bzl_library(
name = "java_lite_proto_library_bzl",
srcs = ["java_lite_proto_library.bzl"],
visibility = ["//visibility:public"],
)
bzl_library(
name = "py_proto_library_bzl",
srcs = ["py_proto_library.bzl"],
visibility = ["//visibility:public"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"@rules_python//python:py_info_bzl",
],
)
bzl_library(
name = "upb_proto_library_bzl",
srcs = [
"upb_c_proto_library.bzl",
"upb_minitable_proto_library.bzl",
"upb_proto_library.bzl",
"upb_proto_reflection_library.bzl",
],
visibility = ["//visibility:public"],
deps = ["//bazel/private:upb_proto_library_internal_bzl"],
)

@ -0,0 +1,3 @@
"""cc_proto_library rule"""
cc_proto_library = native.cc_proto_library

@ -0,0 +1,34 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
name = "proto_common_bzl",
srcs = [
"proto_common.bzl",
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
],
)
bzl_library(
name = "proto_info_bzl",
srcs = [
"proto_info.bzl",
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
],
)
bzl_library(
name = "proto_lang_toolchain_info_bzl",
srcs = [
"proto_lang_toolchain_info.bzl",
],
visibility = ["//visibility:public"],
deps = [
":proto_common.bzl",
],
)

@ -0,0 +1,5 @@
"""proto_common"""
load("//bazel/private:native.bzl", "native_proto_common")
proto_common = native_proto_common

@ -0,0 +1,5 @@
"""ProtoInfo"""
load("//bazel/private:native.bzl", "NativeProtoInfo")
ProtoInfo = NativeProtoInfo

@ -0,0 +1,5 @@
"""ProtoLangToolchainInfo"""
load("//bazel/common:proto_common.bzl", "proto_common")
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo

@ -0,0 +1,3 @@
"""java_lite_proto_library rule"""
java_lite_proto_library = native.java_lite_proto_library

@ -0,0 +1,3 @@
"""java_proto_library rule"""
java_proto_library = native.java_proto_library

@ -0,0 +1,42 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
bzl_library(
name = "upb_proto_library_internal_bzl",
srcs = [
"upb_proto_library_internal/aspect.bzl",
"upb_proto_library_internal/cc_library_func.bzl",
"upb_proto_library_internal/copts.bzl",
"upb_proto_library_internal/rule.bzl",
],
visibility = ["//bazel:__pkg__"],
deps = [
"//bazel/common:proto_common_bzl",
"@bazel_skylib//lib:paths",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)
bzl_library(
name = "native_bzl",
srcs = [
"native.bzl",
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
"proto_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
)

@ -0,0 +1,5 @@
"""Renames toplevel symbols so they can be exported in Starlark under the same name"""
NativeProtoInfo = ProtoInfo
native_proto_common = proto_common_do_not_use

@ -0,0 +1,47 @@
"""A Starlark implementation of the proto_toolchain rule."""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
def _impl(ctx):
kwargs = {}
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
kwargs["toolchain_type"] = "@rules_proto//proto:toolchain_type"
return [
DefaultInfo(
files = depset(),
runfiles = ctx.runfiles(),
),
platform_common.ToolchainInfo(
proto = ProtoLangToolchainInfo(
out_replacement_format_flag = ctx.attr.command_line,
output_files = ctx.attr.output_files,
plugin = None,
runtime = None,
proto_compiler = ctx.attr.proto_compiler.files_to_run,
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
**kwargs
),
),
]
proto_toolchain = rule(
_impl,
attrs =
{
"progress_message": attr.string(default = "Generating Descriptor Set proto_library %{label}"),
"mnemonic": attr.string(default = "GenProtoDescriptorSet"),
"command_line": attr.string(default = "--descriptor_set_out=%s"),
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "single"),
"proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True, # Used by mocks in tests. Consider fixing tests and removing it.
),
},
provides = [platform_common.ToolchainInfo],
fragments = ["proto"],
)

@ -0,0 +1,303 @@
"""Implementation of the aspect that powers the upb_*_proto_library() rules."""
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load("//bazel/common:proto_common.bzl", "proto_common")
load(":upb_proto_library_internal/cc_library_func.bzl", "cc_library_func")
load(":upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo")
# begin:github_only
_is_google3 = False
# end:github_only
# begin:google_only
# _is_google3 = True
# end:google_only
GeneratedSrcsInfo = provider(
"Provides generated headers and sources",
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
"thunks": "Experimental, do not use. List of srcs defining C API. Incompatible with hdrs.",
},
)
def output_dir(ctx, proto_info):
"""Returns the output directory where generated proto files will be placed.
Args:
ctx: Rule context.
proto_info: ProtoInfo provider.
Returns:
A string specifying the output directory
"""
proto_root = proto_info.proto_source_root
if proto_root.startswith(ctx.bin_dir.path):
path = proto_root
else:
path = ctx.bin_dir.path + "/" + proto_root
if proto_root == ".":
path = ctx.bin_dir.path
return path
def _concat_lists(lists):
ret = []
for lst in lists:
ret = ret + lst
return ret
def _merge_generated_srcs(srcs):
return GeneratedSrcsInfo(
srcs = _concat_lists([s.srcs for s in srcs]),
hdrs = _concat_lists([s.hdrs for s in srcs]),
thunks = _concat_lists([s.thunks for s in srcs]),
)
def _get_implicit_weak_field_sources(ctx, proto_info):
# Creating one .cc file for each Message in a proto allows the linker to be more aggressive
# about removing unused classes. However, since the number of outputs won't be known at Blaze
# analysis time, all of the generated source files are put in a directory and a TreeArtifact is
# used to represent them.
proto_artifacts = []
for proto_source in proto_info.direct_sources:
# We can have slashes in the target name. For example, proto_source can be:
# dir/a.proto. However proto_source.basename will return a.proto, when in reality
# the BUILD file declares it as dir/a.proto, because target name contains a slash.
# There is no good workaround for this.
# I am using ctx.label.package to check if the name of the target contains slash or not.
# This is similar to what declare_directory does.
if not proto_source.short_path.startswith(ctx.label.package):
fail("This should never happen, proto source {} path does not start with {}.".format(
proto_source.short_path,
ctx.label.package,
))
proto_source_name = proto_source.short_path[len(ctx.label.package) + 1:]
last_dot = proto_source_name.rfind(".")
if last_dot != -1:
proto_source_name = proto_source_name[:last_dot]
proto_artifacts.append(ctx.actions.declare_directory(proto_source_name + ".upb_weak_minitables"))
return proto_artifacts
def _get_feature_configuration(ctx, cc_toolchain, proto_info):
requested_features = list(ctx.features)
# Disable the whole-archive behavior for protobuf generated code when the
# proto_one_output_per_message feature is enabled.
requested_features.append("disable_whole_archive_for_static_lib_if_proto_one_output_per_message")
unsupported_features = list(ctx.disabled_features)
if len(proto_info.direct_sources) != 0:
requested_features.append("header_modules")
else:
unsupported_features.append("header_modules")
return cc_common.configure_features(
ctx = ctx,
cc_toolchain = cc_toolchain,
requested_features = requested_features,
unsupported_features = unsupported_features,
)
def _generate_srcs_list(ctx, generator, proto_info):
if len(proto_info.direct_sources) == 0:
return GeneratedSrcsInfo(srcs = [], hdrs = [], thunks = [], includes = [])
ext = "." + generator
srcs = []
thunks = []
hdrs = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".h",
proto_info = proto_info,
)
if not (generator == "upb" and _is_google3):
# TODO: The OSS build should also exclude this file for the upb generator,
# as it is empty and unnecessary. We only added it to make the OSS build happy on
# Windows and macOS.
srcs += proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".c",
proto_info = proto_info,
)
if generator == "upb":
thunks = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".thunks.c",
proto_info = proto_info,
)
ctx.actions.run_shell(
inputs = hdrs,
outputs = thunks,
command = " && ".join([
"sed 's/UPB_INLINE //' {} > {}".format(hdr.path, thunk.path)
for (hdr, thunk) in zip(hdrs, thunks)
]),
progress_message = "Generating thunks for upb protos API for: " + ctx.label.name,
mnemonic = "GenUpbProtosThunks",
)
return GeneratedSrcsInfo(
srcs = srcs,
hdrs = hdrs,
thunks = thunks,
)
def _generate_upb_protos(ctx, generator, proto_info, feature_configuration):
implicit_weak = generator == "upb_minitable" and cc_common.is_enabled(
feature_configuration = feature_configuration,
feature_name = "proto_one_output_per_message",
)
srcs = _generate_srcs_list(ctx, generator, proto_info)
additional_args = ctx.actions.args()
if implicit_weak:
srcs.srcs.extend(_get_implicit_weak_field_sources(ctx, proto_info))
additional_args.add("--upb_minitable_opt=one_output_per_message")
proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = _get_lang_toolchain(ctx, generator),
generated_files = srcs.srcs + srcs.hdrs,
experimental_exec_group = "proto_compiler",
additional_args = additional_args,
)
return srcs
def _generate_name(ctx, generator, thunks = False):
if thunks:
return ctx.rule.attr.name + "." + generator + ".thunks"
return ctx.rule.attr.name + "." + generator
def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider):
rule_deps = ctx.rule.attr.deps
dep_ccinfos = [dep[cc_provider].cc_info for dep in rule_deps]
if dep_cc_provider:
# This gives access to our direct sibling. eg. foo.upb.h can #include "foo.upb_minitable.h"
dep_ccinfos.append(target[dep_cc_provider].cc_info)
# This gives access to imports. eg. foo.upb.h can #include "import1.upb_minitable.h"
# But not transitive imports, eg. foo.upb.h cannot #include "transitive_import1.upb_minitable.h"
dep_ccinfos += [dep[dep_cc_provider].cc_info for dep in rule_deps]
return dep_ccinfos
def _get_lang_toolchain(ctx, generator):
lang_toolchain_name = "_" + generator + "_toolchain"
return getattr(ctx.attr, lang_toolchain_name)[proto_common.ProtoLangToolchainInfo]
def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider, proto_info):
cc_info = cc_library_func(
ctx = ctx,
name = _generate_name(ctx, generator),
hdrs = files.hdrs,
srcs = files.srcs,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos,
)
if files.thunks:
cc_info_with_thunks = cc_library_func(
ctx = ctx,
name = _generate_name(ctx, generator, files.thunks),
hdrs = [],
srcs = files.thunks,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos + [cc_info],
)
return cc_provider(
cc_info = cc_info,
cc_info_with_thunks = cc_info_with_thunks,
)
else:
return cc_provider(
cc_info = cc_info,
)
_GENERATORS = ["upb", "upbdefs", "upb_minitable"]
def _get_hint_providers(ctx, generator):
if generator not in _GENERATORS:
fail("Please add new generator '{}' to _GENERATORS list".format(generator))
possible_owners = []
for generator in _GENERATORS:
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator)))
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator, thunks = True)))
if hasattr(cc_common, "CcSharedLibraryHintInfo"):
return [cc_common.CcSharedLibraryHintInfo(owners = possible_owners)]
elif hasattr(cc_common, "CcSharedLibraryHintInfo_6_X_constructor_do_not_use"):
# This branch can be deleted once 6.X is not supported by upb rules
return [cc_common.CcSharedLibraryHintInfo_6_X_constructor_do_not_use(owners = possible_owners)]
return []
def upb_proto_aspect_impl(
target,
ctx,
generator,
cc_provider,
dep_cc_provider,
file_provider,
provide_cc_shared_library_hints = True):
"""A shared aspect implementation for upb_*proto_library() rules.
Args:
target: The `target` parameter from the aspect function.
ctx: The `ctx` parameter from the aspect function.
generator: A string describing which aspect we are generating. This triggers several special
behaviors, and ideally this will be refactored to be less magical.
cc_provider: The provider that this aspect will attach to the target. Should contain a
`cc_info` field. The aspect will ensure that each compilation action can compile and link
against this provider's cc_info for all proto_library() deps.
dep_cc_provider: For aspects that depend on other aspects, this is the provider of the aspect
that we depend on. The aspect wil be able to include the header files from this provider.
file_provider: A provider that this aspect will attach to the target to expose the source
files generated by this aspect. These files are primarily useful for returning in
DefaultInfo(), so users who build the upb_*proto_library() rule directly can view the
generated sources.
provide_cc_shared_library_hints: Whether shared library hints should be provided.
Returns:
The `cc_provider` and `file_provider` providers as described above.
"""
dep_ccinfos = _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider)
if not getattr(ctx.rule.attr, "srcs", []):
# This target doesn't declare any sources, reexport all its deps instead.
# This is known as an "alias library":
# https://bazel.build/reference/be/protocol-buffer#proto_library.srcs
files = _merge_generated_srcs([dep[file_provider].srcs for dep in ctx.rule.attr.deps])
wrapped_cc_info = cc_provider(
cc_info = cc_common.merge_cc_infos(direct_cc_infos = dep_ccinfos),
)
else:
proto_info = target[ProtoInfo]
cc_toolchain = find_cpp_toolchain(ctx)
feature_configuration = _get_feature_configuration(ctx, cc_toolchain, proto_info)
files = _generate_upb_protos(
ctx,
generator,
proto_info,
feature_configuration,
)
wrapped_cc_info = _compile_upb_protos(
ctx,
files,
generator,
dep_ccinfos + [_get_lang_toolchain(ctx, generator).runtime[CcInfo]],
cc_provider,
proto_info,
)
hints = _get_hint_providers(ctx, generator) if provide_cc_shared_library_hints else []
return hints + [
file_provider(srcs = files),
wrapped_cc_info,
]

@ -0,0 +1,86 @@
"""A function to compile C/C++ code, like cc_library() but from Starlark."""
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain")
# begin:google_only
#
# def upb_use_cpp_toolchain():
# # TODO: We shouldn't need to add this to the result of use_cpp_toolchain().
# return [
# config_common.toolchain_type(
# "@bazel_tools//tools/cpp:cc_runtimes_toolchain_type",
# mandatory = False,
# ),
# ] + use_cpp_toolchain()
#
# end:google_only
# begin:github_only
def upb_use_cpp_toolchain():
return use_cpp_toolchain()
# end:github_only
def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
"""Like cc_library(), but callable from rules.
Args:
ctx: Rule context.
name: Unique name used to generate output files.
hdrs: Public headers that can be #included from other rules.
srcs: C/C++ source files.
copts: Additional options for cc compilation.
includes: Additional include paths.
dep_ccinfos: CcInfo providers of dependencies we should build/link against.
Returns:
CcInfo provider for this compilation.
"""
# begin:google_only
# cc_runtimes_toolchain = ctx.toolchains["@bazel_tools//tools/cpp:cc_runtimes_toolchain_type"]
# if cc_runtimes_toolchain:
# dep_ccinfos += [
# target[CcInfo]
# for target in cc_runtimes_toolchain.cc_runtimes_info.runtimes
# ]
#
# end:google_only
compilation_contexts = [info.compilation_context for info in dep_ccinfos]
linking_contexts = [info.linking_context for info in dep_ccinfos]
toolchain = find_cpp_toolchain(ctx)
feature_configuration = cc_common.configure_features(
ctx = ctx,
cc_toolchain = toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
(compilation_context, compilation_outputs) = cc_common.compile(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
name = name,
srcs = srcs,
includes = includes,
public_hdrs = hdrs,
user_compile_flags = copts,
compilation_contexts = compilation_contexts,
)
# buildifier: disable=unused-variable
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
name = name,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = linking_contexts,
disallow_dynamic_library = cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows") or not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "supports_dynamic_linker"),
)
return CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
)

@ -0,0 +1,16 @@
"""Dummy rule to expose select() copts to aspects."""
UpbProtoLibraryCoptsInfo = provider(
"Provides copts for upb proto targets",
fields = {
"copts": "copts for upb_proto_library()",
},
)
def upb_proto_library_copts_impl(ctx):
return UpbProtoLibraryCoptsInfo(copts = ctx.attr.copts)
upb_proto_library_copts = rule(
implementation = upb_proto_library_copts_impl,
attrs = {"copts": attr.string_list(default = [])},
)

@ -0,0 +1,39 @@
"""Internal rule implementation for upb_*_proto_library() rules."""
def _filter_none(elems):
out = []
for elem in elems:
if elem:
out.append(elem)
return out
def upb_proto_rule_impl(ctx, cc_info_provider, srcs_provider):
"""An implementation for upb_*proto_library() rules.
Args:
ctx: The rule `ctx` argument
cc_info_provider: The provider containing a wrapped CcInfo that will be exposed to users who
depend on this rule.
srcs_provider: The provider containing the generated source files. This will be used to make
the DefaultInfo return the source files.
Returns:
Providers for this rule.
"""
if len(ctx.attr.deps) != 1:
fail("only one deps dependency allowed.")
dep = ctx.attr.deps[0]
srcs = dep[srcs_provider].srcs
cc_info = dep[cc_info_provider].cc_info
lib = cc_info.linking_context.linker_inputs.to_list()[0].libraries[0]
files = _filter_none([
lib.static_library,
lib.pic_static_library,
lib.dynamic_library,
])
return [
DefaultInfo(files = depset(files + srcs.hdrs + srcs.srcs)),
srcs,
cc_info,
]

@ -0,0 +1,3 @@
"""proto_library rule"""
proto_library = native.proto_library

@ -0,0 +1,211 @@
"""The implementation of the `py_proto_library` rule and its aspect."""
load("@rules_python//python:py_info.bzl", "PyInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type"
_PyProtoInfo = provider(
doc = "Encapsulates information needed by the Python proto rules.",
fields = {
"imports": """
(depset[str]) The field forwarding PyInfo.imports coming from
the proto language runtime dependency.""",
"runfiles_from_proto_deps": """
(depset[File]) Files from the transitive closure implicit proto
dependencies""",
"transitive_sources": """(depset[File]) The Python sources.""",
},
)
def _filter_provider(provider, *attrs):
return [dep[provider] for attr in attrs for dep in attr if provider in dep]
def _incompatible_toolchains_enabled():
return getattr(proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _py_proto_aspect_impl(target, ctx):
"""Generates and compiles Python code for a proto_library.
The function runs protobuf compiler on the `proto_library` target generating
a .py file for each .proto file.
Args:
target: (Target) A target providing `ProtoInfo`. Usually this means a
`proto_library` target, but not always; you must expect to visit
non-`proto_library` targets, too.
ctx: (RuleContext) The rule context.
Returns:
([_PyProtoInfo]) Providers collecting transitive information about
generated files.
"""
_proto_library = ctx.rule.attr
# Check Proto file names
for proto in target[ProtoInfo].direct_sources:
if proto.is_source and "-" in proto.dirname:
fail("Cannot generate Python code for a .proto whose path contains '-' ({}).".format(
proto.path,
))
if _incompatible_toolchains_enabled():
toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN]
if not toolchain:
fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN)
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo]
api_deps = [proto_lang_toolchain_info.runtime]
generated_sources = []
proto_info = target[ProtoInfo]
proto_root = proto_info.proto_source_root
if proto_info.direct_sources:
# Generate py files
generated_sources = proto_common.declare_generated_files(
actions = ctx.actions,
proto_info = proto_info,
extension = "_pb2.py",
name_mapper = lambda name: name.replace("-", "_").replace(".", "/"),
)
# Handles multiple repository and virtual import cases
if proto_root.startswith(ctx.bin_dir.path):
proto_root = proto_root[len(ctx.bin_dir.path) + 1:]
plugin_output = ctx.bin_dir.path + "/" + proto_root
proto_root = ctx.workspace_name + "/" + proto_root
proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = proto_lang_toolchain_info,
generated_files = generated_sources,
plugin_output = plugin_output,
)
# Generated sources == Python sources
python_sources = generated_sources
deps = _filter_provider(_PyProtoInfo, getattr(_proto_library, "deps", []))
runfiles_from_proto_deps = depset(
transitive = [dep[DefaultInfo].default_runfiles.files for dep in api_deps] +
[dep.runfiles_from_proto_deps for dep in deps],
)
transitive_sources = depset(
direct = python_sources,
transitive = [dep.transitive_sources for dep in deps],
)
return [
_PyProtoInfo(
imports = depset(
# Adding to PYTHONPATH so the generated modules can be
# imported. This is necessary when there is
# strip_import_prefix, the Python modules are generated under
# _virtual_imports. But it's undesirable otherwise, because it
# will put the repo root at the top of the PYTHONPATH, ahead of
# directories added through `imports` attributes.
[proto_root] if "_virtual_imports" in proto_root else [],
transitive = [dep[PyInfo].imports for dep in api_deps] + [dep.imports for dep in deps],
),
runfiles_from_proto_deps = runfiles_from_proto_deps,
transitive_sources = transitive_sources,
),
]
_py_proto_aspect = aspect(
implementation = _py_proto_aspect_impl,
attrs = {} if _incompatible_toolchains_enabled() else {
"_aspect_proto_toolchain": attr.label(
default = "//python:python_toolchain",
),
},
attr_aspects = ["deps"],
required_providers = [ProtoInfo],
provides = [_PyProtoInfo],
toolchains = [PY_PROTO_TOOLCHAIN] if _incompatible_toolchains_enabled() else [],
)
def _py_proto_library_rule(ctx):
"""Merges results of `py_proto_aspect` in `deps`.
Args:
ctx: (RuleContext) The rule context.
Returns:
([PyInfo, DefaultInfo, OutputGroupInfo])
"""
if not ctx.attr.deps:
fail("'deps' attribute mustn't be empty.")
pyproto_infos = _filter_provider(_PyProtoInfo, ctx.attr.deps)
default_outputs = depset(
transitive = [info.transitive_sources for info in pyproto_infos],
)
return [
DefaultInfo(
files = default_outputs,
default_runfiles = ctx.runfiles(transitive_files = depset(
transitive =
[default_outputs] +
[info.runfiles_from_proto_deps for info in pyproto_infos],
)),
),
OutputGroupInfo(
default = depset(),
),
PyInfo(
transitive_sources = default_outputs,
imports = depset(transitive = [info.imports for info in pyproto_infos]),
# Proto always produces 2- and 3- compatible source files
has_py2_only_sources = False,
has_py3_only_sources = False,
),
]
py_proto_library = rule(
implementation = _py_proto_library_rule,
doc = """
Use `py_proto_library` to generate Python libraries from `.proto` files.
The convention is to name the `py_proto_library` rule `foo_py_pb2`,
when it is wrapping `proto_library` rule `foo_proto`.
`deps` must point to a `proto_library` rule.
Example:
```starlark
py_library(
name = "lib",
deps = [":foo_py_pb2"],
)
py_proto_library(
name = "foo_py_pb2",
deps = [":foo_proto"],
)
proto_library(
name = "foo_proto",
srcs = ["foo.proto"],
)
```""",
attrs = {
"deps": attr.label_list(
doc = """
The list of `proto_library` rules to generate Python libraries for.
Usually this is just the one target: the proto library of interest.
It can be any target providing `ProtoInfo`.""",
providers = [ProtoInfo],
aspects = [_py_proto_aspect],
),
},
provides = [PyInfo],
)

@ -0,0 +1,17 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Temporary alias to repository rule for using Python 3.x headers from the system."""
load(
"//python/dist:system_python.bzl",
_system_python = "system_python",
)
# TODO: Temporary alias. This is deprecated and to be removed in a future
# release. Users should now get system_python from protobuf_deps.bzl.
system_python = _system_python

@ -0,0 +1,23 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
name = "proto_toolchain_bzl",
srcs = [
"proto_toolchain.bzl",
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:proto_toolchain_rule_bzl",
],
)
bzl_library(
name = "proto_lang_toolchain_bzl",
srcs = [
"proto_lang_toolchain.bzl",
],
visibility = ["//visibility:public"],
deps = [
"//bazel/common:proto_common_bzl",
],
)

@ -0,0 +1,34 @@
"""proto_lang_toolchain rule"""
load("//bazel/common:proto_common.bzl", "proto_common")
def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with = [], target_compatible_with = [], **attrs):
"""Creates a proto_lang_toolchain and corresponding toolchain target.
Toolchain target is only created when toolchain_type is set.
https://docs.bazel.build/versions/master/be/protocol-buffer.html#proto_lang_toolchain
Args:
name: name of the toolchain
toolchain_type: The toolchain type
exec_compatible_with: ([constraints]) List of constraints the prebuild binaries is compatible with.
target_compatible_with: ([constraints]) List of constraints the target libraries are compatible with.
**attrs: Rule attributes
"""
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
attrs["toolchain_type"] = toolchain_type
# buildifier: disable=native-proto
native.proto_lang_toolchain(name = name, **attrs)
if toolchain_type:
native.toolchain(
name = name + "_toolchain",
toolchain_type = toolchain_type,
exec_compatible_with = exec_compatible_with,
target_compatible_with = target_compatible_with,
toolchain = name,
)

@ -0,0 +1,26 @@
"""Macro wrapping the proto_toolchain implementation.
The macro additionally creates toolchain target when toolchain_type is given.
"""
load("//bazel/private:proto_toolchain_rule.bzl", _proto_toolchain_rule = "proto_toolchain")
def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []):
"""Creates a proto_toolchain and toolchain target for proto_library.
Toolchain target is suffixed with "_toolchain".
Args:
name: name of the toolchain
proto_compiler: (Label) of either proto compiler sources or prebuild binaries
exec_compatible_with: ([constraints]) List of constraints the prebuild binary is compatible with.
"""
_proto_toolchain_rule(name = name, proto_compiler = proto_compiler)
native.toolchain(
name = name + "_toolchain",
toolchain_type = "@rules_proto//proto:toolchain_type",
exec_compatible_with = exec_compatible_with,
target_compatible_with = [],
toolchain = name,
)

@ -0,0 +1,69 @@
"""upb_c_proto_library() exposes upb's generated C API for protobuf (foo.upb.h)"""
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
UpbWrappedCcInfo = provider(
"Provider for cc_info for protos",
fields = ["cc_info", "cc_info_with_thunks"],
)
_UpbWrappedGeneratedSrcsInfo = provider(
"Provider for generated sources",
fields = ["srcs"],
)
def _upb_c_proto_library_aspect_impl(target, ctx):
return upb_proto_aspect_impl(
target = target,
ctx = ctx,
generator = "upb",
cc_provider = UpbWrappedCcInfo,
dep_cc_provider = UpbMinitableCcInfo,
file_provider = _UpbWrappedGeneratedSrcsInfo,
provide_cc_shared_library_hints = False,
)
upb_c_proto_library_aspect = aspect(
attrs = {
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upb_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
},
implementation = _upb_c_proto_library_aspect_impl,
requires = [upb_minitable_proto_library_aspect],
required_aspect_providers = [UpbMinitableCcInfo],
provides = [
UpbWrappedCcInfo,
_UpbWrappedGeneratedSrcsInfo,
],
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_c_proto_library_rule_impl(ctx):
return upb_proto_rule_impl(ctx, UpbWrappedCcInfo, _UpbWrappedGeneratedSrcsInfo)
upb_c_proto_library = rule(
implementation = _upb_c_proto_library_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [upb_c_proto_library_aspect],
allow_rules = ["proto_library"],
providers = [ProtoInfo],
),
},
provides = [CcInfo],
)

@ -0,0 +1,77 @@
"""upb_minitable_proto_library() exposes upb's generated minitables (foo.upb_minitable.h)"""
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
UpbMinitableCcInfo = provider(
"Provider for cc_info for protos",
fields = ["cc_info"],
)
_UpbWrappedGeneratedSrcsInfo = provider(
"Provider for generated sources",
fields = ["srcs"],
)
def _upb_minitable_proto_library_aspect_impl(target, ctx):
return upb_proto_aspect_impl(
target = target,
ctx = ctx,
generator = "upb_minitable",
cc_provider = UpbMinitableCcInfo,
dep_cc_provider = None,
file_provider = _UpbWrappedGeneratedSrcsInfo,
)
def _get_upb_minitable_proto_library_aspect_provides():
provides = [
UpbMinitableCcInfo,
_UpbWrappedGeneratedSrcsInfo,
]
if hasattr(cc_common, "CcSharedLibraryHintInfo"):
provides.append(cc_common.CcSharedLibraryHintInfo)
elif hasattr(cc_common, "CcSharedLibraryHintInfo_6_X_getter_do_not_use"):
# This branch can be deleted once 6.X is not supported by upb rules
provides.append(cc_common.CcSharedLibraryHintInfo_6_X_getter_do_not_use)
return provides
upb_minitable_proto_library_aspect = aspect(
attrs = {
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upb_minitable_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_minitable_toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_fasttable_enabled": attr.label(default = "//upb:fasttable_enabled"),
},
implementation = _upb_minitable_proto_library_aspect_impl,
provides = _get_upb_minitable_proto_library_aspect_provides(),
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_minitable_proto_library_rule_impl(ctx):
return upb_proto_rule_impl(ctx, UpbMinitableCcInfo, _UpbWrappedGeneratedSrcsInfo)
upb_minitable_proto_library = rule(
implementation = _upb_minitable_proto_library_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [upb_minitable_proto_library_aspect],
allow_rules = ["proto_library"],
providers = [ProtoInfo],
),
},
provides = [CcInfo],
)

@ -0,0 +1,35 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""Public rules for using upb protos:
- upb_proto_library()
- upb_proto_reflection_library()
"""
load(
"//bazel:upb_c_proto_library.bzl",
_UpbWrappedCcInfo = "UpbWrappedCcInfo",
_upb_c_proto_library = "upb_c_proto_library",
_upb_c_proto_library_aspect = "upb_c_proto_library_aspect",
)
load(
"//bazel:upb_proto_reflection_library.bzl",
_upb_proto_reflection_library = "upb_proto_reflection_library",
)
load(
"//bazel/private:upb_proto_library_internal/aspect.bzl",
_GeneratedSrcsInfo = "GeneratedSrcsInfo",
)
# Temporary alias, see b/291827469.
upb_proto_library = _upb_c_proto_library
upb_c_proto_library = _upb_c_proto_library
upb_proto_reflection_library = _upb_proto_reflection_library
GeneratedSrcsInfo = _GeneratedSrcsInfo
UpbWrappedCcInfo = _UpbWrappedCcInfo
upb_proto_library_aspect = _upb_c_proto_library_aspect

@ -0,0 +1,67 @@
"""upb_c_proto_reflection_library() exposes upb reflection for protobuf (foo.upbdefs.h)"""
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
_UpbDefsWrappedCcInfo = provider("Provider for cc_info for protos", fields = ["cc_info"])
_WrappedDefsGeneratedSrcsInfo = provider(
"Provider for generated reflective sources",
fields = ["srcs"],
)
def _upb_proto_reflection_library_aspect_impl(target, ctx):
return upb_proto_aspect_impl(
target = target,
ctx = ctx,
generator = "upbdefs",
cc_provider = _UpbDefsWrappedCcInfo,
dep_cc_provider = UpbMinitableCcInfo,
file_provider = _WrappedDefsGeneratedSrcsInfo,
provide_cc_shared_library_hints = False,
)
_upb_proto_reflection_library_aspect = aspect(
attrs = {
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_upbdefs_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upbdefs_toolchain"),
cfg = getattr(proto_common, "proto_lang_toolchain_cfg", "target"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
},
implementation = _upb_proto_reflection_library_aspect_impl,
requires = [upb_minitable_proto_library_aspect],
required_aspect_providers = [UpbMinitableCcInfo],
provides = [
_UpbDefsWrappedCcInfo,
_WrappedDefsGeneratedSrcsInfo,
],
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_proto_reflection_library_rule_impl(ctx):
return upb_proto_rule_impl(ctx, _UpbDefsWrappedCcInfo, _WrappedDefsGeneratedSrcsInfo)
upb_proto_reflection_library = rule(
implementation = _upb_proto_reflection_library_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [_upb_proto_reflection_library_aspect],
allow_rules = ["proto_library"],
providers = [ProtoInfo],
),
},
provides = [CcInfo],
)

@ -0,0 +1,245 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("@rules_python//python:defs.bzl", "py_binary")
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
load(
"//bazel:upb_proto_library.bzl",
"upb_c_proto_library",
"upb_proto_reflection_library",
)
load(
":build_defs.bzl",
"cc_optimizefor_proto_library",
"expand_suffixes",
"tmpl_cc_binary",
)
# begin:google_only
# package(default_applicable_licenses = ["//upb:license"])
# end:google_only
licenses(["notice"])
proto_library(
name = "descriptor_proto",
srcs = ["descriptor.proto"],
)
upb_c_proto_library(
name = "benchmark_descriptor_upb_proto",
deps = [":descriptor_proto"],
)
upb_proto_reflection_library(
name = "benchmark_descriptor_upb_proto_reflection",
deps = [":descriptor_proto"],
)
upb_proto_reflection_library(
name = "ads_upb_proto_reflection",
deps = ["@com_google_googleapis//:ads_proto"],
)
cc_proto_library(
name = "benchmark_descriptor_cc_proto",
deps = [":descriptor_proto"],
)
proto_library(
name = "benchmark_descriptor_sv_proto",
srcs = ["descriptor_sv.proto"],
)
cc_proto_library(
name = "benchmark_descriptor_sv_cc_proto",
deps = [":benchmark_descriptor_sv_proto"],
)
cc_test(
name = "benchmark",
testonly = 1,
srcs = ["benchmark.cc"],
deps = [
":ads_upb_proto_reflection",
":benchmark_descriptor_cc_proto",
":benchmark_descriptor_sv_cc_proto",
":benchmark_descriptor_upb_proto",
":benchmark_descriptor_upb_proto_reflection",
"//:protobuf",
"//src/google/protobuf/json",
"//upb:base",
"//upb:descriptor_upb_proto",
"//upb:json",
"//upb:mem",
"//upb:reflection",
"//upb:wire",
"@com_github_google_benchmark//:benchmark_main",
"@com_google_absl//absl/container:flat_hash_set",
"@com_google_absl//absl/log:absl_check",
"@com_google_googletest//:gtest",
"@com_google_googletest//:gtest_main",
],
)
# Size benchmarks.
SIZE_BENCHMARKS = {
"empty": "Empty",
"descriptor": "FileDescriptorSet",
"100_msgs": "Message100",
"200_msgs": "Message200",
"100_fields": "Message",
"200_fields": "Message",
}
py_binary(
name = "gen_synthetic_protos",
srcs = ["gen_synthetic_protos.py"],
python_version = "PY3",
)
py_binary(
name = "gen_upb_binary_c",
srcs = ["gen_upb_binary_c.py"],
python_version = "PY3",
)
py_binary(
name = "gen_protobuf_binary_cc",
srcs = ["gen_protobuf_binary_cc.py"],
python_version = "PY3",
)
genrule(
name = "do_gen_synthetic_protos",
outs = [
"100_msgs.proto",
"200_msgs.proto",
"100_fields.proto",
"200_fields.proto",
],
cmd = "$(execpath :gen_synthetic_protos) $(RULEDIR)",
tools = [":gen_synthetic_protos"],
)
proto_library(
name = "100_msgs_proto",
srcs = ["100_msgs.proto"],
)
proto_library(
name = "200_msgs_proto",
srcs = ["200_msgs.proto"],
)
proto_library(
name = "100_fields_proto",
srcs = ["100_fields.proto"],
)
proto_library(
name = "200_fields_proto",
srcs = ["200_fields.proto"],
)
proto_library(
name = "empty_proto",
srcs = ["empty.proto"],
)
[(
upb_c_proto_library(
name = k + "_upb_proto",
deps = [":" + k + "_proto"],
),
cc_proto_library(
name = k + "_cc_proto",
deps = [":" + k + "_proto"],
),
tmpl_cc_binary(
name = k + "_upb_binary",
testonly = 1,
args = [
package_name() + "/" + k + ".upb.h",
"upb_benchmark_" + v,
],
gen = ":gen_upb_binary_c",
deps = [
":" + k + "_upb_proto",
],
),
tmpl_cc_binary(
name = k + "_protobuf_binary",
testonly = 1,
args = [
package_name() + "/" + k + ".pb.h",
"upb_benchmark::" + v,
],
gen = ":gen_protobuf_binary_cc",
deps = [
":" + k + "_cc_proto",
],
),
cc_optimizefor_proto_library(
name = k + "_cc_lite_proto",
srcs = [k + ".proto"],
outs = [k + "_lite.proto"],
optimize_for = "LITE_RUNTIME",
),
tmpl_cc_binary(
name = k + "_lite_protobuf_binary",
testonly = 1,
args = [
package_name() + "/" + k + "_lite.pb.h",
"upb_benchmark::" + v,
],
gen = ":gen_protobuf_binary_cc",
deps = [
":" + k + "_cc_lite_proto",
],
),
cc_optimizefor_proto_library(
name = k + "_cc_codesize_proto",
srcs = [k + ".proto"],
outs = [k + "_codesize.proto"],
optimize_for = "CODE_SIZE",
),
tmpl_cc_binary(
name = k + "_codesize_protobuf_binary",
testonly = 1,
args = [
package_name() + "/" + k + "_codesize.pb.h",
"upb_benchmark::" + v,
],
gen = ":gen_protobuf_binary_cc",
deps = [
":" + k + "_cc_codesize_proto",
],
),
) for k, v in SIZE_BENCHMARKS.items()]
genrule(
name = "size_data",
testonly = 1,
srcs = expand_suffixes(
SIZE_BENCHMARKS.keys(),
suffixes = [
"_upb_binary",
"_protobuf_binary",
"_lite_protobuf_binary",
"_codesize_protobuf_binary",
],
),
outs = ["size_data.txt"],
# We want --format=GNU which counts rodata with data, not text.
cmd = "size $$($$OSTYPE == 'linux-gnu' ? '--format=GNU -d' : '') $(SRCS) > $@",
# "size" sometimes isn't available remotely.
local = 1,
tags = ["no-remote-exec"],
)

@ -0,0 +1,59 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
load(
"@protobuf//bazel:proto_library.bzl",
"proto_library",
)
proto_library(
name = "ads_proto",
srcs = glob([
"google/ads/googleads/v16/**/*.proto",
"google/api/**/*.proto",
"google/rpc/**/*.proto",
"google/longrunning/**/*.proto",
"google/logging/**/*.proto",
]),
#srcs = ["google/ads/googleads/v5/services/google_ads_service.proto"],
visibility = ["//visibility:public"],
deps = [
"@com_google_protobuf//:any_proto",
"@com_google_protobuf//:api_proto",
"@com_google_protobuf//:descriptor_proto",
"@com_google_protobuf//:duration_proto",
"@com_google_protobuf//:empty_proto",
"@com_google_protobuf//:field_mask_proto",
"@com_google_protobuf//:struct_proto",
"@com_google_protobuf//:timestamp_proto",
"@com_google_protobuf//:type_proto",
"@com_google_protobuf//:wrappers_proto",
],
)

@ -0,0 +1,471 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google LLC. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include <benchmark/benchmark.h>
#include <math.h>
#include <stdint.h>
#include <string.h>
#include <string>
#include <vector>
#include "google/ads/googleads/v16/services/google_ads_service.upbdefs.h"
#include "google/protobuf/descriptor.pb.h"
#include "absl/container/flat_hash_set.h"
#include "absl/log/absl_check.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/json/json.h"
#include "benchmarks/descriptor.pb.h"
#include "benchmarks/descriptor.upb.h"
#include "benchmarks/descriptor.upbdefs.h"
#include "benchmarks/descriptor_sv.pb.h"
#include "upb/base/string_view.h"
#include "upb/base/upcast.h"
#include "upb/json/decode.h"
#include "upb/json/encode.h"
#include "upb/mem/arena.h"
#include "upb/reflection/def.hpp"
#include "upb/wire/decode.h"
upb_StringView descriptor =
benchmarks_descriptor_proto_upbdefinit.descriptor;
namespace protobuf = ::google::protobuf;
// A buffer big enough to parse descriptor.proto without going to heap.
// We use 64-bit ints here to force alignment.
int64_t buf[8191];
void CollectFileDescriptors(
const _upb_DefPool_Init* file,
std::vector<upb_StringView>& serialized_files,
absl::flat_hash_set<const _upb_DefPool_Init*>& seen) {
if (!seen.insert(file).second) return;
for (_upb_DefPool_Init** deps = file->deps; *deps; deps++) {
CollectFileDescriptors(*deps, serialized_files, seen);
}
serialized_files.push_back(file->descriptor);
}
static void BM_ArenaOneAlloc(benchmark::State& state) {
for (auto _ : state) {
upb_Arena* arena = upb_Arena_New();
upb_Arena_Malloc(arena, 1);
upb_Arena_Free(arena);
}
}
BENCHMARK(BM_ArenaOneAlloc);
static void BM_ArenaInitialBlockOneAlloc(benchmark::State& state) {
for (auto _ : state) {
upb_Arena* arena = upb_Arena_Init(buf, sizeof(buf), nullptr);
upb_Arena_Malloc(arena, 1);
upb_Arena_Free(arena);
}
}
BENCHMARK(BM_ArenaInitialBlockOneAlloc);
static void BM_ArenaFuseUnbalanced(benchmark::State& state) {
std::vector<upb_Arena*> arenas(state.range(0));
size_t n = 0;
for (auto _ : state) {
for (auto& arena : arenas) {
arena = upb_Arena_New();
}
for (auto& arena : arenas) {
upb_Arena_Fuse(arenas[0], arena);
}
for (auto& arena : arenas) {
upb_Arena_Free(arena);
}
n += arenas.size();
}
state.SetItemsProcessed(n);
}
BENCHMARK(BM_ArenaFuseUnbalanced)->Range(2, 128);
static void BM_ArenaFuseBalanced(benchmark::State& state) {
std::vector<upb_Arena*> arenas(state.range(0));
size_t n = 0;
for (auto _ : state) {
for (auto& arena : arenas) {
arena = upb_Arena_New();
}
// Perform a series of fuses that keeps the halves balanced.
const size_t max = ceil(log2(double(arenas.size())));
for (size_t n = 0; n <= max; n++) {
size_t step = 1 << n;
for (size_t i = 0; i + step < arenas.size(); i += (step * 2)) {
upb_Arena_Fuse(arenas[i], arenas[i + step]);
}
}
for (auto& arena : arenas) {
upb_Arena_Free(arena);
}
n += arenas.size();
}
state.SetItemsProcessed(n);
}
BENCHMARK(BM_ArenaFuseBalanced)->Range(2, 128);
enum LoadDescriptorMode {
NoLayout,
WithLayout,
};
// This function is mostly copied from upb/def.c, but it is modified to avoid
// passing in the pre-generated mini-tables, in order to force upb to compute
// them dynamically. Generally you would never want to do this, but we want to
// simulate the cost we would pay if we were loading these types purely from
// descriptors, with no mini-tales available.
bool LoadDefInit_BuildLayout(upb_DefPool* s, const _upb_DefPool_Init* init,
size_t* bytes) {
_upb_DefPool_Init** deps = init->deps;
google_protobuf_FileDescriptorProto* file;
upb_Arena* arena;
upb_Status status;
upb_Status_Clear(&status);
if (upb_DefPool_FindFileByName(s, init->filename)) {
return true;
}
arena = upb_Arena_New();
for (; *deps; deps++) {
if (!LoadDefInit_BuildLayout(s, *deps, bytes)) goto err;
}
file = google_protobuf_FileDescriptorProto_parse_ex(
init->descriptor.data, init->descriptor.size, nullptr,
kUpb_DecodeOption_AliasString, arena);
*bytes += init->descriptor.size;
if (!file) {
upb_Status_SetErrorFormat(
&status,
"Failed to parse compiled-in descriptor for file '%s'. This should "
"never happen.",
init->filename);
goto err;
}
// KEY DIFFERENCE: Here we pass in only the descriptor, and not the
// pre-generated minitables.
if (!upb_DefPool_AddFile(s, file, &status)) {
goto err;
}
upb_Arena_Free(arena);
return true;
err:
fprintf(stderr,
"Error loading compiled-in descriptor for file '%s' (this should "
"never happen): %s\n",
init->filename, upb_Status_ErrorMessage(&status));
exit(1);
}
template <LoadDescriptorMode Mode>
static void BM_LoadAdsDescriptor_Upb(benchmark::State& state) {
size_t bytes_per_iter = 0;
for (auto _ : state) {
upb::DefPool defpool;
if (Mode == NoLayout) {
google_ads_googleads_v16_services_SearchGoogleAdsRequest_getmsgdef(
defpool.ptr());
bytes_per_iter = _upb_DefPool_BytesLoaded(defpool.ptr());
} else {
bytes_per_iter = 0;
LoadDefInit_BuildLayout(
defpool.ptr(),
&google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit,
&bytes_per_iter);
}
}
state.SetBytesProcessed(state.iterations() * bytes_per_iter);
}
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Upb, NoLayout);
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Upb, WithLayout);
template <LoadDescriptorMode Mode>
static void BM_LoadAdsDescriptor_Proto2(benchmark::State& state) {
extern _upb_DefPool_Init
google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit;
std::vector<upb_StringView> serialized_files;
absl::flat_hash_set<const _upb_DefPool_Init*> seen_files;
CollectFileDescriptors(
&google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit,
serialized_files, seen_files);
size_t bytes_per_iter = 0;
for (auto _ : state) {
bytes_per_iter = 0;
protobuf::Arena arena;
protobuf::DescriptorPool pool;
for (auto file : serialized_files) {
absl::string_view input(file.data, file.size);
auto proto =
protobuf::Arena::Create<protobuf::FileDescriptorProto>(&arena);
bool ok = proto->ParseFrom<protobuf::MessageLite::kMergePartial>(input) &&
pool.BuildFile(*proto) != nullptr;
if (!ok) {
printf("Failed to add file.\n");
exit(1);
}
bytes_per_iter += input.size();
}
if (Mode == WithLayout) {
protobuf::DynamicMessageFactory factory;
const protobuf::Descriptor* d = pool.FindMessageTypeByName(
"google.ads.googleads.v16.services.SearchGoogleAdsResponse");
if (!d) {
printf("Failed to find descriptor.\n");
exit(1);
}
factory.GetPrototype(d);
}
}
state.SetBytesProcessed(state.iterations() * bytes_per_iter);
}
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Proto2, NoLayout);
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Proto2, WithLayout);
enum CopyStrings {
Copy,
Alias,
};
enum ArenaMode {
NoArena,
UseArena,
InitBlock,
};
template <ArenaMode AMode, CopyStrings Copy>
static void BM_Parse_Upb_FileDesc(benchmark::State& state) {
for (auto _ : state) {
upb_Arena* arena;
if (AMode == InitBlock) {
arena = upb_Arena_Init(buf, sizeof(buf), nullptr);
} else {
arena = upb_Arena_New();
}
upb_benchmark_FileDescriptorProto* set =
upb_benchmark_FileDescriptorProto_parse_ex(
descriptor.data, descriptor.size, nullptr,
Copy == Alias ? kUpb_DecodeOption_AliasString : 0, arena);
if (!set) {
printf("Failed to parse.\n");
exit(1);
}
upb_Arena_Free(arena);
}
state.SetBytesProcessed(state.iterations() * descriptor.size);
}
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, UseArena, Copy);
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, UseArena, Alias);
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, InitBlock, Copy);
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, InitBlock, Alias);
template <ArenaMode AMode, class P>
struct Proto2Factory;
template <class P>
struct Proto2Factory<NoArena, P> {
public:
P* GetProto() { return &proto; }
private:
P proto;
};
template <class P>
struct Proto2Factory<UseArena, P> {
public:
P* GetProto() { return protobuf::Arena::Create<P>(&arena); }
private:
protobuf::Arena arena;
};
template <class P>
struct Proto2Factory<InitBlock, P> {
public:
Proto2Factory() : arena(GetOptions()) {}
P* GetProto() { return protobuf::Arena::Create<P>(&arena); }
private:
protobuf::ArenaOptions GetOptions() {
protobuf::ArenaOptions opts;
opts.initial_block = (char*)buf;
opts.initial_block_size = sizeof(buf);
return opts;
}
protobuf::Arena arena;
};
using FileDesc = ::upb_benchmark::FileDescriptorProto;
using FileDescSV = ::upb_benchmark::sv::FileDescriptorProto;
template <class P, ArenaMode AMode, CopyStrings kCopy>
void BM_Parse_Proto2(benchmark::State& state) {
constexpr protobuf::MessageLite::ParseFlags kParseFlags =
kCopy == Copy
? protobuf::MessageLite::ParseFlags::kMergePartial
: protobuf::MessageLite::ParseFlags::kMergePartialWithAliasing;
for (auto _ : state) {
Proto2Factory<AMode, P> proto_factory;
auto proto = proto_factory.GetProto();
absl::string_view input(descriptor.data, descriptor.size);
bool ok = proto->template ParseFrom<kParseFlags>(input);
if (!ok) {
printf("Failed to parse.\n");
exit(1);
}
}
state.SetBytesProcessed(state.iterations() * descriptor.size);
}
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, NoArena, Copy);
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, UseArena, Copy);
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, InitBlock, Copy);
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDescSV, InitBlock, Alias);
static void BM_SerializeDescriptor_Proto2(benchmark::State& state) {
upb_benchmark::FileDescriptorProto proto;
proto.ParseFromArray(descriptor.data, descriptor.size);
for (auto _ : state) {
proto.SerializePartialToArray(buf, sizeof(buf));
}
state.SetBytesProcessed(state.iterations() * descriptor.size);
}
BENCHMARK(BM_SerializeDescriptor_Proto2);
static upb_benchmark_FileDescriptorProto* UpbParseDescriptor(upb_Arena* arena) {
upb_benchmark_FileDescriptorProto* set =
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size,
arena);
if (!set) {
printf("Failed to parse.\n");
exit(1);
}
return set;
}
static void BM_SerializeDescriptor_Upb(benchmark::State& state) {
int64_t total = 0;
upb_Arena* arena = upb_Arena_New();
upb_benchmark_FileDescriptorProto* set = UpbParseDescriptor(arena);
for (auto _ : state) {
upb_Arena* enc_arena = upb_Arena_Init(buf, sizeof(buf), nullptr);
size_t size;
char* data =
upb_benchmark_FileDescriptorProto_serialize(set, enc_arena, &size);
if (!data) {
printf("Failed to serialize.\n");
exit(1);
}
total += size;
}
state.SetBytesProcessed(total);
}
BENCHMARK(BM_SerializeDescriptor_Upb);
static absl::string_view UpbJsonEncode(upb_benchmark_FileDescriptorProto* proto,
const upb_MessageDef* md,
upb_Arena* arena) {
size_t size =
upb_JsonEncode(UPB_UPCAST(proto), md, nullptr, 0, nullptr, 0, nullptr);
char* buf = reinterpret_cast<char*>(upb_Arena_Malloc(arena, size + 1));
upb_JsonEncode(UPB_UPCAST(proto), md, nullptr, 0, buf, size, nullptr);
return absl::string_view(buf, size);
}
static void BM_JsonParse_Upb(benchmark::State& state) {
upb_Arena* arena = upb_Arena_New();
upb_benchmark_FileDescriptorProto* set =
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size,
arena);
if (!set) {
printf("Failed to parse.\n");
exit(1);
}
upb::DefPool defpool;
const upb_MessageDef* md =
upb_benchmark_FileDescriptorProto_getmsgdef(defpool.ptr());
auto json = UpbJsonEncode(set, md, arena);
for (auto _ : state) {
upb_Arena* arena = upb_Arena_New();
upb_benchmark_FileDescriptorProto* proto =
upb_benchmark_FileDescriptorProto_new(arena);
upb_JsonDecode(json.data(), json.size(), UPB_UPCAST(proto), md,
defpool.ptr(), 0, arena, nullptr);
upb_Arena_Free(arena);
}
state.SetBytesProcessed(state.iterations() * json.size());
}
BENCHMARK(BM_JsonParse_Upb);
static void BM_JsonParse_Proto2(benchmark::State& state) {
protobuf::FileDescriptorProto proto;
absl::string_view input(descriptor.data, descriptor.size);
proto.ParseFromString(input);
std::string json;
ABSL_CHECK_OK(google::protobuf::json::MessageToJsonString(proto, &json));
for (auto _ : state) {
protobuf::FileDescriptorProto proto;
ABSL_CHECK_OK(google::protobuf::json::JsonStringToMessage(json, &proto));
}
state.SetBytesProcessed(state.iterations() * json.size());
}
BENCHMARK(BM_JsonParse_Proto2);
static void BM_JsonSerialize_Upb(benchmark::State& state) {
upb_Arena* arena = upb_Arena_New();
upb_benchmark_FileDescriptorProto* set =
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size,
arena);
ABSL_CHECK(set != nullptr);
upb::DefPool defpool;
const upb_MessageDef* md =
upb_benchmark_FileDescriptorProto_getmsgdef(defpool.ptr());
auto json = UpbJsonEncode(set, md, arena);
std::string json_str;
json_str.resize(json.size());
for (auto _ : state) {
// This isn't a fully fair comparison, as it assumes we already know the
// correct size of the buffer. In practice, we usually need to run the
// encoder twice, once to discover the size of the buffer.
upb_JsonEncode(UPB_UPCAST(set), md, nullptr, 0, json_str.data(),
json_str.size(), nullptr);
}
state.SetBytesProcessed(state.iterations() * json.size());
}
BENCHMARK(BM_JsonSerialize_Upb);
static void BM_JsonSerialize_Proto2(benchmark::State& state) {
protobuf::FileDescriptorProto proto;
absl::string_view input(descriptor.data, descriptor.size);
proto.ParseFromString(input);
std::string json;
for (auto _ : state) {
json.clear();
ABSL_CHECK_OK(google::protobuf::json::MessageToJsonString(proto, &json));
}
state.SetBytesProcessed(state.iterations() * json.size());
}
BENCHMARK(BM_JsonSerialize_Proto2);

@ -0,0 +1,63 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("//bazel:cc_proto_library.bzl", "cc_proto_library")
load("//bazel:proto_library.bzl", "proto_library")
# begin:google_only
# _is_google3 = True
# end:google_only
# begin:github_only
_is_google3 = False
# end:github_only
def tmpl_cc_binary(name, gen, args, replacements = [], **kwargs):
srcs = [name + ".cc"]
native.genrule(
name = name + "_gen_srcs",
tools = [gen],
outs = srcs,
cmd = "$(location " + gen + ") " + " ".join(args) + " > $@",
)
if _is_google3:
kwargs["malloc"] = "//base:system_malloc"
kwargs["features"] = ["-static_linking_mode"]
native.cc_binary(
name = name,
srcs = srcs,
**kwargs
)
def cc_optimizefor_proto_library(name, srcs, outs, optimize_for):
if len(srcs) != 1:
fail("Currently srcs must have exactly 1 element")
native.genrule(
name = name + "_gen_proto",
srcs = srcs,
outs = outs,
cmd = "cp $< $@ && chmod a+w $@ && echo 'option optimize_for = " + optimize_for + ";' >> $@",
)
proto_library(
name = name + "_proto",
srcs = outs,
)
cc_proto_library(
name = name,
deps = [":" + name + "_proto"],
)
def expand_suffixes(vals, suffixes):
ret = []
for val in vals:
for suffix in suffixes:
ret.append(val + suffix)
return ret

@ -0,0 +1,123 @@
#!/usr/bin/python3
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Benchmarks the current working directory against a given baseline.
This script benchmarks both size and speed. Sample output:
"""
import contextlib
import json
import os
import re
import subprocess
import sys
import tempfile
@contextlib.contextmanager
def GitWorktree(commit):
tmpdir = tempfile.mkdtemp()
subprocess.run(['git', 'worktree', 'add', '-q', '-d', tmpdir, commit], check=True)
cwd = os.getcwd()
os.chdir(tmpdir)
try:
yield tmpdir
finally:
os.chdir(cwd)
subprocess.run(['git', 'worktree', 'remove', tmpdir], check=True)
def Run(cmd):
subprocess.check_call(cmd, shell=True)
def Benchmark(outbase, bench_cpu=True, runs=12, fasttable=False):
tmpfile = "/tmp/bench-output.json"
Run("rm -rf {}".format(tmpfile))
#Run("CC=clang bazel test ...")
if fasttable:
extra_args = " --//:fasttable_enabled=true"
else:
extra_args = ""
if bench_cpu:
Run("CC=clang bazel build -c opt --copt=-march=native benchmarks:benchmark" + extra_args)
Run("./bazel-bin/benchmarks/benchmark --benchmark_out_format=json --benchmark_out={} --benchmark_repetitions={} --benchmark_min_time=0.05 --benchmark_enable_random_interleaving=true".format(tmpfile, runs))
with open(tmpfile) as f:
bench_json = json.load(f)
# Translate into the format expected by benchstat.
txt_filename = outbase + ".txt"
with open(txt_filename, "w") as f:
for run in bench_json["benchmarks"]:
if run["run_type"] == "aggregate":
continue
name = run["name"]
name = name.replace(" ", "")
name = re.sub(r'^BM_', 'Benchmark', name)
values = (name, run["iterations"], run["cpu_time"])
print("{} {} {} ns/op".format(*values), file=f)
Run("sort {} -o {} ".format(txt_filename, txt_filename))
Run("CC=clang bazel build -c opt --copt=-g --copt=-march=native :conformance_upb"
+ extra_args)
Run("cp -f bazel-bin/conformance_upb {}.bin".format(outbase))
baseline = "main"
bench_cpu = True
fasttable = False
if len(sys.argv) > 1:
baseline = sys.argv[1]
# Quickly verify that the baseline exists.
with GitWorktree(baseline):
pass
# Benchmark our current directory first, since it's more likely to be broken.
Benchmark("/tmp/new", bench_cpu, fasttable=fasttable)
# Benchmark the baseline.
with GitWorktree(baseline):
Benchmark("/tmp/old", bench_cpu, fasttable=fasttable)
print()
print()
if bench_cpu:
Run("~/go/bin/benchstat /tmp/old.txt /tmp/new.txt")
print()
print()
Run("objcopy --strip-debug /tmp/old.bin /tmp/old.bin.stripped")
Run("objcopy --strip-debug /tmp/new.bin /tmp/new.bin.stripped")
Run("~/code/bloaty/bloaty /tmp/new.bin.stripped -- /tmp/old.bin.stripped --debug-file=/tmp/old.bin --debug-file=/tmp/new.bin -d compileunits,symbols")

@ -0,0 +1,865 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package upb_benchmark;
option go_package = "google.golang.org/protobuf/types/descriptorpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "UPBB";
option cc_enable_arenas = true;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1; // file name, relative to root of source tree
optional string package = 2; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3;
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12;
}
// Describes a message type.
message DescriptorProto {
optional string name = 1;
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
// fields or extension ranges in the same message. Reserved ranges may
// not overlap.
message ReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10;
}
message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
// treat group fields as unknown fields.
TYPE_GROUP = 10;
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
}
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
}
optional string name = 1;
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6;
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2;
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
// TODO: Base-64 encode?
optional string default_value = 7;
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof.
optional int32 oneof_index = 9;
// JSON name of this field. The value is set by protocol compiler. If the
// user has set a "json_name" option on this field, that option's value
// will be used. Otherwise, it's deduced from the field's name by converting
// it to camelCase.
optional string json_name = 10;
optional FieldOptions options = 8;
// If true, this is a proto3 "optional". When a proto3 field is optional, it
// tracks presence regardless of field type.
//
// When proto3_optional is true, this field must be belong to a oneof to
// signal to old proto3 clients that presence is tracked for this field. This
// oneof is known as a "synthetic" oneof, and this field must be its sole
// member (each proto3 optional field gets its own synthetic oneof). Synthetic
// oneofs exist in the descriptor only, and do not generate any API. Synthetic
// oneofs must be ordered after all "real" oneofs.
//
// For message fields, proto3_optional doesn't create any semantic change,
// since non-repeated message fields always track presence. However it still
// indicates the semantic detail of whether the user wrote "optional" or not.
// This can be useful for round-tripping the .proto file. For consistency we
// give message fields a synthetic oneof also, even though it is not required
// to track presence. This is especially important because the parser can't
// tell if a field is a message or an enum, so it must always create a
// synthetic oneof.
//
// Proto2 optional fields do not set this flag, because they already indicate
// optional with `LABEL_OPTIONAL`.
optional bool proto3_optional = 17;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1;
optional OneofOptions options = 2;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1;
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
// entries in the same enum. Reserved ranges may not overlap.
//
// Note that this is distinct from DescriptorProto.ReservedRange in that it
// is inclusive such that it can appropriately represent the entire int32
// domain.
message EnumReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Inclusive.
}
// Range of reserved numeric values. Reserved numeric values may not be used
// by enum values in the same enum declaration. Reserved ranges may not
// overlap.
repeated EnumReservedRange reserved_range = 4;
// Reserved enum value names, which may not be reused. A given name may only
// be reserved once.
repeated string reserved_name = 5;
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1;
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2;
optional string output_type = 3;
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default = false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail protobuf-global-extension-registry@google.com
// to reserve extension numbers. Simply provide your project name (e.g.
// Objective-C plugin) and your project website (if available) -- there's no
// need to explain how you intend to use them. Usually you only need one
// extension number. You can declare multiple options with only one extension
// number by putting them in a sub-message. See the Custom Options section of
// the docs for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1;
// If set, all the classes from the .proto file are wrapped in a single
// outer class with the given name. This applies to both Proto1
// (equivalent to the old "--one_java_file" option) and Proto2 (where
// a .proto always translates to a single class, but you may want to
// explicitly choose the class name).
optional string java_outer_classname = 8;
// If set true, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the outer class
// named by java_outer_classname. However, the outer class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default = false];
// This option does nothing.
optional bool java_generate_equals_and_hash = 20 [deprecated = true];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default = SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default = false];
optional bool java_generic_services = 17 [default = false];
optional bool py_generic_services = 18 [default = false];
optional bool php_generic_services = 42 [default = false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default = false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
// Namespace for generated classes; defaults to the package.
optional string csharp_namespace = 37;
// By default Swift generators will take the proto package and CamelCase it
// replacing '.' with underscore and use that to prefix the types/symbols
// defined. When this options is provided, they will use this value instead
// to prefix the types/symbols defined.
optional string swift_prefix = 39;
// Sets the php class prefix which is prepended to all php generated classes
// from this .proto. Default is empty.
optional string php_class_prefix = 40;
// Use this option to change the namespace of php generated classes. Default
// is empty. When this option is empty, the package name will be used for
// determining the namespace.
optional string php_namespace = 41;
// Use this option to change the namespace of php generated metadata classes.
// Default is empty. When this option is empty, the proto file name will be
// used for determining the namespace.
optional string php_metadata_namespace = 44;
// Use this option to change the package of ruby generated classes. Default
// is empty. When this option is not set, the package name will be used for
// determining the ruby package.
optional string ruby_package = 45;
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message.
// See the documentation for the "Options" section above.
extensions 1000 to max;
reserved 38;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default = false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default = false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default = false];
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementations still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob. In proto3, only explicit setting it to
// false will avoid using packed encoding.
optional bool packed = 2;
// The jstype option determines the JavaScript type used for values of the
// field. The option is permitted only for 64 bit integral and fixed types
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
// is represented as JavaScript string, which avoids loss of precision that
// can happen when a large value is converted to a floating point JavaScript.
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
// use the JavaScript "number" type. The behavior of the default option
// JS_NORMAL is implementation dependent.
//
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
// Use JavaScript strings.
JS_STRING = 1;
// Use JavaScript numbers.
JS_NUMBER = 2;
}
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
optional bool lazy = 5 [default = false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default = false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 4; // removed jtype
}
message OneofOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default = false];
reserved 5; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default = false];
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
// or neither? HTTP based RPC implementation may choose GET verb for safe
// methods, and PUT verb for idempotent methods instead of the default POST.
enum IdempotencyLevel {
IDEMPOTENCY_UNKNOWN = 0;
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level = 34
[default = IDEMPOTENCY_UNKNOWN];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
// "foo.(bar.baz).qux".
message NamePart {
optional string name_part = 1;
optional bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3;
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8;
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1;
// }
// Let's look at just the field definition:
// optional string foo = 1;
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendant. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition. For
// example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1;
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed = true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed = true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// leading_detached_comments will keep paragraphs of comments that appear
// before (but not connected to) the current element. Each paragraph,
// separated by empty lines, will be one comment element in the repeated
// field.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3;
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to qux.
// //
// // Another line attached to qux.
// optional double qux = 4;
//
// // Detached comment for corge. This is not leading or trailing comments
// // to qux or corge because there are blank lines separating it from
// // both.
//
// // Detached comment for corge paragraph 2.
//
// optional string corge = 5;
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
//
// // ignored detached comments.
optional string leading_comments = 3;
optional string trailing_comments = 4;
repeated string leading_detached_comments = 6;
}
}
// Describes the relationship between generated code and its original source
// file. A GeneratedCodeInfo message is associated with only one generated
// source file, but may contain references to different source .proto files.
message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.
repeated int32 path = 1 [packed = true];
// Identifies the filesystem path to the original source .proto.
optional string source_file = 2;
// Identifies the starting offset in bytes in the generated code
// that relates to the identified object.
optional int32 begin = 3;
// Identifies the ending offset in bytes in the generated code that
// relates to the identified offset. The end offset should be one past
// the last relevant byte (so the length of the text = end - begin).
optional int32 end = 4;
}
}

@ -0,0 +1,867 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package upb_benchmark.sv;
option go_package = "google.golang.org/protobuf/types/descriptorpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "GPB";
option cc_enable_arenas = true;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1
[ctype = STRING_PIECE]; // file name, relative to root of source tree
optional string package = 2
[ctype = STRING_PIECE]; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3 [ctype = STRING_PIECE];
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12 [ctype = STRING_PIECE];
}
// Describes a message type.
message DescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
// fields or extension ranges in the same message. Reserved ranges may
// not overlap.
message ReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10 [ctype = STRING_PIECE];
}
message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
// treat group fields as unknown fields.
TYPE_GROUP = 10;
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
}
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
}
optional string name = 1 [ctype = STRING_PIECE];
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6 [ctype = STRING_PIECE];
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2 [ctype = STRING_PIECE];
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
// TODO: Base-64 encode?
optional string default_value = 7 [ctype = STRING_PIECE];
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof.
optional int32 oneof_index = 9;
// JSON name of this field. The value is set by protocol compiler. If the
// user has set a "json_name" option on this field, that option's value
// will be used. Otherwise, it's deduced from the field's name by converting
// it to camelCase.
optional string json_name = 10 [ctype = STRING_PIECE];
optional FieldOptions options = 8;
// If true, this is a proto3 "optional". When a proto3 field is optional, it
// tracks presence regardless of field type.
//
// When proto3_optional is true, this field must be belong to a oneof to
// signal to old proto3 clients that presence is tracked for this field. This
// oneof is known as a "synthetic" oneof, and this field must be its sole
// member (each proto3 optional field gets its own synthetic oneof). Synthetic
// oneofs exist in the descriptor only, and do not generate any API. Synthetic
// oneofs must be ordered after all "real" oneofs.
//
// For message fields, proto3_optional doesn't create any semantic change,
// since non-repeated message fields always track presence. However it still
// indicates the semantic detail of whether the user wrote "optional" or not.
// This can be useful for round-tripping the .proto file. For consistency we
// give message fields a synthetic oneof also, even though it is not required
// to track presence. This is especially important because the parser can't
// tell if a field is a message or an enum, so it must always create a
// synthetic oneof.
//
// Proto2 optional fields do not set this flag, because they already indicate
// optional with `LABEL_OPTIONAL`.
optional bool proto3_optional = 17;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
optional OneofOptions options = 2;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
// entries in the same enum. Reserved ranges may not overlap.
//
// Note that this is distinct from DescriptorProto.ReservedRange in that it
// is inclusive such that it can appropriately represent the entire int32
// domain.
message EnumReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Inclusive.
}
// Range of reserved numeric values. Reserved numeric values may not be used
// by enum values in the same enum declaration. Reserved ranges may not
// overlap.
repeated EnumReservedRange reserved_range = 4;
// Reserved enum value names, which may not be reused. A given name may only
// be reserved once.
repeated string reserved_name = 5 [ctype = STRING_PIECE];
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1 [ctype = STRING_PIECE];
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2 [ctype = STRING_PIECE];
optional string output_type = 3 [ctype = STRING_PIECE];
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default = false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail protobuf-global-extension-registry@google.com
// to reserve extension numbers. Simply provide your project name (e.g.
// Objective-C plugin) and your project website (if available) -- there's no
// need to explain how you intend to use them. Usually you only need one
// extension number. You can declare multiple options with only one extension
// number by putting them in a sub-message. See the Custom Options section of
// the docs for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1 [ctype = STRING_PIECE];
// If set, all the classes from the .proto file are wrapped in a single
// outer class with the given name. This applies to both Proto1
// (equivalent to the old "--one_java_file" option) and Proto2 (where
// a .proto always translates to a single class, but you may want to
// explicitly choose the class name).
optional string java_outer_classname = 8 [ctype = STRING_PIECE];
// If set true, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the outer class
// named by java_outer_classname. However, the outer class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default = false];
// This option does nothing.
optional bool java_generate_equals_and_hash = 20 [deprecated = true];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default = SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11 [ctype = STRING_PIECE];
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default = false];
optional bool java_generic_services = 17 [default = false];
optional bool py_generic_services = 18 [default = false];
optional bool php_generic_services = 42 [default = false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default = false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36 [ctype = STRING_PIECE];
// Namespace for generated classes; defaults to the package.
optional string csharp_namespace = 37 [ctype = STRING_PIECE];
// By default Swift generators will take the proto package and CamelCase it
// replacing '.' with underscore and use that to prefix the types/symbols
// defined. When this options is provided, they will use this value instead
// to prefix the types/symbols defined.
optional string swift_prefix = 39 [ctype = STRING_PIECE];
// Sets the php class prefix which is prepended to all php generated classes
// from this .proto. Default is empty.
optional string php_class_prefix = 40 [ctype = STRING_PIECE];
// Use this option to change the namespace of php generated classes. Default
// is empty. When this option is empty, the package name will be used for
// determining the namespace.
optional string php_namespace = 41 [ctype = STRING_PIECE];
// Use this option to change the namespace of php generated metadata classes.
// Default is empty. When this option is empty, the proto file name will be
// used for determining the namespace.
optional string php_metadata_namespace = 44 [ctype = STRING_PIECE];
// Use this option to change the package of ruby generated classes. Default
// is empty. When this option is not set, the package name will be used for
// determining the ruby package.
optional string ruby_package = 45 [ctype = STRING_PIECE];
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message.
// See the documentation for the "Options" section above.
extensions 1000 to max;
reserved 38;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default = false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default = false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default = false];
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementations still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob. In proto3, only explicit setting it to
// false will avoid using packed encoding.
optional bool packed = 2;
// The jstype option determines the JavaScript type used for values of the
// field. The option is permitted only for 64 bit integral and fixed types
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
// is represented as JavaScript string, which avoids loss of precision that
// can happen when a large value is converted to a floating point JavaScript.
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
// use the JavaScript "number" type. The behavior of the default option
// JS_NORMAL is implementation dependent.
//
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
// Use JavaScript strings.
JS_STRING = 1;
// Use JavaScript numbers.
JS_NUMBER = 2;
}
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
optional bool lazy = 5 [default = false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default = false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 4; // removed jtype
}
message OneofOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default = false];
reserved 5; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default = false];
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
// or neither? HTTP based RPC implementation may choose GET verb for safe
// methods, and PUT verb for idempotent methods instead of the default POST.
enum IdempotencyLevel {
IDEMPOTENCY_UNKNOWN = 0;
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level = 34
[default = IDEMPOTENCY_UNKNOWN];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
// "foo.(bar.baz).qux".
message NamePart {
optional string name_part = 1 [ctype = STRING_PIECE];
optional bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3 [ctype = STRING_PIECE];
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8 [ctype = STRING_PIECE];
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1 [ctype = STRING_PIECE];
// }
// Let's look at just the field definition:
// optional string foo = 1 [ctype = STRING_PIECE];
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendant. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition. For
// example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1 [ctype = STRING_PIECE];
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed = true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed = true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// leading_detached_comments will keep paragraphs of comments that appear
// before (but not connected to) the current element. Each paragraph,
// separated by empty lines, will be one comment element in the repeated
// field.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3 [ctype = STRING_PIECE];
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to qux.
// //
// // Another line attached to qux.
// optional double qux = 4;
//
// // Detached comment for corge. This is not leading or trailing comments
// // to qux or corge because there are blank lines separating it from
// // both.
//
// // Detached comment for corge paragraph 2.
//
// optional string corge = 5 [ctype = STRING_PIECE];
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
//
// // ignored detached comments.
optional string leading_comments = 3 [ctype = STRING_PIECE];
optional string trailing_comments = 4 [ctype = STRING_PIECE];
repeated string leading_detached_comments = 6 [ctype = STRING_PIECE];
}
}
// Describes the relationship between generated code and its original source
// file. A GeneratedCodeInfo message is associated with only one generated
// source file, but may contain references to different source .proto files.
message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.
repeated int32 path = 1 [packed = true];
// Identifies the filesystem path to the original source .proto.
optional string source_file = 2 [ctype = STRING_PIECE];
// Identifies the starting offset in bytes in the generated code
// that relates to the identified object.
optional int32 begin = 3;
// Identifies the ending offset in bytes in the generated code that
// relates to the identified offset. The end offset should be one past
// the last relevant byte (so the length of the text = end - begin).
optional int32 end = 4;
}
}

@ -0,0 +1,12 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google LLC. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
syntax = "proto3";
package upb_benchmark;
message Empty {}

@ -0,0 +1,69 @@
#!/usr/bin/python3
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import re
include = sys.argv[1]
msg_basename = sys.argv[2]
count = 1
m = re.search(r'(.*\D)(\d+)$', sys.argv[2])
if m:
msg_basename = m.group(1)
count = int(m.group(2))
print('''
#include "{include}"
char buf[1];
int main() {{
'''.format(include=include))
def RefMessage(name):
print('''
{{
{name} proto;
proto.ParseFromArray(buf, 0);
proto.SerializePartialToArray(&buf[0], 0);
}}
'''.format(name=name))
RefMessage(msg_basename)
for i in range(2, count + 1):
RefMessage(msg_basename + str(i))
print('''
return 0;
}''')

@ -0,0 +1,123 @@
#!/usr/bin/python3
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import random
base = sys.argv[1]
field_freqs = [
(('bool', 'optional'), 8.321),
(('bool', 'repeated'), 0.033),
(('bytes', 'optional'), 0.809),
(('bytes', 'repeated'), 0.065),
(('double', 'optional'), 2.845),
(('double', 'repeated'), 0.143),
(('fixed32', 'optional'), 0.084),
(('fixed32', 'repeated'), 0.012),
(('fixed64', 'optional'), 0.204),
(('fixed64', 'repeated'), 0.027),
(('float', 'optional'), 2.355),
(('float', 'repeated'), 0.132),
(('int32', 'optional'), 6.717),
(('int32', 'repeated'), 0.366),
(('int64', 'optional'), 9.678),
(('int64', 'repeated'), 0.425),
(('sfixed32', 'optional'), 0.018),
(('sfixed32', 'repeated'), 0.005),
(('sfixed64', 'optional'), 0.022),
(('sfixed64', 'repeated'), 0.005),
(('sint32', 'optional'), 0.026),
(('sint32', 'repeated'), 0.009),
(('sint64', 'optional'), 0.018),
(('sint64', 'repeated'), 0.006),
(('string', 'optional'), 25.461),
(('string', 'repeated'), 2.606),
(('Enum', 'optional'), 6.16),
(('Enum', 'repeated'), 0.576),
(('Message', 'optional'), 22.472),
(('Message', 'repeated'), 7.766),
(('uint32', 'optional'), 1.289),
(('uint32', 'repeated'), 0.051),
(('uint64', 'optional'), 1.044),
(('uint64', 'repeated'), 0.079),
]
population = [item[0] for item in field_freqs]
weights = [item[1] for item in field_freqs]
def choices(k):
if sys.version_info >= (3, 6):
return random.choices(population=population, weights=weights, k=k)
else:
print("WARNING: old Python version, field types are not properly weighted!")
return [random.choice(population) for _ in range(k)]
with open(base + "/100_msgs.proto", "w") as f:
f.write('syntax = "proto3";\n')
f.write('package upb_benchmark;\n')
f.write('message Message {}\n')
for i in range(2, 101):
f.write('message Message{i} {{}}\n'.format(i=i))
with open(base + "/200_msgs.proto", "w") as f:
f.write('syntax = "proto3";\n')
f.write('package upb_benchmark;\n')
f.write('message Message {}\n')
for i in range(2, 501):
f.write('message Message{i} {{}}\n'.format(i=i))
with open(base + "/100_fields.proto", "w") as f:
f.write('syntax = "proto2";\n')
f.write('package upb_benchmark;\n')
f.write('enum Enum { ZERO = 0; }\n')
f.write('message Message {\n')
i = 1
random.seed(a=0, version=2)
for field in choices(100):
field_type, label = field
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label, field_type=field_type))
i += 1
f.write('}\n')
with open(base + "/200_fields.proto", "w") as f:
f.write('syntax = "proto2";\n')
f.write('package upb_benchmark;\n')
f.write('enum Enum { ZERO = 0; }\n')
f.write('message Message {\n')
i = 1
random.seed(a=0, version=2)
for field in choices(200):
field_type, label = field
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label,field_type=field_type))
i += 1
f.write('}\n')

@ -0,0 +1,70 @@
#!/usr/bin/python3
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import re
include = sys.argv[1]
msg_basename = sys.argv[2]
count = 1
m = re.search(r'(.*\D)(\d+)$', sys.argv[2])
if m:
msg_basename = m.group(1)
count = int(m.group(2))
print('''
#include "{include}"
char buf[1];
int main() {{
upb_Arena *arena = upb_Arena_New();
size_t size;
'''.format(include=include))
def RefMessage(name):
print('''
{{
{name} *proto = {name}_parse(buf, 1, arena);
{name}_serialize(proto, arena, &size);
}}
'''.format(name=name))
RefMessage(msg_basename)
for i in range(2, count + 1):
RefMessage(msg_basename + str(i))
print('''
return 0;
}''')

@ -1,7 +1,8 @@
# Internal Starlark definitions for Protobuf.
load("@rules_cc//cc:defs.bzl", starlark_cc_proto_library = "cc_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@bazel_skylib//lib:selects.bzl", "selects")
load("@rules_pkg//pkg:mappings.bzl", "pkg_files", "strip_prefix")
load("//bazel:cc_proto_library.bzl", starlark_cc_proto_library = "cc_proto_library")
load(":cc_proto_blacklist_test.bzl", "cc_proto_blacklist_test")
load(":compiler_config_setting.bzl", "create_compiler_config_setting")
@ -13,10 +14,24 @@ package(
)
create_compiler_config_setting(
name = "config_msvc",
name = "config_msvc_cl",
value = "msvc-cl",
)
# Caveat: clang-cl support in protobuf is only best-effort / untested for now.
create_compiler_config_setting(
name = "config_clang_cl",
value = "clang-cl",
)
selects.config_setting_group(
name = "config_msvc",
match_any = [
":config_clang_cl",
":config_msvc_cl",
],
)
config_setting(
name = "aarch64",
values = {"cpu": "linux-aarch_64"},
@ -34,9 +49,17 @@ config_setting(
# For more info on the various crosstool_tops used by NDK Bazel builds, see:
# https://docs.bazel.build/versions/master/android-ndk.html#configuring-the-stl
# When using https://bazel.build/concepts/platforms
config_setting(
name = "config_android",
constraint_values = ["@platforms//os:android"],
)
# When using legacy flags like --android_crosstool_top, --android_cpu, and --fat_apk_cpu
config_setting(
name = "config_android-legacy-default-crosstool",
values = {
# Default of `--android_crosstool_top`
"crosstool_top": "//external:android/crosstool",
},
)
@ -83,6 +106,32 @@ config_setting(
},
)
selects.config_setting_group(
name = "config_win",
match_any = [
":config_win32",
":config_win64",
],
)
config_setting(
name = "config_osx_aarch64",
values = {"cpu": "osx-aarch_64"},
)
config_setting(
name = "config_osx_x86_64",
values = {"cpu": "osx-x86_64"},
)
selects.config_setting_group(
name = "config_osx",
match_any = [
":config_osx_aarch64",
":config_osx_x86_64",
],
)
# Internal testing:
starlark_cc_proto_library(

@ -15,6 +15,7 @@ COPTS = select({
"/wd4506", # no definition for inline function 'function'
"/wd4800", # 'type' : forcing value to bool 'true' or 'false' (performance warning)
"/wd4996", # The compiler encountered a deprecated declaration.
"/utf-8", # Set source and execution character sets to UTF-8
],
"//conditions:default": [
"-DHAVE_ZLIB",
@ -28,6 +29,7 @@ COPTS = select({
# Android and MSVC builds do not need to link in a separate pthread library.
LINK_OPTS = select({
"//build_defs:config_android": [],
"//build_defs:config_android-legacy-default-crosstool": [],
"//build_defs:config_android-stlport": [],
"//build_defs:config_android-libcpp": [],
"//build_defs:config_android-gnu-libstdcpp": [],
@ -35,16 +37,15 @@ LINK_OPTS = select({
"//build_defs:config_msvc": [
# Suppress linker warnings about files with no symbols defined.
"-ignore:4221",
"Shell32.lib",
],
"@platforms//os:macos": [
"-lpthread",
"-lm",
"-framework CoreFoundation",
],
"//conditions:default": [
"-lpthread",
"-lm",
],
})
# When cross-compiling for Windows we need to statically link pthread and the C++ library.
PROTOC_LINK_OPTS = select({
"//build_defs:config_win32": ["-static"],
"//build_defs:config_win64": ["-static"],
"//conditions:default": [],
})

@ -32,7 +32,7 @@ def inline_sh_binary(
native.genrule(
name = name + "_genrule",
srcs = srcs,
exec_tools = tools,
tools = tools,
outs = [name + ".sh"],
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd,
visibility = ["//visibility:private"],
@ -77,7 +77,7 @@ def inline_sh_test(
native.genrule(
name = name + "_genrule",
srcs = srcs,
exec_tools = tools,
tools = tools,
outs = [name + ".sh"],
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd,
visibility = ["//visibility:private"],

@ -0,0 +1,82 @@
"""Java options and protobuf-specific java build rules with those options."""
load("@rules_java//java:defs.bzl", "java_library")
load("@rules_jvm_external//:defs.bzl", "java_export")
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION")
load("//java/osgi:osgi.bzl", "osgi_java_library")
JAVA_OPTS = [
"-source 8",
"-target 8",
"-Xep:Java8ApiChecker:ERROR",
]
BUNDLE_DOC_URL = "https://developers.google.com/protocol-buffers/"
BUNDLE_LICENSE = "https://opensource.org/licenses/BSD-3-Clause"
def protobuf_java_export(**kwargs):
java_export(
javacopts = JAVA_OPTS,
**kwargs
)
def protobuf_java_library(**kwargs):
java_library(
javacopts = JAVA_OPTS,
**kwargs
)
def protobuf_versioned_java_library(
automatic_module_name,
bundle_description,
bundle_name,
bundle_symbolic_name,
bundle_additional_imports = [],
bundle_additional_exports = [],
**kwargs):
"""Extends `java_library` to add OSGi headers to the MANIFEST.MF using bndlib
This macro should be usable as a drop-in replacement for java_library.
The additional arguments are given the bndlib tool to generate an OSGi-compliant manifest file.
See [bnd documentation](https://bnd.bndtools.org/chapters/110-introduction.html)
Takes all the args that are standard for a java_library target plus the following.
Args:
bundle_description: (required) The Bundle-Description header defines a short
description of this bundle.
automatic_module_name: (required) The Automatic-Module-Name header that represents
the name of the module when this bundle is used as an automatic
module.
bundle_name: (required) The Bundle-Name header defines a readable name for this
bundle. This should be a short, human-readable name that can
contain spaces.
bundle_symbolic_name: (required) The Bundle-SymbolicName header specifies a
non-localizable name for this bundle. The bundle symbolic name
together with a version must identify a unique bundle though it can
be installed multiple times in a framework. The bundle symbolic
name should be based on the reverse domain name convention.
bundle_additional_exports: The Export-Package header contains a
declaration of exported packages. These are additional export
package statements to be added before the default wildcard export
"*;version={$Bundle-Version}".
bundle_additional_imports: The Import-Package header declares the
imported packages for this bundle. These are additional import
package statements to be added before the default wildcard import
"*".
**kwargs: Additional key-word arguments that are passed to the internal
java_library target.
"""
osgi_java_library(
javacopts = JAVA_OPTS,
automatic_module_name = automatic_module_name,
bundle_doc_url = BUNDLE_DOC_URL,
bundle_license = BUNDLE_LICENSE,
bundle_version = PROTOBUF_JAVA_VERSION,
bundle_description = bundle_description,
bundle_name = bundle_name,
bundle_symbolic_name = bundle_symbolic_name,
bundle_additional_exports = bundle_additional_exports,
bundle_additional_imports = bundle_additional_imports + ["sun.misc;resolution:=optional"],
**kwargs
)

@ -1,11 +0,0 @@
--- upbc/bootstrap_compiler.bzl
+++ upbc/bootstrap_compiler.bzl
@@ -20,7 +20,7 @@ _upbc_base = "//upbc:protoc-gen-upb"
# begin:github_only
_is_google3 = False
-_extra_proto_path = "-Iexternal/com_google_protobuf/src "
+_extra_proto_path = "-Isrc "
# end:github_only
def _upbc(stage):

@ -0,0 +1,3 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14

@ -0,0 +1,17 @@
This directory contains CI-specific tooling.
# Clang wrappers
CMake allows for compiler wrappers to be injected such as ccache, which
intercepts compiler calls and short-circuits on cache-hits. This can be done
by specifying `CMAKE_C_COMPILER_LAUNCHER` and `CMAKE_CXX_COMPILER_LAUNCHER`
during CMake's configure step. Unfortunately, X-Code doesn't provide anything
like this, so we use basic wrapper scripts to invoke ccache + clang.
# Bazelrc files
In order to allow platform-specific `.bazelrc` flags during testing, we keep
3 different versions here along with a shared `common.bazelrc` that they all
include. Our GHA infrastructure will select the appropriate file for any test
and overwrite the default `.bazelrc` in our workspace, which is intended for
development only.

@ -0,0 +1,5 @@
import common.bazelrc
# Workaround for maximum path length issues
startup --output_user_root=C:/tmp --windows_enable_symlinks
common --enable_runfiles

@ -0,0 +1,3 @@
#!/bin/sh
# This file wraps clang with ccache to enable faster runs of xcodebuild
ccache clang "$@"

@ -0,0 +1,3 @@
#!/bin/sh
# This file wraps clang with ccache to enable faster runs of xcodebuild
ccache clang++ "$@"

@ -0,0 +1,80 @@
build:dbg --compilation_mode=dbg
build:opt --compilation_mode=opt
build:san-common --config=dbg --strip=never --copt=-O0 --copt=-fno-omit-frame-pointer
build:asan --config=san-common --copt=-fsanitize=address --linkopt=-fsanitize=address
# ASAN hits ODR violations with shared linkage due to rules_proto.
build:asan --dynamic_mode=off
build:msan --config=san-common --copt=-fsanitize=memory --linkopt=-fsanitize=memory
build:msan --copt=-fsanitize-memory-track-origins
build:msan --copt=-fsanitize-memory-use-after-dtor
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1
# Use our instrumented LLVM libc++ in Kokoro.
build:docker-msan --config=msan
build:docker-msan --linkopt=-L/opt/libcxx_msan/lib --linkopt=-lc++abi
build:docker-msan --linkopt=-Wl,-rpath,/opt/libcxx_msan/lib
build:docker-msan --cxxopt=-stdlib=libc++ --linkopt=-stdlib=libc++
build:tsan --config=san-common --copt=-fsanitize=thread --linkopt=-fsanitize=thread
build:ubsan --config=san-common --copt=-fsanitize=undefined --linkopt=-fsanitize=undefined
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Workaround Bazel 7 remote cache issues.
# See https://github.com/bazelbuild/bazel/issues/20161
build --experimental_remote_cache_eviction_retries=5
build --remote_download_outputs=all
# Build with all --incompatible flags that we can. This helps us prepare for
# upcoming breaking changes in Bazel. This list was generated for Bazel 6 by
# running bazelisk with the --migrate flag and filtering out all flags that
# default to true or are deprecated.
build --incompatible_check_sharding_support
build --incompatible_default_to_explicit_init_py
build --incompatible_disable_native_android_rules
build --incompatible_disable_target_provider_fields
build --incompatible_disallow_empty_glob
build --incompatible_dont_use_javasourceinfoprovider
build --incompatible_enable_android_toolchain_resolution
build --incompatible_enable_apple_toolchain_resolution
build --incompatible_exclusive_test_sandboxed
build --incompatible_remote_output_paths_relative_to_input_root
build --incompatible_remote_use_new_exit_code_for_lost_inputs
build --incompatible_sandbox_hermetic_tmp
build --incompatible_struct_has_no_methods
build --incompatible_top_level_aspects_require_providers
build --incompatible_use_cc_configure_from_rules_cc
build --incompatible_use_host_features
# We cannot yet build successfully with the following flags:
# --incompatible_check_testonly_for_output_files
# --incompatible_config_setting_private_default_visibility
# --incompatible_disable_starlark_host_transitions
# --incompatible_disallow_struct_provider_syntax
# --incompatible_no_implicit_file_export
# --incompatible_no_rule_outputs_param
# --incompatible_stop_exporting_language_modules
# --incompatible_strict_action_env
# --incompatible_visibility_private_attributes_at_definition
# We might be compatible with these flags, but they are not available in all
# Bazel versions we are currently using:
# --incompatible_disable_objc_library_transition
# --incompatible_fail_on_unknown_attributes
# --incompatible_merge_fixed_and_default_shell_env
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313
common --noenable_bzlmod
# Important: this flag ensures that we remain compliant with the C++ layering
# check.
build --features=layering_check

@ -0,0 +1,5 @@
import common.bazelrc
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes

@ -8,7 +8,7 @@
set -ex
# Cd to the repo root.
cd $(dirname -- "$0")
cd $(dirname -- "$0")/..
previous_commit_title=$(git log -1 --pretty='%s')
@ -41,4 +41,5 @@ fi
git add -A
git diff --staged --quiet || git commit -am "$commit_message"
git push
git pull --rebase
git push --force-with-lease || echo "Conflicting commit hit, retrying in next job..."

@ -1,9 +0,0 @@
cmake_minimum_required(VERSION 3.5)
message(WARNING "Calling of cmake with source directory set to \"cmake\" subdirectory of Protocol Buffers project is deprecated. Top-level directory of Protocol Buffers project should be used instead.")
project(protobuf C CXX)
set(protobuf_DEPRECATED_CMAKE_SUBDIRECTORY_USAGE TRUE)
include(../CMakeLists.txt)

@ -1,9 +1,9 @@
This directory contains *CMake* files that can be used to build protobuf.
You need to have [CMake](http://www.cmake.org) and
[Git](http://git-scm.com) installed on your computer before proceeding. We
currently support CMake 3.5 and newer on both [Windows](#windows-builds) and
[Linux](#linux-builds).
You need to have [CMake](http://www.cmake.org),
[Git](http://git-scm.com), and [Abseil](https://github.com/abseil/abseil-cpp)
installed on your computer before proceeding. We currently support CMake 3.5
and newer on both [Windows](#windows-builds) and [Linux](#linux-builds).
Most of the instructions will be given using CMake's command-line interface, but
the same actions can be performed using appropriate GUI tools.
@ -16,8 +16,10 @@ By default, CMake will use whatever C++ version is the system default. Since
protobuf requires C++14 or newer, sometimes you will need to explicitly override
this. For example, the following:
cmake . -DCMAKE_CXX_STANDARD=14
cmake --build
```
cmake . -DCMAKE_CXX_STANDARD=14
cmake --build .
```
will build protobuf using C++14 (see [CXX_STANDARD](https://cmake.org/cmake/help/latest/prop_tgt/CXX_STANDARD.html#prop_tgt:CXX_STANDARD){.external} for all available options).
@ -67,11 +69,6 @@ You can get the latest stable source packages from the release page:
https://github.com/protocolbuffers/protobuf/releases/latest
For example: if you only need C++, download `protobuf-cpp-[VERSION].tar.gz`; if
you need C++ and Java, download `protobuf-java-[VERSION].tar.gz` (every package
contains C++ source already); if you need C++ and multiple other languages,
download `protobuf-all-[VERSION].tar.gz`.
Or you can use git to clone from protobuf git repository.
C:\Path\to> mkdir src & cd src
@ -119,6 +116,20 @@ Create a temporary *build* folder and change your working directory to it:
cd C:\Path\to\build\protobuf
C:\Path\to\build\protobuf>
During configuration you will also be specifying where CMake should expect to
find your Abseil installation. To do so, first set `-Dprotobuf_ABSL_PROVIDER=package`
and then set `-DCMAKE_PREFIX_PATH` to the path where you installed Abseil.
For example:
```console
C:\Path\to\build\protobuf> cmake -S. -Bcmake-out \
-DCMAKE_INSTALL_PREFIX=/tmp/protobuf \
-DCMAKE_CXX_STANDARD=14 \
-Dprotobuf_ABSL_PROVIDER=package \
-DCMAKE_PREFIX_PATH=/tmp/absl # Path to where I installed Abseil
```
The *Makefile* and *Ninja* generators can build the project in only one configuration, so you need to build
a separate folder for each configuration.

@ -39,9 +39,19 @@ set(_protobuf_FIND_ABSL "if(NOT TARGET absl::strings)\n find_package(absl CONFI
if (BUILD_SHARED_LIBS AND MSVC)
# On MSVC Abseil is bundled into a single DLL.
set(protobuf_ABSL_USED_TARGETS abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS abseil_test_dll)
# This condition is necessary as of abseil 20230125.3 when abseil is consumed
# via add_subdirectory, the abseil_dll target is named abseil_dll, while if
# abseil is consumed via find_package, the target is called absl::abseil_dll
# Once https://github.com/abseil/abseil-cpp/pull/1466 is merged and released
# in the minimum version of abseil required by protobuf, it is possible to
# always link absl::abseil_dll and absl::abseil_test_dll and remove the if
if(protobuf_ABSL_PROVIDER STREQUAL "package")
set(protobuf_ABSL_USED_TARGETS absl::abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS absl::abseil_test_dll)
else()
set(protobuf_ABSL_USED_TARGETS abseil_dll)
set(protobuf_ABSL_USED_TEST_TARGETS abseil_test_dll)
endif()
else()
set(protobuf_ABSL_USED_TARGETS
absl::absl_check
@ -62,13 +72,17 @@ else()
absl::flat_hash_set
absl::function_ref
absl::hash
absl::if_constexpr
absl::layout
absl::log_initialize
absl::log_globals
absl::log_severity
absl::memory
absl::node_hash_map
absl::node_hash_set
absl::optional
absl::random_distributions
absl::random_random
absl::span
absl::status
absl::statusor

@ -1,62 +1,102 @@
if (NOT EXISTS "${protobuf_SOURCE_DIR}/third_party/jsoncpp/CMakeLists.txt")
message(FATAL_ERROR
"Cannot find third_party/jsoncpp directory that's needed to "
"build conformance tests. If you use git, make sure you have cloned "
"submodules:\n"
" git submodule update --init --recursive\n"
"If instead you want to skip them, run cmake with:\n"
" cmake -Dprotobuf_BUILD_CONFORMANCE=OFF\n")
if (protobuf_JSONCPP_PROVIDER STREQUAL "module")
if (NOT EXISTS "${protobuf_SOURCE_DIR}/third_party/jsoncpp/CMakeLists.txt")
message(FATAL_ERROR
"Cannot find third_party/jsoncpp directory that's needed to "
"build conformance tests. If you use git, make sure you have cloned "
"submodules:\n"
" git submodule update --init --recursive\n"
"If instead you want to skip them, run cmake with:\n"
" cmake -Dprotobuf_BUILD_CONFORMANCE=OFF\n")
endif()
elseif(protobuf_JSONCPP_PROVIDER STREQUAL "package")
find_package(jsoncpp REQUIRED)
endif()
file(MAKE_DIRECTORY ${protobuf_BINARY_DIR}/conformance)
add_custom_command(
OUTPUT
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
DEPENDS ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/conformance/conformance.proto
COMMAND ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/conformance/conformance.proto
--proto_path=${protobuf_SOURCE_DIR}/conformance
--cpp_out=${protobuf_SOURCE_DIR}/conformance
${protobuf_BINARY_DIR}/conformance/conformance.pb.h
${protobuf_BINARY_DIR}/conformance/conformance.pb.cc
${protobuf_BINARY_DIR}/conformance/test_protos/test_messages_edition2023.pb.h
${protobuf_BINARY_DIR}/conformance/test_protos/test_messages_edition2023.pb.cc
DEPENDS ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/conformance/conformance.proto
${protobuf_SOURCE_DIR}/conformance/test_protos/test_messages_edition2023.proto
COMMAND ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/conformance/conformance.proto
${protobuf_SOURCE_DIR}/conformance/test_protos/test_messages_edition2023.proto
--proto_path=${protobuf_SOURCE_DIR}
--cpp_out=${protobuf_BINARY_DIR}
)
add_custom_command(
OUTPUT
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
DEPENDS ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
COMMAND ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto3_editions.pb.h
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto3_editions.pb.cc
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto2_editions.pb.h
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto2_editions.pb.cc
DEPENDS ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/editions/golden/test_messages_proto3_editions.proto
${protobuf_SOURCE_DIR}/editions/golden/test_messages_proto2_editions.proto
COMMAND ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/editions/golden/test_messages_proto3_editions.proto
${protobuf_SOURCE_DIR}/editions/golden/test_messages_proto2_editions.proto
--proto_path=${protobuf_SOURCE_DIR}
--proto_path=${protobuf_SOURCE_DIR}/src
--cpp_out=${protobuf_SOURCE_DIR}/src
--cpp_out=${protobuf_BINARY_DIR}
)
file(MAKE_DIRECTORY ${protobuf_BINARY_DIR}/src)
add_custom_command(
OUTPUT
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
DEPENDS ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
COMMAND ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
--proto_path=${protobuf_SOURCE_DIR}/src
--cpp_out=${protobuf_BINARY_DIR}/src
)
add_library(libconformance_common STATIC
${protobuf_BINARY_DIR}/conformance/conformance.pb.h
${protobuf_BINARY_DIR}/conformance/conformance.pb.cc
${protobuf_BINARY_DIR}/conformance/test_protos/test_messages_edition2023.pb.h
${protobuf_BINARY_DIR}/conformance/test_protos/test_messages_edition2023.pb.cc
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto3_editions.pb.h
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto3_editions.pb.cc
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto2_editions.pb.h
${protobuf_BINARY_DIR}/editions/golden/test_messages_proto2_editions.pb.cc
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_BINARY_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
)
target_link_libraries(libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
add_executable(conformance_test_runner
${protobuf_SOURCE_DIR}/conformance/binary_json_conformance_suite.cc
${protobuf_SOURCE_DIR}/conformance/binary_json_conformance_suite.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test_runner.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test_main.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
)
add_executable(conformance_cpp
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
${protobuf_SOURCE_DIR}/conformance/conformance_cpp.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
)
target_include_directories(
@ -70,20 +110,35 @@ target_include_directories(
target_include_directories(conformance_test_runner PRIVATE ${ABSL_ROOT_DIR})
target_include_directories(conformance_cpp PRIVATE ${ABSL_ROOT_DIR})
target_link_libraries(conformance_test_runner ${protobuf_LIB_PROTOBUF})
target_link_libraries(conformance_test_runner ${protobuf_ABSL_USED_TARGETS})
target_link_libraries(conformance_cpp ${protobuf_LIB_PROTOBUF})
target_link_libraries(conformance_cpp ${protobuf_ABSL_USED_TARGETS})
target_link_libraries(conformance_test_runner
libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
target_link_libraries(conformance_cpp
libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
add_test(NAME conformance_cpp_test
COMMAND ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/conformance_test_runner
--failure_list ${protobuf_SOURCE_DIR}/conformance/failure_list_cpp.txt
--text_format_failure_list ${protobuf_SOURCE_DIR}/conformance/text_format_failure_list_cpp.txt
--output_dir ${protobuf_TEST_XML_OUTDIR}
--maximum_edition 2023
${CMAKE_CURRENT_BINARY_DIR}/conformance_cpp
DEPENDS conformance_test_runner conformance_cpp)
set(JSONCPP_WITH_TESTS OFF CACHE BOOL "Disable tests")
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp third_party/jsoncpp)
target_include_directories(conformance_test_runner PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp/include)
target_link_libraries(conformance_test_runner jsoncpp_lib)
if(protobuf_JSONCPP_PROVIDER STREQUAL "module")
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp third_party/jsoncpp)
target_include_directories(conformance_test_runner PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/third_party/jsoncpp/include)
if(BUILD_SHARED_LIBS)
target_link_libraries(conformance_test_runner jsoncpp_lib)
else()
target_link_libraries(conformance_test_runner jsoncpp_static)
endif()
else()
target_link_libraries(conformance_test_runner jsoncpp)
endif()

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save