commit
c19d666e96
2842 changed files with 374249 additions and 155635 deletions
@ -0,0 +1,8 @@ |
||||
# Bazel Central Registry |
||||
|
||||
When protobuf is released, we want it to be published to the Bazel Central |
||||
Registry automatically: <https://registry.bazel.build> |
||||
|
||||
This folder contains configuration files to automate the publish step. See |
||||
<https://github.com/bazel-contrib/publish-to-bcr/blob/main/templates/README.md> |
||||
for authoritative documentation about these files. |
@ -0,0 +1,23 @@ |
||||
{ |
||||
"homepage": "https://github.com/protocolbuffers/protobuf", |
||||
"maintainers": [ |
||||
{ |
||||
"email": "sandyzhang@google.com", |
||||
"github": "zhangskz", |
||||
"name": "Sandy Zhang" |
||||
}, |
||||
{ |
||||
"email": "mkruskal@google.com", |
||||
"github": "mkruskal-google", |
||||
"name": "Mike Kruskal" |
||||
}, |
||||
{ |
||||
"email": "gberg@google.com", |
||||
"github": "googleberg", |
||||
"name": "Jerry Berg" |
||||
} |
||||
], |
||||
"repository": ["github:protocolbuffers/protobuf"], |
||||
"versions": [], |
||||
"yanked_versions": {} |
||||
} |
@ -0,0 +1,36 @@ |
||||
matrix: |
||||
platform: ["debian10", "macos", "ubuntu2004", "windows"] |
||||
bazel: [6.x, 7.x] |
||||
|
||||
tasks: |
||||
verify_targets: |
||||
name: "Verify build targets" |
||||
platform: ${{ platform }} |
||||
bazel: ${{ bazel }} |
||||
build_flags: |
||||
- '--host_cxxopt=-std=c++14' |
||||
- '--cxxopt=-std=c++14' |
||||
build_targets: |
||||
- '@protobuf//:protobuf' |
||||
- '@protobuf//:protobuf_lite' |
||||
- '@protobuf//:protobuf_python' |
||||
- '@protobuf//:protobuf_rust' |
||||
- '@protobuf//:protoc' |
||||
- '@protobuf//:test_messages_proto2_cc_proto' |
||||
- '@protobuf//:test_messages_proto3_cc_proto' |
||||
|
||||
bcr_test_module: |
||||
module_path: "examples" |
||||
matrix: |
||||
platform: ["debian10", "macos", "ubuntu2004", "windows"] |
||||
bazel: [6.x, 7.x] |
||||
tasks: |
||||
run_test_module: |
||||
name: "Run test module" |
||||
platform: ${{ platform }} |
||||
bazel: ${{ bazel }} |
||||
build_flags: |
||||
- '--host_cxxopt=-std=c++14' |
||||
- '--cxxopt=-std=c++14' |
||||
build_targets: |
||||
- "//..." |
@ -0,0 +1,5 @@ |
||||
{ |
||||
"integrity": "**leave this alone**", |
||||
"strip_prefix": "{REPO}-{VERSION}", |
||||
"url": "https://github.com/{OWNER}/{REPO}/releases/download/{TAG}/{REPO}-{VERSION}.zip" |
||||
} |
@ -0,0 +1,3 @@ |
||||
BasedOnStyle: Google |
||||
DerivePointerAlignment: false |
||||
PointerAlignment: Left |
@ -0,0 +1,54 @@ |
||||
# This information is extracted from the MacOS runner specs located at: |
||||
# https://github.com/actions/runner-images/blob/main/images/macos/macos-12-Readme.md |
||||
# |
||||
# When updating, also ensure the "xcode_destination" entries in |
||||
# `.github/workflows/test_objectivec.yml` are supported for the given versions |
||||
# of Xcode. |
||||
xcode_version( |
||||
name = "version15_2_15C500b", |
||||
aliases = [ |
||||
"15C500b", |
||||
"15.2", |
||||
], |
||||
default_ios_sdk_version = "17.2", |
||||
default_macos_sdk_version = "14.2", |
||||
default_tvos_sdk_version = "17.2", |
||||
default_watchos_sdk_version = "10.2", |
||||
version = "15.2.0.15C500b", |
||||
) |
||||
|
||||
xcode_version( |
||||
name = "version14_2_14C18", |
||||
aliases = [ |
||||
"14C18", |
||||
"14.2", |
||||
], |
||||
default_ios_sdk_version = "16.2", |
||||
default_macos_sdk_version = "13.1", |
||||
default_tvos_sdk_version = "16.1", |
||||
default_watchos_sdk_version = "9.1", |
||||
version = "14.2.0.14C18", |
||||
) |
||||
|
||||
xcode_version( |
||||
name = "version14_1_0_14B47b", |
||||
aliases = [ |
||||
"14B47b", |
||||
"14.1", |
||||
], |
||||
default_ios_sdk_version = "16.1", |
||||
default_macos_sdk_version = "13.0", |
||||
default_tvos_sdk_version = "16.1", |
||||
default_watchos_sdk_version = "9.1", |
||||
version = "14.1.0.14B47b", |
||||
) |
||||
|
||||
xcode_config( |
||||
name = "host_xcodes", |
||||
default = ":version14_2_14C18", |
||||
versions = [ |
||||
":version15_2_15C500b", |
||||
":version14_2_14C18", |
||||
":version14_1_0_14B47b", |
||||
], |
||||
) |
@ -0,0 +1,8 @@ |
||||
version: 2 |
||||
updates: |
||||
- package-ecosystem: "github-actions" # Necessary to update action hashs |
||||
directory: "/" |
||||
schedule: |
||||
interval: "weekly" |
||||
# Allow up to 3 opened pull requests for github-actions versions |
||||
open-pull-requests-limit: 3 |
@ -1,15 +0,0 @@ |
||||
mergeable: |
||||
pull_requests: |
||||
label: |
||||
and: |
||||
- must_exclude: |
||||
regex: '^disposition/DO NOT MERGE' |
||||
message: 'Pull request marked not mergeable' |
||||
- must_include: |
||||
regex: 'mergeable:force-allow' |
||||
message: 'Pull requests should not be merged directly and should instead |
||||
be handled by Copybara. |
||||
|
||||
To enable Github merges, add the `mergeable:force-allow` label and get a second |
||||
approval. This should almost never be used except for releases or as a break glass |
||||
measure after discussing with the team.' |
@ -0,0 +1,216 @@ |
||||
This directory contains all of our automatically triggered workflows. |
||||
|
||||
# Test runner |
||||
|
||||
Our top level `test_runner.yml` is responsible for kicking off all tests, which |
||||
are represented as reusable workflows. This is carefully constructed to satisfy |
||||
the design laid out in go/protobuf-gha-protected-resources (see below), and |
||||
duplicating it across every workflow file would be difficult to maintain. As an |
||||
added bonus, we can manually dispatch our full test suite with a single button |
||||
and monitor the progress of all of them simultaneously in GitHub's actions UI. |
||||
|
||||
There are five ways our test suite can be triggered: |
||||
|
||||
- **Post-submit tests** (`push`): These are run over newly submitted code |
||||
that we can assume has been thoroughly reviewed. There are no additional |
||||
security concerns here and these jobs can be given highly privileged access to |
||||
our internal resources and caches. |
||||
|
||||
- **Pre-submit tests from a branch** (`push_request`): These are run over |
||||
every PR as changes are made. Since they are coming from branches in our |
||||
repository, they have secret access by default and can also be given highly |
||||
privileged access. However, we expect *many* of these events per change, |
||||
and likely many from abandoned/exploratory changes. Given the much higher |
||||
frequency, we restrict the ability to *write* to our more expensive caches. |
||||
|
||||
- **Pre-submit tests from a fork** (`push_request_target`): These are run |
||||
over every PR from a forked repository as changes are made. These have much |
||||
more restricted access, since they could be coming from anywhere. To protect |
||||
our secret keys and our resources, tests will not run until a commit has been |
||||
labeled `safe to submit`. Further commits will require further approvals to |
||||
run our test suite. Once marked as safe, we will provide read-only access to |
||||
our caches and Docker images, but will generally disallow any writes to shared |
||||
resources. |
||||
|
||||
- **Continuous tests** (`schedule`): These are run on a fixed schedule. We |
||||
currently have them set up to run daily, and can help identify non-hermetic |
||||
issues in tests that don't get run often (such as due to test caching) or during |
||||
slow periods like weekends and holidays. Similar to post-submit tests, these |
||||
are run over submitted code and are highly privileged in the resources they |
||||
can use. |
||||
|
||||
- **Manual testing** (`workflow_dispatch`): Our test runner can be triggered |
||||
manually over any branch. This is treated similarly to pre-submit tests, |
||||
which should be highly privileged because they can only be triggered by the |
||||
protobuf team. |
||||
|
||||
# Staleness handling |
||||
|
||||
While Bazel handles code generation seamlessly, we do support build systems that |
||||
don't. There are a handful of cases where we need to check in generated files |
||||
that can become stale over time. In order to provide a good developer |
||||
experience, we've implemented a system to make this more manageable. |
||||
|
||||
- Stale files should have a corresponding `staleness_test` Bazel target. This |
||||
should be marked `manual` to avoid getting picked up in CI, but will fail if |
||||
files become stale. It also provides a `--fix` flag to update the stale files. |
||||
|
||||
- Bazel tests will never depend on the checked-in versions, and will generate |
||||
new ones on-the-fly during build. |
||||
|
||||
- Non-Bazel tests will always regenerate necessary files before starting. This |
||||
is done using our `bash` and `docker` actions, which should be used for any |
||||
non-Bazel tests. This way, no tests will fail due to stale files. |
||||
|
||||
- A post-submit job will immediately regenerate any stale files and commit them |
||||
if they've changed. |
||||
|
||||
- A scheduled job will run late at night every day to make sure the post-submit |
||||
is working as expected (that is, it will run all the staleness tests). |
||||
|
||||
The `regenerate_stale_files.sh` script is the central script responsible for all |
||||
the re-generation of stale files. |
||||
|
||||
# Forked PRs |
||||
|
||||
Because we need secret access to run our tests, we use the `pull_request_target` |
||||
event for PRs coming from forked repositories. We do checkout the code from the |
||||
PR's head, but the workflow files themselves are always fetched from the *base* |
||||
branch (that is, the branch we're merging to). Therefore, any changes to these |
||||
files won't be tested, so we explicitly ban PRs that touch these files. |
||||
|
||||
# Caches |
||||
|
||||
We have a number of different caching strategies to help speed up tests. These |
||||
live either in GCP buckets or in our GitHub repository cache. The former has |
||||
a lot of resources available and we don't have to worry as much about bloat. |
||||
On the other hand, the GitHub repository cache is limited to 10GB, and will |
||||
start pruning old caches when it exceeds that threshold. Therefore, we need |
||||
to be very careful about the size and quantity of our caches in order to |
||||
maximize the gains. |
||||
|
||||
## Bazel remote cache |
||||
|
||||
As described in https://bazel.build/remote/caching, remote caching allows us to |
||||
offload a lot of our build steps to a remote server that holds a cache of |
||||
previous builds. We use our GCP project for this storage, and configure |
||||
*every* Bazel call to use it. This provides substantial performance |
||||
improvements at minimal cost. |
||||
|
||||
We do not allow forked PRs to upload updates to our Bazel caches, but they |
||||
do use them. Every other event is given read/write access to the caches. |
||||
Because Bazel behaves poorly under certain environment changes (such as |
||||
toolchain, operating system), we try to use finely-grained caches. Each job |
||||
should typically have its own cache to avoid cross-pollution. |
||||
|
||||
## Bazel repository cache |
||||
|
||||
When Bazel starts up, it downloads all the external dependencies for a given |
||||
build and stores them in the repository cache. This cache is *separate* from |
||||
the remote cache, and only exists locally. Because we have so many Bazel |
||||
dependencies, this can be a source of frequent flakes due to network issues. |
||||
|
||||
To avoid this, we keep a cached version of the repository cache in GitHub's |
||||
action cache. Our full set of repository dependencies ends up being ~300MB, |
||||
which is fairly expensive given our 10GB maximum. The most expensive ones seem |
||||
to come from Java, which has some very large downstream dependencies. |
||||
|
||||
Given the cost, we take a more conservative approach for this cache. Only push |
||||
events will ever write to this cache, but all events can read from them. |
||||
Additionally, we only store three caches for any given commit, one per platform. |
||||
This means that multiple jobs are trying to update the same cache, leading to a |
||||
race. GitHub rejects all but one of these updates, so we designed the system so |
||||
that caches are only updated if they've actually changed. That way, over time |
||||
(and multiple pushes) the repository caches will incrementally grow to encompass |
||||
all of our dependencies. A scheduled job will run monthly to clear these caches |
||||
to prevent unbounded growth as our dependencies evolve. |
||||
|
||||
## ccache |
||||
|
||||
In order to speed up non-Bazel builds to be on par with Bazel, we make use of |
||||
[ccache](https://ccache.dev/). This intercepts all calls to the compiler, and |
||||
caches the result. Subsequent calls with a cache-hit will very quickly |
||||
short-circuit and return the already computed result. This has minimal affect |
||||
on any *single* job, since we typically only run a single build. However, by |
||||
caching the ccache results in GitHub's action cache we can substantially |
||||
decrease the build time of subsequent runs. |
||||
|
||||
One useful feature of ccache is that you can set a maximum cache size, and it |
||||
will automatically prune older results to keep below that limit. On Linux and |
||||
Mac cmake builds, we generally get 30MB caches and set a 100MB cache limit. On |
||||
Windows, with debug symbol stripping we get ~70MB and set a 200MB cache limit. |
||||
|
||||
Because CMake build tend to be our slowest, bottlenecking the entire CI process, |
||||
we use a fairly expensive strategy with ccache. All events will cache their |
||||
ccache directory, keyed by the commit and the branch. This means that each |
||||
PR and each branch will write its own set of caches. When looking up which |
||||
cache to use initially, each job will first look for a recent cache in its |
||||
current branch. If it can't find one, it will accept a cache from the base |
||||
branch (for example, PRs will initially use the latest cache from their target |
||||
branch). |
||||
|
||||
While the ccache caches quickly over-run our GitHub action cache, they also |
||||
quickly become useless. Since GitHub prunes caches based on the time they were |
||||
last used, this just means that we'll see quicker turnover. |
||||
|
||||
## sccache |
||||
|
||||
An alternative to ccache is [sccache](https://github.com/mozilla/sccache). The |
||||
two tools are very similar in function, but sccache requires (and allows) much |
||||
less configuration and supports GCS storage right out of the box. By hooking |
||||
this up to our project that we already use for Bazel caching, we're able to get |
||||
even bigger CMake wins in CI because we're no longer constrained by GitHub's |
||||
10GB cache limit. |
||||
|
||||
Similar to the Bazel remote cache, we give read access to every CI run, but |
||||
disallow writing in PRs from forks. |
||||
|
||||
## Bazelisk |
||||
|
||||
Bazelisk will automatically download a pinned version of Bazel on first use. |
||||
This can lead to flakes, and to avoid that we cache the result keyed on the |
||||
Bazel version. Only push events will write to this cache, but it's unlikely |
||||
to change very often. |
||||
|
||||
## Docker images |
||||
|
||||
Instead of downloading a fresh Docker image for every test run, we can save it |
||||
as a tar and cache it using `docker image save` and later restore using |
||||
`docker image load`. This can decrease download times and also reduce flakes. |
||||
Note, Docker's load can actually be significantly slower than a pull in certain |
||||
situations. Therefore, we should reserve this strategy for only Docker images |
||||
that are causing noticeable flakes. |
||||
|
||||
## Pip dependencies |
||||
|
||||
The actions/setup-python action we use for Python supports automated caching |
||||
of pip dependencies. We enable this to avoid having to download these |
||||
dependencies on every run, which can lead to flakes. |
||||
|
||||
# Custom actions |
||||
|
||||
We've defined a number of custom actions to abstract out shared pieces of our |
||||
workflows. |
||||
|
||||
- **Bazel** use this for running all Bazel tests. It can take either a single |
||||
Bazel command or a more general bash command. In the latter case, it provides |
||||
environment variables for running Bazel with all our standardized settings. |
||||
|
||||
- **Bazel-Docker** nearly identical to the **Bazel** action, this additionally |
||||
runs everything in a specified Docker image. |
||||
|
||||
- **Bash** use this for running non-Bazel tests. It takes a bash command and |
||||
runs it verbatim. It also handles the regeneration of stale files (which does |
||||
use Bazel), which non-Bazel tests might depend on. |
||||
|
||||
- **Docker** nearly identical to the **Bash** action, this additionally runs |
||||
everything in a specified Docker image. |
||||
|
||||
- **ccache** this sets up a ccache environment, and initializes some |
||||
environment variables for standardized usage of ccache. |
||||
|
||||
- **Cross-compile protoc** this abstracts out the compilation of protoc using |
||||
our cross-compilation infrastructure. It will set a `PROTOC` environment |
||||
variable that gets automatically picked up by a lot of our infrastructure. |
||||
This is most useful in conjunction with the **Bash** action with non-Bazel |
||||
tests. |
@ -0,0 +1,35 @@ |
||||
name: Clear expensive caches to prevent unbounded growth |
||||
|
||||
on: |
||||
schedule: |
||||
# Run every 4 months at 10 AM UTC (2 AM PDT) |
||||
- cron: 0 10 5 */4 * |
||||
|
||||
# manual |
||||
workflow_dispatch: |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
bazel-repository-cache: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
os: [ubuntu-latest, macos-latest, windows-latest] |
||||
name: Clear Bazel repository cache ${{ matrix.os }} |
||||
runs-on: ${{ matrix.os }} |
||||
permissions: |
||||
actions: write # permission is required to delete caches |
||||
contents: read |
||||
steps: |
||||
- uses: actions/cache@627f0f41f6904a5b1efbaed9f96d9eb58e92e920 # v3.2.4 |
||||
with: |
||||
path: ${{ github.workspace }}/${{ steps.output.outputs.repository-cache }} |
||||
key: repository-cache-${{ github.ref_name }}-${{ runner.os }}-reset-${{ github.sha }} |
||||
|
||||
- name: Create an empty cache with a single file |
||||
run: | |
||||
rm -rf .repository-cache |
||||
mkdir -p .repository-cache |
||||
touch .repository-cache/reset_file |
@ -1,18 +0,0 @@ |
||||
# GitHub Action to automate the identification of common misspellings in text files. |
||||
# https://github.com/codespell-project/actions-codespell |
||||
# https://github.com/codespell-project/codespell |
||||
name: codespell |
||||
on: [push, pull_request] |
||||
permissions: |
||||
contents: read # to fetch code (actions/checkout) |
||||
jobs: |
||||
codespell: |
||||
name: Check for spelling errors |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- uses: actions/checkout@v3 |
||||
- uses: codespell-project/actions-codespell@master |
||||
with: |
||||
check_filenames: true |
||||
skip: ./.git,./third_party,./conformance/third_party,*.snk,*.pb,*.pb.cc,*.pb.h,./src/google/protobuf/testdata,./objectivec/Tests,./python/compatibility_tests/v2.5.0/tests/google/protobuf/internal,./.github/workflows/codespell.yml |
||||
ignore_words_list: "alow,alse,atleast,ba,chec,cleare,copyable,cloneable,dedup,dur,errorprone,falsy,files',fo,fundementals,hel,importd,inout,leapyear,nd,nin,ois,ons,parseable,process',ro,te,testof,ue,unparseable,wasn,wee,gae,keyserver,objext,od,optin,streem,sur,falsy" |
@ -0,0 +1,30 @@ |
||||
name: Forked PR workflow check |
||||
|
||||
# This workflow prevents modifications to our workflow files in PRs from forked |
||||
# repositories. Since tests in these PRs always use the workflows in the |
||||
# *target* branch, modifications to these files can't be properly tested. |
||||
|
||||
on: |
||||
# safe presubmit |
||||
pull_request: |
||||
branches: |
||||
- main |
||||
- '[0-9]+.x' |
||||
# The 21.x branch still uses Kokoro |
||||
- '!21.x' |
||||
# For testing purposes so we can stage this on the `gha` branch. |
||||
- gha |
||||
paths: |
||||
- '.github/workflows/**' |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
check: |
||||
name: Check PR source |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- run: > |
||||
${{ github.event.pull_request.head.repo.full_name == 'protocolbuffers/protobuf' }} || |
||||
(echo "This pull request is from an unsafe fork (${{ github.event.pull_request.head.repo.full_name }}) and isn't allowed to modify workflow files!" && exit 1) |
@ -0,0 +1,78 @@ |
||||
name: Protobuf Janitor |
||||
|
||||
on: |
||||
schedule: |
||||
# Run daily at 10 AM UTC (2 AM PDT) |
||||
- cron: 0 10 * * * |
||||
workflow_dispatch: |
||||
|
||||
permissions: {} |
||||
|
||||
jobs: |
||||
stale-prs: |
||||
name: Close Stale Copybara PRs |
||||
runs-on: ubuntu-latest |
||||
permissions: |
||||
contents: write # to allow deleting branches |
||||
pull-requests: write # to allow closing the PR |
||||
env: |
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
GH_REPO: ${{ github.repository }} |
||||
steps: |
||||
- run: | |
||||
set -ex |
||||
STALE_PRS=$(gh pr list --author "app/copybara-service" --limit 500 \ |
||||
--json "number" --search "updated:<=$(date --date="-7 day" +%F)" \ |
||||
| jq ".[].number") |
||||
for pr in $STALE_PRS; do |
||||
echo "Closing #$pr..." |
||||
gh pr close --comment "Auto-closing Copybara pull request" --delete-branch "$pr" |
||||
done |
||||
|
||||
stale-others: |
||||
name: Close stale non-copybara PRs and issues |
||||
runs-on: ubuntu-latest |
||||
permissions: |
||||
issues: write # allow the action to comment on, add labels to, and close issues |
||||
pull-requests: write # allow the action to comment on, add labels to, and close PRs |
||||
steps: |
||||
- uses: actions/stale@b69b346013879cedbf50c69f572cd85439a41936 |
||||
with: |
||||
stale-issue-message: > |
||||
We triage inactive PRs and issues in order to make it easier to find |
||||
active work. If this issue should remain active or becomes active |
||||
again, please add a comment. |
||||
|
||||
|
||||
This issue is labeled `inactive` because the last activity was over |
||||
90 days ago. |
||||
close-issue-message: > |
||||
We triage inactive PRs and issues in order to make it easier to find |
||||
active work. If this issue should remain active or becomes active |
||||
again, please reopen it. |
||||
|
||||
|
||||
This issue was closed and archived because there has been no new |
||||
activity in the 14 days since the `inactive` label was added. |
||||
stale-pr-message: > |
||||
We triage inactive PRs and issues in order to make it easier to find |
||||
active work. If this PR should remain active, please add a comment. |
||||
|
||||
|
||||
This PR is labeled `inactive` because the last activity was over 90 |
||||
days ago. This PR will be closed and archived after 14 additional |
||||
days without activity. |
||||
close-pr-message: > |
||||
We triage inactive PRs and issues in order to make it easier to find |
||||
active work. If this PR should remain active or becomes active |
||||
again, please reopen it. |
||||
|
||||
|
||||
This PR was closed and archived because there has been no new |
||||
activity in the 14 days since the `inactive` label was added. |
||||
stale-issue-label: 'inactive' |
||||
stale-pr-label: 'inactive' |
||||
exempt-issue-labels: 'help wanted' |
||||
days-before-stale: 90 |
||||
days-before-close: 14 |
||||
operations-per-run: 100 |
@ -1,40 +0,0 @@ |
||||
name: 'ObjC CocoaPods' |
||||
|
||||
on: |
||||
push: |
||||
paths: |
||||
- '.github/workflows/objc_cocoapods.yml' |
||||
- 'Protobuf.podspec' |
||||
- 'objectivec/**' |
||||
- '!objectivec/DevTools/**' |
||||
- '!objectivec/ProtocolBuffers_*.xcodeproj/**' |
||||
- '!objectivec/Tests/**' |
||||
pull_request: |
||||
paths: |
||||
- '.github/workflows/objc_cocoapods.yml' |
||||
- 'Protobuf.podspec' |
||||
- 'objectivec/**' |
||||
- '!objectivec/DevTools/**' |
||||
- '!objectivec/ProtocolBuffers_*.xcodeproj/**' |
||||
- '!objectivec/Tests/**' |
||||
|
||||
permissions: |
||||
contents: read # to fetch code (actions/checkout) |
||||
|
||||
jobs: |
||||
pod-lib-lint: |
||||
runs-on: macos-latest |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
# Add back 'watchos'. See CocoaPods/CocoaPods#11558 |
||||
PLATFORM: ["ios", "macos", "tvos"] |
||||
CONFIGURATION: ["Debug", "Release"] |
||||
steps: |
||||
- uses: actions/checkout@v3 |
||||
- name: Pod lib lint |
||||
run: | |
||||
pod lib lint --verbose \ |
||||
--configuration=${{ matrix.CONFIGURATION }} \ |
||||
--platforms=${{ matrix.PLATFORM }} \ |
||||
Protobuf.podspec |
@ -0,0 +1,60 @@ |
||||
# This workflow uses actions that are not certified by GitHub. They are provided |
||||
# by a third-party and are governed by separate terms of service, privacy |
||||
# policy, and support documentation. |
||||
|
||||
name: Scorecard supply-chain security |
||||
on: |
||||
# For Branch-Protection check. Only the default branch is supported. See |
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection |
||||
branch_protection_rule: |
||||
# To guarantee Maintained check is occasionally updated. See |
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained |
||||
schedule: |
||||
- cron: '20 5 * * 2' |
||||
push: |
||||
branches: [ "main" ] |
||||
|
||||
# Declare default permissions as read only. |
||||
permissions: read-all |
||||
|
||||
jobs: |
||||
analysis: |
||||
name: Scorecard analysis |
||||
runs-on: ubuntu-latest |
||||
permissions: |
||||
security-events: write # to upload the results to code-scanning dashboard |
||||
id-token: write # to publish results and get a badge |
||||
|
||||
steps: |
||||
- name: "Checkout code" |
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 |
||||
with: |
||||
persist-credentials: false |
||||
|
||||
- name: "Run analysis" |
||||
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 |
||||
with: |
||||
results_file: results.sarif |
||||
results_format: sarif |
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if |
||||
# you want to enable the Branch-Protection check on a *public* repository, or |
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-fine-grained-pat-optional. |
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }} |
||||
|
||||
# Allows the repository to include the Scorecard badge. |
||||
publish_results: true |
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF |
||||
# format to the repository Actions tab. |
||||
- name: "Upload artifact" |
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 |
||||
with: |
||||
name: SARIF file |
||||
path: results.sarif |
||||
retention-days: 5 |
||||
|
||||
# Upload the results to GitHub's code scanning dashboard. |
||||
- name: "Upload to code-scanning" |
||||
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 |
||||
with: |
||||
sarif_file: results.sarif |
@ -0,0 +1,63 @@ |
||||
name: Staleness tests |
||||
|
||||
on: |
||||
schedule: |
||||
# Run daily at 10 AM UTC (2 AM PDT) |
||||
- cron: 0 10 * * * |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: false |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
workflow_dispatch: |
||||
|
||||
permissions: {} |
||||
jobs: |
||||
test: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
branch: [main, 25.x, 27.x] |
||||
os: [{ name: Linux, value: ubuntu-latest}] |
||||
|
||||
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }} |
||||
runs-on: ${{ matrix.os.value }} |
||||
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }} |
||||
steps: |
||||
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }} |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }} |
||||
|
||||
- name: Mark runs associated with commits |
||||
if: ${{ github.event_name != 'schedule' && github.event_name != 'workflow_dispatch' }} |
||||
run: echo "COMMIT_TRIGGERED_RUN=1" >> $GITHUB_ENV |
||||
|
||||
- name: Mark runs from the main branch |
||||
if: ${{ github.base_ref == 'main' || github.ref == 'refs/heads/main' }} |
||||
run: echo "MAIN_RUN=1" >> $GITHUB_ENV |
||||
|
||||
- name: Run all staleness tests |
||||
# Run all tests if either of the following is true, otherwise simply run the query to make |
||||
# sure it continues to work: |
||||
# 1) If this is not a commit-based run it means it's scheduled or manually dispatched. In |
||||
# this case we want to make sure there are no stale files. |
||||
# 2) Release branches don't work with automated commits (see b/287117570). Until this is |
||||
# fixed, we want to run the tests to force manual regeneration when necessary. |
||||
# |
||||
# In branches where automatic updates work as post-submits, we don't want to run staleness |
||||
# tests along with user changes. Any stale files will be automatically fixed in a follow-up |
||||
# commit. |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: staleness |
||||
bash: > |
||||
set -ex; |
||||
echo "Please run ./regenerate_stale_files.sh to regenerate stale files"; |
||||
if [[ -z $COMMIT_TRIGGERED_RUN || -z $MAIN_RUN ]]; then |
||||
bazel query 'attr(tags, "staleness_test", //...)' | xargs bazel test $BAZEL_FLAGS; |
||||
else |
||||
bazel query 'attr(tags, "staleness_test", //...)'; |
||||
fi |
@ -0,0 +1,54 @@ |
||||
name: Bazel Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
examples: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
runner: [ ubuntu, windows, macos ] |
||||
bazelversion: [ '7.1.2' ] |
||||
bzlmod: [true, false ] |
||||
include: |
||||
- runner: ubuntu |
||||
bazelversion: '6.4.0' |
||||
bzlmod: true |
||||
- runner: ubuntu |
||||
bazelversion: '6.4.0' |
||||
bzlmod: false |
||||
runs-on: ${{ matrix.runner }}-latest |
||||
name: Examples ${{ matrix.runner }} ${{ matrix.bazelversion }}${{ matrix.bzlmod && ' (bzlmod)' || '' }} |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Windows startup flags |
||||
if: runner.os == 'Windows' |
||||
working-directory: examples |
||||
shell: bash |
||||
run: echo "startup --output_user_root=C:/ --windows_enable_symlinks" >> .bazelrc |
||||
|
||||
- name: Configure Bazel version |
||||
working-directory: examples |
||||
shell: bash |
||||
run: echo "${{ matrix.bazelversion }}" > .bazelversion |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: examples |
||||
version: ${{ matrix.bazelversion }} |
||||
bash: cd examples && bazel build //... $BAZEL_FLAGS --enable_bzlmod=${{ matrix.bzlmod }} |
@ -0,0 +1,118 @@ |
||||
name: C# Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux: |
||||
name: Linux |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
# TODO Run this with Bazel once codegen is handled properly. |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
entrypoint: /bin/bash |
||||
command: >- |
||||
-c " |
||||
cd csharp && |
||||
dotnet restore src/Google.Protobuf.sln && |
||||
dotnet build -c Release src/Google.Protobuf.sln && |
||||
dotnet test -c Release -f net6.0 src/Google.Protobuf.Test/Google.Protobuf.Test.csproj" |
||||
|
||||
- name: Clear bazel between docker instances |
||||
run: sudo rm -rf _build .repository-cache |
||||
|
||||
- name: Run conformance tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/csharp:3.1.415-6.0.100-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: csharp_linux |
||||
bazel: test //csharp:conformance_test --action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 --test_env=DOTNET_CLI_HOME=/home/bazel |
||||
|
||||
windows: |
||||
name: Windows |
||||
runs-on: windows-2019 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Setup dotnet |
||||
uses: actions/setup-dotnet@3447fd6a9f9e57506b15f895c5b76d3b197dc7c2 # v3.2.0 |
||||
with: |
||||
dotnet-version: '6.0.x' |
||||
|
||||
# Workaround for incompatibility between gcloud and windows-2019 runners. |
||||
- name: Install Python |
||||
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 |
||||
with: |
||||
python-version: '3.9' |
||||
- name: Use custom python for gcloud |
||||
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV |
||||
shell: bash |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bash@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: | |
||||
dotnet build csharp/src/Google.Protobuf.sln |
||||
dotnet test csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj |
||||
|
||||
linux-aarch64: |
||||
name: Linux aarch64 |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Build protobuf C# tests under x86_64 docker image |
||||
# Tests are built "dotnet publish" because we want all the dependencies to the copied to the destination directory |
||||
# (we want to avoid references to ~/.nuget that won't be available in the subsequent docker run) |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: mcr.microsoft.com/dotnet/sdk:6.0.100-bullseye-slim |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: >- |
||||
/bin/bash -cex ' |
||||
DOTNET_CLI_TELEMETRY_OPTOUT=true |
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true |
||||
dotnet publish -c Release -f net6.0 /workspace/csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj' |
||||
|
||||
- name: Use an actual aarch64 docker image to run protobuf C# tests with an emulator |
||||
# "dotnet vstest" allows running tests from a pre-built project. |
||||
# * mount the protobuf root as /work to be able to access the crosscompiled files |
||||
# * to avoid running the process inside docker as root (which can pollute the workspace with files owned by root), we force |
||||
# running under current user's UID and GID. To be able to do that, we need to provide a home directory for the user |
||||
# otherwise the UID would be homeless under the docker container and pip install wouldn't work. For simplicity, |
||||
# we just run map the user's home to a throwaway temporary directory |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: mcr.microsoft.com/dotnet/sdk:6.0.100-bullseye-slim-arm64v8 |
||||
skip-staleness-check: true |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: >- |
||||
/bin/bash -cex ' |
||||
DOTNET_CLI_TELEMETRY_OPTOUT=true |
||||
DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true |
||||
dotnet vstest /workspace/csharp/src/Google.Protobuf.Test/bin/Release/net6.0/publish/Google.Protobuf.Test.dll' |
@ -0,0 +1,120 @@ |
||||
name: Java Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
include: |
||||
- name: OpenJDK 8 |
||||
version: '8' |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702 |
||||
# TODO: b/318555165 - enable the layering check. Currently it does |
||||
# not work correctly with the toolchain in this Docker image. |
||||
targets: //java/... //java/internal:java_version --features=-layering_check |
||||
- name: OpenJDK 11 |
||||
version: '11' |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702 |
||||
targets: //java/... //java/internal:java_version |
||||
- name: OpenJDK 17 |
||||
version: '17' |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:17-1fdbb997433cb22c1e49ef75ad374a8d6bb88702 |
||||
targets: //java/... //java/internal:java_version |
||||
- name: aarch64 |
||||
version: 'aarch64' |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17 |
||||
targets: //java/... //src/google/protobuf/compiler:protoc_aarch64_test |
||||
|
||||
name: Linux ${{ matrix.name }} |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: ${{ matrix.image }} |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: java_linux/${{ matrix.version }} |
||||
bazel: test ${{ matrix.targets }} --test_env=KOKORO_JAVA_VERSION |
||||
|
||||
# TODO restore this test (or a better one) when gRPC has rebuilt with 26.x |
||||
# linkage-monitor: |
||||
# name: Linux Linkage Monitor |
||||
# runs-on: ubuntu-latest |
||||
# steps: |
||||
# - name: Checkout pending changes |
||||
# uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
# with: |
||||
# ref: ${{ inputs.safe-checkout }} |
||||
# - name: Run Linkage Monitor test |
||||
# uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
# with: |
||||
# image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:8-1fdbb997433cb22c1e49ef75ad374a8d6bb88702 |
||||
# credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
# bazel-cache: java_linux/8 |
||||
# # TODO: b/318555165 - enable the layering check. Currently it does |
||||
# # not work correctly with the toolchain in this Docker image. |
||||
# bazel: test --test_output=all //java:linkage_monitor --spawn_strategy=standalone --features=-layering_check |
||||
|
||||
protobuf-bom: |
||||
name: Protobuf Maven BOM |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Build protoc |
||||
id: build-protoc |
||||
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
architecture: linux-x86_64 |
||||
- name: Move protoc into place and clean up |
||||
run: | |
||||
mv ${{ steps.build-protoc.outputs.protoc }} protoc |
||||
sudo rm -rf _build |
||||
- name: Install snapshot version locally (not using generated pom.xml) |
||||
run: | |
||||
mvn -e -B -Dhttps.protocols=TLSv1.2 install -Dmaven.test.skip=true |
||||
working-directory: java |
||||
- name: Generate pom.xml files from the template |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/java:11-1fdbb997433cb22c1e49ef75ad374a8d6bb88702 |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: java_linux/11 |
||||
# protobuf-java and protobuf-java-util are the member of the BOM |
||||
bash: | |
||||
bazel build //java/core:core_mvn-pom //java/util:util_mvn-pom |
||||
cp bazel-bin/java/core/core_mvn-pom.xml . |
||||
cp bazel-bin/java/util/util_mvn-pom.xml . |
||||
- name: Copy the generated pom.xml files to the local Maven repository |
||||
shell: bash |
||||
run: | |
||||
LOCAL_MAVEN_GROUP_DIR="${HOME}/.m2/repository/com/google/protobuf" |
||||
VERSION=$(grep "<version>" core_mvn-pom.xml | sed "s/<version>\(.*\)<\/version>/\1/" | xargs) |
||||
cp core_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java/${VERSION}/protobuf-java-${VERSION}.pom |
||||
cp util_mvn-pom.xml ${LOCAL_MAVEN_GROUP_DIR}/protobuf-java-util/${VERSION}/protobuf-java-util-${VERSION}.pom |
||||
- name: Clean up |
||||
run: | |
||||
sudo rm -rf _build |
||||
- name: Validate Protobuf BOM |
||||
uses: googleapis/java-cloud-bom/tests/validate-bom@fd56f04bb0bc581776a74031591f0b3bc5e7920a # v26.13.0 |
||||
with: |
||||
bom-path: java/bom/pom.xml |
@ -0,0 +1,136 @@ |
||||
name: Objective-C Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
xcode: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
platform: ["macOS", "iOS"] |
||||
xc_config: ["Debug", "Release"] |
||||
# The "destination" entries need to match what is available for the selected Xcode. |
||||
# See `.github/BUILD.bazel` for the Xcode info. |
||||
include: |
||||
- platform: "macOS" |
||||
destination: "platform=macOS" |
||||
xc_project: "ProtocolBuffers_OSX.xcodeproj" |
||||
- platform: "iOS" |
||||
destination: "platform=iOS Simulator,name=iPhone 13,OS=latest" |
||||
xc_project: "ProtocolBuffers_iOS.xcodeproj" |
||||
|
||||
name: Xcode ${{ matrix.platform}} ${{ matrix.xc_config }} |
||||
runs-on: macos-12 |
||||
env: |
||||
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Setup ccache |
||||
uses: protocolbuffers/protobuf-ci/ccache@v3 |
||||
with: |
||||
cache-prefix: objectivec_${{ matrix.platform }}_${{ matrix.xc_config }} |
||||
support-modules: true |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bash@v3 |
||||
env: |
||||
CC: ${{ github.workspace }}/ci/clang_wrapper |
||||
CXX: ${{ github.workspace }}/ci/clang_wrapper++ |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: | |
||||
xcodebuild \ |
||||
-project "objectivec/${{ matrix.xc_project }}" \ |
||||
-scheme ProtocolBuffers \ |
||||
-configuration ${{ matrix.xc_config }} \ |
||||
-destination "${{ matrix.destination }}" \ |
||||
test \ |
||||
| xcpretty |
||||
|
||||
- name: Report ccache stats |
||||
shell: bash |
||||
run: ccache -s -v |
||||
|
||||
pod-lib-lint: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
PLATFORM: ["ios", "macos", "tvos", "watchos", "visionos"] |
||||
CONFIGURATION: ["Debug", "Release"] |
||||
include: |
||||
- OS: macos-12 |
||||
XCODE: "14.1" |
||||
- OS: macos-14 |
||||
PLATFORM: "visionos" |
||||
XCODE: "15.2" |
||||
name: CocoaPods ${{ matrix.PLATFORM }} ${{ matrix.CONFIGURATION }} |
||||
runs-on: ${{ matrix.OS }} |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Xcode version |
||||
run: sudo xcode-select -switch /Applications/Xcode_${{ matrix.XCODE }}.app |
||||
- name: Pod lib lint |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: cocoapods/${{ matrix.XCODE }} |
||||
bash: | |
||||
./regenerate_stale_files.sh $BAZEL_FLAGS --xcode_version="${{ matrix.XCODE }}" |
||||
pod lib lint --verbose \ |
||||
--configuration=${{ matrix.CONFIGURATION }} \ |
||||
--platforms=${{ matrix.PLATFORM }} \ |
||||
Protobuf.podspec |
||||
|
||||
bazel: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
config: |
||||
- name: Optimized |
||||
flags: --config=opt |
||||
bazel_action: test |
||||
- name: Debug |
||||
flags: --config=dbg |
||||
bazel_action: test |
||||
# Current github runners are all Intel based, so just build/compile |
||||
# for Apple Silicon to detect issues there. |
||||
- name: Apple_Silicon_Optimized |
||||
flags: --config=opt --cpu=darwin_arm64 |
||||
bazel_action: build |
||||
- name: Apple_Silicon_Debug |
||||
flags: --config=dbg --cpu=darwin_arm64 |
||||
bazel_action: build |
||||
# TODO: Could add iOS to atleast build the objc_library targets for that. |
||||
platform: ["macOS"] |
||||
include: |
||||
- platform: "macOS" |
||||
bazel_targets: //objectivec/... |
||||
name: Bazel ${{ matrix.platform }} ${{ matrix.config.name }} |
||||
runs-on: macos-12 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: bazel ${{ matrix.config.bazel_action }} |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel: ${{ matrix.config.bazel_action }} ${{ matrix.config.flags }} ${{ matrix.bazel_targets }} |
||||
bazel-cache: objc_${{ matrix.platform }}_${{ matrix.config.name }} |
@ -0,0 +1,205 @@ |
||||
name: PHP Tests |
||||
|
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
include: |
||||
- name: 8.1 Optimized |
||||
version: "8.1.14" |
||||
version-short: "8.1" |
||||
command: composer test \&\& composer test_c |
||||
- name: 8.1 Debug |
||||
version: 8.1.14-dbg |
||||
version-short: "8.1" |
||||
command: composer test \&\& composer test_c |
||||
- name: 8.1 Memory Leak |
||||
version: 8.1.14-dbg |
||||
version-short: "8.1" |
||||
# Run specialized memory leak & multirequest tests. |
||||
command: composer test_c \&\& tests/multirequest.sh \&\& tests/memory_leak_test.sh |
||||
- name: 8.1 Valgrind |
||||
version: 8.1.14-dbg |
||||
version-short: "8.1" |
||||
command: composer test_valgrind |
||||
- name: 8.3 Optimized |
||||
version: "8.3.1" |
||||
version-short: "8.3" |
||||
command: composer test \&\& composer test_c |
||||
|
||||
name: Linux ${{ matrix.name}} |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Setup composer |
||||
uses: protocolbuffers/protobuf-ci/composer-setup@v3 |
||||
with: |
||||
cache-prefix: php-${{ matrix.version-short }} |
||||
directory: php |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php:${{ matrix.version }}-66964dc8b07b6d1fc73a5cc14e59e84c1c534cea |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache |
||||
command: ${{ matrix.command }} |
||||
|
||||
linux-32bit: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
version: ['8.1'] |
||||
suffix: [ '', '-zts'] |
||||
test: ['test', 'test_c'] |
||||
exclude: |
||||
- suffix: '-zts' |
||||
test: 'test' |
||||
include: |
||||
- suffix: '-zts' |
||||
suffix_name: ' Thread Safe' |
||||
- test: 'test_c' |
||||
test_name: ' Extension' |
||||
|
||||
name: Linux 32-bit ${{ matrix.version}}${{ matrix.suffix_name }}${{ matrix.test_name }} |
||||
runs-on: ubuntu-latest |
||||
env: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:836f2cedcfe351d9a30055076630408e61994fc7d783e8333a99570968990eeb |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Cross compile protoc for i386 |
||||
id: cross-compile |
||||
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
architecture: linux-i386 |
||||
|
||||
- name: Setup composer |
||||
uses: protocolbuffers/protobuf-ci/composer-setup@v3 |
||||
with: |
||||
cache-prefix: php-${{ matrix.version }} |
||||
directory: php |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: ${{ env.image }} |
||||
platform: linux/386 |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache -e PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} |
||||
command: >- |
||||
/bin/bash -cex ' |
||||
PATH="/usr/local/php-${{ matrix.version }}${{matrix.suffix}}/bin:$PATH"; |
||||
cd php && php -v && php -m; |
||||
composer update --ignore-platform-reqs; |
||||
composer ${{ matrix.test }}' |
||||
|
||||
linux-aarch64: |
||||
name: Linux aarch64 |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Cross compile protoc for aarch64 |
||||
id: cross-compile |
||||
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
architecture: linux-aarch64 |
||||
|
||||
- name: Setup composer |
||||
uses: protocolbuffers/protobuf-ci/composer-setup@v3 |
||||
with: |
||||
cache-prefix: php-8.1 |
||||
directory: php |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php-aarch64@sha256:77ff9fdec867bbfb290ee0b10d8b7a3e5e434155daa5ec93de7341c7592b858d |
||||
platform: linux/arm64 |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
extra-flags: -e COMPOSER_HOME=/workspace/composer-cache -e PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} |
||||
command: >- |
||||
-c ' |
||||
cd php; |
||||
composer update --ignore-platform-reqs; |
||||
composer test; |
||||
composer test_c' |
||||
|
||||
macos: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
version: ['8.2', '8.3'] |
||||
|
||||
name: MacOS PHP ${{ matrix.version }} |
||||
runs-on: macos-12 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Uninstall problematic libgd |
||||
run: brew uninstall --ignore-dependencies gd |
||||
|
||||
- name: Install dependencies |
||||
run: brew install coreutils gd |
||||
|
||||
- name: Pin PHP version |
||||
uses: shivammathur/setup-php@8872c784b04a1420e81191df5d64fbd59d3d3033 # 2.30.2 |
||||
with: |
||||
php-version: ${{ matrix.version }} |
||||
|
||||
- name: Check PHP version |
||||
run: php --version | grep ${{ matrix.version }} || (echo "Invalid PHP version - $(php --version)" && exit 1) |
||||
|
||||
- name: Setup composer |
||||
uses: protocolbuffers/protobuf-ci/composer-setup@v3 |
||||
with: |
||||
cache-prefix: php-${{ matrix.version }} |
||||
directory: php |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bash@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: | |
||||
pushd php |
||||
php -v |
||||
php -m |
||||
composer update |
||||
composer test_c |
||||
popd |
||||
|
||||
- name: Run conformance tests |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: php_macos/${{ matrix.version }} |
||||
bazel: test //php:conformance_test_c --action_env=PATH --test_env=PATH |
@ -1,57 +0,0 @@ |
||||
name: PHP Extension Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read # to fetch code (actions/checkout) |
||||
|
||||
jobs: |
||||
build-php: |
||||
name: Build |
||||
runs-on: ubuntu-latest |
||||
container: ${{ matrix.php-image }} |
||||
strategy: |
||||
matrix: |
||||
php-image: |
||||
- php:7.4-cli |
||||
- php:8.1-cli |
||||
# TODO(b/266868629) Dockerize these instead of installing all the |
||||
# dependencies on each run. |
||||
steps: |
||||
- name: Install python3 |
||||
run: | |
||||
apt-get update -q |
||||
apt-get install -qy python3 |
||||
- name: Install bazel |
||||
run: | |
||||
apt-get install -qy wget |
||||
mkdir $HOME/bin |
||||
wget -O $HOME/bin/bazel https://github.com/bazelbuild/bazel/releases/download/5.3.2/bazel-5.3.2-linux-x86_64 |
||||
chmod a+x $HOME/bin/bazel |
||||
- name: Install git |
||||
run: | |
||||
apt-get install -qy --no-install-recommends git |
||||
- name: Checkout |
||||
uses: actions/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
submodules: recursive |
||||
- name: Create package |
||||
run: | |
||||
cd $GITHUB_WORKSPACE |
||||
rm -rf bazel-bin/php/protobuf-*.tgz |
||||
$HOME/bin/bazel build php:release |
||||
- name: Compile extension |
||||
run: | |
||||
cd /tmp |
||||
MAKE="make -j$(nproc)" pecl install $GITHUB_WORKSPACE/bazel-bin/php/protobuf-*.tgz |
||||
- name: Enable extension |
||||
run: docker-php-ext-enable protobuf |
||||
- name: Inspect extension |
||||
run: php --ri protobuf |
@ -0,0 +1,188 @@ |
||||
name: Ruby Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
include: |
||||
# Test both FFI and Native implementations on the highest and lowest |
||||
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x. |
||||
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE } |
||||
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI } |
||||
- { name: Ruby 3.1, ruby: ruby-3.1.0 } |
||||
- { name: Ruby 3.2, ruby: ruby-3.2.0 } |
||||
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE } |
||||
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI } |
||||
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE } |
||||
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI } |
||||
|
||||
name: Linux ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }} |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: ${{ matrix.image || format('us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:{0}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec', matrix.ruby) }} |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: ruby_linux/${{ matrix.ruby }}_${{ matrix.bazel }} |
||||
bazel: test //ruby/... //ruby/tests:ruby_version --test_env=KOKORO_RUBY_VERSION --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} |
||||
- name: Archive log artifacts |
||||
uses: actions/upload-artifact@v4 |
||||
with: |
||||
name: test-logs-${{ matrix.ruby }}_${{ matrix.ffi || 'NATIVE' }} |
||||
path: logs |
||||
|
||||
linux-32bit: |
||||
name: Linux 32-bit |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Cross compile protoc for i386 |
||||
id: cross-compile |
||||
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
architecture: linux-i386 |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: i386/ruby:3.0.2-buster |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: >- |
||||
/bin/bash -cex ' |
||||
gem install bundler -v 2.5.6; |
||||
cd /workspace/ruby; |
||||
bundle; |
||||
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake; |
||||
rake clobber_package gem; |
||||
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake test' |
||||
|
||||
linux-aarch64: |
||||
name: Linux aarch64 |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Cross compile protoc for aarch64 |
||||
id: cross-compile |
||||
uses: protocolbuffers/protobuf-ci/cross-compile-protoc@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
architecture: linux-aarch64 |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/docker@v3 |
||||
with: |
||||
image: arm64v8/ruby:3.0.2-buster |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
command: >- |
||||
/bin/bash -cex ' |
||||
gem install bundler -v 2.5.6; |
||||
cd /workspace/ruby; |
||||
bundle; |
||||
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake; |
||||
rake clobber_package gem; |
||||
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake test' |
||||
|
||||
macos: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
include: |
||||
# Test both FFI and Native implementations on the highest and lowest |
||||
# Ruby versions for CRuby, but only on Bazel 5.x. |
||||
# Quote versions numbers otherwise 3.0 will render as 3 |
||||
- { version: "3.0", ffi: NATIVE } |
||||
- { version: "3.0", ffi: FFI } |
||||
- { version: "3.1" } |
||||
- { version: "3.2" } |
||||
- { version: "3.3", ffi: NATIVE } |
||||
- { version: "3.3", ffi: FFI } |
||||
|
||||
name: MacOS Ruby ${{ matrix.version }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }} |
||||
runs-on: macos-12 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
|
||||
- name: Pin Ruby version |
||||
uses: ruby/setup-ruby@961f85197f92e4842e3cb92a4f97bd8e010cdbaf # v1.165.0 |
||||
with: |
||||
ruby-version: ${{ matrix.version }} |
||||
|
||||
- name: Validate version |
||||
run: ruby --version | grep ${{ matrix.version }} || (echo "Invalid Ruby version - $(ruby --version)" && exit 1) |
||||
|
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: ruby_macos/${{ matrix.version }} |
||||
bazel: test //ruby/... --test_env=KOKORO_RUBY_VERSION=${{ matrix.version }} --test_env=BAZEL=true ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled --test_env=PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} |
||||
|
||||
test_ruby_gems: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
include: |
||||
# Test both FFI and Native implementations on the highest and lowest |
||||
# Ruby versions for CRuby and JRuby, but only on Bazel 5.x. |
||||
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: NATIVE} |
||||
- { name: Ruby 3.0, ruby: ruby-3.0.2, ffi: FFI} |
||||
- { name: Ruby 3.1, ruby: ruby-3.1.0} |
||||
- { name: Ruby 3.2, ruby: ruby-3.2.0} |
||||
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: NATIVE } |
||||
- { name: Ruby 3.3, ruby: ruby-3.3.0, ffi: FFI } |
||||
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: NATIVE } |
||||
- { name: JRuby 9.4, ruby: jruby-9.4.6.0, ffi: FFI } |
||||
name: Install ${{ matrix.name }}${{ matrix.ffi == 'FFI' && ' FFI' || '' }} |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/ruby:${{ matrix.ruby }}-6.3.0-9848710ff1370795ee7517570a20b81e140112ec |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: ruby_install/${{ matrix.ruby }}_${{ matrix.bazel }} |
||||
bash: > |
||||
bazel --version; |
||||
ruby --version; |
||||
./regenerate_stale_files.sh $BAZEL_FLAGS; |
||||
bazel build //ruby:release //:protoc ${{ matrix.ffi == 'FFI' && '--//ruby:ffi=enabled' || '' }} $BAZEL_FLAGS; |
||||
gem install bazel-bin/ruby/google-protobuf-*; |
||||
bazel-bin/protoc --proto_path=src --proto_path=ruby/tests --proto_path=ruby --ruby_out=ruby tests/test_import_proto2.proto; |
||||
bazel-bin/protoc --proto_path=src --proto_path=ruby/tests --proto_path=ruby --ruby_out=ruby tests/basic_test.proto; |
||||
${{ matrix.ffi == 'FFI' && 'PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} ruby ruby/tests/basic.rb; |
||||
${{ matrix.ffi == 'FFI' && 'PROTOCOL_BUFFERS_RUBY_IMPLEMENTATION=FFI' || '' }} ruby ruby/tests/implementation.rb |
@ -0,0 +1,32 @@ |
||||
name: Rust Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux: |
||||
name: Linux |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.1.1-97f82260fd504923d8af642d567afb2d83a1959d" |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: rust_linux |
||||
bazel: >- |
||||
test //rust:protobuf_upb_test //rust:protobuf_cpp_test |
||||
//rust/test/rust_proto_library_unit_test:rust_upb_aspect_test |
||||
//src/google/protobuf/compiler/rust/... |
@ -0,0 +1,283 @@ |
||||
name: μpb Tests |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
linux-clang: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
config: |
||||
- { name: "Bazel 7", bazel_version: "7.1.1" } |
||||
- { name: "Fastbuild" } |
||||
- { name: "Optimized", flags: "-c opt" } |
||||
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/...", runner: ubuntu-20-large } |
||||
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/..." } |
||||
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/..." } |
||||
# TODO: Add 32-bit ASAN test |
||||
# TODO: Restore the FastTable tests |
||||
|
||||
name: ${{ matrix.config.name }} |
||||
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }} |
||||
|
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-75f2a85ece6526cc3d54087018c0f1097d78d42b |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: upb-bazel |
||||
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }} |
||||
exclude-targets: ${{ matrix.config.exclude-targets }} |
||||
|
||||
linux-gcc: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
name: GCC Optimized |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17" |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: "upb-bazel-gcc" |
||||
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/... |
||||
|
||||
windows: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
name: Windows |
||||
runs-on: windows-2022 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 |
||||
with: |
||||
cache: pip |
||||
cache-dependency-path: 'python/requirements.txt' |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: "upb-bazel-windows" |
||||
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/... //protos/... //hpb_generator/... |
||||
version: 6.3.0 |
||||
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test |
||||
|
||||
macos: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
config: |
||||
- { name: "macOS", bazel-command: "test" } |
||||
- { name: "macOS ARM (build only)", bazel-command: "build", flags: "--cpu=darwin_arm64" } |
||||
name: ${{ matrix.config.name }} |
||||
runs-on: macos-12 |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 |
||||
with: |
||||
cache: pip |
||||
cache-dependency-path: 'python/requirements.txt' |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel@v3 |
||||
with: |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: "upb-bazel-macos" |
||||
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //protos/... //hpb_generator/... //python/... //upb/... //upb_generator/... |
||||
version: 6.3.0 |
||||
|
||||
no-python: |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
name: No System Python |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run tests |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:6.3.0-91a0ac83e968068672bc6001a4d474cfd9a50f1d |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: "upb-bazel-no-python" |
||||
bash: >- |
||||
which python3 && |
||||
mv `which python3` /tmp && |
||||
! which python3 && |
||||
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //python/... -- -//python/dist:source_wheel |
||||
|
||||
build_wheels: |
||||
name: Build Wheels |
||||
runs-on: ubuntu-latest |
||||
if: ${{ github.event_name != 'pull_request_target' }} |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Build Wheels |
||||
uses: protocolbuffers/protobuf-ci/bazel-docker@v3 |
||||
with: |
||||
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple:6.3.0-53225851b051e66f8543e972c143f35be757a181 |
||||
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }} |
||||
bazel-cache: upb-bazel-python |
||||
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //python/dist //python/dist:test_wheel //python/dist:source_wheel |
||||
- name: Move Wheels |
||||
run: mkdir wheels && find _build/out \( -name 'protobuf*.whl' -o -name 'protobuf-*.tar.gz' \) -exec mv '{}' wheels ';' |
||||
- uses: actions/upload-artifact@v3 |
||||
with: |
||||
name: python-wheels |
||||
path: wheels/ |
||||
- uses: actions/upload-artifact@v3 |
||||
with: |
||||
name: requirements |
||||
# Tests shouldn't have access to the whole upb repo, upload the one file we need |
||||
path: python/requirements.txt |
||||
|
||||
test_wheels: |
||||
name: Test Wheels |
||||
needs: build_wheels |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
include: |
||||
# Linux and Mac use the limited API, so all Python versions will use |
||||
# a single wheel. As a result we can just test the oldest and newest |
||||
# supported Python versions and assume this gives us sufficient test |
||||
# coverage. |
||||
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary' } |
||||
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'binary' } |
||||
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' } |
||||
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'binary' } |
||||
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source' } |
||||
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'source' } |
||||
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source' } |
||||
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'source' } |
||||
|
||||
# Windows uses the full API up until Python 3.10. |
||||
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary' } |
||||
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' } |
||||
runs-on: ${{ matrix.os }} |
||||
if: ${{ github.event_name != 'pull_request_target' }} |
||||
defaults: |
||||
run: |
||||
shell: bash |
||||
steps: |
||||
- name: Download Wheels |
||||
uses: actions/download-artifact@v3 |
||||
with: |
||||
name: python-wheels |
||||
path: wheels |
||||
- name: Download Requirements |
||||
uses: actions/download-artifact@v3 |
||||
with: |
||||
name: requirements |
||||
path: requirements |
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 |
||||
with: |
||||
python-version: ${{ matrix.python-version }} |
||||
architecture: ${{ matrix.architecture }} |
||||
- name: Setup Python venv |
||||
run: | |
||||
python -m pip install --upgrade pip |
||||
python -m venv env |
||||
# Windows uses 'Scripts' instead of 'bin' |
||||
source env/bin/activate || source env/Scripts/activate |
||||
echo "VIRTUAL ENV:" $VIRTUAL_ENV |
||||
- name: Install tzdata |
||||
run: pip install tzdata |
||||
# Only needed on Windows, Linux ships with tzdata. |
||||
if: ${{ contains(matrix.os, 'windows') }} |
||||
- name: Install requirements |
||||
run: pip install -r requirements/requirements.txt |
||||
- name: Install Protobuf Binary Wheel |
||||
run: pip install -vvv --no-index --find-links wheels protobuf |
||||
if: ${{ matrix.type == 'binary' }} |
||||
- name: Install Protobuf Source Wheel |
||||
run: | |
||||
cd wheels |
||||
tar -xzvf *.tar.gz |
||||
cd protobuf-*/ |
||||
pip install . |
||||
if: ${{ matrix.type == 'source' }} |
||||
- name: Test that module is importable |
||||
run: python -v -c 'from google._upb import _message; assert "google._upb._message.MessageMeta" in str(_message.MessageMeta)' |
||||
- name: Install Protobuf Test Wheel |
||||
run: pip install -vvv --no-index --find-links wheels protobuftests |
||||
- name: Run the unit tests |
||||
run: | |
||||
TESTS=$(pip show -f protobuftests | grep pb_unit_tests.*py$ | sed 's,/,.,g' | sed 's,\\,.,g' | sed -E 's,.py$,,g') |
||||
for test in $TESTS; do |
||||
python -m unittest -v $test |
||||
done |
||||
|
||||
test_pure_python_wheels: |
||||
name: Test Pure Python Wheels |
||||
needs: build_wheels |
||||
strategy: |
||||
fail-fast: false # Don't cancel all jobs if one fails. |
||||
matrix: |
||||
python-version: ["3.8", "3.12"] |
||||
runs-on: ubuntu-latest |
||||
if: ${{ github.event_name != 'pull_request_target' }} |
||||
steps: |
||||
- name: Download Wheels |
||||
uses: actions/download-artifact@v3 |
||||
with: |
||||
name: python-wheels |
||||
path: wheels |
||||
- name: Delete Binary Wheels |
||||
run: find wheels -type f | grep -v none-any | xargs rm |
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0 |
||||
with: |
||||
python-version: ${{ matrix.python-version }} |
||||
- name: Setup Python venv |
||||
run: | |
||||
python -m pip install --upgrade pip |
||||
python -m venv env |
||||
source env/bin/activate |
||||
echo "VIRTUAL ENV:" $VIRTUAL_ENV |
||||
- name: Install numpy |
||||
run: pip install numpy |
||||
- name: Install Protobuf Wheels |
||||
run: pip install -vvv --no-index --find-links wheels protobuf protobuftests |
||||
- name: Run the unit tests |
||||
run: | |
||||
TESTS=$(pip show -f protobuftests | grep _test.py | grep --invert-match _pybind11_test.py | sed 's,/,.,g' | sed -E 's,.py$,,g') |
||||
for test in $TESTS; do |
||||
python -m unittest -v $test |
||||
done |
@ -1,75 +0,0 @@ |
||||
2022-07-01 Unreleased version |
||||
# C++ |
||||
* cpp_generated_lib_linked support is removed in protoc |
||||
* Reduced .pb.o object file size slightly by explicitly instantiating |
||||
InternalMetadata templates in the runtime. |
||||
* Breaking change: Add C++20 reserved keywords. |
||||
* Fixed crash in ThreadLocalStorage for pre-C++17 compilers on 32-bit ARM. |
||||
* Clarified that JSON API non-OK statuses are not a stable API. |
||||
* Added a default implementation of MessageDifferencer::Reporter methods. |
||||
* proto2::MapPair is now an alias to std::pair. |
||||
* Hide C++ RepeatedField::UnsafeArenaSwap |
||||
* Use table-driven parser for reflection based objects. |
||||
* Update Map's InternalSwap() to take a pointer to the other Map. |
||||
* Add ARM-optimized Varint decoding functions. |
||||
* Minor optimization for parsing groups |
||||
* Declare ReflectiveProtoHook class |
||||
* Reduce size of VarintParse code in protocol buffers, by calling the shared |
||||
routine after handling just one-byte varint encoding inline, rather than |
||||
handling one-byte and two-byte varints inline. |
||||
* Avoid inlining some large heavily duplicated routines in repeated_ptr_field.h |
||||
* Add ReflectiveProtoHook to Reflection. |
||||
* Turns on table-driven parser for reflection based objects. |
||||
* Save code space by avoiding inlining of large-in-aggregate code-space MessageLite::~MessageLite destructor. |
||||
* Undefine the macro `linux` when compiling protobuf |
||||
* Reduce memory consumption of MessageSet parsing. |
||||
* Save code space by avoiding inlining of large-in-aggregate code-space MessageLite::~MessageLite destructor. |
||||
* Breaking change: Delete Arena::Init |
||||
* Make a PROTOBUF_POISON/UNPOISON to reduce noise in the source |
||||
* Put alignment functions in "arena_align.h" |
||||
* Split off `cleanup` arena functions into "arena_cleanup.h" |
||||
* Fix signed / unsigned match in CHECK_EQ |
||||
* Kill Atomic<>. it's not pulling it's weight |
||||
* Move AllocationPolicy out of arena_impl, and unify arena_config for bazel |
||||
* Fix failure case in table-driven parser. |
||||
* Add a new JSON parser. |
||||
* Removed old JSON parsing code. |
||||
* Introduce the Printer::{SetRedactDebugString,SetRandomizeDebugString} private flags. |
||||
* Introduce global flags to control Printer::{SetRedactDebugString, SetRandomizeDebugString}. |
||||
* proto3 string fields no longer trigger clang-tidy warning bugprone-branch-clone. |
||||
* Fix the API of DescriptorUpgrader::set_allow_unknown_dependencies to set to True always, and to populate into the DescriptorPool as well. |
||||
* Report line numbers consistently in text-format deprecated-field warnings. |
||||
* Reserve C++20 keywords |
||||
* Fixed C++ code generation for protos that use int32_t, uint32_t, int64_t, uint64_t, size_t as field names. |
||||
* Annotate generated C++ public aliases for enum types. |
||||
* Change default arena max block size from 8K to 32K. |
||||
|
||||
# Kotlin |
||||
* Suppress deprecation warnings in Kotlin generated code. |
||||
* Kotlin generated code comments now use kdoc format instead of javadoc. |
||||
* Escape keywords in package names in proto generated code |
||||
* Add Kotlin enum int value getters and setters |
||||
|
||||
# Java |
||||
* Performance improvement for repeated use of FieldMaskUtil#merge by caching |
||||
constructed FieldMaskTrees. |
||||
* Optimized Java proto serialization gencode for protos having many extension ranges with few fields in between. |
||||
* More thoroughly annotate public generated code in Java lite protocol buffers. |
||||
* Fixed Bug in proto3 java lite repeated enum fields. Failed to call copyOnWrite before modifying previously built message. Causes modification to already "built" messages that should be immutable. |
||||
* Fix Java reflection serialization of empty packed fields. |
||||
* Refactoring java full runtime to reuse sub-message builders and prepare to migrate parsing logic from parse constructor to builder. |
||||
* Move proto wireformat parsing functionality from the private "parsing constructor" to the Builder class. |
||||
* Change the Lite runtime to prefer merging from the wireformat into mutable messages rather than building up a new immutable object before merging. This way results in fewer allocations and copy operations. |
||||
* Make message-type extensions merge from wire-format instead of building up instances and merging afterwards. This has much better performance. |
||||
* Fix TextFormat parser to build up recurring (but supposedly not repeated) sub-messages directly from text rather than building a new sub-message and merging the fully formed message into the existing field. |
||||
* Fix bug in nested builder caching logic where cleared sub-field builders would remain dirty after a clear and build in a parent layer. https://github.com/protocolbuffers/protobuf/issues/10624 |
||||
|
||||
# Python |
||||
* Changes ordering of printed fields in .pyi files from lexicographic to the same ordering found in the proto descriptor. |
||||
* Adds GeneratedCodeInfo annotations to python proto .pyi outputs as a base64 encoded docstring in the last line of the .pyi file for code analysis tools. |
||||
* Fix message factory's behavior in python cpp extension to return same message classes for same descriptor, even if the factories are different. |
||||
* Add type annotation for enum value fields in enum classes. |
||||
|
||||
# Compiler |
||||
* Print full path name of source .proto file on error |
||||
* Include proto message type in the annotation comments. |
@ -0,0 +1,933 @@ |
||||
{ |
||||
"checksum": "8863e5b8f3da7cf4502f68bea0d455dec4834bf25ff070caaa58a8e1c5ea1a3d", |
||||
"crates": { |
||||
"aho-corasick 1.1.2": { |
||||
"name": "aho-corasick", |
||||
"version": "1.1.2", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/aho-corasick/1.1.2/download", |
||||
"sha256": "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "aho_corasick", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "aho_corasick", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"default", |
||||
"perf-literal", |
||||
"std" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "memchr 2.6.4", |
||||
"target": "memchr" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "1.1.2" |
||||
}, |
||||
"license": "Unlicense OR MIT" |
||||
}, |
||||
"autocfg 1.1.0": { |
||||
"name": "autocfg", |
||||
"version": "1.1.0", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/autocfg/1.1.0/download", |
||||
"sha256": "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "autocfg", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "autocfg", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"edition": "2015", |
||||
"version": "1.1.0" |
||||
}, |
||||
"license": "Apache-2.0 OR MIT" |
||||
}, |
||||
"direct-cargo-bazel-deps 0.0.1": { |
||||
"name": "direct-cargo-bazel-deps", |
||||
"version": "0.0.1", |
||||
"repository": null, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "direct_cargo_bazel_deps", |
||||
"crate_root": ".direct_cargo_bazel_deps.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "direct_cargo_bazel_deps", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "googletest 0.11.0", |
||||
"target": "googletest" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2018", |
||||
"proc_macro_deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "paste 1.0.14", |
||||
"target": "paste" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"version": "0.0.1" |
||||
}, |
||||
"license": null |
||||
}, |
||||
"googletest 0.11.0": { |
||||
"name": "googletest", |
||||
"version": "0.11.0", |
||||
"repository": { |
||||
"Git": { |
||||
"remote": "https://github.com/google/googletest-rust", |
||||
"commitish": { |
||||
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f" |
||||
}, |
||||
"strip_prefix": "googletest" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "googletest", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "googletest", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "num-traits 0.2.17", |
||||
"target": "num_traits" |
||||
}, |
||||
{ |
||||
"id": "regex 1.10.0", |
||||
"target": "regex" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"proc_macro_deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "googletest_macro 0.11.0", |
||||
"target": "googletest_macro" |
||||
}, |
||||
{ |
||||
"id": "rustversion 1.0.14", |
||||
"target": "rustversion" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"version": "0.11.0" |
||||
}, |
||||
"license": "Apache-2.0" |
||||
}, |
||||
"googletest_macro 0.11.0": { |
||||
"name": "googletest_macro", |
||||
"version": "0.11.0", |
||||
"repository": { |
||||
"Git": { |
||||
"remote": "https://github.com/google/googletest-rust", |
||||
"commitish": { |
||||
"Rev": "471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f" |
||||
}, |
||||
"strip_prefix": "googletest_macro" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"ProcMacro": { |
||||
"crate_name": "googletest_macro", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "googletest_macro", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "quote 1.0.33", |
||||
"target": "quote" |
||||
}, |
||||
{ |
||||
"id": "syn 2.0.43", |
||||
"target": "syn" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "0.11.0" |
||||
}, |
||||
"license": "Apache-2.0" |
||||
}, |
||||
"memchr 2.6.4": { |
||||
"name": "memchr", |
||||
"version": "2.6.4", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/memchr/2.6.4/download", |
||||
"sha256": "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "memchr", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "memchr", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"alloc", |
||||
"default", |
||||
"std" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "2.6.4" |
||||
}, |
||||
"license": "Unlicense OR MIT" |
||||
}, |
||||
"num-traits 0.2.17": { |
||||
"name": "num-traits", |
||||
"version": "0.2.17", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/num-traits/0.2.17/download", |
||||
"sha256": "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "num_traits", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
}, |
||||
{ |
||||
"BuildScript": { |
||||
"crate_name": "build_script_build", |
||||
"crate_root": "build.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "num_traits", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"default", |
||||
"std" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "num-traits 0.2.17", |
||||
"target": "build_script_build" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2018", |
||||
"version": "0.2.17" |
||||
}, |
||||
"build_script_attrs": { |
||||
"data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "autocfg 1.1.0", |
||||
"target": "autocfg" |
||||
} |
||||
], |
||||
"selects": {} |
||||
} |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"paste 1.0.14": { |
||||
"name": "paste", |
||||
"version": "1.0.14", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/paste/1.0.14/download", |
||||
"sha256": "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"ProcMacro": { |
||||
"crate_name": "paste", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
}, |
||||
{ |
||||
"BuildScript": { |
||||
"crate_name": "build_script_build", |
||||
"crate_root": "build.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "paste", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "paste 1.0.14", |
||||
"target": "build_script_build" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2018", |
||||
"version": "1.0.14" |
||||
}, |
||||
"build_script_attrs": { |
||||
"data_glob": [ |
||||
"**" |
||||
] |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"proc-macro2 1.0.69": { |
||||
"name": "proc-macro2", |
||||
"version": "1.0.69", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/proc-macro2/1.0.69/download", |
||||
"sha256": "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "proc_macro2", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
}, |
||||
{ |
||||
"BuildScript": { |
||||
"crate_name": "build_script_build", |
||||
"crate_root": "build.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "proc_macro2", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"proc-macro" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "proc-macro2 1.0.69", |
||||
"target": "build_script_build" |
||||
}, |
||||
{ |
||||
"id": "unicode-ident 1.0.12", |
||||
"target": "unicode_ident" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "1.0.69" |
||||
}, |
||||
"build_script_attrs": { |
||||
"data_glob": [ |
||||
"**" |
||||
] |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"quote 1.0.33": { |
||||
"name": "quote", |
||||
"version": "1.0.33", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/quote/1.0.33/download", |
||||
"sha256": "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "quote", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "quote", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"default", |
||||
"proc-macro" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "proc-macro2 1.0.69", |
||||
"target": "proc_macro2" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2018", |
||||
"version": "1.0.33" |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"regex 1.10.0": { |
||||
"name": "regex", |
||||
"version": "1.10.0", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/regex/1.10.0/download", |
||||
"sha256": "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "regex", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "regex", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"default", |
||||
"perf", |
||||
"perf-backtrack", |
||||
"perf-cache", |
||||
"perf-dfa", |
||||
"perf-inline", |
||||
"perf-literal", |
||||
"perf-onepass", |
||||
"std", |
||||
"unicode", |
||||
"unicode-age", |
||||
"unicode-bool", |
||||
"unicode-case", |
||||
"unicode-gencat", |
||||
"unicode-perl", |
||||
"unicode-script", |
||||
"unicode-segment" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "aho-corasick 1.1.2", |
||||
"target": "aho_corasick" |
||||
}, |
||||
{ |
||||
"id": "memchr 2.6.4", |
||||
"target": "memchr" |
||||
}, |
||||
{ |
||||
"id": "regex-automata 0.4.1", |
||||
"target": "regex_automata" |
||||
}, |
||||
{ |
||||
"id": "regex-syntax 0.8.1", |
||||
"target": "regex_syntax" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "1.10.0" |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"regex-automata 0.4.1": { |
||||
"name": "regex-automata", |
||||
"version": "0.4.1", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/regex-automata/0.4.1/download", |
||||
"sha256": "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "regex_automata", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "regex_automata", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"alloc", |
||||
"dfa-onepass", |
||||
"hybrid", |
||||
"meta", |
||||
"nfa-backtrack", |
||||
"nfa-pikevm", |
||||
"nfa-thompson", |
||||
"perf-inline", |
||||
"perf-literal", |
||||
"perf-literal-multisubstring", |
||||
"perf-literal-substring", |
||||
"std", |
||||
"syntax", |
||||
"unicode", |
||||
"unicode-age", |
||||
"unicode-bool", |
||||
"unicode-case", |
||||
"unicode-gencat", |
||||
"unicode-perl", |
||||
"unicode-script", |
||||
"unicode-segment", |
||||
"unicode-word-boundary" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "aho-corasick 1.1.2", |
||||
"target": "aho_corasick" |
||||
}, |
||||
{ |
||||
"id": "memchr 2.6.4", |
||||
"target": "memchr" |
||||
}, |
||||
{ |
||||
"id": "regex-syntax 0.8.1", |
||||
"target": "regex_syntax" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "0.4.1" |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"regex-syntax 0.8.1": { |
||||
"name": "regex-syntax", |
||||
"version": "0.8.1", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/regex-syntax/0.8.1/download", |
||||
"sha256": "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "regex_syntax", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "regex_syntax", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"default", |
||||
"std", |
||||
"unicode", |
||||
"unicode-age", |
||||
"unicode-bool", |
||||
"unicode-case", |
||||
"unicode-gencat", |
||||
"unicode-perl", |
||||
"unicode-script", |
||||
"unicode-segment" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "0.8.1" |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"rustversion 1.0.14": { |
||||
"name": "rustversion", |
||||
"version": "1.0.14", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/rustversion/1.0.14/download", |
||||
"sha256": "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"ProcMacro": { |
||||
"crate_name": "rustversion", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
}, |
||||
{ |
||||
"BuildScript": { |
||||
"crate_name": "build_script_build", |
||||
"crate_root": "build/build.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "rustversion", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "rustversion 1.0.14", |
||||
"target": "build_script_build" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2018", |
||||
"version": "1.0.14" |
||||
}, |
||||
"build_script_attrs": { |
||||
"data_glob": [ |
||||
"**" |
||||
] |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"syn 2.0.43": { |
||||
"name": "syn", |
||||
"version": "2.0.43", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/syn/2.0.43/download", |
||||
"sha256": "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "syn", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "syn", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"crate_features": { |
||||
"common": [ |
||||
"clone-impls", |
||||
"default", |
||||
"derive", |
||||
"full", |
||||
"parsing", |
||||
"printing", |
||||
"proc-macro", |
||||
"quote" |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"deps": { |
||||
"common": [ |
||||
{ |
||||
"id": "proc-macro2 1.0.69", |
||||
"target": "proc_macro2" |
||||
}, |
||||
{ |
||||
"id": "quote 1.0.33", |
||||
"target": "quote" |
||||
}, |
||||
{ |
||||
"id": "unicode-ident 1.0.12", |
||||
"target": "unicode_ident" |
||||
} |
||||
], |
||||
"selects": {} |
||||
}, |
||||
"edition": "2021", |
||||
"version": "2.0.43" |
||||
}, |
||||
"license": "MIT OR Apache-2.0" |
||||
}, |
||||
"unicode-ident 1.0.12": { |
||||
"name": "unicode-ident", |
||||
"version": "1.0.12", |
||||
"repository": { |
||||
"Http": { |
||||
"url": "https://static.crates.io/crates/unicode-ident/1.0.12/download", |
||||
"sha256": "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" |
||||
} |
||||
}, |
||||
"targets": [ |
||||
{ |
||||
"Library": { |
||||
"crate_name": "unicode_ident", |
||||
"crate_root": "src/lib.rs", |
||||
"srcs": [ |
||||
"**/*.rs" |
||||
] |
||||
} |
||||
} |
||||
], |
||||
"library_target_name": "unicode_ident", |
||||
"common_attrs": { |
||||
"compile_data_glob": [ |
||||
"**" |
||||
], |
||||
"edition": "2018", |
||||
"version": "1.0.12" |
||||
}, |
||||
"license": "(MIT OR Apache-2.0) AND Unicode-DFS-2016" |
||||
} |
||||
}, |
||||
"binary_crates": [], |
||||
"workspace_members": { |
||||
"direct-cargo-bazel-deps 0.0.1": "" |
||||
}, |
||||
"conditions": { |
||||
"aarch64-apple-darwin": [ |
||||
"aarch64-apple-darwin" |
||||
], |
||||
"aarch64-apple-ios": [ |
||||
"aarch64-apple-ios" |
||||
], |
||||
"aarch64-apple-ios-sim": [ |
||||
"aarch64-apple-ios-sim" |
||||
], |
||||
"aarch64-fuchsia": [ |
||||
"aarch64-fuchsia" |
||||
], |
||||
"aarch64-linux-android": [ |
||||
"aarch64-linux-android" |
||||
], |
||||
"aarch64-pc-windows-msvc": [ |
||||
"aarch64-pc-windows-msvc" |
||||
], |
||||
"aarch64-unknown-linux-gnu": [ |
||||
"aarch64-unknown-linux-gnu" |
||||
], |
||||
"arm-unknown-linux-gnueabi": [ |
||||
"arm-unknown-linux-gnueabi" |
||||
], |
||||
"armv7-linux-androideabi": [ |
||||
"armv7-linux-androideabi" |
||||
], |
||||
"armv7-unknown-linux-gnueabi": [ |
||||
"armv7-unknown-linux-gnueabi" |
||||
], |
||||
"i686-apple-darwin": [ |
||||
"i686-apple-darwin" |
||||
], |
||||
"i686-linux-android": [ |
||||
"i686-linux-android" |
||||
], |
||||
"i686-pc-windows-msvc": [ |
||||
"i686-pc-windows-msvc" |
||||
], |
||||
"i686-unknown-freebsd": [ |
||||
"i686-unknown-freebsd" |
||||
], |
||||
"i686-unknown-linux-gnu": [ |
||||
"i686-unknown-linux-gnu" |
||||
], |
||||
"powerpc-unknown-linux-gnu": [ |
||||
"powerpc-unknown-linux-gnu" |
||||
], |
||||
"riscv32imc-unknown-none-elf": [ |
||||
"riscv32imc-unknown-none-elf" |
||||
], |
||||
"riscv64gc-unknown-none-elf": [ |
||||
"riscv64gc-unknown-none-elf" |
||||
], |
||||
"s390x-unknown-linux-gnu": [ |
||||
"s390x-unknown-linux-gnu" |
||||
], |
||||
"thumbv7em-none-eabi": [ |
||||
"thumbv7em-none-eabi" |
||||
], |
||||
"thumbv8m.main-none-eabi": [ |
||||
"thumbv8m.main-none-eabi" |
||||
], |
||||
"wasm32-unknown-unknown": [ |
||||
"wasm32-unknown-unknown" |
||||
], |
||||
"wasm32-wasi": [ |
||||
"wasm32-wasi" |
||||
], |
||||
"x86_64-apple-darwin": [ |
||||
"x86_64-apple-darwin" |
||||
], |
||||
"x86_64-apple-ios": [ |
||||
"x86_64-apple-ios" |
||||
], |
||||
"x86_64-fuchsia": [ |
||||
"x86_64-fuchsia" |
||||
], |
||||
"x86_64-linux-android": [ |
||||
"x86_64-linux-android" |
||||
], |
||||
"x86_64-pc-windows-msvc": [ |
||||
"x86_64-pc-windows-msvc" |
||||
], |
||||
"x86_64-unknown-freebsd": [ |
||||
"x86_64-unknown-freebsd" |
||||
], |
||||
"x86_64-unknown-linux-gnu": [ |
||||
"x86_64-unknown-linux-gnu" |
||||
], |
||||
"x86_64-unknown-none": [ |
||||
"x86_64-unknown-none" |
||||
] |
||||
} |
||||
} |
@ -0,0 +1,137 @@ |
||||
# This file is automatically @generated by Cargo. |
||||
# It is not intended for manual editing. |
||||
version = 3 |
||||
|
||||
[[package]] |
||||
name = "aho-corasick" |
||||
version = "1.1.2" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" |
||||
dependencies = [ |
||||
"memchr", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "autocfg" |
||||
version = "1.1.0" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" |
||||
|
||||
[[package]] |
||||
name = "direct-cargo-bazel-deps" |
||||
version = "0.0.1" |
||||
dependencies = [ |
||||
"googletest", |
||||
"paste", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "googletest" |
||||
version = "0.11.0" |
||||
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f" |
||||
dependencies = [ |
||||
"googletest_macro", |
||||
"num-traits", |
||||
"regex", |
||||
"rustversion", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "googletest_macro" |
||||
version = "0.11.0" |
||||
source = "git+https://github.com/google/googletest-rust?rev=471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f#471d4a2a8e8bc74f6d7d9c8eecb4d4e3157b2a9f" |
||||
dependencies = [ |
||||
"quote", |
||||
"syn", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "memchr" |
||||
version = "2.6.4" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" |
||||
|
||||
[[package]] |
||||
name = "num-traits" |
||||
version = "0.2.17" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" |
||||
dependencies = [ |
||||
"autocfg", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "paste" |
||||
version = "1.0.14" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" |
||||
|
||||
[[package]] |
||||
name = "proc-macro2" |
||||
version = "1.0.69" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" |
||||
dependencies = [ |
||||
"unicode-ident", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "quote" |
||||
version = "1.0.33" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" |
||||
dependencies = [ |
||||
"proc-macro2", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "regex" |
||||
version = "1.10.0" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87" |
||||
dependencies = [ |
||||
"aho-corasick", |
||||
"memchr", |
||||
"regex-automata", |
||||
"regex-syntax", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "regex-automata" |
||||
version = "0.4.1" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b" |
||||
dependencies = [ |
||||
"aho-corasick", |
||||
"memchr", |
||||
"regex-syntax", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "regex-syntax" |
||||
version = "0.8.1" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33" |
||||
|
||||
[[package]] |
||||
name = "rustversion" |
||||
version = "1.0.14" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" |
||||
|
||||
[[package]] |
||||
name = "syn" |
||||
version = "2.0.43" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "ee659fb5f3d355364e1f3e5bc10fb82068efbf824a1e9d1c9504244a6469ad53" |
||||
dependencies = [ |
||||
"proc-macro2", |
||||
"quote", |
||||
"unicode-ident", |
||||
] |
||||
|
||||
[[package]] |
||||
name = "unicode-ident" |
||||
version = "1.0.12" |
||||
source = "registry+https://github.com/rust-lang/crates.io-index" |
||||
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" |
@ -0,0 +1,72 @@ |
||||
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel |
||||
# https://github.com/protocolbuffers/protobuf/issues/14313 |
||||
|
||||
module( |
||||
name = "protobuf", |
||||
version = "28.0-dev", # Automatically updated on release |
||||
compatibility_level = 1, |
||||
repo_name = "com_google_protobuf", |
||||
) |
||||
|
||||
# LOWER BOUND dependency versions. |
||||
# Bzlmod follows MVS: |
||||
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution |
||||
# Thus the highest version in their module graph is resolved. |
||||
bazel_dep(name = "abseil-cpp", version = "20230802.0.bcr.1", repo_name = "com_google_absl") |
||||
bazel_dep(name = "bazel_skylib", version = "1.4.1") |
||||
bazel_dep(name = "jsoncpp", version = "1.9.5") |
||||
bazel_dep(name = "rules_cc", version = "0.0.9") |
||||
bazel_dep(name = "rules_fuzzing", version = "0.5.2") |
||||
bazel_dep(name = "rules_java", version = "5.3.5") |
||||
bazel_dep(name = "rules_jvm_external", version = "5.1") |
||||
bazel_dep(name = "rules_pkg", version = "0.7.0") |
||||
bazel_dep(name = "rules_python", version = "0.28.0") |
||||
bazel_dep(name = "rules_rust", version = "0.45.1") |
||||
bazel_dep(name = "platforms", version = "0.0.8") |
||||
bazel_dep(name = "zlib", version = "1.2.11") |
||||
|
||||
# TODO: remove after toolchain types are moved to protobuf |
||||
bazel_dep(name = "rules_proto", version = "4.0.0") |
||||
|
||||
SUPPORTED_PYTHON_VERSIONS = [ |
||||
"3.8", |
||||
"3.9", |
||||
"3.10", |
||||
"3.11", |
||||
"3.12", |
||||
] |
||||
python = use_extension("@rules_python//python/extensions:python.bzl", "python") |
||||
[ |
||||
python.toolchain( |
||||
is_default = python_version == SUPPORTED_PYTHON_VERSIONS[-1], |
||||
python_version = python_version, |
||||
) |
||||
for python_version in SUPPORTED_PYTHON_VERSIONS |
||||
] |
||||
use_repo(python, system_python = "python_{}".format(SUPPORTED_PYTHON_VERSIONS[-1].replace(".", "_"))) |
||||
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip") |
||||
[ |
||||
pip.parse( |
||||
hub_name = "pip_deps", |
||||
python_version = python_version, |
||||
requirements_lock = "//python:requirements.txt", |
||||
) |
||||
for python_version in SUPPORTED_PYTHON_VERSIONS |
||||
] |
||||
use_repo(pip, "pip_deps") |
||||
|
||||
rust = use_extension("@rules_rust//rust:extensions.bzl", "rust") |
||||
rust.toolchain(edition = "2021") |
||||
use_repo(rust, "rust_toolchains") |
||||
register_toolchains("@rust_toolchains//:all") |
||||
crate = use_extension("@rules_rust//crate_universe:extension.bzl", "crate") |
||||
crate.spec( |
||||
package = "googletest", |
||||
version = ">0.0.0", |
||||
) |
||||
crate.spec( |
||||
package = "paste", |
||||
version = ">=1", |
||||
) |
||||
crate.from_specs() |
||||
use_repo(crate, crate_index = "crates") |
@ -0,0 +1,14 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> |
||||
<plist version="1.0"> |
||||
<dict> |
||||
<key>NSPrivacyTracking</key> |
||||
<false/> |
||||
<key>NSPrivacyTrackingDomains</key> |
||||
<array/> |
||||
<key>NSPrivacyCollectedDataTypes</key> |
||||
<array/> |
||||
<key>NSPrivacyAccessedAPITypes</key> |
||||
<array/> |
||||
</dict> |
||||
</plist> |
@ -0,0 +1,57 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
|
||||
licenses(["notice"]) |
||||
|
||||
bzl_library( |
||||
name = "proto_library_bzl", |
||||
srcs = ["proto_library.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "cc_proto_library_bzl", |
||||
srcs = ["cc_proto_library.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "java_proto_library_bzl", |
||||
srcs = ["java_proto_library.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "java_lite_proto_library_bzl", |
||||
srcs = ["java_lite_proto_library.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "py_proto_library_bzl", |
||||
srcs = ["py_proto_library.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"//bazel/common:proto_info_bzl", |
||||
"@rules_python//python:py_info_bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "upb_proto_library_bzl", |
||||
srcs = [ |
||||
"upb_c_proto_library.bzl", |
||||
"upb_minitable_proto_library.bzl", |
||||
"upb_proto_library.bzl", |
||||
"upb_proto_reflection_library.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = ["//bazel/private:upb_proto_library_internal_bzl"], |
||||
) |
@ -0,0 +1,3 @@ |
||||
"""cc_proto_library rule""" |
||||
|
||||
cc_proto_library = native.cc_proto_library |
@ -0,0 +1,34 @@ |
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
|
||||
bzl_library( |
||||
name = "proto_common_bzl", |
||||
srcs = [ |
||||
"proto_common.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"//bazel/private:native_bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_info_bzl", |
||||
srcs = [ |
||||
"proto_info.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"//bazel/private:native_bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_lang_toolchain_info_bzl", |
||||
srcs = [ |
||||
"proto_lang_toolchain_info.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
":proto_common.bzl", |
||||
], |
||||
) |
@ -0,0 +1,5 @@ |
||||
"""proto_common""" |
||||
|
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
|
||||
proto_common = native_proto_common |
@ -0,0 +1,5 @@ |
||||
"""ProtoInfo""" |
||||
|
||||
load("//bazel/private:native.bzl", "NativeProtoInfo") |
||||
|
||||
ProtoInfo = NativeProtoInfo |
@ -0,0 +1,5 @@ |
||||
"""ProtoLangToolchainInfo""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
|
||||
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo |
@ -0,0 +1,3 @@ |
||||
"""java_lite_proto_library rule""" |
||||
|
||||
java_lite_proto_library = native.java_lite_proto_library |
@ -0,0 +1,3 @@ |
||||
"""java_proto_library rule""" |
||||
|
||||
java_proto_library = native.java_proto_library |
@ -0,0 +1,42 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
|
||||
licenses(["notice"]) |
||||
|
||||
bzl_library( |
||||
name = "upb_proto_library_internal_bzl", |
||||
srcs = [ |
||||
"upb_proto_library_internal/aspect.bzl", |
||||
"upb_proto_library_internal/cc_library_func.bzl", |
||||
"upb_proto_library_internal/copts.bzl", |
||||
"upb_proto_library_internal/rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__pkg__"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"@bazel_skylib//lib:paths", |
||||
"@bazel_tools//tools/cpp:toolchain_utils.bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "native_bzl", |
||||
srcs = [ |
||||
"native.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_toolchain_rule_bzl", |
||||
srcs = [ |
||||
"proto_toolchain_rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
@ -0,0 +1,5 @@ |
||||
"""Renames toplevel symbols so they can be exported in Starlark under the same name""" |
||||
|
||||
NativeProtoInfo = ProtoInfo |
||||
|
||||
native_proto_common = proto_common_do_not_use |
@ -0,0 +1,47 @@ |
||||
"""A Starlark implementation of the proto_toolchain rule.""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
|
||||
def _impl(ctx): |
||||
kwargs = {} |
||||
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False): |
||||
kwargs["toolchain_type"] = "@rules_proto//proto:toolchain_type" |
||||
|
||||
return [ |
||||
DefaultInfo( |
||||
files = depset(), |
||||
runfiles = ctx.runfiles(), |
||||
), |
||||
platform_common.ToolchainInfo( |
||||
proto = ProtoLangToolchainInfo( |
||||
out_replacement_format_flag = ctx.attr.command_line, |
||||
output_files = ctx.attr.output_files, |
||||
plugin = None, |
||||
runtime = None, |
||||
proto_compiler = ctx.attr.proto_compiler.files_to_run, |
||||
protoc_opts = ctx.fragments.proto.experimental_protoc_opts, |
||||
progress_message = ctx.attr.progress_message, |
||||
mnemonic = ctx.attr.mnemonic, |
||||
**kwargs |
||||
), |
||||
), |
||||
] |
||||
|
||||
proto_toolchain = rule( |
||||
_impl, |
||||
attrs = |
||||
{ |
||||
"progress_message": attr.string(default = "Generating Descriptor Set proto_library %{label}"), |
||||
"mnemonic": attr.string(default = "GenProtoDescriptorSet"), |
||||
"command_line": attr.string(default = "--descriptor_set_out=%s"), |
||||
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "single"), |
||||
"proto_compiler": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
allow_files = True, # Used by mocks in tests. Consider fixing tests and removing it. |
||||
), |
||||
}, |
||||
provides = [platform_common.ToolchainInfo], |
||||
fragments = ["proto"], |
||||
) |
@ -0,0 +1,303 @@ |
||||
"""Implementation of the aspect that powers the upb_*_proto_library() rules.""" |
||||
|
||||
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load(":upb_proto_library_internal/cc_library_func.bzl", "cc_library_func") |
||||
load(":upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo") |
||||
|
||||
# begin:github_only |
||||
_is_google3 = False |
||||
# end:github_only |
||||
|
||||
# begin:google_only |
||||
# _is_google3 = True |
||||
# end:google_only |
||||
|
||||
GeneratedSrcsInfo = provider( |
||||
"Provides generated headers and sources", |
||||
fields = { |
||||
"srcs": "list of srcs", |
||||
"hdrs": "list of hdrs", |
||||
"thunks": "Experimental, do not use. List of srcs defining C API. Incompatible with hdrs.", |
||||
}, |
||||
) |
||||
|
||||
def output_dir(ctx, proto_info): |
||||
"""Returns the output directory where generated proto files will be placed. |
||||
|
||||
Args: |
||||
ctx: Rule context. |
||||
proto_info: ProtoInfo provider. |
||||
|
||||
Returns: |
||||
A string specifying the output directory |
||||
""" |
||||
proto_root = proto_info.proto_source_root |
||||
if proto_root.startswith(ctx.bin_dir.path): |
||||
path = proto_root |
||||
else: |
||||
path = ctx.bin_dir.path + "/" + proto_root |
||||
|
||||
if proto_root == ".": |
||||
path = ctx.bin_dir.path |
||||
return path |
||||
|
||||
def _concat_lists(lists): |
||||
ret = [] |
||||
for lst in lists: |
||||
ret = ret + lst |
||||
return ret |
||||
|
||||
def _merge_generated_srcs(srcs): |
||||
return GeneratedSrcsInfo( |
||||
srcs = _concat_lists([s.srcs for s in srcs]), |
||||
hdrs = _concat_lists([s.hdrs for s in srcs]), |
||||
thunks = _concat_lists([s.thunks for s in srcs]), |
||||
) |
||||
|
||||
def _get_implicit_weak_field_sources(ctx, proto_info): |
||||
# Creating one .cc file for each Message in a proto allows the linker to be more aggressive |
||||
# about removing unused classes. However, since the number of outputs won't be known at Blaze |
||||
# analysis time, all of the generated source files are put in a directory and a TreeArtifact is |
||||
# used to represent them. |
||||
proto_artifacts = [] |
||||
for proto_source in proto_info.direct_sources: |
||||
# We can have slashes in the target name. For example, proto_source can be: |
||||
# dir/a.proto. However proto_source.basename will return a.proto, when in reality |
||||
# the BUILD file declares it as dir/a.proto, because target name contains a slash. |
||||
# There is no good workaround for this. |
||||
# I am using ctx.label.package to check if the name of the target contains slash or not. |
||||
# This is similar to what declare_directory does. |
||||
if not proto_source.short_path.startswith(ctx.label.package): |
||||
fail("This should never happen, proto source {} path does not start with {}.".format( |
||||
proto_source.short_path, |
||||
ctx.label.package, |
||||
)) |
||||
proto_source_name = proto_source.short_path[len(ctx.label.package) + 1:] |
||||
last_dot = proto_source_name.rfind(".") |
||||
if last_dot != -1: |
||||
proto_source_name = proto_source_name[:last_dot] |
||||
proto_artifacts.append(ctx.actions.declare_directory(proto_source_name + ".upb_weak_minitables")) |
||||
|
||||
return proto_artifacts |
||||
|
||||
def _get_feature_configuration(ctx, cc_toolchain, proto_info): |
||||
requested_features = list(ctx.features) |
||||
|
||||
# Disable the whole-archive behavior for protobuf generated code when the |
||||
# proto_one_output_per_message feature is enabled. |
||||
requested_features.append("disable_whole_archive_for_static_lib_if_proto_one_output_per_message") |
||||
unsupported_features = list(ctx.disabled_features) |
||||
if len(proto_info.direct_sources) != 0: |
||||
requested_features.append("header_modules") |
||||
else: |
||||
unsupported_features.append("header_modules") |
||||
return cc_common.configure_features( |
||||
ctx = ctx, |
||||
cc_toolchain = cc_toolchain, |
||||
requested_features = requested_features, |
||||
unsupported_features = unsupported_features, |
||||
) |
||||
|
||||
def _generate_srcs_list(ctx, generator, proto_info): |
||||
if len(proto_info.direct_sources) == 0: |
||||
return GeneratedSrcsInfo(srcs = [], hdrs = [], thunks = [], includes = []) |
||||
|
||||
ext = "." + generator |
||||
srcs = [] |
||||
thunks = [] |
||||
hdrs = proto_common.declare_generated_files( |
||||
ctx.actions, |
||||
extension = ext + ".h", |
||||
proto_info = proto_info, |
||||
) |
||||
if not (generator == "upb" and _is_google3): |
||||
# TODO: The OSS build should also exclude this file for the upb generator, |
||||
# as it is empty and unnecessary. We only added it to make the OSS build happy on |
||||
# Windows and macOS. |
||||
srcs += proto_common.declare_generated_files( |
||||
ctx.actions, |
||||
extension = ext + ".c", |
||||
proto_info = proto_info, |
||||
) |
||||
if generator == "upb": |
||||
thunks = proto_common.declare_generated_files( |
||||
ctx.actions, |
||||
extension = ext + ".thunks.c", |
||||
proto_info = proto_info, |
||||
) |
||||
ctx.actions.run_shell( |
||||
inputs = hdrs, |
||||
outputs = thunks, |
||||
command = " && ".join([ |
||||
"sed 's/UPB_INLINE //' {} > {}".format(hdr.path, thunk.path) |
||||
for (hdr, thunk) in zip(hdrs, thunks) |
||||
]), |
||||
progress_message = "Generating thunks for upb protos API for: " + ctx.label.name, |
||||
mnemonic = "GenUpbProtosThunks", |
||||
) |
||||
|
||||
return GeneratedSrcsInfo( |
||||
srcs = srcs, |
||||
hdrs = hdrs, |
||||
thunks = thunks, |
||||
) |
||||
|
||||
def _generate_upb_protos(ctx, generator, proto_info, feature_configuration): |
||||
implicit_weak = generator == "upb_minitable" and cc_common.is_enabled( |
||||
feature_configuration = feature_configuration, |
||||
feature_name = "proto_one_output_per_message", |
||||
) |
||||
|
||||
srcs = _generate_srcs_list(ctx, generator, proto_info) |
||||
additional_args = ctx.actions.args() |
||||
|
||||
if implicit_weak: |
||||
srcs.srcs.extend(_get_implicit_weak_field_sources(ctx, proto_info)) |
||||
additional_args.add("--upb_minitable_opt=one_output_per_message") |
||||
|
||||
proto_common.compile( |
||||
actions = ctx.actions, |
||||
proto_info = proto_info, |
||||
proto_lang_toolchain_info = _get_lang_toolchain(ctx, generator), |
||||
generated_files = srcs.srcs + srcs.hdrs, |
||||
experimental_exec_group = "proto_compiler", |
||||
additional_args = additional_args, |
||||
) |
||||
|
||||
return srcs |
||||
|
||||
def _generate_name(ctx, generator, thunks = False): |
||||
if thunks: |
||||
return ctx.rule.attr.name + "." + generator + ".thunks" |
||||
return ctx.rule.attr.name + "." + generator |
||||
|
||||
def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider): |
||||
rule_deps = ctx.rule.attr.deps |
||||
dep_ccinfos = [dep[cc_provider].cc_info for dep in rule_deps] |
||||
if dep_cc_provider: |
||||
# This gives access to our direct sibling. eg. foo.upb.h can #include "foo.upb_minitable.h" |
||||
dep_ccinfos.append(target[dep_cc_provider].cc_info) |
||||
|
||||
# This gives access to imports. eg. foo.upb.h can #include "import1.upb_minitable.h" |
||||
# But not transitive imports, eg. foo.upb.h cannot #include "transitive_import1.upb_minitable.h" |
||||
dep_ccinfos += [dep[dep_cc_provider].cc_info for dep in rule_deps] |
||||
|
||||
return dep_ccinfos |
||||
|
||||
def _get_lang_toolchain(ctx, generator): |
||||
lang_toolchain_name = "_" + generator + "_toolchain" |
||||
return getattr(ctx.attr, lang_toolchain_name)[proto_common.ProtoLangToolchainInfo] |
||||
|
||||
def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider, proto_info): |
||||
cc_info = cc_library_func( |
||||
ctx = ctx, |
||||
name = _generate_name(ctx, generator), |
||||
hdrs = files.hdrs, |
||||
srcs = files.srcs, |
||||
includes = [output_dir(ctx, proto_info)], |
||||
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts, |
||||
dep_ccinfos = dep_ccinfos, |
||||
) |
||||
|
||||
if files.thunks: |
||||
cc_info_with_thunks = cc_library_func( |
||||
ctx = ctx, |
||||
name = _generate_name(ctx, generator, files.thunks), |
||||
hdrs = [], |
||||
srcs = files.thunks, |
||||
includes = [output_dir(ctx, proto_info)], |
||||
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts, |
||||
dep_ccinfos = dep_ccinfos + [cc_info], |
||||
) |
||||
return cc_provider( |
||||
cc_info = cc_info, |
||||
cc_info_with_thunks = cc_info_with_thunks, |
||||
) |
||||
else: |
||||
return cc_provider( |
||||
cc_info = cc_info, |
||||
) |
||||
|
||||
_GENERATORS = ["upb", "upbdefs", "upb_minitable"] |
||||
|
||||
def _get_hint_providers(ctx, generator): |
||||
if generator not in _GENERATORS: |
||||
fail("Please add new generator '{}' to _GENERATORS list".format(generator)) |
||||
|
||||
possible_owners = [] |
||||
for generator in _GENERATORS: |
||||
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator))) |
||||
possible_owners.append(ctx.label.relative(_generate_name(ctx, generator, thunks = True))) |
||||
|
||||
if hasattr(cc_common, "CcSharedLibraryHintInfo"): |
||||
return [cc_common.CcSharedLibraryHintInfo(owners = possible_owners)] |
||||
elif hasattr(cc_common, "CcSharedLibraryHintInfo_6_X_constructor_do_not_use"): |
||||
# This branch can be deleted once 6.X is not supported by upb rules |
||||
return [cc_common.CcSharedLibraryHintInfo_6_X_constructor_do_not_use(owners = possible_owners)] |
||||
|
||||
return [] |
||||
|
||||
def upb_proto_aspect_impl( |
||||
target, |
||||
ctx, |
||||
generator, |
||||
cc_provider, |
||||
dep_cc_provider, |
||||
file_provider, |
||||
provide_cc_shared_library_hints = True): |
||||
"""A shared aspect implementation for upb_*proto_library() rules. |
||||
|
||||
Args: |
||||
target: The `target` parameter from the aspect function. |
||||
ctx: The `ctx` parameter from the aspect function. |
||||
generator: A string describing which aspect we are generating. This triggers several special |
||||
behaviors, and ideally this will be refactored to be less magical. |
||||
cc_provider: The provider that this aspect will attach to the target. Should contain a |
||||
`cc_info` field. The aspect will ensure that each compilation action can compile and link |
||||
against this provider's cc_info for all proto_library() deps. |
||||
dep_cc_provider: For aspects that depend on other aspects, this is the provider of the aspect |
||||
that we depend on. The aspect wil be able to include the header files from this provider. |
||||
file_provider: A provider that this aspect will attach to the target to expose the source |
||||
files generated by this aspect. These files are primarily useful for returning in |
||||
DefaultInfo(), so users who build the upb_*proto_library() rule directly can view the |
||||
generated sources. |
||||
provide_cc_shared_library_hints: Whether shared library hints should be provided. |
||||
|
||||
Returns: |
||||
The `cc_provider` and `file_provider` providers as described above. |
||||
""" |
||||
dep_ccinfos = _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider) |
||||
if not getattr(ctx.rule.attr, "srcs", []): |
||||
# This target doesn't declare any sources, reexport all its deps instead. |
||||
# This is known as an "alias library": |
||||
# https://bazel.build/reference/be/protocol-buffer#proto_library.srcs |
||||
files = _merge_generated_srcs([dep[file_provider].srcs for dep in ctx.rule.attr.deps]) |
||||
wrapped_cc_info = cc_provider( |
||||
cc_info = cc_common.merge_cc_infos(direct_cc_infos = dep_ccinfos), |
||||
) |
||||
else: |
||||
proto_info = target[ProtoInfo] |
||||
cc_toolchain = find_cpp_toolchain(ctx) |
||||
feature_configuration = _get_feature_configuration(ctx, cc_toolchain, proto_info) |
||||
files = _generate_upb_protos( |
||||
ctx, |
||||
generator, |
||||
proto_info, |
||||
feature_configuration, |
||||
) |
||||
wrapped_cc_info = _compile_upb_protos( |
||||
ctx, |
||||
files, |
||||
generator, |
||||
dep_ccinfos + [_get_lang_toolchain(ctx, generator).runtime[CcInfo]], |
||||
cc_provider, |
||||
proto_info, |
||||
) |
||||
|
||||
hints = _get_hint_providers(ctx, generator) if provide_cc_shared_library_hints else [] |
||||
|
||||
return hints + [ |
||||
file_provider(srcs = files), |
||||
wrapped_cc_info, |
||||
] |
@ -0,0 +1,86 @@ |
||||
"""A function to compile C/C++ code, like cc_library() but from Starlark.""" |
||||
|
||||
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain", "use_cpp_toolchain") |
||||
|
||||
# begin:google_only |
||||
# |
||||
# def upb_use_cpp_toolchain(): |
||||
# # TODO: We shouldn't need to add this to the result of use_cpp_toolchain(). |
||||
# return [ |
||||
# config_common.toolchain_type( |
||||
# "@bazel_tools//tools/cpp:cc_runtimes_toolchain_type", |
||||
# mandatory = False, |
||||
# ), |
||||
# ] + use_cpp_toolchain() |
||||
# |
||||
# end:google_only |
||||
|
||||
# begin:github_only |
||||
def upb_use_cpp_toolchain(): |
||||
return use_cpp_toolchain() |
||||
|
||||
# end:github_only |
||||
|
||||
def cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos): |
||||
"""Like cc_library(), but callable from rules. |
||||
|
||||
Args: |
||||
ctx: Rule context. |
||||
name: Unique name used to generate output files. |
||||
hdrs: Public headers that can be #included from other rules. |
||||
srcs: C/C++ source files. |
||||
copts: Additional options for cc compilation. |
||||
includes: Additional include paths. |
||||
dep_ccinfos: CcInfo providers of dependencies we should build/link against. |
||||
|
||||
Returns: |
||||
CcInfo provider for this compilation. |
||||
""" |
||||
|
||||
# begin:google_only |
||||
# cc_runtimes_toolchain = ctx.toolchains["@bazel_tools//tools/cpp:cc_runtimes_toolchain_type"] |
||||
# if cc_runtimes_toolchain: |
||||
# dep_ccinfos += [ |
||||
# target[CcInfo] |
||||
# for target in cc_runtimes_toolchain.cc_runtimes_info.runtimes |
||||
# ] |
||||
# |
||||
# end:google_only |
||||
|
||||
compilation_contexts = [info.compilation_context for info in dep_ccinfos] |
||||
linking_contexts = [info.linking_context for info in dep_ccinfos] |
||||
toolchain = find_cpp_toolchain(ctx) |
||||
feature_configuration = cc_common.configure_features( |
||||
ctx = ctx, |
||||
cc_toolchain = toolchain, |
||||
requested_features = ctx.features, |
||||
unsupported_features = ctx.disabled_features, |
||||
) |
||||
|
||||
(compilation_context, compilation_outputs) = cc_common.compile( |
||||
actions = ctx.actions, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
name = name, |
||||
srcs = srcs, |
||||
includes = includes, |
||||
public_hdrs = hdrs, |
||||
user_compile_flags = copts, |
||||
compilation_contexts = compilation_contexts, |
||||
) |
||||
|
||||
# buildifier: disable=unused-variable |
||||
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs( |
||||
actions = ctx.actions, |
||||
name = name, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
compilation_outputs = compilation_outputs, |
||||
linking_contexts = linking_contexts, |
||||
disallow_dynamic_library = cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows") or not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "supports_dynamic_linker"), |
||||
) |
||||
|
||||
return CcInfo( |
||||
compilation_context = compilation_context, |
||||
linking_context = linking_context, |
||||
) |
@ -0,0 +1,16 @@ |
||||
"""Dummy rule to expose select() copts to aspects.""" |
||||
|
||||
UpbProtoLibraryCoptsInfo = provider( |
||||
"Provides copts for upb proto targets", |
||||
fields = { |
||||
"copts": "copts for upb_proto_library()", |
||||
}, |
||||
) |
||||
|
||||
def upb_proto_library_copts_impl(ctx): |
||||
return UpbProtoLibraryCoptsInfo(copts = ctx.attr.copts) |
||||
|
||||
upb_proto_library_copts = rule( |
||||
implementation = upb_proto_library_copts_impl, |
||||
attrs = {"copts": attr.string_list(default = [])}, |
||||
) |
@ -0,0 +1,39 @@ |
||||
"""Internal rule implementation for upb_*_proto_library() rules.""" |
||||
|
||||
def _filter_none(elems): |
||||
out = [] |
||||
for elem in elems: |
||||
if elem: |
||||
out.append(elem) |
||||
return out |
||||
|
||||
def upb_proto_rule_impl(ctx, cc_info_provider, srcs_provider): |
||||
"""An implementation for upb_*proto_library() rules. |
||||
|
||||
Args: |
||||
ctx: The rule `ctx` argument |
||||
cc_info_provider: The provider containing a wrapped CcInfo that will be exposed to users who |
||||
depend on this rule. |
||||
srcs_provider: The provider containing the generated source files. This will be used to make |
||||
the DefaultInfo return the source files. |
||||
|
||||
Returns: |
||||
Providers for this rule. |
||||
""" |
||||
if len(ctx.attr.deps) != 1: |
||||
fail("only one deps dependency allowed.") |
||||
dep = ctx.attr.deps[0] |
||||
srcs = dep[srcs_provider].srcs |
||||
cc_info = dep[cc_info_provider].cc_info |
||||
|
||||
lib = cc_info.linking_context.linker_inputs.to_list()[0].libraries[0] |
||||
files = _filter_none([ |
||||
lib.static_library, |
||||
lib.pic_static_library, |
||||
lib.dynamic_library, |
||||
]) |
||||
return [ |
||||
DefaultInfo(files = depset(files + srcs.hdrs + srcs.srcs)), |
||||
srcs, |
||||
cc_info, |
||||
] |
@ -0,0 +1,3 @@ |
||||
"""proto_library rule""" |
||||
|
||||
proto_library = native.proto_library |
@ -0,0 +1,211 @@ |
||||
"""The implementation of the `py_proto_library` rule and its aspect.""" |
||||
|
||||
load("@rules_python//python:py_info.bzl", "PyInfo") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
|
||||
PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type" |
||||
|
||||
_PyProtoInfo = provider( |
||||
doc = "Encapsulates information needed by the Python proto rules.", |
||||
fields = { |
||||
"imports": """ |
||||
(depset[str]) The field forwarding PyInfo.imports coming from |
||||
the proto language runtime dependency.""", |
||||
"runfiles_from_proto_deps": """ |
||||
(depset[File]) Files from the transitive closure implicit proto |
||||
dependencies""", |
||||
"transitive_sources": """(depset[File]) The Python sources.""", |
||||
}, |
||||
) |
||||
|
||||
def _filter_provider(provider, *attrs): |
||||
return [dep[provider] for attr in attrs for dep in attr if provider in dep] |
||||
|
||||
def _incompatible_toolchains_enabled(): |
||||
return getattr(proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False) |
||||
|
||||
def _py_proto_aspect_impl(target, ctx): |
||||
"""Generates and compiles Python code for a proto_library. |
||||
|
||||
The function runs protobuf compiler on the `proto_library` target generating |
||||
a .py file for each .proto file. |
||||
|
||||
Args: |
||||
target: (Target) A target providing `ProtoInfo`. Usually this means a |
||||
`proto_library` target, but not always; you must expect to visit |
||||
non-`proto_library` targets, too. |
||||
ctx: (RuleContext) The rule context. |
||||
|
||||
Returns: |
||||
([_PyProtoInfo]) Providers collecting transitive information about |
||||
generated files. |
||||
""" |
||||
|
||||
_proto_library = ctx.rule.attr |
||||
|
||||
# Check Proto file names |
||||
for proto in target[ProtoInfo].direct_sources: |
||||
if proto.is_source and "-" in proto.dirname: |
||||
fail("Cannot generate Python code for a .proto whose path contains '-' ({}).".format( |
||||
proto.path, |
||||
)) |
||||
|
||||
if _incompatible_toolchains_enabled(): |
||||
toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN] |
||||
if not toolchain: |
||||
fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN) |
||||
proto_lang_toolchain_info = toolchain.proto |
||||
else: |
||||
proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo] |
||||
|
||||
api_deps = [proto_lang_toolchain_info.runtime] |
||||
|
||||
generated_sources = [] |
||||
proto_info = target[ProtoInfo] |
||||
proto_root = proto_info.proto_source_root |
||||
if proto_info.direct_sources: |
||||
# Generate py files |
||||
generated_sources = proto_common.declare_generated_files( |
||||
actions = ctx.actions, |
||||
proto_info = proto_info, |
||||
extension = "_pb2.py", |
||||
name_mapper = lambda name: name.replace("-", "_").replace(".", "/"), |
||||
) |
||||
|
||||
# Handles multiple repository and virtual import cases |
||||
if proto_root.startswith(ctx.bin_dir.path): |
||||
proto_root = proto_root[len(ctx.bin_dir.path) + 1:] |
||||
|
||||
plugin_output = ctx.bin_dir.path + "/" + proto_root |
||||
proto_root = ctx.workspace_name + "/" + proto_root |
||||
|
||||
proto_common.compile( |
||||
actions = ctx.actions, |
||||
proto_info = proto_info, |
||||
proto_lang_toolchain_info = proto_lang_toolchain_info, |
||||
generated_files = generated_sources, |
||||
plugin_output = plugin_output, |
||||
) |
||||
|
||||
# Generated sources == Python sources |
||||
python_sources = generated_sources |
||||
|
||||
deps = _filter_provider(_PyProtoInfo, getattr(_proto_library, "deps", [])) |
||||
runfiles_from_proto_deps = depset( |
||||
transitive = [dep[DefaultInfo].default_runfiles.files for dep in api_deps] + |
||||
[dep.runfiles_from_proto_deps for dep in deps], |
||||
) |
||||
transitive_sources = depset( |
||||
direct = python_sources, |
||||
transitive = [dep.transitive_sources for dep in deps], |
||||
) |
||||
|
||||
return [ |
||||
_PyProtoInfo( |
||||
imports = depset( |
||||
# Adding to PYTHONPATH so the generated modules can be |
||||
# imported. This is necessary when there is |
||||
# strip_import_prefix, the Python modules are generated under |
||||
# _virtual_imports. But it's undesirable otherwise, because it |
||||
# will put the repo root at the top of the PYTHONPATH, ahead of |
||||
# directories added through `imports` attributes. |
||||
[proto_root] if "_virtual_imports" in proto_root else [], |
||||
transitive = [dep[PyInfo].imports for dep in api_deps] + [dep.imports for dep in deps], |
||||
), |
||||
runfiles_from_proto_deps = runfiles_from_proto_deps, |
||||
transitive_sources = transitive_sources, |
||||
), |
||||
] |
||||
|
||||
_py_proto_aspect = aspect( |
||||
implementation = _py_proto_aspect_impl, |
||||
attrs = {} if _incompatible_toolchains_enabled() else { |
||||
"_aspect_proto_toolchain": attr.label( |
||||
default = "//python:python_toolchain", |
||||
), |
||||
}, |
||||
attr_aspects = ["deps"], |
||||
required_providers = [ProtoInfo], |
||||
provides = [_PyProtoInfo], |
||||
toolchains = [PY_PROTO_TOOLCHAIN] if _incompatible_toolchains_enabled() else [], |
||||
) |
||||
|
||||
def _py_proto_library_rule(ctx): |
||||
"""Merges results of `py_proto_aspect` in `deps`. |
||||
|
||||
Args: |
||||
ctx: (RuleContext) The rule context. |
||||
Returns: |
||||
([PyInfo, DefaultInfo, OutputGroupInfo]) |
||||
""" |
||||
if not ctx.attr.deps: |
||||
fail("'deps' attribute mustn't be empty.") |
||||
|
||||
pyproto_infos = _filter_provider(_PyProtoInfo, ctx.attr.deps) |
||||
default_outputs = depset( |
||||
transitive = [info.transitive_sources for info in pyproto_infos], |
||||
) |
||||
|
||||
return [ |
||||
DefaultInfo( |
||||
files = default_outputs, |
||||
default_runfiles = ctx.runfiles(transitive_files = depset( |
||||
transitive = |
||||
[default_outputs] + |
||||
[info.runfiles_from_proto_deps for info in pyproto_infos], |
||||
)), |
||||
), |
||||
OutputGroupInfo( |
||||
default = depset(), |
||||
), |
||||
PyInfo( |
||||
transitive_sources = default_outputs, |
||||
imports = depset(transitive = [info.imports for info in pyproto_infos]), |
||||
# Proto always produces 2- and 3- compatible source files |
||||
has_py2_only_sources = False, |
||||
has_py3_only_sources = False, |
||||
), |
||||
] |
||||
|
||||
py_proto_library = rule( |
||||
implementation = _py_proto_library_rule, |
||||
doc = """ |
||||
Use `py_proto_library` to generate Python libraries from `.proto` files. |
||||
|
||||
The convention is to name the `py_proto_library` rule `foo_py_pb2`, |
||||
when it is wrapping `proto_library` rule `foo_proto`. |
||||
|
||||
`deps` must point to a `proto_library` rule. |
||||
|
||||
Example: |
||||
|
||||
```starlark |
||||
py_library( |
||||
name = "lib", |
||||
deps = [":foo_py_pb2"], |
||||
) |
||||
|
||||
py_proto_library( |
||||
name = "foo_py_pb2", |
||||
deps = [":foo_proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "foo_proto", |
||||
srcs = ["foo.proto"], |
||||
) |
||||
```""", |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
doc = """ |
||||
The list of `proto_library` rules to generate Python libraries for. |
||||
|
||||
Usually this is just the one target: the proto library of interest. |
||||
It can be any target providing `ProtoInfo`.""", |
||||
providers = [ProtoInfo], |
||||
aspects = [_py_proto_aspect], |
||||
), |
||||
}, |
||||
provides = [PyInfo], |
||||
) |
@ -0,0 +1,17 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
"""Temporary alias to repository rule for using Python 3.x headers from the system.""" |
||||
|
||||
load( |
||||
"//python/dist:system_python.bzl", |
||||
_system_python = "system_python", |
||||
) |
||||
|
||||
# TODO: Temporary alias. This is deprecated and to be removed in a future |
||||
# release. Users should now get system_python from protobuf_deps.bzl. |
||||
system_python = _system_python |
@ -0,0 +1,23 @@ |
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
|
||||
bzl_library( |
||||
name = "proto_toolchain_bzl", |
||||
srcs = [ |
||||
"proto_toolchain.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"//bazel/private:proto_toolchain_rule_bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_lang_toolchain_bzl", |
||||
srcs = [ |
||||
"proto_lang_toolchain.bzl", |
||||
], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
], |
||||
) |
@ -0,0 +1,34 @@ |
||||
"""proto_lang_toolchain rule""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
|
||||
def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with = [], target_compatible_with = [], **attrs): |
||||
"""Creates a proto_lang_toolchain and corresponding toolchain target. |
||||
|
||||
Toolchain target is only created when toolchain_type is set. |
||||
|
||||
https://docs.bazel.build/versions/master/be/protocol-buffer.html#proto_lang_toolchain |
||||
|
||||
Args: |
||||
|
||||
name: name of the toolchain |
||||
toolchain_type: The toolchain type |
||||
exec_compatible_with: ([constraints]) List of constraints the prebuild binaries is compatible with. |
||||
target_compatible_with: ([constraints]) List of constraints the target libraries are compatible with. |
||||
**attrs: Rule attributes |
||||
""" |
||||
|
||||
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False): |
||||
attrs["toolchain_type"] = toolchain_type |
||||
|
||||
# buildifier: disable=native-proto |
||||
native.proto_lang_toolchain(name = name, **attrs) |
||||
|
||||
if toolchain_type: |
||||
native.toolchain( |
||||
name = name + "_toolchain", |
||||
toolchain_type = toolchain_type, |
||||
exec_compatible_with = exec_compatible_with, |
||||
target_compatible_with = target_compatible_with, |
||||
toolchain = name, |
||||
) |
@ -0,0 +1,26 @@ |
||||
"""Macro wrapping the proto_toolchain implementation. |
||||
|
||||
The macro additionally creates toolchain target when toolchain_type is given. |
||||
""" |
||||
|
||||
load("//bazel/private:proto_toolchain_rule.bzl", _proto_toolchain_rule = "proto_toolchain") |
||||
|
||||
def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []): |
||||
"""Creates a proto_toolchain and toolchain target for proto_library. |
||||
|
||||
Toolchain target is suffixed with "_toolchain". |
||||
|
||||
Args: |
||||
name: name of the toolchain |
||||
proto_compiler: (Label) of either proto compiler sources or prebuild binaries |
||||
exec_compatible_with: ([constraints]) List of constraints the prebuild binary is compatible with. |
||||
""" |
||||
_proto_toolchain_rule(name = name, proto_compiler = proto_compiler) |
||||
|
||||
native.toolchain( |
||||
name = name + "_toolchain", |
||||
toolchain_type = "@rules_proto//proto:toolchain_type", |
||||
exec_compatible_with = exec_compatible_with, |
||||
target_compatible_with = [], |
||||
toolchain = name, |
||||
) |
@ -0,0 +1,69 @@ |
||||
"""upb_c_proto_library() exposes upb's generated C API for protobuf (foo.upb.h)""" |
||||
|
||||
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect") |
||||
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl") |
||||
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain") |
||||
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl") |
||||
|
||||
UpbWrappedCcInfo = provider( |
||||
"Provider for cc_info for protos", |
||||
fields = ["cc_info", "cc_info_with_thunks"], |
||||
) |
||||
|
||||
_UpbWrappedGeneratedSrcsInfo = provider( |
||||
"Provider for generated sources", |
||||
fields = ["srcs"], |
||||
) |
||||
|
||||
def _upb_c_proto_library_aspect_impl(target, ctx): |
||||
return upb_proto_aspect_impl( |
||||
target = target, |
||||
ctx = ctx, |
||||
generator = "upb", |
||||
cc_provider = UpbWrappedCcInfo, |
||||
dep_cc_provider = UpbMinitableCcInfo, |
||||
file_provider = _UpbWrappedGeneratedSrcsInfo, |
||||
provide_cc_shared_library_hints = False, |
||||
) |
||||
|
||||
upb_c_proto_library_aspect = aspect( |
||||
attrs = { |
||||
"_copts": attr.label( |
||||
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use", |
||||
), |
||||
"_upb_toolchain": attr.label( |
||||
default = Label("//upb_generator:protoc-gen-upb_toolchain"), |
||||
), |
||||
"_cc_toolchain": attr.label( |
||||
default = "@bazel_tools//tools/cpp:current_cc_toolchain", |
||||
), |
||||
}, |
||||
implementation = _upb_c_proto_library_aspect_impl, |
||||
requires = [upb_minitable_proto_library_aspect], |
||||
required_aspect_providers = [UpbMinitableCcInfo], |
||||
provides = [ |
||||
UpbWrappedCcInfo, |
||||
_UpbWrappedGeneratedSrcsInfo, |
||||
], |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp"], |
||||
toolchains = upb_use_cpp_toolchain(), |
||||
exec_groups = { |
||||
"proto_compiler": exec_group(), |
||||
}, |
||||
) |
||||
|
||||
def _upb_c_proto_library_rule_impl(ctx): |
||||
return upb_proto_rule_impl(ctx, UpbWrappedCcInfo, _UpbWrappedGeneratedSrcsInfo) |
||||
|
||||
upb_c_proto_library = rule( |
||||
implementation = _upb_c_proto_library_rule_impl, |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [upb_c_proto_library_aspect], |
||||
allow_rules = ["proto_library"], |
||||
providers = [ProtoInfo], |
||||
), |
||||
}, |
||||
provides = [CcInfo], |
||||
) |
@ -0,0 +1,77 @@ |
||||
"""upb_minitable_proto_library() exposes upb's generated minitables (foo.upb_minitable.h)""" |
||||
|
||||
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl") |
||||
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain") |
||||
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl") |
||||
|
||||
UpbMinitableCcInfo = provider( |
||||
"Provider for cc_info for protos", |
||||
fields = ["cc_info"], |
||||
) |
||||
|
||||
_UpbWrappedGeneratedSrcsInfo = provider( |
||||
"Provider for generated sources", |
||||
fields = ["srcs"], |
||||
) |
||||
|
||||
def _upb_minitable_proto_library_aspect_impl(target, ctx): |
||||
return upb_proto_aspect_impl( |
||||
target = target, |
||||
ctx = ctx, |
||||
generator = "upb_minitable", |
||||
cc_provider = UpbMinitableCcInfo, |
||||
dep_cc_provider = None, |
||||
file_provider = _UpbWrappedGeneratedSrcsInfo, |
||||
) |
||||
|
||||
def _get_upb_minitable_proto_library_aspect_provides(): |
||||
provides = [ |
||||
UpbMinitableCcInfo, |
||||
_UpbWrappedGeneratedSrcsInfo, |
||||
] |
||||
|
||||
if hasattr(cc_common, "CcSharedLibraryHintInfo"): |
||||
provides.append(cc_common.CcSharedLibraryHintInfo) |
||||
elif hasattr(cc_common, "CcSharedLibraryHintInfo_6_X_getter_do_not_use"): |
||||
# This branch can be deleted once 6.X is not supported by upb rules |
||||
provides.append(cc_common.CcSharedLibraryHintInfo_6_X_getter_do_not_use) |
||||
|
||||
return provides |
||||
|
||||
upb_minitable_proto_library_aspect = aspect( |
||||
attrs = { |
||||
"_copts": attr.label( |
||||
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use", |
||||
), |
||||
"_upb_minitable_toolchain": attr.label( |
||||
default = Label("//upb_generator:protoc-gen-upb_minitable_toolchain"), |
||||
), |
||||
"_cc_toolchain": attr.label( |
||||
default = "@bazel_tools//tools/cpp:current_cc_toolchain", |
||||
), |
||||
"_fasttable_enabled": attr.label(default = "//upb:fasttable_enabled"), |
||||
}, |
||||
implementation = _upb_minitable_proto_library_aspect_impl, |
||||
provides = _get_upb_minitable_proto_library_aspect_provides(), |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp"], |
||||
toolchains = upb_use_cpp_toolchain(), |
||||
exec_groups = { |
||||
"proto_compiler": exec_group(), |
||||
}, |
||||
) |
||||
|
||||
def _upb_minitable_proto_library_rule_impl(ctx): |
||||
return upb_proto_rule_impl(ctx, UpbMinitableCcInfo, _UpbWrappedGeneratedSrcsInfo) |
||||
|
||||
upb_minitable_proto_library = rule( |
||||
implementation = _upb_minitable_proto_library_rule_impl, |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [upb_minitable_proto_library_aspect], |
||||
allow_rules = ["proto_library"], |
||||
providers = [ProtoInfo], |
||||
), |
||||
}, |
||||
provides = [CcInfo], |
||||
) |
@ -0,0 +1,35 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
"""Public rules for using upb protos: |
||||
- upb_proto_library() |
||||
- upb_proto_reflection_library() |
||||
""" |
||||
|
||||
load( |
||||
"//bazel:upb_c_proto_library.bzl", |
||||
_UpbWrappedCcInfo = "UpbWrappedCcInfo", |
||||
_upb_c_proto_library = "upb_c_proto_library", |
||||
_upb_c_proto_library_aspect = "upb_c_proto_library_aspect", |
||||
) |
||||
load( |
||||
"//bazel:upb_proto_reflection_library.bzl", |
||||
_upb_proto_reflection_library = "upb_proto_reflection_library", |
||||
) |
||||
load( |
||||
"//bazel/private:upb_proto_library_internal/aspect.bzl", |
||||
_GeneratedSrcsInfo = "GeneratedSrcsInfo", |
||||
) |
||||
|
||||
# Temporary alias, see b/291827469. |
||||
upb_proto_library = _upb_c_proto_library |
||||
|
||||
upb_c_proto_library = _upb_c_proto_library |
||||
upb_proto_reflection_library = _upb_proto_reflection_library |
||||
GeneratedSrcsInfo = _GeneratedSrcsInfo |
||||
UpbWrappedCcInfo = _UpbWrappedCcInfo |
||||
upb_proto_library_aspect = _upb_c_proto_library_aspect |
@ -0,0 +1,67 @@ |
||||
"""upb_c_proto_reflection_library() exposes upb reflection for protobuf (foo.upbdefs.h)""" |
||||
|
||||
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect") |
||||
load("//bazel/private:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl") |
||||
load("//bazel/private:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain") |
||||
load("//bazel/private:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl") |
||||
|
||||
_UpbDefsWrappedCcInfo = provider("Provider for cc_info for protos", fields = ["cc_info"]) |
||||
|
||||
_WrappedDefsGeneratedSrcsInfo = provider( |
||||
"Provider for generated reflective sources", |
||||
fields = ["srcs"], |
||||
) |
||||
|
||||
def _upb_proto_reflection_library_aspect_impl(target, ctx): |
||||
return upb_proto_aspect_impl( |
||||
target = target, |
||||
ctx = ctx, |
||||
generator = "upbdefs", |
||||
cc_provider = _UpbDefsWrappedCcInfo, |
||||
dep_cc_provider = UpbMinitableCcInfo, |
||||
file_provider = _WrappedDefsGeneratedSrcsInfo, |
||||
provide_cc_shared_library_hints = False, |
||||
) |
||||
|
||||
_upb_proto_reflection_library_aspect = aspect( |
||||
attrs = { |
||||
"_copts": attr.label( |
||||
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use", |
||||
), |
||||
"_upbdefs_toolchain": attr.label( |
||||
default = Label("//upb_generator:protoc-gen-upbdefs_toolchain"), |
||||
cfg = getattr(proto_common, "proto_lang_toolchain_cfg", "target"), |
||||
), |
||||
"_cc_toolchain": attr.label( |
||||
default = "@bazel_tools//tools/cpp:current_cc_toolchain", |
||||
), |
||||
}, |
||||
implementation = _upb_proto_reflection_library_aspect_impl, |
||||
requires = [upb_minitable_proto_library_aspect], |
||||
required_aspect_providers = [UpbMinitableCcInfo], |
||||
provides = [ |
||||
_UpbDefsWrappedCcInfo, |
||||
_WrappedDefsGeneratedSrcsInfo, |
||||
], |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp"], |
||||
toolchains = upb_use_cpp_toolchain(), |
||||
exec_groups = { |
||||
"proto_compiler": exec_group(), |
||||
}, |
||||
) |
||||
|
||||
def _upb_proto_reflection_library_rule_impl(ctx): |
||||
return upb_proto_rule_impl(ctx, _UpbDefsWrappedCcInfo, _WrappedDefsGeneratedSrcsInfo) |
||||
|
||||
upb_proto_reflection_library = rule( |
||||
implementation = _upb_proto_reflection_library_rule_impl, |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [_upb_proto_reflection_library_aspect], |
||||
allow_rules = ["proto_library"], |
||||
providers = [ProtoInfo], |
||||
), |
||||
}, |
||||
provides = [CcInfo], |
||||
) |
@ -0,0 +1,245 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
load("@rules_python//python:defs.bzl", "py_binary") |
||||
load("//bazel:cc_proto_library.bzl", "cc_proto_library") |
||||
load("//bazel:proto_library.bzl", "proto_library") |
||||
load( |
||||
"//bazel:upb_proto_library.bzl", |
||||
"upb_c_proto_library", |
||||
"upb_proto_reflection_library", |
||||
) |
||||
load( |
||||
":build_defs.bzl", |
||||
"cc_optimizefor_proto_library", |
||||
"expand_suffixes", |
||||
"tmpl_cc_binary", |
||||
) |
||||
|
||||
# begin:google_only |
||||
# package(default_applicable_licenses = ["//upb:license"]) |
||||
# end:google_only |
||||
|
||||
licenses(["notice"]) |
||||
|
||||
proto_library( |
||||
name = "descriptor_proto", |
||||
srcs = ["descriptor.proto"], |
||||
) |
||||
|
||||
upb_c_proto_library( |
||||
name = "benchmark_descriptor_upb_proto", |
||||
deps = [":descriptor_proto"], |
||||
) |
||||
|
||||
upb_proto_reflection_library( |
||||
name = "benchmark_descriptor_upb_proto_reflection", |
||||
deps = [":descriptor_proto"], |
||||
) |
||||
|
||||
upb_proto_reflection_library( |
||||
name = "ads_upb_proto_reflection", |
||||
deps = ["@com_google_googleapis//:ads_proto"], |
||||
) |
||||
|
||||
cc_proto_library( |
||||
name = "benchmark_descriptor_cc_proto", |
||||
deps = [":descriptor_proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "benchmark_descriptor_sv_proto", |
||||
srcs = ["descriptor_sv.proto"], |
||||
) |
||||
|
||||
cc_proto_library( |
||||
name = "benchmark_descriptor_sv_cc_proto", |
||||
deps = [":benchmark_descriptor_sv_proto"], |
||||
) |
||||
|
||||
cc_test( |
||||
name = "benchmark", |
||||
testonly = 1, |
||||
srcs = ["benchmark.cc"], |
||||
deps = [ |
||||
":ads_upb_proto_reflection", |
||||
":benchmark_descriptor_cc_proto", |
||||
":benchmark_descriptor_sv_cc_proto", |
||||
":benchmark_descriptor_upb_proto", |
||||
":benchmark_descriptor_upb_proto_reflection", |
||||
"//:protobuf", |
||||
"//src/google/protobuf/json", |
||||
"//upb:base", |
||||
"//upb:descriptor_upb_proto", |
||||
"//upb:json", |
||||
"//upb:mem", |
||||
"//upb:reflection", |
||||
"//upb:wire", |
||||
"@com_github_google_benchmark//:benchmark_main", |
||||
"@com_google_absl//absl/container:flat_hash_set", |
||||
"@com_google_absl//absl/log:absl_check", |
||||
"@com_google_googletest//:gtest", |
||||
"@com_google_googletest//:gtest_main", |
||||
], |
||||
) |
||||
|
||||
# Size benchmarks. |
||||
|
||||
SIZE_BENCHMARKS = { |
||||
"empty": "Empty", |
||||
"descriptor": "FileDescriptorSet", |
||||
"100_msgs": "Message100", |
||||
"200_msgs": "Message200", |
||||
"100_fields": "Message", |
||||
"200_fields": "Message", |
||||
} |
||||
|
||||
py_binary( |
||||
name = "gen_synthetic_protos", |
||||
srcs = ["gen_synthetic_protos.py"], |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_binary( |
||||
name = "gen_upb_binary_c", |
||||
srcs = ["gen_upb_binary_c.py"], |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
py_binary( |
||||
name = "gen_protobuf_binary_cc", |
||||
srcs = ["gen_protobuf_binary_cc.py"], |
||||
python_version = "PY3", |
||||
) |
||||
|
||||
genrule( |
||||
name = "do_gen_synthetic_protos", |
||||
outs = [ |
||||
"100_msgs.proto", |
||||
"200_msgs.proto", |
||||
"100_fields.proto", |
||||
"200_fields.proto", |
||||
], |
||||
cmd = "$(execpath :gen_synthetic_protos) $(RULEDIR)", |
||||
tools = [":gen_synthetic_protos"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "100_msgs_proto", |
||||
srcs = ["100_msgs.proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "200_msgs_proto", |
||||
srcs = ["200_msgs.proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "100_fields_proto", |
||||
srcs = ["100_fields.proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "200_fields_proto", |
||||
srcs = ["200_fields.proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "empty_proto", |
||||
srcs = ["empty.proto"], |
||||
) |
||||
|
||||
[( |
||||
upb_c_proto_library( |
||||
name = k + "_upb_proto", |
||||
deps = [":" + k + "_proto"], |
||||
), |
||||
cc_proto_library( |
||||
name = k + "_cc_proto", |
||||
deps = [":" + k + "_proto"], |
||||
), |
||||
tmpl_cc_binary( |
||||
name = k + "_upb_binary", |
||||
testonly = 1, |
||||
args = [ |
||||
package_name() + "/" + k + ".upb.h", |
||||
"upb_benchmark_" + v, |
||||
], |
||||
gen = ":gen_upb_binary_c", |
||||
deps = [ |
||||
":" + k + "_upb_proto", |
||||
], |
||||
), |
||||
tmpl_cc_binary( |
||||
name = k + "_protobuf_binary", |
||||
testonly = 1, |
||||
args = [ |
||||
package_name() + "/" + k + ".pb.h", |
||||
"upb_benchmark::" + v, |
||||
], |
||||
gen = ":gen_protobuf_binary_cc", |
||||
deps = [ |
||||
":" + k + "_cc_proto", |
||||
], |
||||
), |
||||
cc_optimizefor_proto_library( |
||||
name = k + "_cc_lite_proto", |
||||
srcs = [k + ".proto"], |
||||
outs = [k + "_lite.proto"], |
||||
optimize_for = "LITE_RUNTIME", |
||||
), |
||||
tmpl_cc_binary( |
||||
name = k + "_lite_protobuf_binary", |
||||
testonly = 1, |
||||
args = [ |
||||
package_name() + "/" + k + "_lite.pb.h", |
||||
"upb_benchmark::" + v, |
||||
], |
||||
gen = ":gen_protobuf_binary_cc", |
||||
deps = [ |
||||
":" + k + "_cc_lite_proto", |
||||
], |
||||
), |
||||
cc_optimizefor_proto_library( |
||||
name = k + "_cc_codesize_proto", |
||||
srcs = [k + ".proto"], |
||||
outs = [k + "_codesize.proto"], |
||||
optimize_for = "CODE_SIZE", |
||||
), |
||||
tmpl_cc_binary( |
||||
name = k + "_codesize_protobuf_binary", |
||||
testonly = 1, |
||||
args = [ |
||||
package_name() + "/" + k + "_codesize.pb.h", |
||||
"upb_benchmark::" + v, |
||||
], |
||||
gen = ":gen_protobuf_binary_cc", |
||||
deps = [ |
||||
":" + k + "_cc_codesize_proto", |
||||
], |
||||
), |
||||
) for k, v in SIZE_BENCHMARKS.items()] |
||||
|
||||
genrule( |
||||
name = "size_data", |
||||
testonly = 1, |
||||
srcs = expand_suffixes( |
||||
SIZE_BENCHMARKS.keys(), |
||||
suffixes = [ |
||||
"_upb_binary", |
||||
"_protobuf_binary", |
||||
"_lite_protobuf_binary", |
||||
"_codesize_protobuf_binary", |
||||
], |
||||
), |
||||
outs = ["size_data.txt"], |
||||
# We want --format=GNU which counts rodata with data, not text. |
||||
cmd = "size $$($$OSTYPE == 'linux-gnu' ? '--format=GNU -d' : '') $(SRCS) > $@", |
||||
# "size" sometimes isn't available remotely. |
||||
local = 1, |
||||
tags = ["no-remote-exec"], |
||||
) |
@ -0,0 +1,59 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2023 Google LLC. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google LLC nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
load( |
||||
"@protobuf//bazel:proto_library.bzl", |
||||
"proto_library", |
||||
) |
||||
|
||||
proto_library( |
||||
name = "ads_proto", |
||||
srcs = glob([ |
||||
"google/ads/googleads/v16/**/*.proto", |
||||
"google/api/**/*.proto", |
||||
"google/rpc/**/*.proto", |
||||
"google/longrunning/**/*.proto", |
||||
"google/logging/**/*.proto", |
||||
]), |
||||
#srcs = ["google/ads/googleads/v5/services/google_ads_service.proto"], |
||||
visibility = ["//visibility:public"], |
||||
deps = [ |
||||
"@com_google_protobuf//:any_proto", |
||||
"@com_google_protobuf//:api_proto", |
||||
"@com_google_protobuf//:descriptor_proto", |
||||
"@com_google_protobuf//:duration_proto", |
||||
"@com_google_protobuf//:empty_proto", |
||||
"@com_google_protobuf//:field_mask_proto", |
||||
"@com_google_protobuf//:struct_proto", |
||||
"@com_google_protobuf//:timestamp_proto", |
||||
"@com_google_protobuf//:type_proto", |
||||
"@com_google_protobuf//:wrappers_proto", |
||||
], |
||||
) |
@ -0,0 +1,471 @@ |
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2023 Google LLC. All rights reserved.
|
||||
//
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file or at
|
||||
// https://developers.google.com/open-source/licenses/bsd
|
||||
|
||||
#include <benchmark/benchmark.h> |
||||
|
||||
#include <math.h> |
||||
#include <stdint.h> |
||||
#include <string.h> |
||||
|
||||
#include <string> |
||||
#include <vector> |
||||
|
||||
#include "google/ads/googleads/v16/services/google_ads_service.upbdefs.h" |
||||
#include "google/protobuf/descriptor.pb.h" |
||||
#include "absl/container/flat_hash_set.h" |
||||
#include "absl/log/absl_check.h" |
||||
#include "google/protobuf/dynamic_message.h" |
||||
#include "google/protobuf/json/json.h" |
||||
#include "benchmarks/descriptor.pb.h" |
||||
#include "benchmarks/descriptor.upb.h" |
||||
#include "benchmarks/descriptor.upbdefs.h" |
||||
#include "benchmarks/descriptor_sv.pb.h" |
||||
#include "upb/base/string_view.h" |
||||
#include "upb/base/upcast.h" |
||||
#include "upb/json/decode.h" |
||||
#include "upb/json/encode.h" |
||||
#include "upb/mem/arena.h" |
||||
#include "upb/reflection/def.hpp" |
||||
#include "upb/wire/decode.h" |
||||
|
||||
upb_StringView descriptor = |
||||
benchmarks_descriptor_proto_upbdefinit.descriptor; |
||||
namespace protobuf = ::google::protobuf; |
||||
|
||||
// A buffer big enough to parse descriptor.proto without going to heap.
|
||||
// We use 64-bit ints here to force alignment.
|
||||
int64_t buf[8191]; |
||||
|
||||
void CollectFileDescriptors( |
||||
const _upb_DefPool_Init* file, |
||||
std::vector<upb_StringView>& serialized_files, |
||||
absl::flat_hash_set<const _upb_DefPool_Init*>& seen) { |
||||
if (!seen.insert(file).second) return; |
||||
for (_upb_DefPool_Init** deps = file->deps; *deps; deps++) { |
||||
CollectFileDescriptors(*deps, serialized_files, seen); |
||||
} |
||||
serialized_files.push_back(file->descriptor); |
||||
} |
||||
|
||||
static void BM_ArenaOneAlloc(benchmark::State& state) { |
||||
for (auto _ : state) { |
||||
upb_Arena* arena = upb_Arena_New(); |
||||
upb_Arena_Malloc(arena, 1); |
||||
upb_Arena_Free(arena); |
||||
} |
||||
} |
||||
BENCHMARK(BM_ArenaOneAlloc); |
||||
|
||||
static void BM_ArenaInitialBlockOneAlloc(benchmark::State& state) { |
||||
for (auto _ : state) { |
||||
upb_Arena* arena = upb_Arena_Init(buf, sizeof(buf), nullptr); |
||||
upb_Arena_Malloc(arena, 1); |
||||
upb_Arena_Free(arena); |
||||
} |
||||
} |
||||
BENCHMARK(BM_ArenaInitialBlockOneAlloc); |
||||
|
||||
static void BM_ArenaFuseUnbalanced(benchmark::State& state) { |
||||
std::vector<upb_Arena*> arenas(state.range(0)); |
||||
size_t n = 0; |
||||
for (auto _ : state) { |
||||
for (auto& arena : arenas) { |
||||
arena = upb_Arena_New(); |
||||
} |
||||
for (auto& arena : arenas) { |
||||
upb_Arena_Fuse(arenas[0], arena); |
||||
} |
||||
for (auto& arena : arenas) { |
||||
upb_Arena_Free(arena); |
||||
} |
||||
n += arenas.size(); |
||||
} |
||||
state.SetItemsProcessed(n); |
||||
} |
||||
BENCHMARK(BM_ArenaFuseUnbalanced)->Range(2, 128); |
||||
|
||||
static void BM_ArenaFuseBalanced(benchmark::State& state) { |
||||
std::vector<upb_Arena*> arenas(state.range(0)); |
||||
size_t n = 0; |
||||
|
||||
for (auto _ : state) { |
||||
for (auto& arena : arenas) { |
||||
arena = upb_Arena_New(); |
||||
} |
||||
|
||||
// Perform a series of fuses that keeps the halves balanced.
|
||||
const size_t max = ceil(log2(double(arenas.size()))); |
||||
for (size_t n = 0; n <= max; n++) { |
||||
size_t step = 1 << n; |
||||
for (size_t i = 0; i + step < arenas.size(); i += (step * 2)) { |
||||
upb_Arena_Fuse(arenas[i], arenas[i + step]); |
||||
} |
||||
} |
||||
|
||||
for (auto& arena : arenas) { |
||||
upb_Arena_Free(arena); |
||||
} |
||||
n += arenas.size(); |
||||
} |
||||
state.SetItemsProcessed(n); |
||||
} |
||||
BENCHMARK(BM_ArenaFuseBalanced)->Range(2, 128); |
||||
|
||||
enum LoadDescriptorMode { |
||||
NoLayout, |
||||
WithLayout, |
||||
}; |
||||
|
||||
// This function is mostly copied from upb/def.c, but it is modified to avoid
|
||||
// passing in the pre-generated mini-tables, in order to force upb to compute
|
||||
// them dynamically. Generally you would never want to do this, but we want to
|
||||
// simulate the cost we would pay if we were loading these types purely from
|
||||
// descriptors, with no mini-tales available.
|
||||
bool LoadDefInit_BuildLayout(upb_DefPool* s, const _upb_DefPool_Init* init, |
||||
size_t* bytes) { |
||||
_upb_DefPool_Init** deps = init->deps; |
||||
google_protobuf_FileDescriptorProto* file; |
||||
upb_Arena* arena; |
||||
upb_Status status; |
||||
|
||||
upb_Status_Clear(&status); |
||||
|
||||
if (upb_DefPool_FindFileByName(s, init->filename)) { |
||||
return true; |
||||
} |
||||
|
||||
arena = upb_Arena_New(); |
||||
|
||||
for (; *deps; deps++) { |
||||
if (!LoadDefInit_BuildLayout(s, *deps, bytes)) goto err; |
||||
} |
||||
|
||||
file = google_protobuf_FileDescriptorProto_parse_ex( |
||||
init->descriptor.data, init->descriptor.size, nullptr, |
||||
kUpb_DecodeOption_AliasString, arena); |
||||
*bytes += init->descriptor.size; |
||||
|
||||
if (!file) { |
||||
upb_Status_SetErrorFormat( |
||||
&status, |
||||
"Failed to parse compiled-in descriptor for file '%s'. This should " |
||||
"never happen.", |
||||
init->filename); |
||||
goto err; |
||||
} |
||||
|
||||
// KEY DIFFERENCE: Here we pass in only the descriptor, and not the
|
||||
// pre-generated minitables.
|
||||
if (!upb_DefPool_AddFile(s, file, &status)) { |
||||
goto err; |
||||
} |
||||
|
||||
upb_Arena_Free(arena); |
||||
return true; |
||||
|
||||
err: |
||||
fprintf(stderr, |
||||
"Error loading compiled-in descriptor for file '%s' (this should " |
||||
"never happen): %s\n", |
||||
init->filename, upb_Status_ErrorMessage(&status)); |
||||
exit(1); |
||||
} |
||||
|
||||
template <LoadDescriptorMode Mode> |
||||
static void BM_LoadAdsDescriptor_Upb(benchmark::State& state) { |
||||
size_t bytes_per_iter = 0; |
||||
for (auto _ : state) { |
||||
upb::DefPool defpool; |
||||
if (Mode == NoLayout) { |
||||
google_ads_googleads_v16_services_SearchGoogleAdsRequest_getmsgdef( |
||||
defpool.ptr()); |
||||
bytes_per_iter = _upb_DefPool_BytesLoaded(defpool.ptr()); |
||||
} else { |
||||
bytes_per_iter = 0; |
||||
LoadDefInit_BuildLayout( |
||||
defpool.ptr(), |
||||
&google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit, |
||||
&bytes_per_iter); |
||||
} |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * bytes_per_iter); |
||||
} |
||||
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Upb, NoLayout); |
||||
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Upb, WithLayout); |
||||
|
||||
template <LoadDescriptorMode Mode> |
||||
static void BM_LoadAdsDescriptor_Proto2(benchmark::State& state) { |
||||
extern _upb_DefPool_Init |
||||
google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit; |
||||
std::vector<upb_StringView> serialized_files; |
||||
absl::flat_hash_set<const _upb_DefPool_Init*> seen_files; |
||||
CollectFileDescriptors( |
||||
&google_ads_googleads_v16_services_google_ads_service_proto_upbdefinit, |
||||
serialized_files, seen_files); |
||||
size_t bytes_per_iter = 0; |
||||
for (auto _ : state) { |
||||
bytes_per_iter = 0; |
||||
protobuf::Arena arena; |
||||
protobuf::DescriptorPool pool; |
||||
for (auto file : serialized_files) { |
||||
absl::string_view input(file.data, file.size); |
||||
auto proto = |
||||
protobuf::Arena::Create<protobuf::FileDescriptorProto>(&arena); |
||||
bool ok = proto->ParseFrom<protobuf::MessageLite::kMergePartial>(input) && |
||||
pool.BuildFile(*proto) != nullptr; |
||||
if (!ok) { |
||||
printf("Failed to add file.\n"); |
||||
exit(1); |
||||
} |
||||
bytes_per_iter += input.size(); |
||||
} |
||||
|
||||
if (Mode == WithLayout) { |
||||
protobuf::DynamicMessageFactory factory; |
||||
const protobuf::Descriptor* d = pool.FindMessageTypeByName( |
||||
"google.ads.googleads.v16.services.SearchGoogleAdsResponse"); |
||||
if (!d) { |
||||
printf("Failed to find descriptor.\n"); |
||||
exit(1); |
||||
} |
||||
factory.GetPrototype(d); |
||||
} |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * bytes_per_iter); |
||||
} |
||||
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Proto2, NoLayout); |
||||
BENCHMARK_TEMPLATE(BM_LoadAdsDescriptor_Proto2, WithLayout); |
||||
|
||||
enum CopyStrings { |
||||
Copy, |
||||
Alias, |
||||
}; |
||||
|
||||
enum ArenaMode { |
||||
NoArena, |
||||
UseArena, |
||||
InitBlock, |
||||
}; |
||||
|
||||
template <ArenaMode AMode, CopyStrings Copy> |
||||
static void BM_Parse_Upb_FileDesc(benchmark::State& state) { |
||||
for (auto _ : state) { |
||||
upb_Arena* arena; |
||||
if (AMode == InitBlock) { |
||||
arena = upb_Arena_Init(buf, sizeof(buf), nullptr); |
||||
} else { |
||||
arena = upb_Arena_New(); |
||||
} |
||||
upb_benchmark_FileDescriptorProto* set = |
||||
upb_benchmark_FileDescriptorProto_parse_ex( |
||||
descriptor.data, descriptor.size, nullptr, |
||||
Copy == Alias ? kUpb_DecodeOption_AliasString : 0, arena); |
||||
if (!set) { |
||||
printf("Failed to parse.\n"); |
||||
exit(1); |
||||
} |
||||
upb_Arena_Free(arena); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * descriptor.size); |
||||
} |
||||
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, UseArena, Copy); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, UseArena, Alias); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, InitBlock, Copy); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Upb_FileDesc, InitBlock, Alias); |
||||
|
||||
template <ArenaMode AMode, class P> |
||||
struct Proto2Factory; |
||||
|
||||
template <class P> |
||||
struct Proto2Factory<NoArena, P> { |
||||
public: |
||||
P* GetProto() { return &proto; } |
||||
|
||||
private: |
||||
P proto; |
||||
}; |
||||
|
||||
template <class P> |
||||
struct Proto2Factory<UseArena, P> { |
||||
public: |
||||
P* GetProto() { return protobuf::Arena::Create<P>(&arena); } |
||||
|
||||
private: |
||||
protobuf::Arena arena; |
||||
}; |
||||
|
||||
template <class P> |
||||
struct Proto2Factory<InitBlock, P> { |
||||
public: |
||||
Proto2Factory() : arena(GetOptions()) {} |
||||
P* GetProto() { return protobuf::Arena::Create<P>(&arena); } |
||||
|
||||
private: |
||||
protobuf::ArenaOptions GetOptions() { |
||||
protobuf::ArenaOptions opts; |
||||
opts.initial_block = (char*)buf; |
||||
opts.initial_block_size = sizeof(buf); |
||||
return opts; |
||||
} |
||||
|
||||
protobuf::Arena arena; |
||||
}; |
||||
|
||||
using FileDesc = ::upb_benchmark::FileDescriptorProto; |
||||
using FileDescSV = ::upb_benchmark::sv::FileDescriptorProto; |
||||
|
||||
template <class P, ArenaMode AMode, CopyStrings kCopy> |
||||
void BM_Parse_Proto2(benchmark::State& state) { |
||||
constexpr protobuf::MessageLite::ParseFlags kParseFlags = |
||||
kCopy == Copy |
||||
? protobuf::MessageLite::ParseFlags::kMergePartial |
||||
: protobuf::MessageLite::ParseFlags::kMergePartialWithAliasing; |
||||
for (auto _ : state) { |
||||
Proto2Factory<AMode, P> proto_factory; |
||||
auto proto = proto_factory.GetProto(); |
||||
absl::string_view input(descriptor.data, descriptor.size); |
||||
bool ok = proto->template ParseFrom<kParseFlags>(input); |
||||
if (!ok) { |
||||
printf("Failed to parse.\n"); |
||||
exit(1); |
||||
} |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * descriptor.size); |
||||
} |
||||
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, NoArena, Copy); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, UseArena, Copy); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDesc, InitBlock, Copy); |
||||
BENCHMARK_TEMPLATE(BM_Parse_Proto2, FileDescSV, InitBlock, Alias); |
||||
|
||||
static void BM_SerializeDescriptor_Proto2(benchmark::State& state) { |
||||
upb_benchmark::FileDescriptorProto proto; |
||||
proto.ParseFromArray(descriptor.data, descriptor.size); |
||||
for (auto _ : state) { |
||||
proto.SerializePartialToArray(buf, sizeof(buf)); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * descriptor.size); |
||||
} |
||||
BENCHMARK(BM_SerializeDescriptor_Proto2); |
||||
|
||||
static upb_benchmark_FileDescriptorProto* UpbParseDescriptor(upb_Arena* arena) { |
||||
upb_benchmark_FileDescriptorProto* set = |
||||
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size, |
||||
arena); |
||||
if (!set) { |
||||
printf("Failed to parse.\n"); |
||||
exit(1); |
||||
} |
||||
return set; |
||||
} |
||||
|
||||
static void BM_SerializeDescriptor_Upb(benchmark::State& state) { |
||||
int64_t total = 0; |
||||
upb_Arena* arena = upb_Arena_New(); |
||||
upb_benchmark_FileDescriptorProto* set = UpbParseDescriptor(arena); |
||||
for (auto _ : state) { |
||||
upb_Arena* enc_arena = upb_Arena_Init(buf, sizeof(buf), nullptr); |
||||
size_t size; |
||||
char* data = |
||||
upb_benchmark_FileDescriptorProto_serialize(set, enc_arena, &size); |
||||
if (!data) { |
||||
printf("Failed to serialize.\n"); |
||||
exit(1); |
||||
} |
||||
total += size; |
||||
} |
||||
state.SetBytesProcessed(total); |
||||
} |
||||
BENCHMARK(BM_SerializeDescriptor_Upb); |
||||
|
||||
static absl::string_view UpbJsonEncode(upb_benchmark_FileDescriptorProto* proto, |
||||
const upb_MessageDef* md, |
||||
upb_Arena* arena) { |
||||
size_t size = |
||||
upb_JsonEncode(UPB_UPCAST(proto), md, nullptr, 0, nullptr, 0, nullptr); |
||||
char* buf = reinterpret_cast<char*>(upb_Arena_Malloc(arena, size + 1)); |
||||
upb_JsonEncode(UPB_UPCAST(proto), md, nullptr, 0, buf, size, nullptr); |
||||
return absl::string_view(buf, size); |
||||
} |
||||
|
||||
static void BM_JsonParse_Upb(benchmark::State& state) { |
||||
upb_Arena* arena = upb_Arena_New(); |
||||
upb_benchmark_FileDescriptorProto* set = |
||||
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size, |
||||
arena); |
||||
if (!set) { |
||||
printf("Failed to parse.\n"); |
||||
exit(1); |
||||
} |
||||
|
||||
upb::DefPool defpool; |
||||
const upb_MessageDef* md = |
||||
upb_benchmark_FileDescriptorProto_getmsgdef(defpool.ptr()); |
||||
auto json = UpbJsonEncode(set, md, arena); |
||||
|
||||
for (auto _ : state) { |
||||
upb_Arena* arena = upb_Arena_New(); |
||||
upb_benchmark_FileDescriptorProto* proto = |
||||
upb_benchmark_FileDescriptorProto_new(arena); |
||||
upb_JsonDecode(json.data(), json.size(), UPB_UPCAST(proto), md, |
||||
defpool.ptr(), 0, arena, nullptr); |
||||
upb_Arena_Free(arena); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * json.size()); |
||||
} |
||||
BENCHMARK(BM_JsonParse_Upb); |
||||
|
||||
static void BM_JsonParse_Proto2(benchmark::State& state) { |
||||
protobuf::FileDescriptorProto proto; |
||||
absl::string_view input(descriptor.data, descriptor.size); |
||||
proto.ParseFromString(input); |
||||
std::string json; |
||||
ABSL_CHECK_OK(google::protobuf::json::MessageToJsonString(proto, &json)); |
||||
for (auto _ : state) { |
||||
protobuf::FileDescriptorProto proto; |
||||
ABSL_CHECK_OK(google::protobuf::json::JsonStringToMessage(json, &proto)); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * json.size()); |
||||
} |
||||
BENCHMARK(BM_JsonParse_Proto2); |
||||
|
||||
static void BM_JsonSerialize_Upb(benchmark::State& state) { |
||||
upb_Arena* arena = upb_Arena_New(); |
||||
upb_benchmark_FileDescriptorProto* set = |
||||
upb_benchmark_FileDescriptorProto_parse(descriptor.data, descriptor.size, |
||||
arena); |
||||
ABSL_CHECK(set != nullptr); |
||||
|
||||
upb::DefPool defpool; |
||||
const upb_MessageDef* md = |
||||
upb_benchmark_FileDescriptorProto_getmsgdef(defpool.ptr()); |
||||
auto json = UpbJsonEncode(set, md, arena); |
||||
std::string json_str; |
||||
json_str.resize(json.size()); |
||||
|
||||
for (auto _ : state) { |
||||
// This isn't a fully fair comparison, as it assumes we already know the
|
||||
// correct size of the buffer. In practice, we usually need to run the
|
||||
// encoder twice, once to discover the size of the buffer.
|
||||
upb_JsonEncode(UPB_UPCAST(set), md, nullptr, 0, json_str.data(), |
||||
json_str.size(), nullptr); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * json.size()); |
||||
} |
||||
BENCHMARK(BM_JsonSerialize_Upb); |
||||
|
||||
static void BM_JsonSerialize_Proto2(benchmark::State& state) { |
||||
protobuf::FileDescriptorProto proto; |
||||
absl::string_view input(descriptor.data, descriptor.size); |
||||
proto.ParseFromString(input); |
||||
std::string json; |
||||
for (auto _ : state) { |
||||
json.clear(); |
||||
ABSL_CHECK_OK(google::protobuf::json::MessageToJsonString(proto, &json)); |
||||
} |
||||
state.SetBytesProcessed(state.iterations() * json.size()); |
||||
} |
||||
BENCHMARK(BM_JsonSerialize_Proto2); |
@ -0,0 +1,63 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
load("//bazel:cc_proto_library.bzl", "cc_proto_library") |
||||
load("//bazel:proto_library.bzl", "proto_library") |
||||
|
||||
# begin:google_only |
||||
# _is_google3 = True |
||||
# end:google_only |
||||
|
||||
# begin:github_only |
||||
_is_google3 = False |
||||
# end:github_only |
||||
|
||||
def tmpl_cc_binary(name, gen, args, replacements = [], **kwargs): |
||||
srcs = [name + ".cc"] |
||||
native.genrule( |
||||
name = name + "_gen_srcs", |
||||
tools = [gen], |
||||
outs = srcs, |
||||
cmd = "$(location " + gen + ") " + " ".join(args) + " > $@", |
||||
) |
||||
|
||||
if _is_google3: |
||||
kwargs["malloc"] = "//base:system_malloc" |
||||
kwargs["features"] = ["-static_linking_mode"] |
||||
native.cc_binary( |
||||
name = name, |
||||
srcs = srcs, |
||||
**kwargs |
||||
) |
||||
|
||||
def cc_optimizefor_proto_library(name, srcs, outs, optimize_for): |
||||
if len(srcs) != 1: |
||||
fail("Currently srcs must have exactly 1 element") |
||||
|
||||
native.genrule( |
||||
name = name + "_gen_proto", |
||||
srcs = srcs, |
||||
outs = outs, |
||||
cmd = "cp $< $@ && chmod a+w $@ && echo 'option optimize_for = " + optimize_for + ";' >> $@", |
||||
) |
||||
|
||||
proto_library( |
||||
name = name + "_proto", |
||||
srcs = outs, |
||||
) |
||||
|
||||
cc_proto_library( |
||||
name = name, |
||||
deps = [":" + name + "_proto"], |
||||
) |
||||
|
||||
def expand_suffixes(vals, suffixes): |
||||
ret = [] |
||||
for val in vals: |
||||
for suffix in suffixes: |
||||
ret.append(val + suffix) |
||||
return ret |
@ -0,0 +1,123 @@ |
||||
#!/usr/bin/python3 |
||||
# |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2023 Google LLC. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google LLC nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
"""Benchmarks the current working directory against a given baseline. |
||||
|
||||
This script benchmarks both size and speed. Sample output: |
||||
""" |
||||
|
||||
import contextlib |
||||
import json |
||||
import os |
||||
import re |
||||
import subprocess |
||||
import sys |
||||
import tempfile |
||||
|
||||
@contextlib.contextmanager |
||||
def GitWorktree(commit): |
||||
tmpdir = tempfile.mkdtemp() |
||||
subprocess.run(['git', 'worktree', 'add', '-q', '-d', tmpdir, commit], check=True) |
||||
cwd = os.getcwd() |
||||
os.chdir(tmpdir) |
||||
try: |
||||
yield tmpdir |
||||
finally: |
||||
os.chdir(cwd) |
||||
subprocess.run(['git', 'worktree', 'remove', tmpdir], check=True) |
||||
|
||||
def Run(cmd): |
||||
subprocess.check_call(cmd, shell=True) |
||||
|
||||
def Benchmark(outbase, bench_cpu=True, runs=12, fasttable=False): |
||||
tmpfile = "/tmp/bench-output.json" |
||||
Run("rm -rf {}".format(tmpfile)) |
||||
#Run("CC=clang bazel test ...") |
||||
if fasttable: |
||||
extra_args = " --//:fasttable_enabled=true" |
||||
else: |
||||
extra_args = "" |
||||
|
||||
if bench_cpu: |
||||
Run("CC=clang bazel build -c opt --copt=-march=native benchmarks:benchmark" + extra_args) |
||||
Run("./bazel-bin/benchmarks/benchmark --benchmark_out_format=json --benchmark_out={} --benchmark_repetitions={} --benchmark_min_time=0.05 --benchmark_enable_random_interleaving=true".format(tmpfile, runs)) |
||||
with open(tmpfile) as f: |
||||
bench_json = json.load(f) |
||||
|
||||
# Translate into the format expected by benchstat. |
||||
txt_filename = outbase + ".txt" |
||||
with open(txt_filename, "w") as f: |
||||
for run in bench_json["benchmarks"]: |
||||
if run["run_type"] == "aggregate": |
||||
continue |
||||
name = run["name"] |
||||
name = name.replace(" ", "") |
||||
name = re.sub(r'^BM_', 'Benchmark', name) |
||||
values = (name, run["iterations"], run["cpu_time"]) |
||||
print("{} {} {} ns/op".format(*values), file=f) |
||||
Run("sort {} -o {} ".format(txt_filename, txt_filename)) |
||||
|
||||
Run("CC=clang bazel build -c opt --copt=-g --copt=-march=native :conformance_upb" |
||||
+ extra_args) |
||||
Run("cp -f bazel-bin/conformance_upb {}.bin".format(outbase)) |
||||
|
||||
|
||||
baseline = "main" |
||||
bench_cpu = True |
||||
fasttable = False |
||||
|
||||
if len(sys.argv) > 1: |
||||
baseline = sys.argv[1] |
||||
|
||||
# Quickly verify that the baseline exists. |
||||
with GitWorktree(baseline): |
||||
pass |
||||
|
||||
# Benchmark our current directory first, since it's more likely to be broken. |
||||
Benchmark("/tmp/new", bench_cpu, fasttable=fasttable) |
||||
|
||||
# Benchmark the baseline. |
||||
with GitWorktree(baseline): |
||||
Benchmark("/tmp/old", bench_cpu, fasttable=fasttable) |
||||
|
||||
print() |
||||
print() |
||||
|
||||
if bench_cpu: |
||||
Run("~/go/bin/benchstat /tmp/old.txt /tmp/new.txt") |
||||
|
||||
print() |
||||
print() |
||||
|
||||
Run("objcopy --strip-debug /tmp/old.bin /tmp/old.bin.stripped") |
||||
Run("objcopy --strip-debug /tmp/new.bin /tmp/new.bin.stripped") |
||||
Run("~/code/bloaty/bloaty /tmp/new.bin.stripped -- /tmp/old.bin.stripped --debug-file=/tmp/old.bin --debug-file=/tmp/new.bin -d compileunits,symbols") |
@ -0,0 +1,865 @@ |
||||
// Protocol Buffers - Google's data interchange format |
||||
// Copyright 2008 Google Inc. All rights reserved. |
||||
// |
||||
// Use of this source code is governed by a BSD-style |
||||
// license that can be found in the LICENSE file or at |
||||
// https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
// Author: kenton@google.com (Kenton Varda) |
||||
// Based on original Protocol Buffers design by |
||||
// Sanjay Ghemawat, Jeff Dean, and others. |
||||
// |
||||
// The messages in this file describe the definitions found in .proto files. |
||||
// A valid .proto file can be translated directly to a FileDescriptorProto |
||||
// without any other information (e.g. without reading its imports). |
||||
|
||||
syntax = "proto2"; |
||||
|
||||
package upb_benchmark; |
||||
|
||||
option go_package = "google.golang.org/protobuf/types/descriptorpb"; |
||||
option java_package = "com.google.protobuf"; |
||||
option java_outer_classname = "DescriptorProtos"; |
||||
option csharp_namespace = "Google.Protobuf.Reflection"; |
||||
option objc_class_prefix = "UPBB"; |
||||
option cc_enable_arenas = true; |
||||
|
||||
// The protocol compiler can output a FileDescriptorSet containing the .proto |
||||
// files it parses. |
||||
message FileDescriptorSet { |
||||
repeated FileDescriptorProto file = 1; |
||||
} |
||||
|
||||
// Describes a complete .proto file. |
||||
message FileDescriptorProto { |
||||
optional string name = 1; // file name, relative to root of source tree |
||||
optional string package = 2; // e.g. "foo", "foo.bar", etc. |
||||
|
||||
// Names of files imported by this file. |
||||
repeated string dependency = 3; |
||||
// Indexes of the public imported files in the dependency list above. |
||||
repeated int32 public_dependency = 10; |
||||
// Indexes of the weak imported files in the dependency list. |
||||
// For Google-internal migration only. Do not use. |
||||
repeated int32 weak_dependency = 11; |
||||
|
||||
// All top-level definitions in this file. |
||||
repeated DescriptorProto message_type = 4; |
||||
repeated EnumDescriptorProto enum_type = 5; |
||||
repeated ServiceDescriptorProto service = 6; |
||||
repeated FieldDescriptorProto extension = 7; |
||||
|
||||
optional FileOptions options = 8; |
||||
|
||||
// This field contains optional information about the original source code. |
||||
// You may safely remove this entire field without harming runtime |
||||
// functionality of the descriptors -- the information is needed only by |
||||
// development tools. |
||||
optional SourceCodeInfo source_code_info = 9; |
||||
|
||||
// The syntax of the proto file. |
||||
// The supported values are "proto2" and "proto3". |
||||
optional string syntax = 12; |
||||
} |
||||
|
||||
// Describes a message type. |
||||
message DescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
repeated FieldDescriptorProto field = 2; |
||||
repeated FieldDescriptorProto extension = 6; |
||||
|
||||
repeated DescriptorProto nested_type = 3; |
||||
repeated EnumDescriptorProto enum_type = 4; |
||||
|
||||
message ExtensionRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Exclusive. |
||||
|
||||
optional ExtensionRangeOptions options = 3; |
||||
} |
||||
repeated ExtensionRange extension_range = 5; |
||||
|
||||
repeated OneofDescriptorProto oneof_decl = 8; |
||||
|
||||
optional MessageOptions options = 7; |
||||
|
||||
// Range of reserved tag numbers. Reserved tag numbers may not be used by |
||||
// fields or extension ranges in the same message. Reserved ranges may |
||||
// not overlap. |
||||
message ReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Exclusive. |
||||
} |
||||
repeated ReservedRange reserved_range = 9; |
||||
// Reserved field names, which may not be used by fields in the same message. |
||||
// A given name may only be reserved once. |
||||
repeated string reserved_name = 10; |
||||
} |
||||
|
||||
message ExtensionRangeOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
// Describes a field within a message. |
||||
message FieldDescriptorProto { |
||||
enum Type { |
||||
// 0 is reserved for errors. |
||||
// Order is weird for historical reasons. |
||||
TYPE_DOUBLE = 1; |
||||
TYPE_FLOAT = 2; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if |
||||
// negative values are likely. |
||||
TYPE_INT64 = 3; |
||||
TYPE_UINT64 = 4; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if |
||||
// negative values are likely. |
||||
TYPE_INT32 = 5; |
||||
TYPE_FIXED64 = 6; |
||||
TYPE_FIXED32 = 7; |
||||
TYPE_BOOL = 8; |
||||
TYPE_STRING = 9; |
||||
// Tag-delimited aggregate. |
||||
// Group type is deprecated and not supported in proto3. However, Proto3 |
||||
// implementations should still be able to parse the group wire format and |
||||
// treat group fields as unknown fields. |
||||
TYPE_GROUP = 10; |
||||
TYPE_MESSAGE = 11; // Length-delimited aggregate. |
||||
|
||||
// New in version 2. |
||||
TYPE_BYTES = 12; |
||||
TYPE_UINT32 = 13; |
||||
TYPE_ENUM = 14; |
||||
TYPE_SFIXED32 = 15; |
||||
TYPE_SFIXED64 = 16; |
||||
TYPE_SINT32 = 17; // Uses ZigZag encoding. |
||||
TYPE_SINT64 = 18; // Uses ZigZag encoding. |
||||
} |
||||
|
||||
enum Label { |
||||
// 0 is reserved for errors |
||||
LABEL_OPTIONAL = 1; |
||||
LABEL_REQUIRED = 2; |
||||
LABEL_REPEATED = 3; |
||||
} |
||||
|
||||
optional string name = 1; |
||||
optional int32 number = 3; |
||||
optional Label label = 4; |
||||
|
||||
// If type_name is set, this need not be set. If both this and type_name |
||||
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. |
||||
optional Type type = 5; |
||||
|
||||
// For message and enum types, this is the name of the type. If the name |
||||
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping |
||||
// rules are used to find the type (i.e. first the nested types within this |
||||
// message are searched, then within the parent, on up to the root |
||||
// namespace). |
||||
optional string type_name = 6; |
||||
|
||||
// For extensions, this is the name of the type being extended. It is |
||||
// resolved in the same manner as type_name. |
||||
optional string extendee = 2; |
||||
|
||||
// For numeric types, contains the original text representation of the value. |
||||
// For booleans, "true" or "false". |
||||
// For strings, contains the default text contents (not escaped in any way). |
||||
// For bytes, contains the C escaped value. All bytes >= 128 are escaped. |
||||
// TODO: Base-64 encode? |
||||
optional string default_value = 7; |
||||
|
||||
// If set, gives the index of a oneof in the containing type's oneof_decl |
||||
// list. This field is a member of that oneof. |
||||
optional int32 oneof_index = 9; |
||||
|
||||
// JSON name of this field. The value is set by protocol compiler. If the |
||||
// user has set a "json_name" option on this field, that option's value |
||||
// will be used. Otherwise, it's deduced from the field's name by converting |
||||
// it to camelCase. |
||||
optional string json_name = 10; |
||||
|
||||
optional FieldOptions options = 8; |
||||
|
||||
// If true, this is a proto3 "optional". When a proto3 field is optional, it |
||||
// tracks presence regardless of field type. |
||||
// |
||||
// When proto3_optional is true, this field must be belong to a oneof to |
||||
// signal to old proto3 clients that presence is tracked for this field. This |
||||
// oneof is known as a "synthetic" oneof, and this field must be its sole |
||||
// member (each proto3 optional field gets its own synthetic oneof). Synthetic |
||||
// oneofs exist in the descriptor only, and do not generate any API. Synthetic |
||||
// oneofs must be ordered after all "real" oneofs. |
||||
// |
||||
// For message fields, proto3_optional doesn't create any semantic change, |
||||
// since non-repeated message fields always track presence. However it still |
||||
// indicates the semantic detail of whether the user wrote "optional" or not. |
||||
// This can be useful for round-tripping the .proto file. For consistency we |
||||
// give message fields a synthetic oneof also, even though it is not required |
||||
// to track presence. This is especially important because the parser can't |
||||
// tell if a field is a message or an enum, so it must always create a |
||||
// synthetic oneof. |
||||
// |
||||
// Proto2 optional fields do not set this flag, because they already indicate |
||||
// optional with `LABEL_OPTIONAL`. |
||||
optional bool proto3_optional = 17; |
||||
} |
||||
|
||||
// Describes a oneof. |
||||
message OneofDescriptorProto { |
||||
optional string name = 1; |
||||
optional OneofOptions options = 2; |
||||
} |
||||
|
||||
// Describes an enum type. |
||||
message EnumDescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
repeated EnumValueDescriptorProto value = 2; |
||||
|
||||
optional EnumOptions options = 3; |
||||
|
||||
// Range of reserved numeric values. Reserved values may not be used by |
||||
// entries in the same enum. Reserved ranges may not overlap. |
||||
// |
||||
// Note that this is distinct from DescriptorProto.ReservedRange in that it |
||||
// is inclusive such that it can appropriately represent the entire int32 |
||||
// domain. |
||||
message EnumReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Inclusive. |
||||
} |
||||
|
||||
// Range of reserved numeric values. Reserved numeric values may not be used |
||||
// by enum values in the same enum declaration. Reserved ranges may not |
||||
// overlap. |
||||
repeated EnumReservedRange reserved_range = 4; |
||||
|
||||
// Reserved enum value names, which may not be reused. A given name may only |
||||
// be reserved once. |
||||
repeated string reserved_name = 5; |
||||
} |
||||
|
||||
// Describes a value within an enum. |
||||
message EnumValueDescriptorProto { |
||||
optional string name = 1; |
||||
optional int32 number = 2; |
||||
|
||||
optional EnumValueOptions options = 3; |
||||
} |
||||
|
||||
// Describes a service. |
||||
message ServiceDescriptorProto { |
||||
optional string name = 1; |
||||
repeated MethodDescriptorProto method = 2; |
||||
|
||||
optional ServiceOptions options = 3; |
||||
} |
||||
|
||||
// Describes a method of a service. |
||||
message MethodDescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
// Input and output type names. These are resolved in the same way as |
||||
// FieldDescriptorProto.type_name, but must refer to a message type. |
||||
optional string input_type = 2; |
||||
optional string output_type = 3; |
||||
|
||||
optional MethodOptions options = 4; |
||||
|
||||
// Identifies if client streams multiple client messages |
||||
optional bool client_streaming = 5 [default = false]; |
||||
// Identifies if server streams multiple server messages |
||||
optional bool server_streaming = 6 [default = false]; |
||||
} |
||||
|
||||
// =================================================================== |
||||
// Options |
||||
|
||||
// Each of the definitions above may have "options" attached. These are |
||||
// just annotations which may cause code to be generated slightly differently |
||||
// or may contain hints for code that manipulates protocol messages. |
||||
// |
||||
// Clients may define custom options as extensions of the *Options messages. |
||||
// These extensions may not yet be known at parsing time, so the parser cannot |
||||
// store the values in them. Instead it stores them in a field in the *Options |
||||
// message called uninterpreted_option. This field must have the same name |
||||
// across all *Options messages. We then use this field to populate the |
||||
// extensions when we build a descriptor, at which point all protos have been |
||||
// parsed and so all extensions are known. |
||||
// |
||||
// Extension numbers for custom options may be chosen as follows: |
||||
// * For options which will only be used within a single application or |
||||
// organization, or for experimental options, use field numbers 50000 |
||||
// through 99999. It is up to you to ensure that you do not use the |
||||
// same number for multiple options. |
||||
// * For options which will be published and used publicly by multiple |
||||
// independent entities, e-mail protobuf-global-extension-registry@google.com |
||||
// to reserve extension numbers. Simply provide your project name (e.g. |
||||
// Objective-C plugin) and your project website (if available) -- there's no |
||||
// need to explain how you intend to use them. Usually you only need one |
||||
// extension number. You can declare multiple options with only one extension |
||||
// number by putting them in a sub-message. See the Custom Options section of |
||||
// the docs for examples: |
||||
// https://developers.google.com/protocol-buffers/docs/proto#options |
||||
// If this turns out to be popular, a web service will be set up |
||||
// to automatically assign option numbers. |
||||
|
||||
message FileOptions { |
||||
// Sets the Java package where classes generated from this .proto will be |
||||
// placed. By default, the proto package is used, but this is often |
||||
// inappropriate because proto packages do not normally start with backwards |
||||
// domain names. |
||||
optional string java_package = 1; |
||||
|
||||
// If set, all the classes from the .proto file are wrapped in a single |
||||
// outer class with the given name. This applies to both Proto1 |
||||
// (equivalent to the old "--one_java_file" option) and Proto2 (where |
||||
// a .proto always translates to a single class, but you may want to |
||||
// explicitly choose the class name). |
||||
optional string java_outer_classname = 8; |
||||
|
||||
// If set true, then the Java code generator will generate a separate .java |
||||
// file for each top-level message, enum, and service defined in the .proto |
||||
// file. Thus, these types will *not* be nested inside the outer class |
||||
// named by java_outer_classname. However, the outer class will still be |
||||
// generated to contain the file's getDescriptor() method as well as any |
||||
// top-level extensions defined in the file. |
||||
optional bool java_multiple_files = 10 [default = false]; |
||||
|
||||
// This option does nothing. |
||||
optional bool java_generate_equals_and_hash = 20 [deprecated = true]; |
||||
|
||||
// If set true, then the Java2 code generator will generate code that |
||||
// throws an exception whenever an attempt is made to assign a non-UTF-8 |
||||
// byte sequence to a string field. |
||||
// Message reflection will do the same. |
||||
// However, an extension field still accepts non-UTF-8 byte sequences. |
||||
// This option has no effect on when used with the lite runtime. |
||||
optional bool java_string_check_utf8 = 27 [default = false]; |
||||
|
||||
// Generated classes can be optimized for speed or code size. |
||||
enum OptimizeMode { |
||||
SPEED = 1; // Generate complete code for parsing, serialization, |
||||
// etc. |
||||
CODE_SIZE = 2; // Use ReflectionOps to implement these methods. |
||||
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. |
||||
} |
||||
optional OptimizeMode optimize_for = 9 [default = SPEED]; |
||||
|
||||
// Sets the Go package where structs generated from this .proto will be |
||||
// placed. If omitted, the Go package will be derived from the following: |
||||
// - The basename of the package import path, if provided. |
||||
// - Otherwise, the package statement in the .proto file, if present. |
||||
// - Otherwise, the basename of the .proto file, without extension. |
||||
optional string go_package = 11; |
||||
|
||||
// Should generic services be generated in each language? "Generic" services |
||||
// are not specific to any particular RPC system. They are generated by the |
||||
// main code generators in each language (without additional plugins). |
||||
// Generic services were the only kind of service generation supported by |
||||
// early versions of google.protobuf. |
||||
// |
||||
// Generic services are now considered deprecated in favor of using plugins |
||||
// that generate code specific to your particular RPC system. Therefore, |
||||
// these default to false. Old code which depends on generic services should |
||||
// explicitly set them to true. |
||||
optional bool cc_generic_services = 16 [default = false]; |
||||
optional bool java_generic_services = 17 [default = false]; |
||||
optional bool py_generic_services = 18 [default = false]; |
||||
optional bool php_generic_services = 42 [default = false]; |
||||
|
||||
// Is this file deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for everything in the file, or it will be completely ignored; in the very |
||||
// least, this is a formalization for deprecating files. |
||||
optional bool deprecated = 23 [default = false]; |
||||
|
||||
// Enables the use of arenas for the proto messages in this file. This applies |
||||
// only to generated classes for C++. |
||||
optional bool cc_enable_arenas = 31 [default = true]; |
||||
|
||||
// Sets the objective c class prefix which is prepended to all objective c |
||||
// generated classes from this .proto. There is no default. |
||||
optional string objc_class_prefix = 36; |
||||
|
||||
// Namespace for generated classes; defaults to the package. |
||||
optional string csharp_namespace = 37; |
||||
|
||||
// By default Swift generators will take the proto package and CamelCase it |
||||
// replacing '.' with underscore and use that to prefix the types/symbols |
||||
// defined. When this options is provided, they will use this value instead |
||||
// to prefix the types/symbols defined. |
||||
optional string swift_prefix = 39; |
||||
|
||||
// Sets the php class prefix which is prepended to all php generated classes |
||||
// from this .proto. Default is empty. |
||||
optional string php_class_prefix = 40; |
||||
|
||||
// Use this option to change the namespace of php generated classes. Default |
||||
// is empty. When this option is empty, the package name will be used for |
||||
// determining the namespace. |
||||
optional string php_namespace = 41; |
||||
|
||||
// Use this option to change the namespace of php generated metadata classes. |
||||
// Default is empty. When this option is empty, the proto file name will be |
||||
// used for determining the namespace. |
||||
optional string php_metadata_namespace = 44; |
||||
|
||||
// Use this option to change the package of ruby generated classes. Default |
||||
// is empty. When this option is not set, the package name will be used for |
||||
// determining the ruby package. |
||||
optional string ruby_package = 45; |
||||
|
||||
// The parser stores options it doesn't recognize here. |
||||
// See the documentation for the "Options" section above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. |
||||
// See the documentation for the "Options" section above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 38; |
||||
} |
||||
|
||||
message MessageOptions { |
||||
// Set true to use the old proto1 MessageSet wire format for extensions. |
||||
// This is provided for backwards-compatibility with the MessageSet wire |
||||
// format. You should not use this for any other reason: It's less |
||||
// efficient, has fewer features, and is more complicated. |
||||
// |
||||
// The message must be defined exactly as follows: |
||||
// message Foo { |
||||
// option message_set_wire_format = true; |
||||
// extensions 4 to max; |
||||
// } |
||||
// Note that the message cannot have any defined fields; MessageSets only |
||||
// have extensions. |
||||
// |
||||
// All extensions of your type must be singular messages; e.g. they cannot |
||||
// be int32s, enums, or repeated messages. |
||||
// |
||||
// Because this is an option, the above two restrictions are not enforced by |
||||
// the protocol compiler. |
||||
optional bool message_set_wire_format = 1 [default = false]; |
||||
|
||||
// Disables the generation of the standard "descriptor()" accessor, which can |
||||
// conflict with a field of the same name. This is meant to make migration |
||||
// from proto1 easier; new code should avoid fields named "descriptor". |
||||
optional bool no_standard_descriptor_accessor = 2 [default = false]; |
||||
|
||||
// Is this message deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the message, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating messages. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
// Whether the message is an automatically generated map entry type for the |
||||
// maps field. |
||||
// |
||||
// For maps fields: |
||||
// map<KeyType, ValueType> map_field = 1; |
||||
// The parsed descriptor looks like: |
||||
// message MapFieldEntry { |
||||
// option map_entry = true; |
||||
// optional KeyType key = 1; |
||||
// optional ValueType value = 2; |
||||
// } |
||||
// repeated MapFieldEntry map_field = 1; |
||||
// |
||||
// Implementations may choose not to generate the map_entry=true message, but |
||||
// use a native map in the target language to hold the keys and values. |
||||
// The reflection APIs in such implementations still need to work as |
||||
// if the field is a repeated message field. |
||||
// |
||||
// NOTE: Do not set the option in .proto files. Always use the maps syntax |
||||
// instead. The option should only be implicitly set by the proto compiler |
||||
// parser. |
||||
optional bool map_entry = 7; |
||||
|
||||
reserved 8; // javalite_serializable |
||||
reserved 9; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message FieldOptions { |
||||
// The ctype option instructs the C++ code generator to use a different |
||||
// representation of the field than it normally would. See the specific |
||||
// options below. This option is not yet implemented in the open source |
||||
// release -- sorry, we'll try to include it in a future version! |
||||
optional CType ctype = 1 [default = STRING]; |
||||
enum CType { |
||||
// Default mode. |
||||
STRING = 0; |
||||
|
||||
CORD = 1; |
||||
|
||||
STRING_PIECE = 2; |
||||
} |
||||
// The packed option can be enabled for repeated primitive fields to enable |
||||
// a more efficient representation on the wire. Rather than repeatedly |
||||
// writing the tag and type for each element, the entire array is encoded as |
||||
// a single length-delimited blob. In proto3, only explicit setting it to |
||||
// false will avoid using packed encoding. |
||||
optional bool packed = 2; |
||||
|
||||
// The jstype option determines the JavaScript type used for values of the |
||||
// field. The option is permitted only for 64 bit integral and fixed types |
||||
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING |
||||
// is represented as JavaScript string, which avoids loss of precision that |
||||
// can happen when a large value is converted to a floating point JavaScript. |
||||
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to |
||||
// use the JavaScript "number" type. The behavior of the default option |
||||
// JS_NORMAL is implementation dependent. |
||||
// |
||||
// This option is an enum to permit additional types to be added, e.g. |
||||
// goog.math.Integer. |
||||
optional JSType jstype = 6 [default = JS_NORMAL]; |
||||
enum JSType { |
||||
// Use the default type. |
||||
JS_NORMAL = 0; |
||||
|
||||
// Use JavaScript strings. |
||||
JS_STRING = 1; |
||||
|
||||
// Use JavaScript numbers. |
||||
JS_NUMBER = 2; |
||||
} |
||||
|
||||
// Should this field be parsed lazily? Lazy applies only to message-type |
||||
// fields. It means that when the outer message is initially parsed, the |
||||
// inner message's contents will not be parsed but instead stored in encoded |
||||
// form. The inner message will actually be parsed when it is first accessed. |
||||
// |
||||
// This is only a hint. Implementations are free to choose whether to use |
||||
// eager or lazy parsing regardless of the value of this option. However, |
||||
// setting this option true suggests that the protocol author believes that |
||||
// using lazy parsing on this field is worth the additional bookkeeping |
||||
// overhead typically needed to implement it. |
||||
// |
||||
// This option does not affect the public interface of any generated code; |
||||
// all method signatures remain the same. Furthermore, thread-safety of the |
||||
// interface is not affected by this option; const methods remain safe to |
||||
// call from multiple threads concurrently, while non-const methods continue |
||||
// to require exclusive access. |
||||
// |
||||
// |
||||
// Note that implementations may choose not to check required fields within |
||||
// a lazy sub-message. That is, calling IsInitialized() on the outer message |
||||
// may return true even if the inner message has missing required fields. |
||||
// This is necessary because otherwise the inner message would have to be |
||||
// parsed in order to perform the check, defeating the purpose of lazy |
||||
// parsing. An implementation which chooses not to check required fields |
||||
// must be consistent about it. That is, for any particular sub-message, the |
||||
// implementation must either *always* check its required fields, or *never* |
||||
// check its required fields, regardless of whether or not the message has |
||||
// been parsed. |
||||
optional bool lazy = 5 [default = false]; |
||||
|
||||
// Is this field deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for accessors, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating fields. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
// For Google-internal migration only. Do not use. |
||||
optional bool weak = 10 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 4; // removed jtype |
||||
} |
||||
|
||||
message OneofOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumOptions { |
||||
// Set this option to true to allow mapping different tag names to the same |
||||
// value. |
||||
optional bool allow_alias = 2; |
||||
|
||||
// Is this enum deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating enums. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
reserved 5; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumValueOptions { |
||||
// Is this enum value deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum value, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating enum values. |
||||
optional bool deprecated = 1 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message ServiceOptions { |
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this service deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the service, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating services. |
||||
optional bool deprecated = 33 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message MethodOptions { |
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this method deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the method, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating methods. |
||||
optional bool deprecated = 33 [default = false]; |
||||
|
||||
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, |
||||
// or neither? HTTP based RPC implementation may choose GET verb for safe |
||||
// methods, and PUT verb for idempotent methods instead of the default POST. |
||||
enum IdempotencyLevel { |
||||
IDEMPOTENCY_UNKNOWN = 0; |
||||
NO_SIDE_EFFECTS = 1; // implies idempotent |
||||
IDEMPOTENT = 2; // idempotent, but may have side effects |
||||
} |
||||
optional IdempotencyLevel idempotency_level = 34 |
||||
[default = IDEMPOTENCY_UNKNOWN]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
// A message representing a option the parser does not recognize. This only |
||||
// appears in options protos created by the compiler::Parser class. |
||||
// DescriptorPool resolves these when building Descriptor objects. Therefore, |
||||
// options protos in descriptor objects (e.g. returned by Descriptor::options(), |
||||
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions |
||||
// in them. |
||||
message UninterpretedOption { |
||||
// The name of the uninterpreted option. Each string represents a segment in |
||||
// a dot-separated name. is_extension is true iff a segment represents an |
||||
// extension (denoted with parentheses in options specs in .proto files). |
||||
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents |
||||
// "foo.(bar.baz).qux". |
||||
message NamePart { |
||||
optional string name_part = 1; |
||||
optional bool is_extension = 2; |
||||
} |
||||
repeated NamePart name = 2; |
||||
|
||||
// The value of the uninterpreted option, in whatever type the tokenizer |
||||
// identified it as during parsing. Exactly one of these should be set. |
||||
optional string identifier_value = 3; |
||||
optional uint64 positive_int_value = 4; |
||||
optional int64 negative_int_value = 5; |
||||
optional double double_value = 6; |
||||
optional bytes string_value = 7; |
||||
optional string aggregate_value = 8; |
||||
} |
||||
|
||||
// =================================================================== |
||||
// Optional source code info |
||||
|
||||
// Encapsulates information about the original source file from which a |
||||
// FileDescriptorProto was generated. |
||||
message SourceCodeInfo { |
||||
// A Location identifies a piece of source code in a .proto file which |
||||
// corresponds to a particular definition. This information is intended |
||||
// to be useful to IDEs, code indexers, documentation generators, and similar |
||||
// tools. |
||||
// |
||||
// For example, say we have a file like: |
||||
// message Foo { |
||||
// optional string foo = 1; |
||||
// } |
||||
// Let's look at just the field definition: |
||||
// optional string foo = 1; |
||||
// ^ ^^ ^^ ^ ^^^ |
||||
// a bc de f ghi |
||||
// We have the following locations: |
||||
// span path represents |
||||
// [a,i) [ 4, 0, 2, 0 ] The whole field definition. |
||||
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). |
||||
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string). |
||||
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). |
||||
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1). |
||||
// |
||||
// Notes: |
||||
// - A location may refer to a repeated field itself (i.e. not to any |
||||
// particular index within it). This is used whenever a set of elements are |
||||
// logically enclosed in a single code segment. For example, an entire |
||||
// extend block (possibly containing multiple extension definitions) will |
||||
// have an outer location whose path refers to the "extensions" repeated |
||||
// field without an index. |
||||
// - Multiple locations may have the same path. This happens when a single |
||||
// logical declaration is spread out across multiple places. The most |
||||
// obvious example is the "extend" block again -- there may be multiple |
||||
// extend blocks in the same scope, each of which will have the same path. |
||||
// - A location's span is not always a subset of its parent's span. For |
||||
// example, the "extendee" of an extension declaration appears at the |
||||
// beginning of the "extend" block and is shared by all extensions within |
||||
// the block. |
||||
// - Just because a location's span is a subset of some other location's span |
||||
// does not mean that it is a descendant. For example, a "group" defines |
||||
// both a type and a field in a single declaration. Thus, the locations |
||||
// corresponding to the type and field and their components will overlap. |
||||
// - Code which tries to interpret locations should probably be designed to |
||||
// ignore those that it doesn't understand, as more types of locations could |
||||
// be recorded in the future. |
||||
repeated Location location = 1; |
||||
message Location { |
||||
// Identifies which part of the FileDescriptorProto was defined at this |
||||
// location. |
||||
// |
||||
// Each element is a field number or an index. They form a path from |
||||
// the root FileDescriptorProto to the place where the definition. For |
||||
// example, this path: |
||||
// [ 4, 3, 2, 7, 1 ] |
||||
// refers to: |
||||
// file.message_type(3) // 4, 3 |
||||
// .field(7) // 2, 7 |
||||
// .name() // 1 |
||||
// This is because FileDescriptorProto.message_type has field number 4: |
||||
// repeated DescriptorProto message_type = 4; |
||||
// and DescriptorProto.field has field number 2: |
||||
// repeated FieldDescriptorProto field = 2; |
||||
// and FieldDescriptorProto.name has field number 1: |
||||
// optional string name = 1; |
||||
// |
||||
// Thus, the above path gives the location of a field name. If we removed |
||||
// the last element: |
||||
// [ 4, 3, 2, 7 ] |
||||
// this path refers to the whole field declaration (from the beginning |
||||
// of the label to the terminating semicolon). |
||||
repeated int32 path = 1 [packed = true]; |
||||
|
||||
// Always has exactly three or four elements: start line, start column, |
||||
// end line (optional, otherwise assumed same as start line), end column. |
||||
// These are packed into a single field for efficiency. Note that line |
||||
// and column numbers are zero-based -- typically you will want to add |
||||
// 1 to each before displaying to a user. |
||||
repeated int32 span = 2 [packed = true]; |
||||
|
||||
// If this SourceCodeInfo represents a complete declaration, these are any |
||||
// comments appearing before and after the declaration which appear to be |
||||
// attached to the declaration. |
||||
// |
||||
// A series of line comments appearing on consecutive lines, with no other |
||||
// tokens appearing on those lines, will be treated as a single comment. |
||||
// |
||||
// leading_detached_comments will keep paragraphs of comments that appear |
||||
// before (but not connected to) the current element. Each paragraph, |
||||
// separated by empty lines, will be one comment element in the repeated |
||||
// field. |
||||
// |
||||
// Only the comment content is provided; comment markers (e.g. //) are |
||||
// stripped out. For block comments, leading whitespace and an asterisk |
||||
// will be stripped from the beginning of each line other than the first. |
||||
// Newlines are included in the output. |
||||
// |
||||
// Examples: |
||||
// |
||||
// optional int32 foo = 1; // Comment attached to foo. |
||||
// // Comment attached to bar. |
||||
// optional int32 bar = 2; |
||||
// |
||||
// optional string baz = 3; |
||||
// // Comment attached to baz. |
||||
// // Another line attached to baz. |
||||
// |
||||
// // Comment attached to qux. |
||||
// // |
||||
// // Another line attached to qux. |
||||
// optional double qux = 4; |
||||
// |
||||
// // Detached comment for corge. This is not leading or trailing comments |
||||
// // to qux or corge because there are blank lines separating it from |
||||
// // both. |
||||
// |
||||
// // Detached comment for corge paragraph 2. |
||||
// |
||||
// optional string corge = 5; |
||||
// /* Block comment attached |
||||
// * to corge. Leading asterisks |
||||
// * will be removed. */ |
||||
// /* Block comment attached to |
||||
// * grault. */ |
||||
// optional int32 grault = 6; |
||||
// |
||||
// // ignored detached comments. |
||||
optional string leading_comments = 3; |
||||
optional string trailing_comments = 4; |
||||
repeated string leading_detached_comments = 6; |
||||
} |
||||
} |
||||
|
||||
// Describes the relationship between generated code and its original source |
||||
// file. A GeneratedCodeInfo message is associated with only one generated |
||||
// source file, but may contain references to different source .proto files. |
||||
message GeneratedCodeInfo { |
||||
// An Annotation connects some span of text in generated code to an element |
||||
// of its generating .proto file. |
||||
repeated Annotation annotation = 1; |
||||
message Annotation { |
||||
// Identifies the element in the original source .proto file. This field |
||||
// is formatted the same as SourceCodeInfo.Location.path. |
||||
repeated int32 path = 1 [packed = true]; |
||||
|
||||
// Identifies the filesystem path to the original source .proto. |
||||
optional string source_file = 2; |
||||
|
||||
// Identifies the starting offset in bytes in the generated code |
||||
// that relates to the identified object. |
||||
optional int32 begin = 3; |
||||
|
||||
// Identifies the ending offset in bytes in the generated code that |
||||
// relates to the identified offset. The end offset should be one past |
||||
// the last relevant byte (so the length of the text = end - begin). |
||||
optional int32 end = 4; |
||||
} |
||||
} |
@ -0,0 +1,867 @@ |
||||
// Protocol Buffers - Google's data interchange format |
||||
// Copyright 2008 Google Inc. All rights reserved. |
||||
// |
||||
// Use of this source code is governed by a BSD-style |
||||
// license that can be found in the LICENSE file or at |
||||
// https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
// Author: kenton@google.com (Kenton Varda) |
||||
// Based on original Protocol Buffers design by |
||||
// Sanjay Ghemawat, Jeff Dean, and others. |
||||
// |
||||
// The messages in this file describe the definitions found in .proto files. |
||||
// A valid .proto file can be translated directly to a FileDescriptorProto |
||||
// without any other information (e.g. without reading its imports). |
||||
|
||||
syntax = "proto2"; |
||||
|
||||
package upb_benchmark.sv; |
||||
|
||||
option go_package = "google.golang.org/protobuf/types/descriptorpb"; |
||||
option java_package = "com.google.protobuf"; |
||||
option java_outer_classname = "DescriptorProtos"; |
||||
option csharp_namespace = "Google.Protobuf.Reflection"; |
||||
option objc_class_prefix = "GPB"; |
||||
option cc_enable_arenas = true; |
||||
|
||||
// The protocol compiler can output a FileDescriptorSet containing the .proto |
||||
// files it parses. |
||||
message FileDescriptorSet { |
||||
repeated FileDescriptorProto file = 1; |
||||
} |
||||
|
||||
// Describes a complete .proto file. |
||||
message FileDescriptorProto { |
||||
optional string name = 1 |
||||
[ctype = STRING_PIECE]; // file name, relative to root of source tree |
||||
optional string package = 2 |
||||
[ctype = STRING_PIECE]; // e.g. "foo", "foo.bar", etc. |
||||
|
||||
// Names of files imported by this file. |
||||
repeated string dependency = 3 [ctype = STRING_PIECE]; |
||||
// Indexes of the public imported files in the dependency list above. |
||||
repeated int32 public_dependency = 10; |
||||
// Indexes of the weak imported files in the dependency list. |
||||
// For Google-internal migration only. Do not use. |
||||
repeated int32 weak_dependency = 11; |
||||
|
||||
// All top-level definitions in this file. |
||||
repeated DescriptorProto message_type = 4; |
||||
repeated EnumDescriptorProto enum_type = 5; |
||||
repeated ServiceDescriptorProto service = 6; |
||||
repeated FieldDescriptorProto extension = 7; |
||||
|
||||
optional FileOptions options = 8; |
||||
|
||||
// This field contains optional information about the original source code. |
||||
// You may safely remove this entire field without harming runtime |
||||
// functionality of the descriptors -- the information is needed only by |
||||
// development tools. |
||||
optional SourceCodeInfo source_code_info = 9; |
||||
|
||||
// The syntax of the proto file. |
||||
// The supported values are "proto2" and "proto3". |
||||
optional string syntax = 12 [ctype = STRING_PIECE]; |
||||
} |
||||
|
||||
// Describes a message type. |
||||
message DescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
|
||||
repeated FieldDescriptorProto field = 2; |
||||
repeated FieldDescriptorProto extension = 6; |
||||
|
||||
repeated DescriptorProto nested_type = 3; |
||||
repeated EnumDescriptorProto enum_type = 4; |
||||
|
||||
message ExtensionRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Exclusive. |
||||
|
||||
optional ExtensionRangeOptions options = 3; |
||||
} |
||||
repeated ExtensionRange extension_range = 5; |
||||
|
||||
repeated OneofDescriptorProto oneof_decl = 8; |
||||
|
||||
optional MessageOptions options = 7; |
||||
|
||||
// Range of reserved tag numbers. Reserved tag numbers may not be used by |
||||
// fields or extension ranges in the same message. Reserved ranges may |
||||
// not overlap. |
||||
message ReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Exclusive. |
||||
} |
||||
repeated ReservedRange reserved_range = 9; |
||||
// Reserved field names, which may not be used by fields in the same message. |
||||
// A given name may only be reserved once. |
||||
repeated string reserved_name = 10 [ctype = STRING_PIECE]; |
||||
} |
||||
|
||||
message ExtensionRangeOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
// Describes a field within a message. |
||||
message FieldDescriptorProto { |
||||
enum Type { |
||||
// 0 is reserved for errors. |
||||
// Order is weird for historical reasons. |
||||
TYPE_DOUBLE = 1; |
||||
TYPE_FLOAT = 2; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if |
||||
// negative values are likely. |
||||
TYPE_INT64 = 3; |
||||
TYPE_UINT64 = 4; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if |
||||
// negative values are likely. |
||||
TYPE_INT32 = 5; |
||||
TYPE_FIXED64 = 6; |
||||
TYPE_FIXED32 = 7; |
||||
TYPE_BOOL = 8; |
||||
TYPE_STRING = 9; |
||||
// Tag-delimited aggregate. |
||||
// Group type is deprecated and not supported in proto3. However, Proto3 |
||||
// implementations should still be able to parse the group wire format and |
||||
// treat group fields as unknown fields. |
||||
TYPE_GROUP = 10; |
||||
TYPE_MESSAGE = 11; // Length-delimited aggregate. |
||||
|
||||
// New in version 2. |
||||
TYPE_BYTES = 12; |
||||
TYPE_UINT32 = 13; |
||||
TYPE_ENUM = 14; |
||||
TYPE_SFIXED32 = 15; |
||||
TYPE_SFIXED64 = 16; |
||||
TYPE_SINT32 = 17; // Uses ZigZag encoding. |
||||
TYPE_SINT64 = 18; // Uses ZigZag encoding. |
||||
} |
||||
|
||||
enum Label { |
||||
// 0 is reserved for errors |
||||
LABEL_OPTIONAL = 1; |
||||
LABEL_REQUIRED = 2; |
||||
LABEL_REPEATED = 3; |
||||
} |
||||
|
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
optional int32 number = 3; |
||||
optional Label label = 4; |
||||
|
||||
// If type_name is set, this need not be set. If both this and type_name |
||||
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. |
||||
optional Type type = 5; |
||||
|
||||
// For message and enum types, this is the name of the type. If the name |
||||
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping |
||||
// rules are used to find the type (i.e. first the nested types within this |
||||
// message are searched, then within the parent, on up to the root |
||||
// namespace). |
||||
optional string type_name = 6 [ctype = STRING_PIECE]; |
||||
|
||||
// For extensions, this is the name of the type being extended. It is |
||||
// resolved in the same manner as type_name. |
||||
optional string extendee = 2 [ctype = STRING_PIECE]; |
||||
|
||||
// For numeric types, contains the original text representation of the value. |
||||
// For booleans, "true" or "false". |
||||
// For strings, contains the default text contents (not escaped in any way). |
||||
// For bytes, contains the C escaped value. All bytes >= 128 are escaped. |
||||
// TODO: Base-64 encode? |
||||
optional string default_value = 7 [ctype = STRING_PIECE]; |
||||
|
||||
// If set, gives the index of a oneof in the containing type's oneof_decl |
||||
// list. This field is a member of that oneof. |
||||
optional int32 oneof_index = 9; |
||||
|
||||
// JSON name of this field. The value is set by protocol compiler. If the |
||||
// user has set a "json_name" option on this field, that option's value |
||||
// will be used. Otherwise, it's deduced from the field's name by converting |
||||
// it to camelCase. |
||||
optional string json_name = 10 [ctype = STRING_PIECE]; |
||||
|
||||
optional FieldOptions options = 8; |
||||
|
||||
// If true, this is a proto3 "optional". When a proto3 field is optional, it |
||||
// tracks presence regardless of field type. |
||||
// |
||||
// When proto3_optional is true, this field must be belong to a oneof to |
||||
// signal to old proto3 clients that presence is tracked for this field. This |
||||
// oneof is known as a "synthetic" oneof, and this field must be its sole |
||||
// member (each proto3 optional field gets its own synthetic oneof). Synthetic |
||||
// oneofs exist in the descriptor only, and do not generate any API. Synthetic |
||||
// oneofs must be ordered after all "real" oneofs. |
||||
// |
||||
// For message fields, proto3_optional doesn't create any semantic change, |
||||
// since non-repeated message fields always track presence. However it still |
||||
// indicates the semantic detail of whether the user wrote "optional" or not. |
||||
// This can be useful for round-tripping the .proto file. For consistency we |
||||
// give message fields a synthetic oneof also, even though it is not required |
||||
// to track presence. This is especially important because the parser can't |
||||
// tell if a field is a message or an enum, so it must always create a |
||||
// synthetic oneof. |
||||
// |
||||
// Proto2 optional fields do not set this flag, because they already indicate |
||||
// optional with `LABEL_OPTIONAL`. |
||||
optional bool proto3_optional = 17; |
||||
} |
||||
|
||||
// Describes a oneof. |
||||
message OneofDescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
optional OneofOptions options = 2; |
||||
} |
||||
|
||||
// Describes an enum type. |
||||
message EnumDescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
|
||||
repeated EnumValueDescriptorProto value = 2; |
||||
|
||||
optional EnumOptions options = 3; |
||||
|
||||
// Range of reserved numeric values. Reserved values may not be used by |
||||
// entries in the same enum. Reserved ranges may not overlap. |
||||
// |
||||
// Note that this is distinct from DescriptorProto.ReservedRange in that it |
||||
// is inclusive such that it can appropriately represent the entire int32 |
||||
// domain. |
||||
message EnumReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Inclusive. |
||||
} |
||||
|
||||
// Range of reserved numeric values. Reserved numeric values may not be used |
||||
// by enum values in the same enum declaration. Reserved ranges may not |
||||
// overlap. |
||||
repeated EnumReservedRange reserved_range = 4; |
||||
|
||||
// Reserved enum value names, which may not be reused. A given name may only |
||||
// be reserved once. |
||||
repeated string reserved_name = 5 [ctype = STRING_PIECE]; |
||||
} |
||||
|
||||
// Describes a value within an enum. |
||||
message EnumValueDescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
optional int32 number = 2; |
||||
|
||||
optional EnumValueOptions options = 3; |
||||
} |
||||
|
||||
// Describes a service. |
||||
message ServiceDescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
repeated MethodDescriptorProto method = 2; |
||||
|
||||
optional ServiceOptions options = 3; |
||||
} |
||||
|
||||
// Describes a method of a service. |
||||
message MethodDescriptorProto { |
||||
optional string name = 1 [ctype = STRING_PIECE]; |
||||
|
||||
// Input and output type names. These are resolved in the same way as |
||||
// FieldDescriptorProto.type_name, but must refer to a message type. |
||||
optional string input_type = 2 [ctype = STRING_PIECE]; |
||||
optional string output_type = 3 [ctype = STRING_PIECE]; |
||||
|
||||
optional MethodOptions options = 4; |
||||
|
||||
// Identifies if client streams multiple client messages |
||||
optional bool client_streaming = 5 [default = false]; |
||||
// Identifies if server streams multiple server messages |
||||
optional bool server_streaming = 6 [default = false]; |
||||
} |
||||
|
||||
// =================================================================== |
||||
// Options |
||||
|
||||
// Each of the definitions above may have "options" attached. These are |
||||
// just annotations which may cause code to be generated slightly differently |
||||
// or may contain hints for code that manipulates protocol messages. |
||||
// |
||||
// Clients may define custom options as extensions of the *Options messages. |
||||
// These extensions may not yet be known at parsing time, so the parser cannot |
||||
// store the values in them. Instead it stores them in a field in the *Options |
||||
// message called uninterpreted_option. This field must have the same name |
||||
// across all *Options messages. We then use this field to populate the |
||||
// extensions when we build a descriptor, at which point all protos have been |
||||
// parsed and so all extensions are known. |
||||
// |
||||
// Extension numbers for custom options may be chosen as follows: |
||||
// * For options which will only be used within a single application or |
||||
// organization, or for experimental options, use field numbers 50000 |
||||
// through 99999. It is up to you to ensure that you do not use the |
||||
// same number for multiple options. |
||||
// * For options which will be published and used publicly by multiple |
||||
// independent entities, e-mail protobuf-global-extension-registry@google.com |
||||
// to reserve extension numbers. Simply provide your project name (e.g. |
||||
// Objective-C plugin) and your project website (if available) -- there's no |
||||
// need to explain how you intend to use them. Usually you only need one |
||||
// extension number. You can declare multiple options with only one extension |
||||
// number by putting them in a sub-message. See the Custom Options section of |
||||
// the docs for examples: |
||||
// https://developers.google.com/protocol-buffers/docs/proto#options |
||||
// If this turns out to be popular, a web service will be set up |
||||
// to automatically assign option numbers. |
||||
|
||||
message FileOptions { |
||||
// Sets the Java package where classes generated from this .proto will be |
||||
// placed. By default, the proto package is used, but this is often |
||||
// inappropriate because proto packages do not normally start with backwards |
||||
// domain names. |
||||
optional string java_package = 1 [ctype = STRING_PIECE]; |
||||
|
||||
// If set, all the classes from the .proto file are wrapped in a single |
||||
// outer class with the given name. This applies to both Proto1 |
||||
// (equivalent to the old "--one_java_file" option) and Proto2 (where |
||||
// a .proto always translates to a single class, but you may want to |
||||
// explicitly choose the class name). |
||||
optional string java_outer_classname = 8 [ctype = STRING_PIECE]; |
||||
|
||||
// If set true, then the Java code generator will generate a separate .java |
||||
// file for each top-level message, enum, and service defined in the .proto |
||||
// file. Thus, these types will *not* be nested inside the outer class |
||||
// named by java_outer_classname. However, the outer class will still be |
||||
// generated to contain the file's getDescriptor() method as well as any |
||||
// top-level extensions defined in the file. |
||||
optional bool java_multiple_files = 10 [default = false]; |
||||
|
||||
// This option does nothing. |
||||
optional bool java_generate_equals_and_hash = 20 [deprecated = true]; |
||||
|
||||
// If set true, then the Java2 code generator will generate code that |
||||
// throws an exception whenever an attempt is made to assign a non-UTF-8 |
||||
// byte sequence to a string field. |
||||
// Message reflection will do the same. |
||||
// However, an extension field still accepts non-UTF-8 byte sequences. |
||||
// This option has no effect on when used with the lite runtime. |
||||
optional bool java_string_check_utf8 = 27 [default = false]; |
||||
|
||||
// Generated classes can be optimized for speed or code size. |
||||
enum OptimizeMode { |
||||
SPEED = 1; // Generate complete code for parsing, serialization, |
||||
// etc. |
||||
CODE_SIZE = 2; // Use ReflectionOps to implement these methods. |
||||
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. |
||||
} |
||||
optional OptimizeMode optimize_for = 9 [default = SPEED]; |
||||
|
||||
// Sets the Go package where structs generated from this .proto will be |
||||
// placed. If omitted, the Go package will be derived from the following: |
||||
// - The basename of the package import path, if provided. |
||||
// - Otherwise, the package statement in the .proto file, if present. |
||||
// - Otherwise, the basename of the .proto file, without extension. |
||||
optional string go_package = 11 [ctype = STRING_PIECE]; |
||||
|
||||
// Should generic services be generated in each language? "Generic" services |
||||
// are not specific to any particular RPC system. They are generated by the |
||||
// main code generators in each language (without additional plugins). |
||||
// Generic services were the only kind of service generation supported by |
||||
// early versions of google.protobuf. |
||||
// |
||||
// Generic services are now considered deprecated in favor of using plugins |
||||
// that generate code specific to your particular RPC system. Therefore, |
||||
// these default to false. Old code which depends on generic services should |
||||
// explicitly set them to true. |
||||
optional bool cc_generic_services = 16 [default = false]; |
||||
optional bool java_generic_services = 17 [default = false]; |
||||
optional bool py_generic_services = 18 [default = false]; |
||||
optional bool php_generic_services = 42 [default = false]; |
||||
|
||||
// Is this file deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for everything in the file, or it will be completely ignored; in the very |
||||
// least, this is a formalization for deprecating files. |
||||
optional bool deprecated = 23 [default = false]; |
||||
|
||||
// Enables the use of arenas for the proto messages in this file. This applies |
||||
// only to generated classes for C++. |
||||
optional bool cc_enable_arenas = 31 [default = true]; |
||||
|
||||
// Sets the objective c class prefix which is prepended to all objective c |
||||
// generated classes from this .proto. There is no default. |
||||
optional string objc_class_prefix = 36 [ctype = STRING_PIECE]; |
||||
|
||||
// Namespace for generated classes; defaults to the package. |
||||
optional string csharp_namespace = 37 [ctype = STRING_PIECE]; |
||||
|
||||
// By default Swift generators will take the proto package and CamelCase it |
||||
// replacing '.' with underscore and use that to prefix the types/symbols |
||||
// defined. When this options is provided, they will use this value instead |
||||
// to prefix the types/symbols defined. |
||||
optional string swift_prefix = 39 [ctype = STRING_PIECE]; |
||||
|
||||
// Sets the php class prefix which is prepended to all php generated classes |
||||
// from this .proto. Default is empty. |
||||
optional string php_class_prefix = 40 [ctype = STRING_PIECE]; |
||||
|
||||
// Use this option to change the namespace of php generated classes. Default |
||||
// is empty. When this option is empty, the package name will be used for |
||||
// determining the namespace. |
||||
optional string php_namespace = 41 [ctype = STRING_PIECE]; |
||||
|
||||
// Use this option to change the namespace of php generated metadata classes. |
||||
// Default is empty. When this option is empty, the proto file name will be |
||||
// used for determining the namespace. |
||||
optional string php_metadata_namespace = 44 [ctype = STRING_PIECE]; |
||||
|
||||
// Use this option to change the package of ruby generated classes. Default |
||||
// is empty. When this option is not set, the package name will be used for |
||||
// determining the ruby package. |
||||
optional string ruby_package = 45 [ctype = STRING_PIECE]; |
||||
|
||||
// The parser stores options it doesn't recognize here. |
||||
// See the documentation for the "Options" section above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. |
||||
// See the documentation for the "Options" section above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 38; |
||||
} |
||||
|
||||
message MessageOptions { |
||||
// Set true to use the old proto1 MessageSet wire format for extensions. |
||||
// This is provided for backwards-compatibility with the MessageSet wire |
||||
// format. You should not use this for any other reason: It's less |
||||
// efficient, has fewer features, and is more complicated. |
||||
// |
||||
// The message must be defined exactly as follows: |
||||
// message Foo { |
||||
// option message_set_wire_format = true; |
||||
// extensions 4 to max; |
||||
// } |
||||
// Note that the message cannot have any defined fields; MessageSets only |
||||
// have extensions. |
||||
// |
||||
// All extensions of your type must be singular messages; e.g. they cannot |
||||
// be int32s, enums, or repeated messages. |
||||
// |
||||
// Because this is an option, the above two restrictions are not enforced by |
||||
// the protocol compiler. |
||||
optional bool message_set_wire_format = 1 [default = false]; |
||||
|
||||
// Disables the generation of the standard "descriptor()" accessor, which can |
||||
// conflict with a field of the same name. This is meant to make migration |
||||
// from proto1 easier; new code should avoid fields named "descriptor". |
||||
optional bool no_standard_descriptor_accessor = 2 [default = false]; |
||||
|
||||
// Is this message deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the message, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating messages. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
// Whether the message is an automatically generated map entry type for the |
||||
// maps field. |
||||
// |
||||
// For maps fields: |
||||
// map<KeyType, ValueType> map_field = 1; |
||||
// The parsed descriptor looks like: |
||||
// message MapFieldEntry { |
||||
// option map_entry = true; |
||||
// optional KeyType key = 1; |
||||
// optional ValueType value = 2; |
||||
// } |
||||
// repeated MapFieldEntry map_field = 1; |
||||
// |
||||
// Implementations may choose not to generate the map_entry=true message, but |
||||
// use a native map in the target language to hold the keys and values. |
||||
// The reflection APIs in such implementations still need to work as |
||||
// if the field is a repeated message field. |
||||
// |
||||
// NOTE: Do not set the option in .proto files. Always use the maps syntax |
||||
// instead. The option should only be implicitly set by the proto compiler |
||||
// parser. |
||||
optional bool map_entry = 7; |
||||
|
||||
reserved 8; // javalite_serializable |
||||
reserved 9; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message FieldOptions { |
||||
// The ctype option instructs the C++ code generator to use a different |
||||
// representation of the field than it normally would. See the specific |
||||
// options below. This option is not yet implemented in the open source |
||||
// release -- sorry, we'll try to include it in a future version! |
||||
optional CType ctype = 1 [default = STRING]; |
||||
enum CType { |
||||
// Default mode. |
||||
STRING = 0; |
||||
|
||||
CORD = 1; |
||||
|
||||
STRING_PIECE = 2; |
||||
} |
||||
// The packed option can be enabled for repeated primitive fields to enable |
||||
// a more efficient representation on the wire. Rather than repeatedly |
||||
// writing the tag and type for each element, the entire array is encoded as |
||||
// a single length-delimited blob. In proto3, only explicit setting it to |
||||
// false will avoid using packed encoding. |
||||
optional bool packed = 2; |
||||
|
||||
// The jstype option determines the JavaScript type used for values of the |
||||
// field. The option is permitted only for 64 bit integral and fixed types |
||||
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING |
||||
// is represented as JavaScript string, which avoids loss of precision that |
||||
// can happen when a large value is converted to a floating point JavaScript. |
||||
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to |
||||
// use the JavaScript "number" type. The behavior of the default option |
||||
// JS_NORMAL is implementation dependent. |
||||
// |
||||
// This option is an enum to permit additional types to be added, e.g. |
||||
// goog.math.Integer. |
||||
optional JSType jstype = 6 [default = JS_NORMAL]; |
||||
enum JSType { |
||||
// Use the default type. |
||||
JS_NORMAL = 0; |
||||
|
||||
// Use JavaScript strings. |
||||
JS_STRING = 1; |
||||
|
||||
// Use JavaScript numbers. |
||||
JS_NUMBER = 2; |
||||
} |
||||
|
||||
// Should this field be parsed lazily? Lazy applies only to message-type |
||||
// fields. It means that when the outer message is initially parsed, the |
||||
// inner message's contents will not be parsed but instead stored in encoded |
||||
// form. The inner message will actually be parsed when it is first accessed. |
||||
// |
||||
// This is only a hint. Implementations are free to choose whether to use |
||||
// eager or lazy parsing regardless of the value of this option. However, |
||||
// setting this option true suggests that the protocol author believes that |
||||
// using lazy parsing on this field is worth the additional bookkeeping |
||||
// overhead typically needed to implement it. |
||||
// |
||||
// This option does not affect the public interface of any generated code; |
||||
// all method signatures remain the same. Furthermore, thread-safety of the |
||||
// interface is not affected by this option; const methods remain safe to |
||||
// call from multiple threads concurrently, while non-const methods continue |
||||
// to require exclusive access. |
||||
// |
||||
// |
||||
// Note that implementations may choose not to check required fields within |
||||
// a lazy sub-message. That is, calling IsInitialized() on the outer message |
||||
// may return true even if the inner message has missing required fields. |
||||
// This is necessary because otherwise the inner message would have to be |
||||
// parsed in order to perform the check, defeating the purpose of lazy |
||||
// parsing. An implementation which chooses not to check required fields |
||||
// must be consistent about it. That is, for any particular sub-message, the |
||||
// implementation must either *always* check its required fields, or *never* |
||||
// check its required fields, regardless of whether or not the message has |
||||
// been parsed. |
||||
optional bool lazy = 5 [default = false]; |
||||
|
||||
// Is this field deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for accessors, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating fields. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
// For Google-internal migration only. Do not use. |
||||
optional bool weak = 10 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 4; // removed jtype |
||||
} |
||||
|
||||
message OneofOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumOptions { |
||||
// Set this option to true to allow mapping different tag names to the same |
||||
// value. |
||||
optional bool allow_alias = 2; |
||||
|
||||
// Is this enum deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating enums. |
||||
optional bool deprecated = 3 [default = false]; |
||||
|
||||
reserved 5; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumValueOptions { |
||||
// Is this enum value deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum value, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating enum values. |
||||
optional bool deprecated = 1 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message ServiceOptions { |
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this service deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the service, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating services. |
||||
optional bool deprecated = 33 [default = false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message MethodOptions { |
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this method deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the method, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating methods. |
||||
optional bool deprecated = 33 [default = false]; |
||||
|
||||
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, |
||||
// or neither? HTTP based RPC implementation may choose GET verb for safe |
||||
// methods, and PUT verb for idempotent methods instead of the default POST. |
||||
enum IdempotencyLevel { |
||||
IDEMPOTENCY_UNKNOWN = 0; |
||||
NO_SIDE_EFFECTS = 1; // implies idempotent |
||||
IDEMPOTENT = 2; // idempotent, but may have side effects |
||||
} |
||||
optional IdempotencyLevel idempotency_level = 34 |
||||
[default = IDEMPOTENCY_UNKNOWN]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
// A message representing a option the parser does not recognize. This only |
||||
// appears in options protos created by the compiler::Parser class. |
||||
// DescriptorPool resolves these when building Descriptor objects. Therefore, |
||||
// options protos in descriptor objects (e.g. returned by Descriptor::options(), |
||||
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions |
||||
// in them. |
||||
message UninterpretedOption { |
||||
// The name of the uninterpreted option. Each string represents a segment in |
||||
// a dot-separated name. is_extension is true iff a segment represents an |
||||
// extension (denoted with parentheses in options specs in .proto files). |
||||
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents |
||||
// "foo.(bar.baz).qux". |
||||
message NamePart { |
||||
optional string name_part = 1 [ctype = STRING_PIECE]; |
||||
optional bool is_extension = 2; |
||||
} |
||||
repeated NamePart name = 2; |
||||
|
||||
// The value of the uninterpreted option, in whatever type the tokenizer |
||||
// identified it as during parsing. Exactly one of these should be set. |
||||
optional string identifier_value = 3 [ctype = STRING_PIECE]; |
||||
optional uint64 positive_int_value = 4; |
||||
optional int64 negative_int_value = 5; |
||||
optional double double_value = 6; |
||||
optional bytes string_value = 7; |
||||
optional string aggregate_value = 8 [ctype = STRING_PIECE]; |
||||
} |
||||
|
||||
// =================================================================== |
||||
// Optional source code info |
||||
|
||||
// Encapsulates information about the original source file from which a |
||||
// FileDescriptorProto was generated. |
||||
message SourceCodeInfo { |
||||
// A Location identifies a piece of source code in a .proto file which |
||||
// corresponds to a particular definition. This information is intended |
||||
// to be useful to IDEs, code indexers, documentation generators, and similar |
||||
// tools. |
||||
// |
||||
// For example, say we have a file like: |
||||
// message Foo { |
||||
// optional string foo = 1 [ctype = STRING_PIECE]; |
||||
// } |
||||
// Let's look at just the field definition: |
||||
// optional string foo = 1 [ctype = STRING_PIECE]; |
||||
// ^ ^^ ^^ ^ ^^^ |
||||
// a bc de f ghi |
||||
// We have the following locations: |
||||
// span path represents |
||||
// [a,i) [ 4, 0, 2, 0 ] The whole field definition. |
||||
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). |
||||
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string). |
||||
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). |
||||
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1). |
||||
// |
||||
// Notes: |
||||
// - A location may refer to a repeated field itself (i.e. not to any |
||||
// particular index within it). This is used whenever a set of elements are |
||||
// logically enclosed in a single code segment. For example, an entire |
||||
// extend block (possibly containing multiple extension definitions) will |
||||
// have an outer location whose path refers to the "extensions" repeated |
||||
// field without an index. |
||||
// - Multiple locations may have the same path. This happens when a single |
||||
// logical declaration is spread out across multiple places. The most |
||||
// obvious example is the "extend" block again -- there may be multiple |
||||
// extend blocks in the same scope, each of which will have the same path. |
||||
// - A location's span is not always a subset of its parent's span. For |
||||
// example, the "extendee" of an extension declaration appears at the |
||||
// beginning of the "extend" block and is shared by all extensions within |
||||
// the block. |
||||
// - Just because a location's span is a subset of some other location's span |
||||
// does not mean that it is a descendant. For example, a "group" defines |
||||
// both a type and a field in a single declaration. Thus, the locations |
||||
// corresponding to the type and field and their components will overlap. |
||||
// - Code which tries to interpret locations should probably be designed to |
||||
// ignore those that it doesn't understand, as more types of locations could |
||||
// be recorded in the future. |
||||
repeated Location location = 1; |
||||
message Location { |
||||
// Identifies which part of the FileDescriptorProto was defined at this |
||||
// location. |
||||
// |
||||
// Each element is a field number or an index. They form a path from |
||||
// the root FileDescriptorProto to the place where the definition. For |
||||
// example, this path: |
||||
// [ 4, 3, 2, 7, 1 ] |
||||
// refers to: |
||||
// file.message_type(3) // 4, 3 |
||||
// .field(7) // 2, 7 |
||||
// .name() // 1 |
||||
// This is because FileDescriptorProto.message_type has field number 4: |
||||
// repeated DescriptorProto message_type = 4; |
||||
// and DescriptorProto.field has field number 2: |
||||
// repeated FieldDescriptorProto field = 2; |
||||
// and FieldDescriptorProto.name has field number 1: |
||||
// optional string name = 1 [ctype = STRING_PIECE]; |
||||
// |
||||
// Thus, the above path gives the location of a field name. If we removed |
||||
// the last element: |
||||
// [ 4, 3, 2, 7 ] |
||||
// this path refers to the whole field declaration (from the beginning |
||||
// of the label to the terminating semicolon). |
||||
repeated int32 path = 1 [packed = true]; |
||||
|
||||
// Always has exactly three or four elements: start line, start column, |
||||
// end line (optional, otherwise assumed same as start line), end column. |
||||
// These are packed into a single field for efficiency. Note that line |
||||
// and column numbers are zero-based -- typically you will want to add |
||||
// 1 to each before displaying to a user. |
||||
repeated int32 span = 2 [packed = true]; |
||||
|
||||
// If this SourceCodeInfo represents a complete declaration, these are any |
||||
// comments appearing before and after the declaration which appear to be |
||||
// attached to the declaration. |
||||
// |
||||
// A series of line comments appearing on consecutive lines, with no other |
||||
// tokens appearing on those lines, will be treated as a single comment. |
||||
// |
||||
// leading_detached_comments will keep paragraphs of comments that appear |
||||
// before (but not connected to) the current element. Each paragraph, |
||||
// separated by empty lines, will be one comment element in the repeated |
||||
// field. |
||||
// |
||||
// Only the comment content is provided; comment markers (e.g. //) are |
||||
// stripped out. For block comments, leading whitespace and an asterisk |
||||
// will be stripped from the beginning of each line other than the first. |
||||
// Newlines are included in the output. |
||||
// |
||||
// Examples: |
||||
// |
||||
// optional int32 foo = 1; // Comment attached to foo. |
||||
// // Comment attached to bar. |
||||
// optional int32 bar = 2; |
||||
// |
||||
// optional string baz = 3 [ctype = STRING_PIECE]; |
||||
// // Comment attached to baz. |
||||
// // Another line attached to baz. |
||||
// |
||||
// // Comment attached to qux. |
||||
// // |
||||
// // Another line attached to qux. |
||||
// optional double qux = 4; |
||||
// |
||||
// // Detached comment for corge. This is not leading or trailing comments |
||||
// // to qux or corge because there are blank lines separating it from |
||||
// // both. |
||||
// |
||||
// // Detached comment for corge paragraph 2. |
||||
// |
||||
// optional string corge = 5 [ctype = STRING_PIECE]; |
||||
// /* Block comment attached |
||||
// * to corge. Leading asterisks |
||||
// * will be removed. */ |
||||
// /* Block comment attached to |
||||
// * grault. */ |
||||
// optional int32 grault = 6; |
||||
// |
||||
// // ignored detached comments. |
||||
optional string leading_comments = 3 [ctype = STRING_PIECE]; |
||||
optional string trailing_comments = 4 [ctype = STRING_PIECE]; |
||||
repeated string leading_detached_comments = 6 [ctype = STRING_PIECE]; |
||||
} |
||||
} |
||||
|
||||
// Describes the relationship between generated code and its original source |
||||
// file. A GeneratedCodeInfo message is associated with only one generated |
||||
// source file, but may contain references to different source .proto files. |
||||
message GeneratedCodeInfo { |
||||
// An Annotation connects some span of text in generated code to an element |
||||
// of its generating .proto file. |
||||
repeated Annotation annotation = 1; |
||||
message Annotation { |
||||
// Identifies the element in the original source .proto file. This field |
||||
// is formatted the same as SourceCodeInfo.Location.path. |
||||
repeated int32 path = 1 [packed = true]; |
||||
|
||||
// Identifies the filesystem path to the original source .proto. |
||||
optional string source_file = 2 [ctype = STRING_PIECE]; |
||||
|
||||
// Identifies the starting offset in bytes in the generated code |
||||
// that relates to the identified object. |
||||
optional int32 begin = 3; |
||||
|
||||
// Identifies the ending offset in bytes in the generated code that |
||||
// relates to the identified offset. The end offset should be one past |
||||
// the last relevant byte (so the length of the text = end - begin). |
||||
optional int32 end = 4; |
||||
} |
||||
} |
@ -0,0 +1,12 @@ |
||||
// Protocol Buffers - Google's data interchange format |
||||
// Copyright 2023 Google LLC. All rights reserved. |
||||
// |
||||
// Use of this source code is governed by a BSD-style |
||||
// license that can be found in the LICENSE file or at |
||||
// https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
syntax = "proto3"; |
||||
|
||||
package upb_benchmark; |
||||
|
||||
message Empty {} |
@ -0,0 +1,69 @@ |
||||
#!/usr/bin/python3 |
||||
# |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2023 Google LLC. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google LLC nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
import sys |
||||
import re |
||||
|
||||
include = sys.argv[1] |
||||
msg_basename = sys.argv[2] |
||||
count = 1 |
||||
|
||||
m = re.search(r'(.*\D)(\d+)$', sys.argv[2]) |
||||
if m: |
||||
msg_basename = m.group(1) |
||||
count = int(m.group(2)) |
||||
|
||||
print(''' |
||||
#include "{include}" |
||||
|
||||
char buf[1]; |
||||
|
||||
int main() {{ |
||||
'''.format(include=include)) |
||||
|
||||
def RefMessage(name): |
||||
print(''' |
||||
{{ |
||||
{name} proto; |
||||
proto.ParseFromArray(buf, 0); |
||||
proto.SerializePartialToArray(&buf[0], 0); |
||||
}} |
||||
'''.format(name=name)) |
||||
|
||||
RefMessage(msg_basename) |
||||
|
||||
for i in range(2, count + 1): |
||||
RefMessage(msg_basename + str(i)) |
||||
|
||||
print(''' |
||||
return 0; |
||||
}''') |
@ -0,0 +1,123 @@ |
||||
#!/usr/bin/python3 |
||||
# |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2023 Google LLC. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google LLC nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
import sys |
||||
import random |
||||
|
||||
base = sys.argv[1] |
||||
|
||||
field_freqs = [ |
||||
(('bool', 'optional'), 8.321), |
||||
(('bool', 'repeated'), 0.033), |
||||
(('bytes', 'optional'), 0.809), |
||||
(('bytes', 'repeated'), 0.065), |
||||
(('double', 'optional'), 2.845), |
||||
(('double', 'repeated'), 0.143), |
||||
(('fixed32', 'optional'), 0.084), |
||||
(('fixed32', 'repeated'), 0.012), |
||||
(('fixed64', 'optional'), 0.204), |
||||
(('fixed64', 'repeated'), 0.027), |
||||
(('float', 'optional'), 2.355), |
||||
(('float', 'repeated'), 0.132), |
||||
(('int32', 'optional'), 6.717), |
||||
(('int32', 'repeated'), 0.366), |
||||
(('int64', 'optional'), 9.678), |
||||
(('int64', 'repeated'), 0.425), |
||||
(('sfixed32', 'optional'), 0.018), |
||||
(('sfixed32', 'repeated'), 0.005), |
||||
(('sfixed64', 'optional'), 0.022), |
||||
(('sfixed64', 'repeated'), 0.005), |
||||
(('sint32', 'optional'), 0.026), |
||||
(('sint32', 'repeated'), 0.009), |
||||
(('sint64', 'optional'), 0.018), |
||||
(('sint64', 'repeated'), 0.006), |
||||
(('string', 'optional'), 25.461), |
||||
(('string', 'repeated'), 2.606), |
||||
(('Enum', 'optional'), 6.16), |
||||
(('Enum', 'repeated'), 0.576), |
||||
(('Message', 'optional'), 22.472), |
||||
(('Message', 'repeated'), 7.766), |
||||
(('uint32', 'optional'), 1.289), |
||||
(('uint32', 'repeated'), 0.051), |
||||
(('uint64', 'optional'), 1.044), |
||||
(('uint64', 'repeated'), 0.079), |
||||
] |
||||
|
||||
population = [item[0] for item in field_freqs] |
||||
weights = [item[1] for item in field_freqs] |
||||
|
||||
def choices(k): |
||||
if sys.version_info >= (3, 6): |
||||
return random.choices(population=population, weights=weights, k=k) |
||||
else: |
||||
print("WARNING: old Python version, field types are not properly weighted!") |
||||
return [random.choice(population) for _ in range(k)] |
||||
|
||||
with open(base + "/100_msgs.proto", "w") as f: |
||||
f.write('syntax = "proto3";\n') |
||||
f.write('package upb_benchmark;\n') |
||||
f.write('message Message {}\n') |
||||
for i in range(2, 101): |
||||
f.write('message Message{i} {{}}\n'.format(i=i)) |
||||
|
||||
with open(base + "/200_msgs.proto", "w") as f: |
||||
f.write('syntax = "proto3";\n') |
||||
f.write('package upb_benchmark;\n') |
||||
f.write('message Message {}\n') |
||||
for i in range(2, 501): |
||||
f.write('message Message{i} {{}}\n'.format(i=i)) |
||||
|
||||
with open(base + "/100_fields.proto", "w") as f: |
||||
f.write('syntax = "proto2";\n') |
||||
f.write('package upb_benchmark;\n') |
||||
f.write('enum Enum { ZERO = 0; }\n') |
||||
f.write('message Message {\n') |
||||
i = 1 |
||||
random.seed(a=0, version=2) |
||||
for field in choices(100): |
||||
field_type, label = field |
||||
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label, field_type=field_type)) |
||||
i += 1 |
||||
f.write('}\n') |
||||
|
||||
with open(base + "/200_fields.proto", "w") as f: |
||||
f.write('syntax = "proto2";\n') |
||||
f.write('package upb_benchmark;\n') |
||||
f.write('enum Enum { ZERO = 0; }\n') |
||||
f.write('message Message {\n') |
||||
i = 1 |
||||
random.seed(a=0, version=2) |
||||
for field in choices(200): |
||||
field_type, label = field |
||||
f.write(' {label} {field_type} field{i} = {i};\n'.format(i=i, label=label,field_type=field_type)) |
||||
i += 1 |
||||
f.write('}\n') |
@ -0,0 +1,70 @@ |
||||
#!/usr/bin/python3 |
||||
# |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2023 Google LLC. All rights reserved. |
||||
# https://developers.google.com/protocol-buffers/ |
||||
# |
||||
# Redistribution and use in source and binary forms, with or without |
||||
# modification, are permitted provided that the following conditions are |
||||
# met: |
||||
# |
||||
# * Redistributions of source code must retain the above copyright |
||||
# notice, this list of conditions and the following disclaimer. |
||||
# * Redistributions in binary form must reproduce the above |
||||
# copyright notice, this list of conditions and the following disclaimer |
||||
# in the documentation and/or other materials provided with the |
||||
# distribution. |
||||
# * Neither the name of Google LLC nor the names of its |
||||
# contributors may be used to endorse or promote products derived from |
||||
# this software without specific prior written permission. |
||||
# |
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
import sys |
||||
import re |
||||
|
||||
include = sys.argv[1] |
||||
msg_basename = sys.argv[2] |
||||
count = 1 |
||||
|
||||
m = re.search(r'(.*\D)(\d+)$', sys.argv[2]) |
||||
if m: |
||||
msg_basename = m.group(1) |
||||
count = int(m.group(2)) |
||||
|
||||
print(''' |
||||
#include "{include}" |
||||
|
||||
char buf[1]; |
||||
|
||||
int main() {{ |
||||
upb_Arena *arena = upb_Arena_New(); |
||||
size_t size; |
||||
'''.format(include=include)) |
||||
|
||||
def RefMessage(name): |
||||
print(''' |
||||
{{ |
||||
{name} *proto = {name}_parse(buf, 1, arena); |
||||
{name}_serialize(proto, arena, &size); |
||||
}} |
||||
'''.format(name=name)) |
||||
|
||||
RefMessage(msg_basename) |
||||
|
||||
for i in range(2, count + 1): |
||||
RefMessage(msg_basename + str(i)) |
||||
|
||||
print(''' |
||||
return 0; |
||||
}''') |
@ -0,0 +1,82 @@ |
||||
"""Java options and protobuf-specific java build rules with those options.""" |
||||
|
||||
load("@rules_java//java:defs.bzl", "java_library") |
||||
load("@rules_jvm_external//:defs.bzl", "java_export") |
||||
load("//:protobuf_version.bzl", "PROTOBUF_JAVA_VERSION") |
||||
load("//java/osgi:osgi.bzl", "osgi_java_library") |
||||
|
||||
JAVA_OPTS = [ |
||||
"-source 8", |
||||
"-target 8", |
||||
"-Xep:Java8ApiChecker:ERROR", |
||||
] |
||||
|
||||
BUNDLE_DOC_URL = "https://developers.google.com/protocol-buffers/" |
||||
BUNDLE_LICENSE = "https://opensource.org/licenses/BSD-3-Clause" |
||||
|
||||
def protobuf_java_export(**kwargs): |
||||
java_export( |
||||
javacopts = JAVA_OPTS, |
||||
**kwargs |
||||
) |
||||
|
||||
def protobuf_java_library(**kwargs): |
||||
java_library( |
||||
javacopts = JAVA_OPTS, |
||||
**kwargs |
||||
) |
||||
|
||||
def protobuf_versioned_java_library( |
||||
automatic_module_name, |
||||
bundle_description, |
||||
bundle_name, |
||||
bundle_symbolic_name, |
||||
bundle_additional_imports = [], |
||||
bundle_additional_exports = [], |
||||
**kwargs): |
||||
"""Extends `java_library` to add OSGi headers to the MANIFEST.MF using bndlib |
||||
|
||||
This macro should be usable as a drop-in replacement for java_library. |
||||
|
||||
The additional arguments are given the bndlib tool to generate an OSGi-compliant manifest file. |
||||
See [bnd documentation](https://bnd.bndtools.org/chapters/110-introduction.html) |
||||
|
||||
Takes all the args that are standard for a java_library target plus the following. |
||||
Args: |
||||
bundle_description: (required) The Bundle-Description header defines a short |
||||
description of this bundle. |
||||
automatic_module_name: (required) The Automatic-Module-Name header that represents |
||||
the name of the module when this bundle is used as an automatic |
||||
module. |
||||
bundle_name: (required) The Bundle-Name header defines a readable name for this |
||||
bundle. This should be a short, human-readable name that can |
||||
contain spaces. |
||||
bundle_symbolic_name: (required) The Bundle-SymbolicName header specifies a |
||||
non-localizable name for this bundle. The bundle symbolic name |
||||
together with a version must identify a unique bundle though it can |
||||
be installed multiple times in a framework. The bundle symbolic |
||||
name should be based on the reverse domain name convention. |
||||
bundle_additional_exports: The Export-Package header contains a |
||||
declaration of exported packages. These are additional export |
||||
package statements to be added before the default wildcard export |
||||
"*;version={$Bundle-Version}". |
||||
bundle_additional_imports: The Import-Package header declares the |
||||
imported packages for this bundle. These are additional import |
||||
package statements to be added before the default wildcard import |
||||
"*". |
||||
**kwargs: Additional key-word arguments that are passed to the internal |
||||
java_library target. |
||||
""" |
||||
osgi_java_library( |
||||
javacopts = JAVA_OPTS, |
||||
automatic_module_name = automatic_module_name, |
||||
bundle_doc_url = BUNDLE_DOC_URL, |
||||
bundle_license = BUNDLE_LICENSE, |
||||
bundle_version = PROTOBUF_JAVA_VERSION, |
||||
bundle_description = bundle_description, |
||||
bundle_name = bundle_name, |
||||
bundle_symbolic_name = bundle_symbolic_name, |
||||
bundle_additional_exports = bundle_additional_exports, |
||||
bundle_additional_imports = bundle_additional_imports + ["sun.misc;resolution:=optional"], |
||||
**kwargs |
||||
) |
@ -1,11 +0,0 @@ |
||||
--- upbc/bootstrap_compiler.bzl
|
||||
+++ upbc/bootstrap_compiler.bzl
|
||||
@@ -20,7 +20,7 @@ _upbc_base = "//upbc:protoc-gen-upb"
|
||||
|
||||
# begin:github_only
|
||||
_is_google3 = False
|
||||
-_extra_proto_path = "-Iexternal/com_google_protobuf/src "
|
||||
+_extra_proto_path = "-Isrc "
|
||||
# end:github_only
|
||||
|
||||
def _upbc(stage):
|
@ -0,0 +1,3 @@ |
||||
import common.bazelrc |
||||
|
||||
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14 |
@ -0,0 +1,17 @@ |
||||
This directory contains CI-specific tooling. |
||||
|
||||
# Clang wrappers |
||||
|
||||
CMake allows for compiler wrappers to be injected such as ccache, which |
||||
intercepts compiler calls and short-circuits on cache-hits. This can be done |
||||
by specifying `CMAKE_C_COMPILER_LAUNCHER` and `CMAKE_CXX_COMPILER_LAUNCHER` |
||||
during CMake's configure step. Unfortunately, X-Code doesn't provide anything |
||||
like this, so we use basic wrapper scripts to invoke ccache + clang. |
||||
|
||||
# Bazelrc files |
||||
|
||||
In order to allow platform-specific `.bazelrc` flags during testing, we keep |
||||
3 different versions here along with a shared `common.bazelrc` that they all |
||||
include. Our GHA infrastructure will select the appropriate file for any test |
||||
and overwrite the default `.bazelrc` in our workspace, which is intended for |
||||
development only. |
@ -0,0 +1,5 @@ |
||||
import common.bazelrc |
||||
|
||||
# Workaround for maximum path length issues |
||||
startup --output_user_root=C:/tmp --windows_enable_symlinks |
||||
common --enable_runfiles |
@ -0,0 +1,3 @@ |
||||
#!/bin/sh |
||||
# This file wraps clang with ccache to enable faster runs of xcodebuild |
||||
ccache clang "$@" |
@ -0,0 +1,3 @@ |
||||
#!/bin/sh |
||||
# This file wraps clang with ccache to enable faster runs of xcodebuild |
||||
ccache clang++ "$@" |
@ -0,0 +1,80 @@ |
||||
build:dbg --compilation_mode=dbg |
||||
|
||||
build:opt --compilation_mode=opt |
||||
|
||||
build:san-common --config=dbg --strip=never --copt=-O0 --copt=-fno-omit-frame-pointer |
||||
|
||||
build:asan --config=san-common --copt=-fsanitize=address --linkopt=-fsanitize=address |
||||
# ASAN hits ODR violations with shared linkage due to rules_proto. |
||||
build:asan --dynamic_mode=off |
||||
|
||||
build:msan --config=san-common --copt=-fsanitize=memory --linkopt=-fsanitize=memory |
||||
build:msan --copt=-fsanitize-memory-track-origins |
||||
build:msan --copt=-fsanitize-memory-use-after-dtor |
||||
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1 |
||||
|
||||
# Use our instrumented LLVM libc++ in Kokoro. |
||||
build:docker-msan --config=msan |
||||
build:docker-msan --linkopt=-L/opt/libcxx_msan/lib --linkopt=-lc++abi |
||||
build:docker-msan --linkopt=-Wl,-rpath,/opt/libcxx_msan/lib |
||||
build:docker-msan --cxxopt=-stdlib=libc++ --linkopt=-stdlib=libc++ |
||||
|
||||
|
||||
build:tsan --config=san-common --copt=-fsanitize=thread --linkopt=-fsanitize=thread |
||||
|
||||
build:ubsan --config=san-common --copt=-fsanitize=undefined --linkopt=-fsanitize=undefined |
||||
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1 |
||||
# Workaround for the fact that Bazel links with $CC, not $CXX |
||||
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748 |
||||
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr |
||||
|
||||
# Workaround Bazel 7 remote cache issues. |
||||
# See https://github.com/bazelbuild/bazel/issues/20161 |
||||
build --experimental_remote_cache_eviction_retries=5 |
||||
build --remote_download_outputs=all |
||||
|
||||
# Build with all --incompatible flags that we can. This helps us prepare for |
||||
# upcoming breaking changes in Bazel. This list was generated for Bazel 6 by |
||||
# running bazelisk with the --migrate flag and filtering out all flags that |
||||
# default to true or are deprecated. |
||||
build --incompatible_check_sharding_support |
||||
build --incompatible_default_to_explicit_init_py |
||||
build --incompatible_disable_native_android_rules |
||||
build --incompatible_disable_target_provider_fields |
||||
build --incompatible_disallow_empty_glob |
||||
build --incompatible_dont_use_javasourceinfoprovider |
||||
build --incompatible_enable_android_toolchain_resolution |
||||
build --incompatible_enable_apple_toolchain_resolution |
||||
build --incompatible_exclusive_test_sandboxed |
||||
build --incompatible_remote_output_paths_relative_to_input_root |
||||
build --incompatible_remote_use_new_exit_code_for_lost_inputs |
||||
build --incompatible_sandbox_hermetic_tmp |
||||
build --incompatible_struct_has_no_methods |
||||
build --incompatible_top_level_aspects_require_providers |
||||
build --incompatible_use_cc_configure_from_rules_cc |
||||
build --incompatible_use_host_features |
||||
|
||||
# We cannot yet build successfully with the following flags: |
||||
# --incompatible_check_testonly_for_output_files |
||||
# --incompatible_config_setting_private_default_visibility |
||||
# --incompatible_disable_starlark_host_transitions |
||||
# --incompatible_disallow_struct_provider_syntax |
||||
# --incompatible_no_implicit_file_export |
||||
# --incompatible_no_rule_outputs_param |
||||
# --incompatible_stop_exporting_language_modules |
||||
# --incompatible_strict_action_env |
||||
# --incompatible_visibility_private_attributes_at_definition |
||||
|
||||
# We might be compatible with these flags, but they are not available in all |
||||
# Bazel versions we are currently using: |
||||
# --incompatible_disable_objc_library_transition |
||||
# --incompatible_fail_on_unknown_attributes |
||||
# --incompatible_merge_fixed_and_default_shell_env |
||||
|
||||
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel |
||||
# https://github.com/protocolbuffers/protobuf/issues/14313 |
||||
common --noenable_bzlmod |
||||
|
||||
# Important: this flag ensures that we remain compliant with the C++ layering |
||||
# check. |
||||
build --features=layering_check |
@ -0,0 +1,5 @@ |
||||
import common.bazelrc |
||||
|
||||
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14 |
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1 |
||||
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes |
@ -1,9 +0,0 @@ |
||||
cmake_minimum_required(VERSION 3.5) |
||||
|
||||
message(WARNING "Calling of cmake with source directory set to \"cmake\" subdirectory of Protocol Buffers project is deprecated. Top-level directory of Protocol Buffers project should be used instead.") |
||||
|
||||
project(protobuf C CXX) |
||||
|
||||
set(protobuf_DEPRECATED_CMAKE_SUBDIRECTORY_USAGE TRUE) |
||||
|
||||
include(../CMakeLists.txt) |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue