Merge branch 'main' into do_not_store_binary_data_inside_php_files

pull/13911/head
Mikhail Galanin 1 year ago committed by GitHub
commit 490958e165
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 8
      .bazelrc
  2. 0
      .clang-format
  3. 13
      .github/BUILD.bazel
  4. 2
      .github/workflows/clear_caches.yml
  5. 48
      .github/workflows/janitor.yml
  6. 60
      .github/workflows/scorecard.yml
  7. 17
      .github/workflows/staleness_check.yml
  8. 2
      .github/workflows/staleness_refresh.yml
  9. 114
      .github/workflows/test_cpp.yml
  10. 8
      .github/workflows/test_csharp.yml
  11. 6
      .github/workflows/test_java.yml
  12. 6
      .github/workflows/test_objectivec.yml
  13. 15
      .github/workflows/test_php.yml
  14. 36
      .github/workflows/test_php_ext.yml
  15. 14
      .github/workflows/test_python.yml
  16. 17
      .github/workflows/test_ruby.yml
  17. 5
      .github/workflows/test_runner.yml
  18. 2
      .github/workflows/test_rust.yml
  19. 70
      .github/workflows/test_upb.yml
  20. 4
      .github/workflows/update_php_repo.yml
  21. 49
      BUILD.bazel
  22. 2
      CMakeLists.txt
  23. 927
      Cargo.bazel.lock
  24. 139
      Cargo.lock
  25. 2
      MODULE.bazel
  26. 2
      Protobuf-C++.podspec
  27. 2
      Protobuf.podspec
  28. 2
      README.md
  29. 72
      WORKSPACE
  30. 20
      bazel/BUILD
  31. 0
      bazel/amalgamate.py
  32. 2
      bazel/amalgamation.bzl
  33. 8
      bazel/build_defs.bzl
  34. 0
      bazel/lua.BUILD
  35. 1
      bazel/py_proto_library.bzl
  36. 4
      bazel/python_downloads.bzl
  37. 0
      bazel/system_python.bzl
  38. 26
      bazel/upb_c_proto_library.bzl
  39. 24
      bazel/upb_minitable_proto_library.bzl
  40. 6
      bazel/upb_proto_library.bzl
  41. 151
      bazel/upb_proto_library_internal/aspect.bzl
  42. 0
      bazel/upb_proto_library_internal/cc_library_func.bzl
  43. 0
      bazel/upb_proto_library_internal/copts.bzl
  44. 0
      bazel/upb_proto_library_internal/rule.bzl
  45. 27
      bazel/upb_proto_reflection_library.bzl
  46. 9
      benchmarks/BUILD
  47. 0
      benchmarks/BUILD.googleapis
  48. 47
      benchmarks/benchmark.cc
  49. 0
      benchmarks/build_defs.bzl
  50. 0
      benchmarks/compare.py
  51. 0
      benchmarks/descriptor.proto
  52. 0
      benchmarks/descriptor_sv.proto
  53. 12
      benchmarks/empty.proto
  54. 0
      benchmarks/gen_protobuf_binary_cc.py
  55. 0
      benchmarks/gen_synthetic_protos.py
  56. 0
      benchmarks/gen_upb_binary_c.py
  57. 4
      build_defs/BUILD.bazel
  58. 1
      build_defs/cpp_opts.bzl
  59. 5
      build_defs/java_opts.bzl
  60. 49
      ci/common.bazelrc
  61. 1
      cmake/abseil-cpp.cmake
  62. 74
      cmake/conformance.cmake
  63. 15
      cmake/install.cmake
  64. 6
      cmake/protobuf-generate.cmake
  65. 76
      cmake/tests.cmake
  66. 29
      conformance/BUILD.bazel
  67. 12
      conformance/bazel_conformance_test_runner.sh
  68. 1345
      conformance/binary_json_conformance_suite.cc
  69. 101
      conformance/binary_json_conformance_suite.h
  70. 8
      conformance/conformance_cpp.cc
  71. 8
      conformance/conformance_dart.dart
  72. 29
      conformance/conformance_objc.m
  73. 22
      conformance/conformance_python.py
  74. 94
      conformance/conformance_test.cc
  75. 18
      conformance/conformance_test.h
  76. 16
      conformance/conformance_test_runner.cc
  77. 3
      conformance/defs.bzl
  78. 33
      conformance/failure_list_cpp.txt
  79. 4
      conformance/failure_list_csharp.txt
  80. 6
      conformance/failure_list_java.txt
  81. 6
      conformance/failure_list_jruby.txt
  82. 3
      conformance/failure_list_python.txt
  83. 3
      conformance/failure_list_python_cpp.txt
  84. 6
      conformance/failure_list_python_upb.txt
  85. 277
      conformance/text_format_conformance_suite.cc
  86. 59
      conformance/text_format_conformance_suite.h
  87. 20
      conformance/text_format_failure_list_cpp.txt
  88. 31
      conformance/text_format_failure_list_python.txt
  89. 28
      conformance/text_format_failure_list_python_cpp.txt
  90. 11
      conformance/text_format_failure_list_python_upb.txt
  91. 2
      csharp/Google.Protobuf.Tools.nuspec
  92. 4
      csharp/compatibility_tests/v3.0.0/BUILD.bazel
  93. 53
      csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs
  94. 6
      csharp/src/Google.Protobuf.Test/FieldMaskTreeTest.cs
  95. 73
      csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs
  96. 8
      csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
  97. 17
      csharp/src/Google.Protobuf.Test/JsonParserTest.cs
  98. BIN
      csharp/src/Google.Protobuf.Test/testprotos.pb
  99. 16
      csharp/src/Google.Protobuf/ByteString.cs
  100. 27
      csharp/src/Google.Protobuf/CodedOutputStream.cs
  101. Some files were not shown because too many files have changed in this diff Show More

@ -7,7 +7,6 @@ build:opt --compilation_mode=opt
build:san-common --config=dbg --strip=never --copt=-O0 --copt=-fno-omit-frame-pointer
build:asan --config=san-common --copt=-fsanitize=address --linkopt=-fsanitize=address
build:asan --copt=-DADDRESS_SANITIZER=1
# ASAN hits ODR violations with shared linkage due to rules_proto.
build:asan --dynamic_mode=off
@ -15,14 +14,15 @@ build:msan --config=san-common --copt=-fsanitize=memory --linkopt=-fsanitize=mem
build:msan --copt=-fsanitize-memory-track-origins
build:msan --copt=-fsanitize-memory-use-after-dtor
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1
build:msan --copt=-DMEMORY_SANITIZER=1
build:tsan --config=san-common --copt=-fsanitize=thread --linkopt=-fsanitize=thread
build:tsan --copt=-DTHREAD_SANITIZER=1
build:ubsan --config=san-common --copt=-fsanitize=undefined --linkopt=-fsanitize=undefined
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
build:ubsan --copt=-DUNDEFINED_SANITIZER=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313
common --noenable_bzlmod

@ -6,26 +6,29 @@
# of Xcode.
xcode_version(
name = "version14_2_14C18",
version = "14.2.14C18",
aliases = ["14C18"],
default_ios_sdk_version = "16.2",
default_tvos_sdk_version = "16.1",
default_macos_sdk_version = "13.1",
default_tvos_sdk_version = "16.1",
default_watchos_sdk_version = "9.1",
version = "14.2.14C18",
)
xcode_version(
name = "version14_1_0_14B47b",
version = "14.1.0.14B47b",
aliases = ["14B47b"],
default_ios_sdk_version = "16.1",
default_tvos_sdk_version = "16.1",
default_macos_sdk_version = "13.0",
default_tvos_sdk_version = "16.1",
default_watchos_sdk_version = "9.1",
version = "14.1.0.14B47b",
)
xcode_config(
name = "host_xcodes",
versions = [":version14_2_14C18", ":version14_1_0_14B47b"],
default = ":version14_1_0_14B47b",
versions = [
":version14_2_14C18",
":version14_1_0_14B47b",
],
)

@ -17,7 +17,7 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Clear Bazel repository cache ${{ runner.os }}
name: Clear Bazel repository cache ${{ matrix.os }}
runs-on: ${{ matrix.os }}
permissions:
actions: write # permission is required to delete caches

@ -28,3 +28,51 @@ jobs:
echo "Closing #$pr..."
gh pr close --comment "Auto-closing Copybara pull request" --delete-branch "$pr"
done
stale-others:
name: Close stale non-copybara PRs and issues
runs-on: ubuntu-latest
permissions:
issues: write # allow the action to comment on, add labels to, and close issues
pull-requests: write # allow the action to comment on, add labels to, and close PRs
steps:
- uses: actions/stale@b69b346013879cedbf50c69f572cd85439a41936
with:
stale-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active
again, please add a comment.
This issue is labeled `inactive` because the last activity was over
90 days ago.
close-issue-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this issue should remain active or becomes active
again, please reopen it.
This issue was closed and archived because there has been no new
activity in the 14 days since the `inactive` label was added.
stale-pr-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this PR should remain active, please add a comment.
This PR is labeled `inactive` because the last activity was over 90
days ago. This PR will be closed and archived after 14 additional
days without activity.
close-pr-message: >
We triage inactive PRs and issues in order to make it easier to find
active work. If this PR should remain active or becomes active
again, please reopen it.
This PR was closed and archived because there has been no new
activity in the 14 days since the `inactive` label was added.
stale-issue-label: 'inactive'
stale-pr-label: 'inactive'
exempt-issue-labels: 'help wanted'
days-before-stale: 90
days-before-close: 14
operations-per-run: 100

@ -0,0 +1,60 @@
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.
name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: '20 5 * * 2'
push:
branches: [ "main" ]
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
security-events: write # to upload the results to code-scanning dashboard
id-token: write # to publish results and get a badge
steps:
- name: "Checkout code"
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if
# you want to enable the Branch-Protection check on a *public* repository, or
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-fine-grained-pat-optional.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Allows the repository to include the Scorecard badge.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
with:
sarif_file: results.sarif

@ -18,7 +18,7 @@ jobs:
strategy:
fail-fast: false
matrix:
branch: [main, 22.x, 23.x, 24.x]
branch: [main, 22.x, 23.x, 24.x, 25.x]
os: [{ name: Linux, value: ubuntu-latest}]
name: Test staleness ${{ matrix.os.name }} ${{ github.head_ref && 'PR' || matrix.branch }}
@ -26,7 +26,7 @@ jobs:
if: ${{ github.event.repository.full_name == 'protocolbuffers/protobuf' }}
steps:
- name: Checkout ${{ github.head_ref && 'PR' || matrix.branch }}
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout || github.head_ref || matrix.branch }}
@ -49,10 +49,15 @@ jobs:
# In branches where automatic updates work as post-submits, we don't want to run staleness
# tests along with user changes. Any stale files will be automatically fixed in a follow-up
# commit.
run: |
uses: protocolbuffers/protobuf-ci/bazel@v2
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: staleness
bash: >
set -ex;
if [[ -z $COMMIT_TRIGGERED_RUN || -z $MAIN_RUN ]]; then
bazel query 'attr(tags, "staleness_test", //...)' | xargs bazel test $BAZEL_FLAGS || \
echo "Please run ./regenerate_stale_files.sh to regenerate stale files"
bazel query 'attr(tags, "staleness_test", //...)' | xargs bazel test $BAZEL_FLAGS ||
echo "Please run ./regenerate_stale_files.sh to regenerate stale files";
else
bazel query 'attr(tags, "staleness_test", //...)'
bazel query 'attr(tags, "staleness_test", //...)';
fi

@ -24,7 +24,7 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
# Note: this token has an expiration date, so if the workflow starts
# failing then you may need to generate a fresh token.

@ -19,28 +19,31 @@ jobs:
config:
- { name: Optimized, flags: --config=opt }
- { name: Debug, flags: --config=dbg }
- { name: ASAN, flags: --config=asan }
- { name: MSAN, flags: --config=docker-msan }
- { name: ASAN, flags: --config=asan, runner: ubuntu-22-large }
- { name: MSAN, flags: --config=docker-msan, runner: ubuntu-22-large }
- { name: TSAN, flags: --config=tsan }
- { name: UBSAN, flags: --config=ubsan }
- { name: No-RTTI, flags: --cxxopt=-fno-rtti }
include:
# Set defaults
- image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:04cd765285bc52cbbf51d66c8c66d8603579cf0f19cc42df26b09d2c270541fb
- targets: //pkg/... //src/... @com_google_protobuf_examples//...
- targets: //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
# Override cases with custom images
- config: { name: "Bazel7" }
image: "us-docker.pkg.dev/protobuf-build/containers/common/linux/bazel:7.0.0-a04396cc76704d4b7c722789e9c08df18f47df53"
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "TCMalloc" }
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/tcmalloc@sha256:bd39119d74b8a3fad4ae335d4cf5294e70384676331b7e19949459fc7a8d8328"
targets: "//src/..."
targets: "//src/... //third_party/utf8_range/..."
- config: { name: "aarch64" }
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test"
targets: "//src/... //src/google/protobuf/compiler:protoc_aarch64_test //third_party/utf8_range/..."
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/emulation:aarch64-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
name: Linux ${{ matrix.config.name }}
runs-on: ubuntu-latest
runs-on: ${{ matrix.config.runner || 'ubuntu-latest' }}
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -60,7 +63,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -69,7 +72,7 @@ jobs:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:${{ matrix.version }}-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: cpp_linux/gcc-${{ matrix.version }}
bazel: test //pkg/... //src/... @com_google_protobuf_examples//...
bazel: test //pkg/... //src/... @com_google_protobuf_examples//... //third_party/utf8_range/...
linux-release:
strategy:
@ -80,7 +83,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -130,7 +133,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -155,7 +158,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -172,7 +175,7 @@ jobs:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/cmake:3.13.3-63dd26c0c7a808d92673a3e52e848189d4ab0f17
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
command: >-
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package \&\&
/install.sh -DCMAKE_CXX_STANDARD=14 ${{ env.SCCACHE_CMAKE_FLAGS }} -Dprotobuf_USE_EXTERNAL_GTEST=ON -Dprotobuf_ABSL_PROVIDER=package -Dprotobuf_BUILD_SHARED_LIBS=ON \&\&
/test.sh
${{ env.SCCACHE_CMAKE_FLAGS }}
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
@ -186,7 +189,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -226,7 +229,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -257,7 +260,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -282,7 +285,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -313,25 +316,36 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- name: MacOS
- name: MacOS Bazel
os: macos-12
cache_key: macos-12
bazel: test //src/...
- name: MacOS Apple Silicon (build only)
bazel: test //src/... //third_party/utf8_range/...
# TODO Enable these once mac setup is working for Bazel 7
#- name: MacOS Bazel 7
# os: macos-12
# cache_key: macos-12-bazel7
# bazel: test //src/... //third_party/utf8_range/...
# bazel_version: '7.0.0'
- name: MacOS Apple Silicon (build only) Bazel
os: macos-12
cache_key: macos-12-arm
# Current github runners are all Intel based, so just build/compile
# for Apple Silicon to detect issues there.
bazel: build --cpu=darwin_arm64 //src/...
- name: Windows
os: windows-2019
cache_key: windows-2019
bazel: build --cpu=darwin_arm64 //src/... //third_party/utf8_range/...
- name: Windows Bazel
os: windows-2022
cache_key: windows-2022
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
- name: Windows Bazel 7
os: windows-2022
cache_key: windows-2022-bazel7
bazel: test //src/... @com_google_protobuf_examples//... --test_tag_filters=-conformance --build_tag_filters=-conformance
name: ${{ matrix.name }} Bazel
bazel_version: '7.0.0'
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -340,6 +354,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel: ${{ matrix.bazel }}
bazel-cache: cpp_${{ matrix.cache_key }}
version: ${{ matrix.bazel_version || '6.3.0' }}
non-linux-cmake:
strategy:
@ -351,14 +366,6 @@ jobs:
flags: -DCMAKE_CXX_STANDARD=14
cache-prefix: macos-cmake
- name: Windows CMake
os: windows-2019
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2019'
cache-prefix: windows-2019-cmake
- name: Windows CMake 2022
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
@ -366,34 +373,44 @@ jobs:
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2022'
cache-prefix: windows-2022-cmake
- name: Windows CMake 32-bit
- name: Windows CMake 2019
os: windows-2019
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=OFF
-Dprotobuf_BUILD_EXAMPLES=ON
vsversion: '2019'
cache-prefix: windows-2019-cmake
# windows-2019 has python3.7 installed, which is incompatible with the latest gcloud
python-version: '3.8'
- name: Windows CMake 32-bit
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
vsversion: '2022'
windows-arch: 'win32'
cache-prefix: windows-2019-win32-cmake
cache-prefix: windows-2022-win32-cmake
- name: Windows CMake Shared
os: windows-2019
os: windows-2022
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_BUILD_SHARED_LIBS=ON
vsversion: '2019'
cache-prefix: windows-2019-cmake
vsversion: '2022'
cache-prefix: windows-2022-cmake
- name: Windows CMake Install
os: windows-2019
os: windows-2022
install-flags: -G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF -Dprotobuf_BUILD_TESTS=OFF
flags: >-
-G Ninja -Dprotobuf_WITH_ZLIB=OFF -Dprotobuf_BUILD_CONFORMANCE=OFF
-Dprotobuf_REMOVE_INSTALLED_HEADERS=ON
-Dprotobuf_BUILD_PROTOBUF_BINARIES=OFF
vsversion: '2019'
cache-prefix: windows-2019-cmake
vsversion: '2022'
cache-prefix: windows-2022-cmake
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
@ -405,6 +422,17 @@ jobs:
arch: ${{ matrix.windows-arch || 'x64' }}
vsversion: ${{ matrix.vsversion }}
# Workaround for incompatibility between gcloud and windows-2019 runners.
- name: Install Python
if: ${{ matrix.python-version }}
uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Use custom python for gcloud
if: ${{ matrix.python-version }}
run: echo "CLOUDSDK_PYTHON=${Python3_ROOT_DIR}\\python3" >> $GITHUB_ENV
shell: bash
- name: Setup sccache
uses: protocolbuffers/protobuf-ci/sccache@v2
with:

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -33,12 +33,12 @@ jobs:
runs-on: windows-2019
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Setup dotnet
uses: actions/setup-dotnet@607fce577a46308457984d59e4954e075820f10a # v3.0.3
uses: actions/setup-dotnet@3447fd6a9f9e57506b15f895c5b76d3b197dc7c2 # v3.2.0
with:
dotnet-version: '6.0.x'
@ -53,7 +53,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}

@ -38,7 +38,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -54,7 +54,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run Linkage Monitor test
@ -70,7 +70,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Build protoc

@ -34,7 +34,7 @@ jobs:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -77,7 +77,7 @@ jobs:
DEVELOPER_DIR: /Applications/Xcode_14.1.app/Contents/Developer
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Pod lib lint
@ -118,7 +118,7 @@ jobs:
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: bazel ${{ matrix.config.bazel_action }}

@ -39,10 +39,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Setup composer
uses: protocolbuffers/protobuf-ci/composer-setup@v2
with:
@ -78,10 +77,9 @@ jobs:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/32bit@sha256:8275360dc5d676f3470872d79087901c0e4153453976bea908a92c82e8d209ea
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Cross compile protoc for i386
id: cross-compile
@ -116,7 +114,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -152,16 +150,15 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
version: ['8.0']
version: ['8.2']
name: MacOS PHP ${{ matrix.version }}
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Uninstall problematic libgd
run: brew uninstall --ignore-dependencies gd
@ -170,7 +167,7 @@ jobs:
run: brew install coreutils gd
- name: Pin PHP version
uses: shivammathur/setup-php@d30ad8b1843ace22e6698ab99bbafaa747b6bd0d # 2.24.0
uses: shivammathur/setup-php@7fdd3ece872ec7ec4c098ae5ab7637d5e0a96067 # 2.26.0
with:
php-version: ${{ matrix.version }}

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -41,34 +41,18 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
include:
- php-image: php:7.4-cli
version: "7.4"
- php-image: php:8.1-cli
version: "8.1"
- php-image: php:8.2-cli
version: "8.2"
version: ["8.0", "8.1", "8.2"]
name: Build ${{ matrix.version }}
runs-on: ubuntu-latest
container: ${{ matrix.php-image }}
steps:
- uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a
with:
name: protobuf-php-release
- name: Compile extension
run: |
cd /tmp
MAKE="make -j$(nproc)" pecl install $GITHUB_WORKSPACE/protobuf-*.tgz
- name: Enable extension
run: docker-php-ext-enable protobuf
- name: Check for PHP startup warnings
run: |
php -d display_errors=stderr -d display_startup_errors=1 -d error_reporting=-1 -r ';' 2>/tmp/protobuf-warnings
if [ -s /tmp/protobuf-warnings ]; then
echo 'The PHP extension was successfully installed, but PHP raised these warnings:' >&2
cat /tmp/protobuf-warnings >&2
exit 1
fi
echo "PHP didn't raise any warnings at startup."
- name: Inspect extension
run: php --ri protobuf
- name: Run tests
uses: protocolbuffers/protobuf-ci/docker@v2
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/php-extension:${{ matrix.version }}-a48f26c08d9a803dd0177dda63563f6ea6f7b2d4
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
skip-staleness-check: true
command: protobuf-*.tgz

@ -17,10 +17,11 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
type: [ Pure, C++]
# TODO: b/309627662 - Add coverage for Python 3.12.
version: ["3.8", "3.9", "3.10", "3.11"]
include:
- type: Pure
targets: //python/... //upb/python/... //python:python_version_test
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=false
- type: C++
targets: //python/... //python:python_version_test
@ -36,7 +37,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -46,6 +47,7 @@ jobs:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: python_linux/${{ matrix.type }}_${{ matrix.version }}
bazel: test ${{ matrix.targets }} ${{ matrix.flags }} --test_env=KOKORO_PYTHON_VERSION
exclude-targets: -//python/pb_unit_tests/...
macos:
@ -54,10 +56,10 @@ jobs:
matrix:
type: [ Pure, C++]
# TODO Consider expanding this set of versions.
version: [ "3.11" ]
version: [ "3.12" ]
include:
- type: Pure
targets: //python/... //upb/python/... //python:python_version_test
targets: //python/... //python:python_version_test
- type: C++
targets: //python/... //python:python_version_test
flags: --define=use_fast_cpp_protos=true
@ -66,7 +68,7 @@ jobs:
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -75,6 +77,7 @@ jobs:
with:
python-version: ${{ matrix.version }}
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Validate version
run: python3 --version | grep ${{ matrix.version }} || (echo "Invalid Python version - $(python3 --version)" && exit 1)
@ -95,3 +98,4 @@ jobs:
test ${{ matrix.targets }} ${{ matrix.flags }}
--test_env=KOKORO_PYTHON_VERSION=${{ matrix.version }}
--macos_minimum_os=10.9
exclude-targets: -//python/pb_unit_tests/...

@ -34,7 +34,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -50,10 +50,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
submodules: recursive
- name: Cross compile protoc for i386
id: cross-compile
@ -68,9 +67,10 @@ jobs:
with:
image: i386/ruby:2.7.3-buster
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
# Pin to Ruby 2.7 compatible bundler version.
command: >-
/bin/bash -cex '
gem install bundler;
gem install bundler -v 2.4.22;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -82,7 +82,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -99,9 +99,10 @@ jobs:
with:
image: arm64v8/ruby:2.7.3-buster
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
# Pin to Ruby 2.7 compatible bundler version.
command: >-
/bin/bash -cex '
gem install bundler;
gem install bundler -v 2.4.22;
cd /workspace/ruby;
bundle;
PROTOC=/workspace/${{ steps.cross-compile.outputs.protoc }} rake;
@ -129,7 +130,7 @@ jobs:
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
@ -167,7 +168,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests

@ -12,9 +12,8 @@ name: Tests
on:
# continuous
schedule:
# TODO Run daily at 10 AM UTC (2 AM PDT)
# Run every hour for now to gather statistics
- cron: 0 * * * *
# Run every hour
- cron: "0 * * * *"
# postsubmit
push:

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests

@ -17,12 +17,14 @@ jobs:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
config:
- { name: "Bazel 7", bazel_version: "7.0.0" }
- { name: "Fastbuild" }
- { name: "Optimized", flags: "-c opt" }
- { name: "FastTable", flags: "--//upb:fasttable_enabled=true" }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//upb/benchmarks:benchmark -//upb/python/..." }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//upb/benchmarks:benchmark -//upb/python/... -//upb/lua/..." }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//upb/benchmarks:benchmark -//upb/python/..." }
- { name: "FastTable ASAN", flags: "--//upb:fasttable_enabled=true --config=asan", exclude-targets: "-//benchmarks:benchmark -//python/..." }
- { name: "ASAN", flags: "--config=asan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/..." }
- { name: "UBSAN", flags: "--config=ubsan -c dbg", exclude-targets: "-//benchmarks:benchmark -//python/... -//lua/..." }
- { name: "32-bit", flags: "--copt=-m32 --linkopt=-m32", exclude-targets: "-//benchmarks:benchmark -//python/..." }
# TODO: b/297027295 - Add 32-bit ASAN test
name: ${{ matrix.config.name }}
@ -30,16 +32,16 @@ jobs:
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel-docker@v2
with:
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize@sha256:04cd765285bc52cbbf51d66c8c66d8603579cf0f19cc42df26b09d2c270541fb
image: us-docker.pkg.dev/protobuf-build/containers/test/linux/sanitize:${{ matrix.config.bazel_version || '6.3.0' }}-d07b7d649401d147e71e7182d2832cc8344f1f35
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //upb/... ${{ matrix.config.flags }}
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //bazel/... //benchmarks/... //lua/... //protos/... //protos_generator/... //python/... //upb/... //upb_generator/... ${{ matrix.config.flags }}
exclude-targets: ${{ matrix.config.exclude-targets }}
linux-gcc:
@ -49,7 +51,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -58,24 +60,30 @@ jobs:
image: "us-docker.pkg.dev/protobuf-build/containers/test/linux/gcc:12.2-6.3.0-63dd26c0c7a808d92673a3e52e848189d4ab0f17"
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-gcc"
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //upb/...
bazel: test --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 -c opt //bazel/... //benchmarks/... //lua/... //protos/... //protos_generator/... //python/... //upb/... //upb_generator/...
windows:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
name: Windows
runs-on: windows-2019
runs-on: windows-2022
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v2
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-windows"
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/upb/... //upb/upbc/... //upb/python/... //upb/protos/... //upb/protos_generator/...
bazel: test --cxxopt=/std:c++17 --host_cxxopt=/std:c++17 //upb/... //upb_generator/... //python/... //protos/... //protos_generator/...
version: 6.3.0
exclude-targets: -//python:conformance_test -//upb/reflection:def_builder_test
macos:
strategy:
@ -88,15 +96,20 @@ jobs:
runs-on: macos-12
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
cache: pip
cache-dependency-path: 'python/requirements.txt'
- name: Run tests
uses: protocolbuffers/protobuf-ci/bazel@v2
with:
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: "upb-bazel-macos"
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //upb/...
bazel: ${{ matrix.config.bazel-command }} --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 ${{ matrix.config.flags }} //bazel/... //benchmarks/... //lua/... //protos/... //protos_generator/... //python/... //upb/... //upb_generator/...
version: 6.3.0
no-python:
strategy:
@ -105,7 +118,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Run tests
@ -118,7 +131,7 @@ jobs:
which python3 &&
mv `which python3` /tmp &&
! which python3 &&
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //upb/python/... -- -//upb/python/dist:source_wheel
bazel test $BAZEL_FLAGS --cxxopt=-std=c++17 --host_cxxopt=-std=c++17 //python/... -- -//python/dist:source_wheel
build_wheels:
name: Build Wheels
@ -126,16 +139,16 @@ jobs:
if: ${{ github.event_name != 'pull_request_target' }}
steps:
- name: Checkout pending changes
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: protocolbuffers/protobuf-ci/checkout@v2
with:
ref: ${{ inputs.safe-checkout }}
- name: Build Wheels
uses: protocolbuffers/protobuf-ci/bazel-docker@v2
with:
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple@sha256:bb1d14738449916d489c1cbb062508c4dca5bd265ea3e67a2628ae40912b9b00
image: us-docker.pkg.dev/protobuf-build/release-containers/linux/apple@sha256:b3dc9b75d8e599b0e95ed245d89f44b5a4231112f975da89dd02006a484a58df
credentials: ${{ secrets.GAR_SERVICE_ACCOUNT }}
bazel-cache: upb-bazel-python
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //upb/python/dist //upb/python/dist:test_wheel //upb/python/dist:source_wheel
bazel: build --crosstool_top=//toolchain:clang_suite --//toolchain:release=true --symlink_prefix=/ -c dbg //python/dist //python/dist:test_wheel //python/dist:source_wheel
- name: Move Wheels
run: mkdir wheels && find _build/out \( -name 'protobuf*.whl' -o -name 'protobuf-*.tar.gz' \) -exec mv '{}' wheels ';'
- uses: actions/upload-artifact@v3
@ -146,7 +159,7 @@ jobs:
with:
name: requirements
# Tests shouldn't have access to the whole upb repo, upload the one file we need
path: upb/python/requirements.txt
path: python/requirements.txt
test_wheels:
name: Test Wheels
@ -161,23 +174,24 @@ jobs:
# coverage.
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: macos-12, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'binary' }
- { os: ubuntu-latest, python-version: "3.8", architecture: x64, type: 'source' }
- { os: macos-11, python-version: "3.8", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.11", architecture: x64, type: 'source' }
- { os: macos-12, python-version: "3.11", architecture: x64, type: 'source' }
- { os: ubuntu-latest, python-version: "3.12", architecture: x64, type: 'source' }
- { os: macos-12, python-version: "3.12", architecture: x64, type: 'source' }
# Windows uses the full API up until Python 3.10, so each of these
# jobs tests a distinct binary wheel.
# Windows uses the full API up until Python 3.10.
- { os: windows-2019, python-version: "3.8", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x86, type: 'binary' }
- { os: windows-2019, python-version: "3.8", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.9", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.10", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.11", architecture: x64, type: 'binary' }
- { os: windows-2019, python-version: "3.12", architecture: x64, type: 'binary' }
runs-on: ${{ matrix.os }}
if: ${{ github.event_name != 'pull_request_target' }}
defaults:
@ -194,7 +208,7 @@ jobs:
with:
name: requirements
path: requirements
- uses: actions/setup-python@v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
architecture: ${{ matrix.architecture }}
@ -238,7 +252,7 @@ jobs:
strategy:
fail-fast: false # Don't cancel all jobs if one fails.
matrix:
python-version: ["3.8", "3.11"]
python-version: ["3.8", "3.12"]
runs-on: ubuntu-latest
if: ${{ github.event_name != 'pull_request_target' }}
steps:
@ -249,7 +263,7 @@ jobs:
path: wheels
- name: Delete Binary Wheels
run: find wheels -type f | grep -v none-any | xargs rm
- uses: actions/setup-python@v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # v4.7.0
with:
python-version: ${{ matrix.python-version }}
- name: Setup Python venv

@ -15,12 +15,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout protobuf-php
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
repository: protocolbuffers/protobuf-php
token: ${{ secrets.BOT_ACCESS_TOKEN }}
- name: Clone protobuf
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0
with:
path: protobuf
- name: Configure Git Bot

@ -1,11 +1,11 @@
# Bazel (https://bazel.build/) BUILD file for Protobuf.
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_lite_proto_library", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_lang_toolchain", "proto_library")
load("@rules_java//java:defs.bzl", "java_lite_proto_library", "java_proto_library")
load("//build_defs:cpp_opts.bzl", "COPTS", "LINK_OPTS")
load(":protobuf.bzl", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library", "internal_ruby_proto_library")
load(":protobuf.bzl", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library")
licenses(["notice"])
@ -150,17 +150,6 @@ filegroup(
visibility = ["//visibility:public"],
)
internal_ruby_proto_library(
name = "well_known_ruby_protos",
srcs = [":well_known_protos"],
includes = ["src"],
default_runtime = "",
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
################################################################################
# Protocol Buffers Compiler
################################################################################
@ -182,7 +171,6 @@ cc_binary(
cc_binary(
name = "protoc_static",
copts = COPTS,
linkopts = LINK_OPTS,
features = select({
# This isn't possible on mac because there is no static library for lcrt0.o
"@platforms//os:osx": [],
@ -190,6 +178,7 @@ cc_binary(
# When cross-compiling we need to statically link all C++ libraries.
"//conditions:default": ["fully_static_link"],
}),
linkopts = LINK_OPTS,
visibility = ["//visibility:public"],
deps = ["//src/google/protobuf/compiler:protoc_lib"],
)
@ -224,7 +213,6 @@ alias(
cc_library(
name = "protobuf",
copts = COPTS,
include_prefix = "google/protobuf/io",
linkopts = LINK_OPTS,
visibility = ["//visibility:public"],
deps = [
@ -302,13 +290,13 @@ alias(
alias(
name = "python_srcs",
actual = "//python:python_srcs",
visibility = ["//upb:__subpackages__"],
visibility = ["//python:__subpackages__"],
)
alias(
name = "python_test_srcs",
actual = "//python:python_test_srcs",
visibility = ["//upb:__subpackages__"],
visibility = ["//python:__subpackages__"],
)
alias(
@ -525,33 +513,6 @@ internal_php_proto_library(
],
)
internal_ruby_proto_library(
name = "test_messages_proto2_ruby_proto",
testonly = 1,
srcs = ["//src/google/protobuf:test_messages_proto2.proto"],
includes = ["src/google/protobuf"],
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
internal_ruby_proto_library(
name = "test_messages_proto3_ruby_proto",
testonly = 1,
srcs = ["//src/google/protobuf:test_messages_proto3.proto"],
includes = [
"src/google/protobuf",
# The above must come first.
"src",
],
deps = [":well_known_ruby_protos"],
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
filegroup(
name = "bzl_srcs",
srcs = glob(["**/*.bzl"]),

@ -79,7 +79,7 @@ if (protobuf_BUILD_SHARED_LIBS)
endif ()
# Version metadata
set(protobuf_VERSION_STRING "4.24.0")
set(protobuf_VERSION_STRING "4.25.0")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")

@ -0,0 +1,927 @@
{
"checksum": "f93f5d1848bc00c6384273f9fb5273cc1b7fc0cb4dbc2afd776d2feb7b37f3ae",
"crates": {
"aho-corasick 1.1.2": {
"name": "aho-corasick",
"version": "1.1.2",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/aho-corasick/1.1.2/download",
"sha256": "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
}
},
"targets": [
{
"Library": {
"crate_name": "aho_corasick",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "aho_corasick",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"perf-literal",
"std"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "memchr 2.6.4",
"target": "memchr"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.1.2"
},
"license": "Unlicense OR MIT"
},
"autocfg 1.1.0": {
"name": "autocfg",
"version": "1.1.0",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/autocfg/1.1.0/download",
"sha256": "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
}
},
"targets": [
{
"Library": {
"crate_name": "autocfg",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "autocfg",
"common_attrs": {
"compile_data_glob": [
"**"
],
"edition": "2015",
"version": "1.1.0"
},
"license": "Apache-2.0 OR MIT"
},
"direct-cargo-bazel-deps 0.0.1": {
"name": "direct-cargo-bazel-deps",
"version": "0.0.1",
"repository": null,
"targets": [
{
"Library": {
"crate_name": "direct_cargo_bazel_deps",
"crate_root": ".direct_cargo_bazel_deps.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "direct_cargo_bazel_deps",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "googletest 0.10.0",
"target": "googletest"
}
],
"selects": {}
},
"edition": "2018",
"proc_macro_deps": {
"common": [
{
"id": "paste 1.0.14",
"target": "paste"
}
],
"selects": {}
},
"version": "0.0.1"
},
"license": null
},
"googletest 0.10.0": {
"name": "googletest",
"version": "0.10.0",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/googletest/0.10.0/download",
"sha256": "09213705c85aa0e4b4fff44a3a826a556979a34a266df6bcda703a49c69fb61e"
}
},
"targets": [
{
"Library": {
"crate_name": "googletest",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "googletest",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "num-traits 0.2.17",
"target": "num_traits"
},
{
"id": "regex 1.10.0",
"target": "regex"
}
],
"selects": {}
},
"edition": "2021",
"proc_macro_deps": {
"common": [
{
"id": "googletest_macro 0.10.0",
"target": "googletest_macro"
},
{
"id": "rustversion 1.0.14",
"target": "rustversion"
}
],
"selects": {}
},
"version": "0.10.0"
},
"license": "Apache-2.0"
},
"googletest_macro 0.10.0": {
"name": "googletest_macro",
"version": "0.10.0",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/googletest_macro/0.10.0/download",
"sha256": "005e4cb962c56efd249bdeeb4ac232b11e1c45a2e49793bba2b2982dcc3f2e9d"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "googletest_macro",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "googletest_macro",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "quote 1.0.33",
"target": "quote"
},
{
"id": "syn 2.0.38",
"target": "syn"
}
],
"selects": {}
},
"edition": "2021",
"version": "0.10.0"
},
"license": "Apache-2.0"
},
"memchr 2.6.4": {
"name": "memchr",
"version": "2.6.4",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/memchr/2.6.4/download",
"sha256": "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
}
},
"targets": [
{
"Library": {
"crate_name": "memchr",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "memchr",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"alloc",
"default",
"std"
],
"selects": {}
},
"edition": "2021",
"version": "2.6.4"
},
"license": "Unlicense OR MIT"
},
"num-traits 0.2.17": {
"name": "num-traits",
"version": "0.2.17",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/num-traits/0.2.17/download",
"sha256": "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
}
},
"targets": [
{
"Library": {
"crate_name": "num_traits",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "num_traits",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"std"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "num-traits 0.2.17",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "0.2.17"
},
"build_script_attrs": {
"data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "autocfg 1.1.0",
"target": "autocfg"
}
],
"selects": {}
}
},
"license": "MIT OR Apache-2.0"
},
"paste 1.0.14": {
"name": "paste",
"version": "1.0.14",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/paste/1.0.14/download",
"sha256": "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "paste",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "paste",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "paste 1.0.14",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.14"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"proc-macro2 1.0.69": {
"name": "proc-macro2",
"version": "1.0.69",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/proc-macro2/1.0.69/download",
"sha256": "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
}
},
"targets": [
{
"Library": {
"crate_name": "proc_macro2",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "proc_macro2",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"proc-macro"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "build_script_build"
},
{
"id": "unicode-ident 1.0.12",
"target": "unicode_ident"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.0.69"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"quote 1.0.33": {
"name": "quote",
"version": "1.0.33",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/quote/1.0.33/download",
"sha256": "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
}
},
"targets": [
{
"Library": {
"crate_name": "quote",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "quote",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"proc-macro"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "proc_macro2"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.33"
},
"license": "MIT OR Apache-2.0"
},
"regex 1.10.0": {
"name": "regex",
"version": "1.10.0",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/regex/1.10.0/download",
"sha256": "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87"
}
},
"targets": [
{
"Library": {
"crate_name": "regex",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"perf",
"perf-backtrack",
"perf-cache",
"perf-dfa",
"perf-inline",
"perf-literal",
"perf-onepass",
"std",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "aho-corasick 1.1.2",
"target": "aho_corasick"
},
{
"id": "memchr 2.6.4",
"target": "memchr"
},
{
"id": "regex-automata 0.4.1",
"target": "regex_automata"
},
{
"id": "regex-syntax 0.8.1",
"target": "regex_syntax"
}
],
"selects": {}
},
"edition": "2021",
"version": "1.10.0"
},
"license": "MIT OR Apache-2.0"
},
"regex-automata 0.4.1": {
"name": "regex-automata",
"version": "0.4.1",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/regex-automata/0.4.1/download",
"sha256": "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b"
}
},
"targets": [
{
"Library": {
"crate_name": "regex_automata",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex_automata",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"alloc",
"dfa-onepass",
"hybrid",
"meta",
"nfa-backtrack",
"nfa-pikevm",
"nfa-thompson",
"perf-inline",
"perf-literal",
"perf-literal-multisubstring",
"perf-literal-substring",
"std",
"syntax",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment",
"unicode-word-boundary"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "aho-corasick 1.1.2",
"target": "aho_corasick"
},
{
"id": "memchr 2.6.4",
"target": "memchr"
},
{
"id": "regex-syntax 0.8.1",
"target": "regex_syntax"
}
],
"selects": {}
},
"edition": "2021",
"version": "0.4.1"
},
"license": "MIT OR Apache-2.0"
},
"regex-syntax 0.8.1": {
"name": "regex-syntax",
"version": "0.8.1",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/regex-syntax/0.8.1/download",
"sha256": "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33"
}
},
"targets": [
{
"Library": {
"crate_name": "regex_syntax",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "regex_syntax",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"default",
"std",
"unicode",
"unicode-age",
"unicode-bool",
"unicode-case",
"unicode-gencat",
"unicode-perl",
"unicode-script",
"unicode-segment"
],
"selects": {}
},
"edition": "2021",
"version": "0.8.1"
},
"license": "MIT OR Apache-2.0"
},
"rustversion 1.0.14": {
"name": "rustversion",
"version": "1.0.14",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/rustversion/1.0.14/download",
"sha256": "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
}
},
"targets": [
{
"ProcMacro": {
"crate_name": "rustversion",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
},
{
"BuildScript": {
"crate_name": "build_script_build",
"crate_root": "build/build.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "rustversion",
"common_attrs": {
"compile_data_glob": [
"**"
],
"deps": {
"common": [
{
"id": "rustversion 1.0.14",
"target": "build_script_build"
}
],
"selects": {}
},
"edition": "2018",
"version": "1.0.14"
},
"build_script_attrs": {
"data_glob": [
"**"
]
},
"license": "MIT OR Apache-2.0"
},
"syn 2.0.38": {
"name": "syn",
"version": "2.0.38",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/syn/2.0.38/download",
"sha256": "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
}
},
"targets": [
{
"Library": {
"crate_name": "syn",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "syn",
"common_attrs": {
"compile_data_glob": [
"**"
],
"crate_features": {
"common": [
"clone-impls",
"default",
"derive",
"full",
"parsing",
"printing",
"proc-macro",
"quote"
],
"selects": {}
},
"deps": {
"common": [
{
"id": "proc-macro2 1.0.69",
"target": "proc_macro2"
},
{
"id": "quote 1.0.33",
"target": "quote"
},
{
"id": "unicode-ident 1.0.12",
"target": "unicode_ident"
}
],
"selects": {}
},
"edition": "2021",
"version": "2.0.38"
},
"license": "MIT OR Apache-2.0"
},
"unicode-ident 1.0.12": {
"name": "unicode-ident",
"version": "1.0.12",
"repository": {
"Http": {
"url": "https://crates.io/api/v1/crates/unicode-ident/1.0.12/download",
"sha256": "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
}
},
"targets": [
{
"Library": {
"crate_name": "unicode_ident",
"crate_root": "src/lib.rs",
"srcs": [
"**/*.rs"
]
}
}
],
"library_target_name": "unicode_ident",
"common_attrs": {
"compile_data_glob": [
"**"
],
"edition": "2018",
"version": "1.0.12"
},
"license": "(MIT OR Apache-2.0) AND Unicode-DFS-2016"
}
},
"binary_crates": [],
"workspace_members": {
"direct-cargo-bazel-deps 0.0.1": ""
},
"conditions": {
"aarch64-apple-darwin": [
"aarch64-apple-darwin"
],
"aarch64-apple-ios": [
"aarch64-apple-ios"
],
"aarch64-apple-ios-sim": [
"aarch64-apple-ios-sim"
],
"aarch64-fuchsia": [
"aarch64-fuchsia"
],
"aarch64-linux-android": [
"aarch64-linux-android"
],
"aarch64-pc-windows-msvc": [
"aarch64-pc-windows-msvc"
],
"aarch64-unknown-linux-gnu": [
"aarch64-unknown-linux-gnu"
],
"arm-unknown-linux-gnueabi": [
"arm-unknown-linux-gnueabi"
],
"armv7-linux-androideabi": [
"armv7-linux-androideabi"
],
"armv7-unknown-linux-gnueabi": [
"armv7-unknown-linux-gnueabi"
],
"i686-apple-darwin": [
"i686-apple-darwin"
],
"i686-linux-android": [
"i686-linux-android"
],
"i686-pc-windows-msvc": [
"i686-pc-windows-msvc"
],
"i686-unknown-freebsd": [
"i686-unknown-freebsd"
],
"i686-unknown-linux-gnu": [
"i686-unknown-linux-gnu"
],
"powerpc-unknown-linux-gnu": [
"powerpc-unknown-linux-gnu"
],
"riscv32imc-unknown-none-elf": [
"riscv32imc-unknown-none-elf"
],
"riscv64gc-unknown-none-elf": [
"riscv64gc-unknown-none-elf"
],
"s390x-unknown-linux-gnu": [
"s390x-unknown-linux-gnu"
],
"thumbv7em-none-eabi": [
"thumbv7em-none-eabi"
],
"thumbv8m.main-none-eabi": [
"thumbv8m.main-none-eabi"
],
"wasm32-unknown-unknown": [
"wasm32-unknown-unknown"
],
"wasm32-wasi": [
"wasm32-wasi"
],
"x86_64-apple-darwin": [
"x86_64-apple-darwin"
],
"x86_64-apple-ios": [
"x86_64-apple-ios"
],
"x86_64-fuchsia": [
"x86_64-fuchsia"
],
"x86_64-linux-android": [
"x86_64-linux-android"
],
"x86_64-pc-windows-msvc": [
"x86_64-pc-windows-msvc"
],
"x86_64-unknown-freebsd": [
"x86_64-unknown-freebsd"
],
"x86_64-unknown-linux-gnu": [
"x86_64-unknown-linux-gnu"
],
"x86_64-unknown-none": [
"x86_64-unknown-none"
]
}
}

139
Cargo.lock generated

@ -0,0 +1,139 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0"
dependencies = [
"memchr",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "direct-cargo-bazel-deps"
version = "0.0.1"
dependencies = [
"googletest",
"paste",
]
[[package]]
name = "googletest"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09213705c85aa0e4b4fff44a3a826a556979a34a266df6bcda703a49c69fb61e"
dependencies = [
"googletest_macro",
"num-traits",
"regex",
"rustversion",
]
[[package]]
name = "googletest_macro"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "005e4cb962c56efd249bdeeb4ac232b11e1c45a2e49793bba2b2982dcc3f2e9d"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "memchr"
version = "2.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
[[package]]
name = "num-traits"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c"
dependencies = [
"autocfg",
]
[[package]]
name = "paste"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
[[package]]
name = "proc-macro2"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56d84fdd47036b038fc80dd333d10b6aab10d5d31f4a366e20014def75328d33"
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "syn"
version = "2.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

@ -0,0 +1,2 @@
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313

@ -1,6 +1,6 @@
Pod::Spec.new do |s|
s.name = 'Protobuf-C++'
s.version = '4.24.0'
s.version = '4.25.0'
s.summary = 'Protocol Buffers v3 runtime library for C++.'
s.homepage = 'https://github.com/google/protobuf'
s.license = 'BSD-3-Clause'

@ -5,7 +5,7 @@
# dependent projects use the :git notation to refer to the library.
Pod::Spec.new do |s|
s.name = 'Protobuf'
s.version = '3.24.0'
s.version = '3.25.0'
s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.'
s.homepage = 'https://github.com/protocolbuffers/protobuf'
s.license = 'BSD-3-Clause'

@ -1,6 +1,8 @@
Protocol Buffers - Google's data interchange format
===================================================
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/protocolbuffers/protobuf/badge)](https://securityscorecards.dev/viewer/?uri=github.com/protocolbuffers/protobuf)
Copyright 2023 Google LLC
Overview

@ -1,5 +1,10 @@
workspace(name = "com_google_protobuf")
# An explicit self-reference to work around changes in Bazel 7.0
# See https://github.com/bazelbuild/bazel/issues/19973#issuecomment-1787814450
# buildifier: disable=duplicated-name
local_repository(name = "com_google_protobuf", path = ".")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
local_repository(
@ -12,6 +17,10 @@ load("//:protobuf_deps.bzl", "PROTOBUF_MAVEN_ARTIFACTS", "protobuf_deps")
protobuf_deps()
load("@rules_python//python:repositories.bzl", "py_repositories")
py_repositories()
# Bazel platform rules.
http_archive(
name = "platforms",
@ -82,6 +91,15 @@ load("@io_bazel_rules_kotlin//kotlin:core.bzl", "kt_register_toolchains")
kt_register_toolchains()
http_archive(
name = "rules_ruby",
urls = [
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip"
],
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436",
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8",
)
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime")
ruby_runtime("system_ruby")
@ -107,13 +125,9 @@ ruby_bundle(
gemfile = "//ruby:Gemfile",
)
load("//upb/bazel:workspace_deps.bzl", "upb_deps")
upb_deps()
http_archive(
name = "lua",
build_file = "//upb/bazel:lua.BUILD",
build_file = "//bazel:lua.BUILD",
sha256 = "b9e2e4aad6789b3b63a056d442f7b39f0ecfca3ae0f1fc0ae4e9614401b69f4b",
strip_prefix = "lua-5.2.4",
urls = [
@ -134,11 +148,11 @@ http_archive(
urls = ["https://github.com/googleapis/googleapis/archive/30ed2662a85403cbdeb9ea38df1e414a2a276b83.zip"],
strip_prefix = "googleapis-30ed2662a85403cbdeb9ea38df1e414a2a276b83",
sha256 = "4dfc28101127d22abd6f0f6308d915d490c4594c0cfcf7643769c446d6763a46",
build_file = "//upb/benchmarks:BUILD.googleapis",
build_file = "//benchmarks:BUILD.googleapis",
patch_cmds = ["find google -type f -name BUILD.bazel -delete"],
)
load("//upb/bazel:system_python.bzl", "system_python")
load("//bazel:system_python.bzl", "system_python")
system_python(
name = "system_python",
@ -149,28 +163,34 @@ load("@system_python//:pip.bzl", "pip_parse")
pip_parse(
name = "pip_deps",
requirements = "//upb/python:requirements.txt",
requirements = "//python:requirements.txt",
)
load("@pip_deps//:requirements.bzl", "install_deps")
install_deps()
load("@utf8_range//:workspace_deps.bzl", "utf8_range_deps")
utf8_range_deps()
http_archive(
name = "rules_fuzzing",
sha256 = "d9002dd3cd6437017f08593124fdd1b13b3473c7b929ceb0e60d317cb9346118",
strip_prefix = "rules_fuzzing-0.3.2",
urls = ["https://github.com/bazelbuild/rules_fuzzing/archive/v0.3.2.zip"],
sha256 = "ff52ef4845ab00e95d29c02a9e32e9eff4e0a4c9c8a6bcf8407a2f19eb3f9190",
strip_prefix = "rules_fuzzing-0.4.1",
urls = ["https://github.com/bazelbuild/rules_fuzzing/releases/download/v0.4.1/rules_fuzzing-0.4.1.zip"],
patches = ["//third_party:rules_fuzzing.patch"],
patch_args = ["-p1"],
)
load("@rules_fuzzing//fuzzing:repositories.bzl", "rules_fuzzing_dependencies")
rules_fuzzing_dependencies()
load("@rules_fuzzing//fuzzing:init.bzl", "rules_fuzzing_init")
rules_fuzzing_init()
load("@fuzzing_py_deps//:requirements.bzl", fuzzing_py_deps_install_deps = "install_deps")
fuzzing_py_deps_install_deps()
bind(
name = "python_headers",
actual = "@system_python//:python_headers",
@ -178,8 +198,8 @@ bind(
http_archive(
name = "rules_rust",
sha256 = "4a9cb4fda6ccd5b5ec393b2e944822a62e050c7c06f1ea41607f14c4fdec57a2",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.25.1/rules_rust-v0.25.1.tar.gz"],
sha256 = "9ecd0f2144f0a24e6bc71ebcc50a1ee5128cedeceb32187004532c9710cb2334",
urls = ["https://github.com/bazelbuild/rules_rust/releases/download/0.29.1/rules_rust-v0.29.1.tar.gz"],
)
load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_register_toolchains")
@ -187,3 +207,21 @@ load("@rules_rust//rust:repositories.bzl", "rules_rust_dependencies", "rust_regi
rules_rust_dependencies()
rust_register_toolchains(edition = "2021")
load("@rules_rust//crate_universe:defs.bzl", "crate", "crates_repository")
# to repin, invoke `CARGO_BAZEL_REPIN=1 bazel sync --only=crate_index`
crates_repository(
name = "crate_index",
cargo_lockfile = "//:Cargo.lock",
lockfile = "//:Cargo.bazel.lock",
packages = {
"googletest": crate.spec(
version = ">0.0.0",
),
"paste": crate.spec(
version = ">=1",
),
},
)
load("@crate_index//:defs.bzl", "crate_repositories")
crate_repositories()

@ -7,6 +7,7 @@
load("@rules_python//python:defs.bzl", "py_binary")
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
load("@bazel_skylib//lib:selects.bzl", "selects")
# begin:google_only
# package(default_applicable_licenses = ["//upb:license"])
@ -14,10 +15,15 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
exports_files(
["workspace_deps.bzl"],
visibility = ["//upb/cmake:__pkg__"],
)
# begin:google_only
# selects.config_setting_group(
# name = "android_opt",
# match_all = [
# "//tools/cc_target_os:android",
# "//tools/compilation_mode:opt",
# ],
# )
# end:google_only
py_binary(
name = "amalgamate",
@ -26,11 +32,14 @@ py_binary(
)
# py_proto_library() is private rule, only intended for internal use by upb.
# Hopefully py_proto_library() will eventually be availble in rules_proto or
# Hopefully py_proto_library() will eventually be available in rules_proto or
# another upstream package.
bzl_library(
name = "py_proto_library_bzl",
srcs = ["py_proto_library.bzl"],
deps = [
"@rules_python//python:py_info_bzl",
],
)
bzl_library(
@ -43,6 +52,7 @@ bzl_library(
],
deps = [
"@bazel_skylib//lib:paths",
"@rules_proto//proto:defs",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)

@ -54,7 +54,7 @@ upb_amalgamation = rule(
"_amalgamator": attr.label(
executable = True,
cfg = "exec",
default = "//upb/bazel:amalgamate",
default = "//bazel:amalgamate",
),
"prefix": attr.string(
default = "",

@ -30,12 +30,20 @@ _DEFAULT_COPTS.extend([
UPB_DEFAULT_CPPOPTS = select({
"//upb:windows": [],
# begin:google_only
# # Override default -Oz for release builds on Android.
# "//bazel:android_opt": _DEFAULT_CPPOPTS + ["-O2"],
# end:google_only
"//conditions:default": _DEFAULT_CPPOPTS,
})
UPB_DEFAULT_COPTS = select({
"//upb:windows": [],
"//upb:fasttable_enabled_setting": ["-std=gnu99", "-DUPB_ENABLE_FASTTABLE"],
# begin:google_only
# # Override default -Oz for release builds on Android.
# "//bazel:android_opt": _DEFAULT_COPTS + ["-O2"],
# end:google_only
"//conditions:default": _DEFAULT_COPTS,
})

@ -15,6 +15,7 @@ But it hasn't been deeply tested or reviewed, and upb should not be in the
business of vending py_proto_library(), so we keep it private to upb.
"""
load("@rules_python//python:py_info.bzl", "PyInfo")
load("@bazel_skylib//lib:paths.bzl", "paths")
# begin:github_only

@ -39,7 +39,7 @@ cc_import(
hdrs = glob(["**/*.h"]),
shared_library = "python{0}.dll",
interface_library = "libs/python{0}.lib",
visibility = ["@com_google_protobuf//upb/python:__pkg__"],
visibility = ["@com_google_protobuf//python:__pkg__"],
)
cc_import(
@ -47,7 +47,7 @@ cc_import(
hdrs = glob(["**/*.h"]),
shared_library = "python{1}.dll",
interface_library = "libs/python{1}.lib",
visibility = ["@com_google_protobuf//upb/python:__pkg__"],
visibility = ["@com_google_protobuf//python:__pkg__"],
)
"""

@ -1,9 +1,9 @@
"""upb_c_proto_library() exposes upb's generated C API for protobuf (foo.upb.h)"""
load("//upb/bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//upb/bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//upb/bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//upb/bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
UpbWrappedCcInfo = provider(
"Provider for cc_info for protos",
@ -31,22 +31,12 @@ upb_c_proto_library_aspect = aspect(
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_gen_upb": attr.label(
executable = True,
cfg = "exec",
default = "//upb/upbc:protoc-gen-upb_stage1",
),
"_protoc": attr.label(
executable = True,
cfg = "exec",
default = "//:protoc",
"_upb_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_upb": attr.label_list(default = [
"//upb:generated_code_support__only_for_generated_code_do_not_use__i_give_permission_to_break_me",
]),
},
implementation = _upb_c_proto_library_aspect_impl,
requires = [upb_minitable_proto_library_aspect],
@ -58,7 +48,9 @@ upb_c_proto_library_aspect = aspect(
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
incompatible_use_toolchain_transition = True,
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_c_proto_library_rule_impl(ctx):

@ -1,8 +1,8 @@
"""upb_minitable_proto_library() exposes upb's generated minitables (foo.upb_minitable.h)"""
load("//upb/bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//upb/bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//upb/bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
load("//bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
UpbMinitableCcInfo = provider(
"Provider for cc_info for protos",
@ -43,22 +43,12 @@ upb_minitable_proto_library_aspect = aspect(
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_gen_upb_minitable": attr.label(
executable = True,
cfg = "exec",
default = "//upb/upbc:protoc-gen-upb_minitable_stage1",
),
"_protoc": attr.label(
executable = True,
cfg = "exec",
default = "//:protoc",
"_upb_minitable_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upb_minitable_toolchain"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_upb_minitable": attr.label_list(default = [
"//upb:generated_code_support__only_for_generated_code_do_not_use__i_give_permission_to_break_me",
]),
"_fasttable_enabled": attr.label(default = "//upb:fasttable_enabled"),
},
implementation = _upb_minitable_proto_library_aspect_impl,
@ -66,7 +56,9 @@ upb_minitable_proto_library_aspect = aspect(
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
incompatible_use_toolchain_transition = True,
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_minitable_proto_library_rule_impl(ctx):

@ -11,17 +11,17 @@
"""
load(
"//upb/bazel:upb_c_proto_library.bzl",
"//bazel:upb_c_proto_library.bzl",
_UpbWrappedCcInfo = "UpbWrappedCcInfo",
_upb_c_proto_library = "upb_c_proto_library",
_upb_c_proto_library_aspect = "upb_c_proto_library_aspect",
)
load(
"//upb/bazel:upb_proto_library_internal/aspect.bzl",
"//bazel:upb_proto_library_internal/aspect.bzl",
_GeneratedSrcsInfo = "GeneratedSrcsInfo",
)
load(
"//upb/bazel:upb_proto_reflection_library.bzl",
"//bazel:upb_proto_reflection_library.bzl",
_upb_proto_reflection_library = "upb_proto_reflection_library",
)

@ -1,8 +1,8 @@
"""Implementation of the aspect that powers the upb_*_proto_library() rules."""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("//upb/bazel:upb_proto_library_internal/cc_library_func.bzl", "cc_library_func")
load("//upb/bazel:upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo")
load("//bazel:upb_proto_library_internal/cc_library_func.bzl", "cc_library_func")
load("//bazel:upb_proto_library_internal/copts.bzl", "UpbProtoLibraryCoptsInfo")
load("@rules_proto//proto:defs.bzl", "proto_common")
# begin:github_only
_is_google3 = False
@ -12,64 +12,35 @@ _is_google3 = False
# _is_google3 = True
# end:google_only
def _get_real_short_path(file):
# For some reason, files from other archives have short paths that look like:
# ../com_google_protobuf/google/protobuf/descriptor.proto
short_path = file.short_path
if short_path.startswith("../"):
second_slash = short_path.index("/", 3)
short_path = short_path[second_slash + 1:]
# Sometimes it has another few prefixes like:
# _virtual_imports/any_proto/google/protobuf/any.proto
# benchmarks/_virtual_imports/100_msgs_proto/benchmarks/100_msgs.proto
# We want just google/protobuf/any.proto.
virtual_imports = "_virtual_imports/"
if virtual_imports in short_path:
short_path = short_path.split(virtual_imports)[1].split("/", 1)[1]
return short_path
def _get_real_root(ctx, file):
real_short_path = _get_real_short_path(file)
root = file.path[:-len(real_short_path) - 1]
if not _is_google3 and ctx.rule.attr.strip_import_prefix:
root = paths.join(root, ctx.rule.attr.strip_import_prefix[1:])
return root
def _generate_output_file(ctx, src, extension):
package = ctx.label.package
if not _is_google3:
strip_import_prefix = ctx.rule.attr.strip_import_prefix
if strip_import_prefix and strip_import_prefix != "/":
if not package.startswith(strip_import_prefix[1:]):
fail("%s does not begin with prefix %s" % (package, strip_import_prefix))
package = package[len(strip_import_prefix):]
real_short_path = _get_real_short_path(src)
real_short_path = paths.relativize(real_short_path, package)
output_filename = paths.replace_extension(real_short_path, extension)
ret = ctx.actions.declare_file(output_filename)
return ret
def _generate_include_path(src, out, extension):
short_path = _get_real_short_path(src)
short_path = paths.replace_extension(short_path, extension)
if not out.path.endswith(short_path):
fail("%s does not end with %s" % (out.path, short_path))
return out.path[:-len(short_path)]
GeneratedSrcsInfo = provider(
"Provides generated headers and sources",
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
"thunks": "Experimental, do not use. List of srcs defining C API. Incompatible with hdrs.",
"includes": "list of extra includes",
},
)
def output_dir(ctx, proto_info):
"""Returns the output directory where generated proto files will be placed.
Args:
ctx: Rule context.
proto_info: ProtoInfo provider.
Returns:
A string specifying the output directory
"""
proto_root = proto_info.proto_source_root
if proto_root.startswith(ctx.bin_dir.path):
path = proto_root
else:
path = ctx.bin_dir.path + "/" + proto_root
if proto_root == ".":
path = ctx.bin_dir.path
return path
def _concat_lists(lists):
ret = []
for lst in lists:
@ -81,49 +52,35 @@ def _merge_generated_srcs(srcs):
srcs = _concat_lists([s.srcs for s in srcs]),
hdrs = _concat_lists([s.hdrs for s in srcs]),
thunks = _concat_lists([s.thunks for s in srcs]),
includes = _concat_lists([s.includes for s in srcs]),
)
def _generate_upb_protos(ctx, generator, proto_info, proto_sources):
if len(proto_sources) == 0:
def _generate_upb_protos(ctx, generator, proto_info):
if len(proto_info.direct_sources) == 0:
return GeneratedSrcsInfo(srcs = [], hdrs = [], thunks = [], includes = [])
ext = "." + generator
tool = getattr(ctx.executable, "_gen_" + generator)
srcs = []
thunks = []
hdrs = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".h",
proto_info = proto_info,
)
if not (generator == "upb" and _is_google3):
# TODO: The OSS build should also exclude this file for the upb generator,
# as it is empty and unnecessary. We only added it to make the OSS build happy on
# Windows and macOS.
srcs += [_generate_output_file(ctx, name, ext + ".c") for name in proto_sources]
hdrs = [_generate_output_file(ctx, name, ext + ".h") for name in proto_sources]
thunks = []
if generator == "upb":
thunks = [_generate_output_file(ctx, name, ext + ".thunks.c") for name in proto_sources]
transitive_sets = proto_info.transitive_descriptor_sets.to_list()
args = ctx.actions.args()
args.use_param_file(param_file_arg = "@%s")
args.set_param_file_format("multiline")
args.add("--" + generator + "_out=" + _get_real_root(ctx, hdrs[0]))
args.add("--plugin=protoc-gen-" + generator + "=" + tool.path)
args.add("--descriptor_set_in=" + ctx.configuration.host_path_separator.join([f.path for f in transitive_sets]))
args.add_all(proto_sources, map_each = _get_real_short_path)
ctx.actions.run(
inputs = depset(
direct = [proto_info.direct_descriptor_set],
transitive = [proto_info.transitive_descriptor_sets],
),
tools = [tool],
outputs = srcs + hdrs,
executable = ctx.executable._protoc,
arguments = [args],
progress_message = "Generating upb protos for :" + ctx.label.name,
mnemonic = "GenUpbProtos",
srcs += proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".c",
proto_info = proto_info,
)
if generator == "upb":
thunks = proto_common.declare_generated_files(
ctx.actions,
extension = ext + ".thunks.c",
proto_info = proto_info,
)
ctx.actions.run_shell(
inputs = hdrs,
outputs = thunks,
@ -134,11 +91,19 @@ def _generate_upb_protos(ctx, generator, proto_info, proto_sources):
progress_message = "Generating thunks for upb protos API for: " + ctx.label.name,
mnemonic = "GenUpbProtosThunks",
)
proto_common.compile(
actions = ctx.actions,
proto_info = proto_info,
proto_lang_toolchain_info = _get_lang_toolchain(ctx, generator),
generated_files = srcs + hdrs,
experimental_exec_group = "proto_compiler",
)
return GeneratedSrcsInfo(
srcs = srcs,
hdrs = hdrs,
thunks = thunks,
includes = [_generate_include_path(proto_sources[0], hdrs[0], ext + ".h")],
)
def _generate_name(ctx, generator, thunks = False):
@ -147,10 +112,8 @@ def _generate_name(ctx, generator, thunks = False):
return ctx.rule.attr.name + "." + generator
def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider):
aspect_deps = getattr(ctx.attr, "_" + generator)
rule_deps = ctx.rule.attr.deps
dep_ccinfos = [dep[CcInfo] for dep in aspect_deps]
dep_ccinfos += [dep[cc_provider].cc_info for dep in rule_deps]
dep_ccinfos = [dep[cc_provider].cc_info for dep in rule_deps]
if dep_cc_provider:
# This gives access to our direct sibling. eg. foo.upb.h can #include "foo.upb_minitable.h"
dep_ccinfos.append(target[dep_cc_provider].cc_info)
@ -161,13 +124,17 @@ def _get_dep_cc_infos(target, ctx, generator, cc_provider, dep_cc_provider):
return dep_ccinfos
def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider):
def _get_lang_toolchain(ctx, generator):
lang_toolchain_name = "_" + generator + "_toolchain"
return getattr(ctx.attr, lang_toolchain_name)[proto_common.ProtoLangToolchainInfo]
def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider, proto_info):
cc_info = cc_library_func(
ctx = ctx,
name = _generate_name(ctx, generator),
hdrs = files.hdrs,
srcs = files.srcs,
includes = files.includes,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos,
)
@ -178,7 +145,7 @@ def _compile_upb_protos(ctx, files, generator, dep_ccinfos, cc_provider):
name = _generate_name(ctx, generator, files.thunks),
hdrs = [],
srcs = files.thunks,
includes = files.includes,
includes = [output_dir(ctx, proto_info)],
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos + [cc_info],
)
@ -254,14 +221,14 @@ def upb_proto_aspect_impl(
ctx,
generator,
proto_info,
proto_info.direct_sources,
)
wrapped_cc_info = _compile_upb_protos(
ctx,
files,
generator,
dep_ccinfos,
dep_ccinfos + [_get_lang_toolchain(ctx, generator).runtime[CcInfo]],
cc_provider,
proto_info,
)
hints = _get_hint_providers(ctx, generator) if provide_cc_shared_library_hints else []

@ -1,9 +1,9 @@
"""upb_c_proto_reflection_library() exposes upb reflection for protobuf (foo.upbdefs.h)"""
load("//upb/bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//upb/bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//upb/bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//upb/bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
load("//bazel:upb_minitable_proto_library.bzl", "UpbMinitableCcInfo", "upb_minitable_proto_library_aspect")
load("//bazel:upb_proto_library_internal/aspect.bzl", "upb_proto_aspect_impl")
load("//bazel:upb_proto_library_internal/cc_library_func.bzl", "upb_use_cpp_toolchain")
load("//bazel:upb_proto_library_internal/rule.bzl", "upb_proto_rule_impl")
_UpbDefsWrappedCcInfo = provider("Provider for cc_info for protos", fields = ["cc_info"])
@ -28,22 +28,13 @@ _upb_proto_reflection_library_aspect = aspect(
"_copts": attr.label(
default = "//upb:upb_proto_library_copts__for_generated_code_only_do_not_use",
),
"_gen_upbdefs": attr.label(
executable = True,
cfg = "exec",
default = "//upb/upbc:protoc-gen-upbdefs",
),
"_protoc": attr.label(
executable = True,
cfg = "exec",
default = "//:protoc",
"_upbdefs_toolchain": attr.label(
default = Label("//upb_generator:protoc-gen-upbdefs_toolchain"),
cfg = getattr(proto_common, "proto_lang_toolchain_cfg", "target"),
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_upbdefs": attr.label_list(default = [
"//upb:generated_reflection_support__only_for_generated_code_do_not_use__i_give_permission_to_break_me",
]),
},
implementation = _upb_proto_reflection_library_aspect_impl,
requires = [upb_minitable_proto_library_aspect],
@ -55,7 +46,9 @@ _upb_proto_reflection_library_aspect = aspect(
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = upb_use_cpp_toolchain(),
incompatible_use_toolchain_transition = True,
exec_groups = {
"proto_compiler": exec_group(),
},
)
def _upb_proto_reflection_library_rule_impl(ctx):

@ -12,8 +12,8 @@ load("@rules_python//python:defs.bzl", "py_binary")
# end:google_only
load(
"//upb/bazel:upb_proto_library.bzl",
"upb_proto_library",
"//bazel:upb_proto_library.bzl",
"upb_c_proto_library",
"upb_proto_reflection_library",
)
load(
@ -35,7 +35,7 @@ proto_library(
srcs = ["descriptor.proto"],
)
upb_proto_library(
upb_c_proto_library(
name = "benchmark_descriptor_upb_proto",
deps = [":descriptor_proto"],
)
@ -76,6 +76,7 @@ cc_test(
":benchmark_descriptor_upb_proto",
":benchmark_descriptor_upb_proto_reflection",
"//:protobuf",
"@com_google_googletest//:gtest_main",
"//upb:base",
"//upb:base_internal",
"//upb:descriptor_upb_proto",
@ -153,7 +154,7 @@ proto_library(
)
[(
upb_proto_library(
upb_c_proto_library(
name = k + "_upb_proto",
deps = [":" + k + "_proto"],
),

@ -1,32 +1,9 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google LLC. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google LLC nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include <benchmark/benchmark.h>
@ -38,15 +15,15 @@
#include "google/protobuf/descriptor.pb.h"
#include "absl/container/flat_hash_set.h"
#include "google/protobuf/dynamic_message.h"
#include "upb/benchmarks/descriptor.pb.h"
#include "upb/benchmarks/descriptor.upb.h"
#include "upb/benchmarks/descriptor.upbdefs.h"
#include "upb/benchmarks/descriptor_sv.pb.h"
#include "upb/upb/base/internal/log2.h"
#include "upb/upb/mem/arena.h"
#include "upb/upb/reflection/def.hpp"
upb_StringView descriptor = upb_benchmarks_descriptor_proto_upbdefinit.descriptor;
#include "benchmarks/descriptor.pb.h"
#include "benchmarks/descriptor.upb.h"
#include "benchmarks/descriptor.upbdefs.h"
#include "benchmarks/descriptor_sv.pb.h"
#include "upb/base/internal/log2.h"
#include "upb/mem/arena.h"
#include "upb/reflection/def.hpp"
upb_StringView descriptor = benchmarks_descriptor_proto_upbdefinit.descriptor;
namespace protobuf = ::google::protobuf;
// A buffer big enough to parse descriptor.proto without going to heap.

@ -0,0 +1,12 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google LLC. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
syntax = "proto3";
package upb_benchmark;
message Empty {}

@ -89,7 +89,7 @@ selects.config_setting_group(
match_any = [
":config_win32",
":config_win64",
]
],
)
config_setting(
@ -107,7 +107,7 @@ selects.config_setting_group(
match_any = [
":config_osx_aarch64",
":config_osx_x86_64",
]
],
)
# Internal testing:

@ -35,6 +35,7 @@ LINK_OPTS = select({
"//build_defs:config_msvc": [
# Suppress linker warnings about files with no symbols defined.
"-ignore:4221",
"/utf-8",
],
"@platforms//os:macos": [
"-lpthread",

@ -27,6 +27,7 @@ def protobuf_java_library(**kwargs):
)
def protobuf_versioned_java_library(
automatic_module_name,
bundle_description,
bundle_name,
bundle_symbolic_name,
@ -44,6 +45,9 @@ def protobuf_versioned_java_library(
Args:
bundle_description: (required) The Bundle-Description header defines a short
description of this bundle.
automatic_module_name: (required) The Automatic-Module-Name header that represents
the name of the module when this bundle is used as an automatic
module.
bundle_name: (required) The Bundle-Name header defines a readable name for this
bundle. This should be a short, human-readable name that can
contain spaces.
@ -65,6 +69,7 @@ def protobuf_versioned_java_library(
"""
osgi_java_library(
javacopts = JAVA_OPTS,
automatic_module_name = automatic_module_name,
bundle_doc_url = BUNDLE_DOC_URL,
bundle_license = BUNDLE_LICENSE,
bundle_version = PROTOBUF_JAVA_VERSION,

@ -1,6 +1,3 @@
# Fail if a glob doesn't match anything (https://github.com/bazelbuild/bazel/issues/8195)
build --incompatible_disallow_empty_glob
build:dbg --compilation_mode=dbg
build:opt --compilation_mode=opt
@ -8,7 +5,6 @@ build:opt --compilation_mode=opt
build:san-common --config=dbg --strip=never --copt=-O0 --copt=-fno-omit-frame-pointer
build:asan --config=san-common --copt=-fsanitize=address --linkopt=-fsanitize=address
build:asan --copt=-DADDRESS_SANITIZER=1
# ASAN hits ODR violations with shared linkage due to rules_proto.
build:asan --dynamic_mode=off
@ -16,7 +12,6 @@ build:msan --config=san-common --copt=-fsanitize=memory --linkopt=-fsanitize=mem
build:msan --copt=-fsanitize-memory-track-origins
build:msan --copt=-fsanitize-memory-use-after-dtor
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1
build:msan --copt=-DMEMORY_SANITIZER=1
# Use our instrumented LLVM libc++ in Kokoro.
build:docker-msan --config=msan
@ -26,11 +21,51 @@ build:docker-msan --cxxopt=-stdlib=libc++ --linkopt=-stdlib=libc++
build:tsan --config=san-common --copt=-fsanitize=thread --linkopt=-fsanitize=thread
build:tsan --copt=-DTHREAD_SANITIZER=1
build:ubsan --config=san-common --copt=-fsanitize=undefined --linkopt=-fsanitize=undefined
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1
build:ubsan --copt=-DUNDEFINED_SANITIZER=1
# Workaround for the fact that Bazel links with $CC, not $CXX
# https://github.com/bazelbuild/bazel/issues/11122#issuecomment-613746748
build:ubsan --copt=-fno-sanitize=function --copt=-fno-sanitize=vptr
# Build with all --incompatible flags that we can. This helps us prepare for
# upcoming breaking changes in Bazel. This list was generated for Bazel 6 by
# running bazelisk with the --migrate flag and filtering out all flags that
# default to true or are deprecated.
build --incompatible_check_sharding_support
build --incompatible_default_to_explicit_init_py
build --incompatible_disable_native_android_rules
build --incompatible_disable_target_provider_fields
build --incompatible_disallow_empty_glob
build --incompatible_dont_use_javasourceinfoprovider
build --incompatible_enable_android_toolchain_resolution
build --incompatible_enable_apple_toolchain_resolution
build --incompatible_exclusive_test_sandboxed
build --incompatible_remote_output_paths_relative_to_input_root
build --incompatible_remote_use_new_exit_code_for_lost_inputs
build --incompatible_sandbox_hermetic_tmp
build --incompatible_struct_has_no_methods
build --incompatible_top_level_aspects_require_providers
build --incompatible_use_cc_configure_from_rules_cc
build --incompatible_use_host_features
# We cannot yet build successfully with the following flags:
# --incompatible_check_testonly_for_output_files
# --incompatible_config_setting_private_default_visibility
# --incompatible_disable_starlark_host_transitions
# --incompatible_disallow_struct_provider_syntax
# --incompatible_no_implicit_file_export
# --incompatible_no_rule_outputs_param
# --incompatible_stop_exporting_language_modules
# --incompatible_strict_action_env
# --incompatible_visibility_private_attributes_at_definition
# We might be compatible with these flags, but they are not available in all
# Bazel versions we are currently using:
# --incompatible_disable_objc_library_transition
# --incompatible_fail_on_unknown_attributes
# --incompatible_merge_fixed_and_default_shell_env
# TODO: migrate all dependencies from WORKSPACE to MODULE.bazel
# https://github.com/protocolbuffers/protobuf/issues/14313
common --noenable_bzlmod

@ -72,6 +72,7 @@ else()
absl::flat_hash_set
absl::function_ref
absl::hash
absl::if_constexpr
absl::layout
absl::log_initialize
absl::log_severity

@ -12,6 +12,11 @@ elseif(protobuf_JSONCPP_PROVIDER STREQUAL "package")
find_package(jsoncpp REQUIRED)
endif()
set(protoc_cpp_args)
if (protobuf_BUILD_SHARED_LIBS)
set(protoc_cpp_args "dllexport_decl=PROTOBUF_TEST_EXPORTS:")
endif ()
add_custom_command(
OUTPUT
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
@ -19,7 +24,7 @@ add_custom_command(
DEPENDS ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/conformance/conformance.proto
COMMAND ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/conformance/conformance.proto
--proto_path=${protobuf_SOURCE_DIR}/conformance
--cpp_out=${protobuf_SOURCE_DIR}/conformance
--cpp_out=${protoc_cpp_args}${protobuf_SOURCE_DIR}/conformance
)
add_custom_command(
@ -28,38 +33,58 @@ add_custom_command(
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
DEPENDS ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
COMMAND ${protobuf_PROTOC_EXE} ${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.pb.cc
DEPENDS ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.proto
COMMAND ${protobuf_PROTOC_EXE}
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.proto
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.proto
--proto_path=${protobuf_SOURCE_DIR}/src
--cpp_out=${protobuf_SOURCE_DIR}/src
--cpp_out=${protoc_cpp_args}${protobuf_SOURCE_DIR}/src
)
add_library(libconformance_common ${protobuf_SHARED_OR_STATIC}
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto3_editions.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/editions/golden/test_messages_proto2_editions.pb.cc
)
target_link_libraries(libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
if(protobuf_BUILD_SHARED_LIBS)
target_compile_definitions(libconformance_common
PUBLIC PROTOBUF_USE_DLLS
PRIVATE LIBPROTOBUF_TEST_EXPORTS)
endif()
add_executable(conformance_test_runner
${protobuf_SOURCE_DIR}/conformance/binary_json_conformance_suite.cc
${protobuf_SOURCE_DIR}/conformance/binary_json_conformance_suite.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test_runner.cc
${protobuf_SOURCE_DIR}/conformance/conformance_test_main.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.cc
${protobuf_SOURCE_DIR}/conformance/text_format_conformance_suite.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
)
add_executable(conformance_cpp
${protobuf_SOURCE_DIR}/conformance/conformance.pb.h
${protobuf_SOURCE_DIR}/conformance/conformance.pb.cc
${protobuf_SOURCE_DIR}/conformance/conformance_cpp.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto2.pb.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/test_messages_proto3.pb.cc
)
target_include_directories(
@ -73,16 +98,23 @@ target_include_directories(
target_include_directories(conformance_test_runner PRIVATE ${ABSL_ROOT_DIR})
target_include_directories(conformance_cpp PRIVATE ${ABSL_ROOT_DIR})
target_link_libraries(conformance_test_runner ${protobuf_LIB_PROTOBUF})
target_link_libraries(conformance_test_runner ${protobuf_ABSL_USED_TARGETS})
target_link_libraries(conformance_cpp ${protobuf_LIB_PROTOBUF})
target_link_libraries(conformance_cpp ${protobuf_ABSL_USED_TARGETS})
target_link_libraries(conformance_test_runner
libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
target_link_libraries(conformance_cpp
libconformance_common
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
)
add_test(NAME conformance_cpp_test
COMMAND ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/conformance_test_runner
--failure_list ${protobuf_SOURCE_DIR}/conformance/failure_list_cpp.txt
--text_format_failure_list ${protobuf_SOURCE_DIR}/conformance/text_format_failure_list_cpp.txt
--output_dir ${protobuf_TEST_XML_OUTDIR}
--maximum_edition 2023
${CMAKE_CURRENT_BINARY_DIR}/conformance_cpp
DEPENDS conformance_test_runner conformance_cpp)

@ -68,10 +68,21 @@ set(protobuf_HEADERS
${cpp_features_proto_proto_srcs}
${descriptor_proto_proto_srcs}
${plugin_proto_proto_srcs}
${java_features_proto_proto_srcs}
)
foreach(_header ${protobuf_HEADERS})
string(REPLACE "${protobuf_SOURCE_DIR}/src" "" _header ${_header})
get_filename_component(_extract_from "${protobuf_SOURCE_DIR}/src/${_header}" ABSOLUTE)
string(FIND ${_header} "${protobuf_SOURCE_DIR}/src" _find_src)
string(FIND ${_header} "${protobuf_SOURCE_DIR}" _find_nosrc)
if (_find_src GREATER -1)
set(_from_dir "${protobuf_SOURCE_DIR}/src")
elseif (_find_nosrc GREATER -1)
set(_from_dir "${protobuf_SOURCE_DIR}")
endif()
# On some platforms `_form_dir` ends up being just "protobuf", which can
# easily match multiple times in our paths. We force it to only replace
# prefixes to avoid this case.
string(REGEX REPLACE "^${_from_dir}" "" _header ${_header})
get_filename_component(_extract_from "${_from_dir}/${_header}" ABSOLUTE)
get_filename_component(_extract_name ${_header} NAME)
get_filename_component(_extract_to "${CMAKE_INSTALL_INCLUDEDIR}/${_header}" DIRECTORY)
install(FILES "${_extract_from}"

@ -108,6 +108,12 @@ function(protobuf_generate)
foreach(DIR ${_protobuf_include_path})
if(NOT DIR STREQUAL "-I")
file(RELATIVE_PATH _rel_dir ${DIR} ${_abs_dir})
if(_rel_dir STREQUAL _abs_dir)
# When there is no relative path from DIR to _abs_dir (e.g. due to
# different drive letters on Windows), _rel_dir is equal to _abs_dir.
# Therefore, DIR is not a suitable include path and must be skipped.
continue()
endif()
string(FIND "${_rel_dir}" "../" _is_in_parent_folder)
if (NOT ${_is_in_parent_folder} EQUAL 0)
set(_suitable_include_found TRUE)

@ -15,9 +15,12 @@ set(tests_protos
${protobuf_test_protos_files}
${compiler_test_protos_files}
${util_test_protos_files}
${lite_test_protos}
)
set(protoc_cpp_args)
if (protobuf_BUILD_SHARED_LIBS)
set(protoc_cpp_args "dllexport_decl=PROTOBUF_TEST_EXPORTS:")
endif ()
macro(compile_proto_file filename)
string(REPLACE .proto .pb.h pb_hdr ${filename})
string(REPLACE .proto .pb.cc pb_src ${filename})
@ -26,7 +29,7 @@ macro(compile_proto_file filename)
DEPENDS ${protobuf_PROTOC_EXE} ${filename}
COMMAND ${protobuf_PROTOC_EXE} ${filename}
--proto_path=${protobuf_SOURCE_DIR}/src
--cpp_out=${protobuf_SOURCE_DIR}/src
--cpp_out=${protoc_cpp_args}${protobuf_SOURCE_DIR}/src
--experimental_allow_proto3_optional
)
endmacro(compile_proto_file)
@ -39,6 +42,10 @@ endforeach(proto_file)
set(tests_proto_files)
foreach(proto_file ${tests_protos})
if (MSVC AND protobuf_BUILD_SHARED_LIBS AND ${proto_file} MATCHES ".*enormous.*")
# Our enormous protos are too big for windows DLLs.
continue()
endif ()
compile_proto_file(${proto_file})
set(tests_proto_files ${tests_proto_files} ${pb_src} ${pb_hdr})
endforeach(proto_file)
@ -55,6 +62,7 @@ set(tests_files
${protobuf_test_files}
${compiler_test_files}
${annotation_test_util_srcs}
${editions_test_files}
${io_test_files}
${util_test_files}
${stubs_test_files}
@ -85,17 +93,34 @@ else()
set(protobuf_GTEST_ARGS)
endif()
add_executable(tests
${tests_files}
${common_test_files}
add_library(libtest_common ${protobuf_SHARED_OR_STATIC}
${tests_proto_files}
)
target_link_libraries(libtest_common
${protobuf_LIB_PROTOC}
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
${protobuf_ABSL_USED_TEST_TARGETS}
GTest::gmock
)
if (MSVC)
target_compile_options(libtest_common PRIVATE /bigobj)
endif ()
if(protobuf_BUILD_SHARED_LIBS)
target_compile_definitions(libtest_common
PUBLIC PROTOBUF_USE_DLLS
PRIVATE LIBPROTOBUF_TEST_EXPORTS)
endif()
add_executable(tests ${tests_files} ${common_test_files})
if (MSVC)
target_compile_options(tests PRIVATE
/wd4146 # unary minus operator applied to unsigned type, result still unsigned
)
endif()
target_link_libraries(tests
libtest_common
libtest_common_lite
${protobuf_LIB_PROTOC}
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
@ -103,22 +128,11 @@ target_link_libraries(tests
GTest::gmock_main
)
set(fake_plugin_files
${fake_plugin_files}
${common_test_hdrs}
${common_test_srcs}
${tests_proto_files}
)
set(test_plugin_files
${test_plugin_files}
${common_test_hdrs}
${common_test_srcs}
${tests_proto_files}
)
add_executable(fake_plugin ${fake_plugin_files})
add_executable(fake_plugin ${fake_plugin_files} ${common_test_files})
target_include_directories(fake_plugin PRIVATE ${ABSL_ROOT_DIR})
target_link_libraries(fake_plugin
libtest_common
libtest_common_lite
${protobuf_LIB_PROTOC}
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
@ -126,9 +140,11 @@ target_link_libraries(fake_plugin
GTest::gmock
)
add_executable(test_plugin ${test_plugin_files})
add_executable(test_plugin ${test_plugin_files} ${common_test_files})
target_include_directories(test_plugin PRIVATE ${ABSL_ROOT_DIR})
target_link_libraries(test_plugin
libtest_common
libtest_common_lite
${protobuf_LIB_PROTOC}
${protobuf_LIB_PROTOBUF}
${protobuf_ABSL_USED_TARGETS}
@ -136,12 +152,27 @@ target_link_libraries(test_plugin
GTest::gmock
)
add_library(libtest_common_lite ${protobuf_SHARED_OR_STATIC}
${lite_test_proto_files}
)
target_link_libraries(libtest_common_lite
${protobuf_LIB_PROTOBUF_LITE}
${protobuf_ABSL_USED_TARGETS}
GTest::gmock
)
if(protobuf_BUILD_SHARED_LIBS)
target_compile_definitions(libtest_common_lite
PUBLIC PROTOBUF_USE_DLLS
PRIVATE LIBPROTOBUF_TEST_EXPORTS)
endif()
add_executable(lite-test
${protobuf_lite_test_files}
${lite_test_util_hdrs}
${lite_test_util_srcs}
${lite_test_proto_files}
)
target_link_libraries(lite-test
libtest_common_lite
${protobuf_LIB_PROTOBUF_LITE}
${protobuf_ABSL_USED_TARGETS}
${protobuf_ABSL_USED_TEST_TARGETS}
@ -176,7 +207,8 @@ file(GLOB_RECURSE _local_hdrs
set(_exclude_hdrs
"${protobuf_SOURCE_DIR}/src/google/protobuf/cpp_features.pb.h"
"${protobuf_SOURCE_DIR}/src/google/protobuf/descriptor.pb.h"
"${protobuf_SOURCE_DIR}/src/google/protobuf/compiler/plugin.pb.h")
"${protobuf_SOURCE_DIR}/src/google/protobuf/compiler/plugin.pb.h"
"${protobuf_SOURCE_DIR}/src/google/protobuf/compiler/java/java_features.pb.h")
# Exclude test library headers.
list(APPEND _exclude_hdrs ${test_util_hdrs} ${lite_test_util_hdrs} ${common_test_hdrs}

@ -2,7 +2,8 @@
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_proto_library", "objc_library")
load("@rules_ruby//ruby:defs.bzl", "ruby_binary")
load("//:protobuf.bzl", "internal_csharp_proto_library", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library", "internal_ruby_proto_library")
load("//ruby:defs.bzl", "internal_ruby_proto_library")
load("//:protobuf.bzl", "internal_csharp_proto_library", "internal_objc_proto_library", "internal_php_proto_library", "internal_py_proto_library")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load(
"@rules_pkg//:mappings.bzl",
@ -22,6 +23,7 @@ exports_files([
"failure_list_php_c.txt",
"failure_list_python.txt",
"failure_list_python_cpp.txt",
"failure_list_python_upb.txt",
"failure_list_ruby.txt",
"failure_list_jruby.txt",
"failure_list_jruby_ffi.txt",
@ -33,6 +35,7 @@ exports_files([
"text_format_failure_list_php_c.txt",
"text_format_failure_list_python.txt",
"text_format_failure_list_python_cpp.txt",
"text_format_failure_list_python_upb.txt",
"text_format_failure_list_ruby.txt",
"text_format_failure_list_jruby.txt",
"text_format_failure_list_jruby_ffi.txt",
@ -130,6 +133,7 @@ internal_ruby_proto_library(
cc_library(
name = "conformance_test",
testonly = 1,
srcs = [
"conformance_test.cc",
"conformance_test_runner.cc",
@ -140,7 +144,6 @@ cc_library(
includes = ["."],
deps = [
":conformance_cc_proto",
"//src/google/protobuf:descriptor_legacy",
"//src/google/protobuf/util:differencer",
"//src/google/protobuf/util:json_util",
"//src/google/protobuf/util:type_resolver_util",
@ -151,12 +154,16 @@ cc_library(
cc_library(
name = "binary_json_conformance_suite",
testonly = 1,
srcs = ["binary_json_conformance_suite.cc"],
hdrs = ["binary_json_conformance_suite.h"],
deps = [
":conformance_test",
":test_messages_proto2_proto_cc",
":test_messages_proto3_proto_cc",
"//src/google/protobuf/editions:test_messages_proto2_editions_cc_proto",
"//src/google/protobuf/editions:test_messages_proto3_editions_cc_proto",
"@com_google_absl//absl/log:die_if_null",
"@com_google_absl//absl/status",
"@jsoncpp",
],
@ -164,17 +171,24 @@ cc_library(
cc_library(
name = "text_format_conformance_suite",
testonly = 1,
srcs = ["text_format_conformance_suite.cc"],
hdrs = ["text_format_conformance_suite.h"],
deps = [
":conformance_test",
":test_messages_proto2_proto_cc",
":test_messages_proto3_proto_cc",
"//src/google/protobuf/editions:test_messages_proto2_editions_cc_proto",
"//src/google/protobuf/editions:test_messages_proto3_editions_cc_proto",
"@com_google_absl//absl/log:absl_log",
"@com_google_absl//absl/log:die_if_null",
"@com_google_absl//absl/strings",
],
)
cc_binary(
name = "conformance_test_runner",
testonly = 1,
srcs = ["conformance_test_main.cc"],
visibility = ["//visibility:public"],
deps = [
@ -195,6 +209,8 @@ cc_binary(
"//:protobuf",
"//:test_messages_proto2_cc_proto",
"//:test_messages_proto3_cc_proto",
"//src/google/protobuf/editions:test_messages_proto2_editions_cc_proto",
"//src/google/protobuf/editions:test_messages_proto3_editions_cc_proto",
"@com_google_absl//absl/status",
"@com_google_absl//absl/status:statusor",
],
@ -247,8 +263,8 @@ py_binary(
deps = [
":conformance_py_proto",
"//:protobuf_python",
"//python:test_messages_proto2_py_proto",
"//python:test_messages_proto3_py_proto",
"//python:_message", # Make upb visible if we need it.
"//python:conformance_test_py_proto",
],
)
@ -312,6 +328,8 @@ objc_library(
":conformance_objc_proto",
"//:test_messages_proto2_objc_proto",
"//:test_messages_proto3_objc_proto",
"//src/google/protobuf/editions:test_messages_proto2_editions_objc_proto",
"//src/google/protobuf/editions:test_messages_proto3_editions_objc_proto",
],
)
@ -331,8 +349,7 @@ ruby_binary(
visibility = ["//ruby:__subpackages__"],
deps = [
":conformance_ruby_proto",
"//:test_messages_proto2_ruby_proto",
"//:test_messages_proto3_ruby_proto",
"//ruby:conformance_test_ruby_proto",
],
)

@ -28,9 +28,10 @@ else
fi
# --- end runfiles.bash initialization ---
TESTEE=unset
FAILURE_LIST=unset
TEXT_FORMAT_FAILURE_LIST=unset
TESTEE=
FAILURE_LIST=
TEXT_FORMAT_FAILURE_LIST=
MAXIMUM_EDITION=
while [[ -n "$@" ]]; do
arg="$1"; shift
@ -39,6 +40,7 @@ while [[ -n "$@" ]]; do
"--testee") TESTEE="$val" ;;
"--failure_list") FAILURE_LIST="$val" ;;
"--text_format_failure_list") TEXT_FORMAT_FAILURE_LIST="$val" ;;
"--maximum_edition") MAXIMUM_EDITION="$val" ;;
*) echo "Flag $arg is not recognized." && exit 1 ;;
esac
done
@ -57,4 +59,8 @@ if [ -n "$text_format_failure_list" ]; then
args+=(--text_format_failure_list $text_format_failure_list)
fi
if [ -n "$MAXIMUM_EDITION" ]; then
args+=(--maximum_edition $MAXIMUM_EDITION)
fi
$conformance_test_runner "${args[@]}" $conformance_testee

File diff suppressed because it is too large Load Diff

@ -8,19 +8,58 @@
#ifndef CONFORMANCE_BINARY_JSON_CONFORMANCE_SUITE_H
#define CONFORMANCE_BINARY_JSON_CONFORMANCE_SUITE_H
#include <functional>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/strings/string_view.h"
#include "json/json.h"
#include "conformance_test.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/util/type_resolver.h"
namespace google {
namespace protobuf {
class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
public:
BinaryAndJsonConformanceSuite() {}
BinaryAndJsonConformanceSuite() = default;
private:
void RunSuiteImpl() override;
bool ParseJsonResponse(const conformance::ConformanceResponse& response,
Message* test_message);
bool ParseResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message) override;
void SetTypeUrl(absl::string_view type_url) {
type_url_ = std::string(type_url);
}
template <typename MessageType>
friend class BinaryAndJsonConformanceSuiteImpl;
std::unique_ptr<google::protobuf::util::TypeResolver> type_resolver_;
std::string type_url_;
};
template <typename MessageType>
class BinaryAndJsonConformanceSuiteImpl {
public:
explicit BinaryAndJsonConformanceSuiteImpl(
BinaryAndJsonConformanceSuite* suite, bool run_proto3_tests);
private:
using ConformanceRequestSetting =
BinaryAndJsonConformanceSuite::ConformanceRequestSetting;
using ConformanceLevel = BinaryAndJsonConformanceSuite::ConformanceLevel;
constexpr static ConformanceLevel RECOMMENDED = ConformanceLevel::RECOMMENDED;
constexpr static ConformanceLevel REQUIRED = ConformanceLevel::REQUIRED;
void RunAllTests();
void RunBinaryPerformanceTests();
void RunJsonPerformanceTests();
void RunJsonTests();
@ -37,10 +76,6 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
void RunValidJsonTest(const std::string& test_name, ConformanceLevel level,
const std::string& input_json,
const std::string& equivalent_text_format);
void RunValidJsonTest(const std::string& test_name, ConformanceLevel level,
const std::string& input_json,
const std::string& equivalent_text_format,
bool is_proto3);
void RunValidJsonTestWithMessage(const std::string& test_name,
ConformanceLevel level,
const std::string& input_json,
@ -48,8 +83,7 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
const Message& prototype);
void RunValidJsonTestWithProtobufInput(
const std::string& test_name, ConformanceLevel level,
const protobuf_test_messages::proto3::TestAllTypesProto3& input,
const std::string& equivalent_text_format);
const MessageType& input, const std::string& equivalent_text_format);
void RunValidJsonIgnoreUnknownTest(const std::string& test_name,
ConformanceLevel level,
const std::string& input_json,
@ -57,38 +91,26 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
void RunValidProtobufTest(const std::string& test_name,
ConformanceLevel level,
const std::string& input_protobuf,
const std::string& equivalent_text_format,
bool is_proto3);
const std::string& equivalent_text_format);
void RunValidBinaryProtobufTest(const std::string& test_name,
ConformanceLevel level,
const std::string& input_protobuf,
bool is_proto3);
const std::string& input_protobuf);
void RunValidBinaryProtobufTest(const std::string& test_name,
ConformanceLevel level,
const std::string& input_protobuf,
const std::string& expected_protobuf,
bool is_proto3);
void RunBinaryPerformanceMergeMessageWithField(const std::string& test_name,
const std::string& field_proto,
bool is_proto3);
const std::string& expected_protobuf);
void RunBinaryPerformanceMergeMessageWithField(
const std::string& test_name, const std::string& field_proto);
void RunValidProtobufTestWithMessage(
const std::string& test_name, ConformanceLevel level,
const Message* input, const std::string& equivalent_text_format,
bool is_proto3);
bool ParseJsonResponse(const conformance::ConformanceResponse& response,
Message* test_message);
bool ParseResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message) override;
const Message* input, const std::string& equivalent_text_format);
typedef std::function<bool(const Json::Value&)> Validator;
void RunValidJsonTestWithValidator(const std::string& test_name,
ConformanceLevel level,
const std::string& input_json,
const Validator& validator,
bool is_proto3);
const Validator& validator);
void ExpectParseFailureForJson(const std::string& test_name,
ConformanceLevel level,
const std::string& input_json);
@ -97,8 +119,7 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
const std::string& text_format);
void ExpectParseFailureForProtoWithProtoVersion(const std::string& proto,
const std::string& test_name,
ConformanceLevel level,
bool is_proto3);
ConformanceLevel level);
void ExpectParseFailureForProto(const std::string& proto,
const std::string& test_name,
ConformanceLevel level);
@ -107,10 +128,8 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
ConformanceLevel level);
void TestPrematureEOFForType(google::protobuf::FieldDescriptor::Type type);
void TestIllegalTags();
template <class MessageType>
void TestOneofMessage(MessageType& message, bool is_proto3);
template <class MessageType>
void TestUnknownMessage(MessageType& message, bool is_proto3);
void TestOneofMessage();
void TestUnknownMessage();
void TestValidDataForType(
google::protobuf::FieldDescriptor::Type,
std::vector<std::pair<std::string, std::string>> values);
@ -128,8 +147,22 @@ class BinaryAndJsonConformanceSuite : public ConformanceTestSuite {
void TestJsonPerformanceMergeMessageWithRepeatedFieldForType(
google::protobuf::FieldDescriptor::Type, std::string field_value);
std::unique_ptr<google::protobuf::util::TypeResolver> type_resolver_;
std::string type_url_;
enum class Packed {
kUnspecified = 0,
kTrue = 1,
kFalse = 2,
};
const FieldDescriptor* GetFieldForType(
FieldDescriptor::Type type, bool repeated,
Packed packed = Packed::kUnspecified) const;
const FieldDescriptor* GetFieldForMapType(
FieldDescriptor::Type key_type, FieldDescriptor::Type value_type) const;
const FieldDescriptor* GetFieldForOneofType(FieldDescriptor::Type type,
bool exclusive = false) const;
std::string SyntaxIdentifier() const;
BinaryAndJsonConformanceSuite& suite_;
bool run_proto3_tests_;
};
} // namespace protobuf

@ -21,6 +21,8 @@
#include "absl/status/statusor.h"
#include "conformance/conformance.pb.h"
#include "conformance/conformance.pb.h"
#include "google/protobuf/editions/golden/test_messages_proto2_editions.pb.h"
#include "google/protobuf/editions/golden/test_messages_proto3_editions.pb.h"
#include "google/protobuf/endian.h"
#include "google/protobuf/message.h"
#include "google/protobuf/test_messages_proto2.pb.h"
@ -45,6 +47,10 @@ using ::google::protobuf::util::NewTypeResolverForDescriptorPool;
using ::google::protobuf::util::TypeResolver;
using ::protobuf_test_messages::proto2::TestAllTypesProto2;
using ::protobuf_test_messages::proto3::TestAllTypesProto3;
using TestAllTypesProto2Editions =
::protobuf_test_messages::editions::proto2::TestAllTypesProto2;
using TestAllTypesProto3Editions =
::protobuf_test_messages::editions::proto3::TestAllTypesProto3;
absl::Status ReadFd(int fd, char* buf, size_t len) {
while (len > 0) {
@ -76,6 +82,8 @@ class Harness {
Harness() {
google::protobuf::LinkMessageReflection<TestAllTypesProto2>();
google::protobuf::LinkMessageReflection<TestAllTypesProto3>();
google::protobuf::LinkMessageReflection<TestAllTypesProto2Editions>();
google::protobuf::LinkMessageReflection<TestAllTypesProto3Editions>();
resolver_.reset(NewTypeResolverForDescriptorPool(
"type.googleapis.com", DescriptorPool::generated_pool()));

@ -1,6 +1,5 @@
import 'dart:io';
import 'package:pb_runtime/ffi/bytes.dart';
import 'package:pb_runtime/pb_runtime.dart' as pb;
import 'package:third_party.protobuf/test_messages_proto2.upb.dart';
import 'package:third_party.protobuf/test_messages_proto3.upb.dart';
@ -50,8 +49,8 @@ ConformanceResponse doTest(ConformanceRequest request) {
case ConformanceRequest_payload.protobufPayload:
try {
testMessage = isProto3
? TestAllTypesProto3.fromBinary(request.protobufPayload.data)
: TestAllTypesProto2.fromBinary(request.protobufPayload.data);
? TestAllTypesProto3.fromBinary(request.protobufPayload)
: TestAllTypesProto2.fromBinary(request.protobufPayload);
} catch (e) {
final parseErrorResponse = ConformanceResponse();
parseErrorResponse.parseError = '$e';
@ -66,8 +65,7 @@ ConformanceResponse doTest(ConformanceRequest request) {
switch (request.requestedOutputFormat) {
case WireFormat.PROTOBUF:
try {
response.protobufPayload =
Bytes(pb.GeneratedMessage.toBinary(testMessage));
response.protobufPayload = pb.GeneratedMessage.toBinary(testMessage);
} catch (e) {
response.serializeError = '$e';
}

@ -10,6 +10,8 @@
#import "Conformance.pbobjc.h"
#import "google/protobuf/TestMessagesProto2.pbobjc.h"
#import "google/protobuf/TestMessagesProto3.pbobjc.h"
#import "google/protobuf/editions/golden/TestMessagesProto2Editions.pbobjc.h"
#import "google/protobuf/editions/golden/TestMessagesProto3Editions.pbobjc.h"
static void Die(NSString *format, ...) __dead2;
@ -49,23 +51,26 @@ static ConformanceResponse *DoTest(ConformanceRequest *request) {
break;
case ConformanceRequest_Payload_OneOfCase_ProtobufPayload: {
Class msgClass = nil;
if ([request.messageType isEqual:@"protobuf_test_messages.proto3.TestAllTypesProto3"]) {
msgClass = [Proto3TestAllTypesProto3 class];
} else if ([request.messageType
isEqual:@"protobuf_test_messages.proto2.TestAllTypesProto2"]) {
msgClass = [Proto2TestAllTypesProto2 class];
} else {
response.runtimeError =
[NSString stringWithFormat:@"Protobuf request had an unknown message_type: %@",
request.messageType];
break;
}
NSDictionary *mappings = @{
@"protobuf_test_messages.proto2.TestAllTypesProto2" : [Proto2TestAllTypesProto2 class],
@"protobuf_test_messages.proto3.TestAllTypesProto3" : [Proto3TestAllTypesProto3 class],
@"protobuf_test_messages.editions.proto2.TestAllTypesProto2" :
[EditionsProto2TestAllTypesProto2 class],
@"protobuf_test_messages.editions.proto3.TestAllTypesProto3" :
[EditionsProto3TestAllTypesProto3 class],
};
Class msgClass = mappings[request.messageType];
if (msgClass) {
NSError *error = nil;
testMessage = [msgClass parseFromData:request.protobufPayload error:&error];
if (!testMessage) {
response.parseError = [NSString stringWithFormat:@"Parse error: %@", error];
}
} else {
response.runtimeError =
[NSString stringWithFormat:@"Protobuf request had an unknown message_type: %@",
request.messageType];
}
break;
}

@ -19,6 +19,8 @@ from google.protobuf import text_format
from google.protobuf import test_messages_proto2_pb2
from google.protobuf import test_messages_proto3_pb2
from conformance import conformance_pb2
from google.protobuf.editions.golden import test_messages_proto2_editions_pb2
from google.protobuf.editions.golden import test_messages_proto3_editions_pb2
test_count = 0
verbose = False
@ -28,6 +30,18 @@ class ProtocolError(Exception):
pass
def _create_test_message(type):
if type == "protobuf_test_messages.proto2.TestAllTypesProto2":
return test_messages_proto2_pb2.TestAllTypesProto2()
if type == "protobuf_test_messages.proto3.TestAllTypesProto3":
return test_messages_proto3_pb2.TestAllTypesProto3()
if type == "protobuf_test_messages.editions.proto2.TestAllTypesProto2":
return test_messages_proto2_editions_pb2.TestAllTypesProto2()
if type == "protobuf_test_messages.editions.proto3.TestAllTypesProto3":
return test_messages_proto3_editions_pb2.TestAllTypesProto3()
return None
def do_test(request):
response = conformance_pb2.ConformanceResponse()
@ -85,16 +99,12 @@ def do_test(request):
response.protobuf_payload = failure_set.SerializeToString()
return response
isProto3 = (request.message_type == "protobuf_test_messages.proto3.TestAllTypesProto3")
isJson = (request.WhichOneof('payload') == 'json_payload')
isProto2 = (request.message_type == "protobuf_test_messages.proto2.TestAllTypesProto2")
test_message = _create_test_message(request.message_type)
if (not isProto3) and (not isJson) and (not isProto2):
if (not isJson) and (test_message is None):
raise ProtocolError("Protobuf request doesn't have specific payload type")
test_message = test_messages_proto2_pb2.TestAllTypesProto2() if isProto2 else \
test_messages_proto3_pb2.TestAllTypesProto3()
try:
if request.WhichOneof('payload') == 'protobuf_payload':
try:

@ -9,34 +9,35 @@
#include <stdarg.h>
#include <cstddef>
#include <cstdint>
#include <fstream>
#include <memory>
#include <string>
#include "google/protobuf/util/field_comparator.h"
#include "google/protobuf/util/json_util.h"
#include "google/protobuf/util/message_differencer.h"
#include "absl/log/absl_check.h"
#include "absl/log/absl_log.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "absl/strings/string_view.h"
#include "conformance/conformance.pb.h"
#include "conformance/conformance.pb.h"
#include "google/protobuf/descriptor_legacy.h"
#include "google/protobuf/message.h"
#include "google/protobuf/text_format.h"
using conformance::ConformanceRequest;
using conformance::ConformanceResponse;
using conformance::WireFormat;
using google::protobuf::TextFormat;
using google::protobuf::util::DefaultFieldComparator;
using google::protobuf::util::MessageDifferencer;
using std::string;
namespace {
static string ToOctString(const string& binary_string) {
string oct_string;
static std::string ToOctString(const std::string& binary_string) {
std::string oct_string;
for (size_t i = 0; i < binary_string.size(); i++) {
uint8_t c = binary_string.at(i);
uint8_t high = c / 64;
@ -96,7 +97,7 @@ ConformanceTestSuite::ConformanceRequestSetting::ConformanceRequestSetting(
ConformanceLevel level, conformance::WireFormat input_format,
conformance::WireFormat output_format,
conformance::TestCategory test_category, const Message& prototype_message,
const string& test_name, const string& input)
const std::string& test_name, const std::string& input)
: level_(level),
input_format_(input_format),
output_format_(output_format),
@ -139,26 +140,34 @@ ConformanceTestSuite::ConformanceRequestSetting::NewTestMessage() const {
return std::unique_ptr<Message>(prototype_message_for_compare_->New());
}
string ConformanceTestSuite::ConformanceRequestSetting::GetTestName() const {
string rname;
switch (FileDescriptorLegacy(prototype_message_.GetDescriptor()->file())
.syntax()) {
case FileDescriptorLegacy::Syntax::SYNTAX_PROTO3:
rname = ".Proto3.";
break;
case FileDescriptorLegacy::Syntax::SYNTAX_PROTO2:
rname = ".Proto2.";
break;
default:
break;
std::string
ConformanceTestSuite::ConformanceRequestSetting::GetSyntaxIdentifier() const {
switch (prototype_message_.GetDescriptor()->file()->edition()) {
case Edition::EDITION_PROTO3:
return "Proto3";
case Edition::EDITION_PROTO2:
return "Proto2";
default: {
std::string id = "Editions";
if (prototype_message_.GetDescriptor()->name() == "TestAllTypesProto2") {
absl::StrAppend(&id, "_Proto2");
} else if (prototype_message_.GetDescriptor()->name() ==
"TestAllTypesProto3") {
absl::StrAppend(&id, "_Proto3");
}
return id;
}
}
}
return absl::StrCat(ConformanceLevelToString(level_), rname,
string ConformanceTestSuite::ConformanceRequestSetting::GetTestName() const {
return absl::StrCat(ConformanceLevelToString(level_), ".",
GetSyntaxIdentifier(), ".",
InputFormatString(input_format_), ".", test_name_, ".",
OutputFormatString(output_format_));
}
string
std::string
ConformanceTestSuite::ConformanceRequestSetting::ConformanceLevelToString(
ConformanceLevel level) const {
switch (level) {
@ -171,7 +180,7 @@ ConformanceTestSuite::ConformanceRequestSetting::ConformanceLevelToString(
return "";
}
string ConformanceTestSuite::ConformanceRequestSetting::InputFormatString(
std::string ConformanceTestSuite::ConformanceRequestSetting::InputFormatString(
conformance::WireFormat format) const {
switch (format) {
case conformance::PROTOBUF:
@ -186,7 +195,7 @@ string ConformanceTestSuite::ConformanceRequestSetting::InputFormatString(
return "";
}
string ConformanceTestSuite::ConformanceRequestSetting::OutputFormatString(
std::string ConformanceTestSuite::ConformanceRequestSetting::OutputFormatString(
conformance::WireFormat format) const {
switch (format) {
case conformance::PROTOBUF:
@ -208,7 +217,7 @@ void ConformanceTestSuite::TruncateDebugPayload(string* payload) {
}
}
const ConformanceRequest ConformanceTestSuite::TruncateRequest(
ConformanceRequest ConformanceTestSuite::TruncateRequest(
const ConformanceRequest& request) {
ConformanceRequest debug_request(request);
switch (debug_request.payload_case()) {
@ -231,7 +240,7 @@ const ConformanceRequest ConformanceTestSuite::TruncateRequest(
return debug_request;
}
const ConformanceResponse ConformanceTestSuite::TruncateResponse(
ConformanceResponse ConformanceTestSuite::TruncateResponse(
const ConformanceResponse& response) {
ConformanceResponse debug_response(response);
switch (debug_response.result_case()) {
@ -254,7 +263,7 @@ const ConformanceResponse ConformanceTestSuite::TruncateResponse(
return debug_response;
}
void ConformanceTestSuite::ReportSuccess(const string& test_name) {
void ConformanceTestSuite::ReportSuccess(const std::string& test_name) {
if (expected_to_fail_.erase(test_name) != 0) {
absl::StrAppendFormat(
&output_,
@ -266,7 +275,7 @@ void ConformanceTestSuite::ReportSuccess(const string& test_name) {
successes_++;
}
void ConformanceTestSuite::ReportFailure(const string& test_name,
void ConformanceTestSuite::ReportFailure(const std::string& test_name,
ConformanceLevel level,
const ConformanceRequest& request,
const ConformanceResponse& response,
@ -286,7 +295,7 @@ void ConformanceTestSuite::ReportFailure(const string& test_name,
TruncateResponse(response).ShortDebugString());
}
void ConformanceTestSuite::ReportSkip(const string& test_name,
void ConformanceTestSuite::ReportSkip(const std::string& test_name,
const ConformanceRequest& request,
const ConformanceResponse& response) {
if (verbose_) {
@ -299,19 +308,20 @@ void ConformanceTestSuite::ReportSkip(const string& test_name,
void ConformanceTestSuite::RunValidInputTest(
const ConformanceRequestSetting& setting,
const string& equivalent_text_format) {
const std::string& equivalent_text_format) {
std::unique_ptr<Message> reference_message(setting.NewTestMessage());
ABSL_CHECK(TextFormat::ParseFromString(equivalent_text_format,
reference_message.get()))
<< "Failed to parse data for test case: " << setting.GetTestName()
<< ", data: " << equivalent_text_format;
const string equivalent_wire_format = reference_message->SerializeAsString();
const std::string equivalent_wire_format =
reference_message->SerializeAsString();
RunValidBinaryInputTest(setting, equivalent_wire_format);
}
void ConformanceTestSuite::RunValidBinaryInputTest(
const ConformanceRequestSetting& setting,
const string& equivalent_wire_format, bool require_same_wire_format) {
const std::string& equivalent_wire_format, bool require_same_wire_format) {
const ConformanceRequest& request = setting.GetRequest();
ConformanceResponse response;
RunTest(setting.GetTestName(), request, &response);
@ -321,11 +331,12 @@ void ConformanceTestSuite::RunValidBinaryInputTest(
void ConformanceTestSuite::VerifyResponse(
const ConformanceRequestSetting& setting,
const string& equivalent_wire_format, const ConformanceResponse& response,
bool need_report_success, bool require_same_wire_format) {
const std::string& equivalent_wire_format,
const ConformanceResponse& response, bool need_report_success,
bool require_same_wire_format) {
std::unique_ptr<Message> test_message(setting.NewTestMessage());
const ConformanceRequest& request = setting.GetRequest();
const string& test_name = setting.GetTestName();
const std::string& test_name = setting.GetTestName();
ConformanceLevel level = setting.GetLevel();
std::unique_ptr<Message> reference_message = setting.NewTestMessage();
@ -358,7 +369,7 @@ void ConformanceTestSuite::VerifyResponse(
DefaultFieldComparator field_comparator;
field_comparator.set_treat_nan_as_equal(true);
differencer.set_field_comparator(&field_comparator);
string differences;
std::string differences;
differencer.ReportDifferencesToString(&differences);
bool check = false;
@ -366,7 +377,7 @@ void ConformanceTestSuite::VerifyResponse(
if (require_same_wire_format) {
ABSL_DCHECK_EQ(response.result_case(),
ConformanceResponse::kProtobufPayload);
const string& protobuf_payload = response.protobuf_payload();
const std::string& protobuf_payload = response.protobuf_payload();
check = equivalent_wire_format == protobuf_payload;
differences = absl::StrCat("Expect: ", ToOctString(equivalent_wire_format),
", but got: ", ToOctString(protobuf_payload));
@ -386,15 +397,15 @@ void ConformanceTestSuite::VerifyResponse(
}
}
void ConformanceTestSuite::RunTest(const string& test_name,
void ConformanceTestSuite::RunTest(const std::string& test_name,
const ConformanceRequest& request,
ConformanceResponse* response) {
if (test_names_.insert(test_name).second == false) {
ABSL_LOG(FATAL) << "Duplicated test name: " << test_name;
}
string serialized_request;
string serialized_response;
std::string serialized_request;
std::string serialized_response;
request.SerializeToString(&serialized_request);
runner_->RunTest(test_name, serialized_request, &serialized_response);
@ -412,7 +423,7 @@ void ConformanceTestSuite::RunTest(const string& test_name,
}
}
string ConformanceTestSuite::WireFormatToString(WireFormat wire_format) {
std::string ConformanceTestSuite::WireFormatToString(WireFormat wire_format) {
switch (wire_format) {
case conformance::PROTOBUF:
return "PROTOBUF";
@ -435,7 +446,8 @@ void ConformanceTestSuite::AddExpectedFailedTest(const std::string& test_name) {
}
bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
std::string* output, const string& filename,
std::string* output,
const std::string& filename,
conformance::FailureSet* failure_list) {
runner_ = runner;
successes_ = 0;
@ -449,7 +461,7 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
failure_list_filename_ = filename;
expected_to_fail_.clear();
for (const string& failure : failure_list->failure()) {
for (const std::string& failure : failure_list->failure()) {
AddExpectedFailedTest(failure);
}
RunSuiteImpl();

@ -14,13 +14,16 @@
#ifndef CONFORMANCE_CONFORMANCE_TEST_H
#define CONFORMANCE_CONFORMANCE_TEST_H
#include <functional>
#include <cstddef>
#include <memory>
#include <string>
#include <vector>
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/util/type_resolver.h"
#include "absl/container/btree_set.h"
#include "absl/container/flat_hash_set.h"
#include "absl/strings/string_view.h"
#include "conformance/conformance.pb.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/wire_format_lite.h"
@ -129,6 +132,7 @@ class ConformanceTestSuite {
: verbose_(false),
performance_(false),
enforce_recommended_(false),
maximum_edition_(Edition::EDITION_PROTO3),
failure_list_flag_name_("--failure_list") {}
virtual ~ConformanceTestSuite() {}
@ -145,6 +149,9 @@ class ConformanceTestSuite {
// difference between REQUIRED and RECOMMENDED test cases.
void SetEnforceRecommended(bool value) { enforce_recommended_ = value; }
// Sets the maximum edition (inclusive) that should be tests for conformance.
void SetMaximumEdition(Edition edition) { maximum_edition_ = edition; }
// Gets the flag name to the failure list file.
// By default, this would return --failure_list
std::string GetFailureListFlagName() { return failure_list_flag_name_; }
@ -198,13 +205,15 @@ class ConformanceTestSuite {
std::unique_ptr<Message> NewTestMessage() const;
std::string GetSyntaxIdentifier() const;
std::string GetTestName() const;
const conformance::ConformanceRequest& GetRequest() const {
return request_;
}
const ConformanceLevel GetLevel() const { return level_; }
ConformanceLevel GetLevel() const { return level_; }
std::string ConformanceLevelToString(ConformanceLevel level) const;
@ -245,9 +254,9 @@ class ConformanceTestSuite {
bool need_report_success, bool require_same_wire_format);
void TruncateDebugPayload(std::string* payload);
const conformance::ConformanceRequest TruncateRequest(
conformance::ConformanceRequest TruncateRequest(
const conformance::ConformanceRequest& request);
const conformance::ConformanceResponse TruncateResponse(
conformance::ConformanceResponse TruncateResponse(
const conformance::ConformanceResponse& response);
void ReportSuccess(const std::string& test_name);
@ -279,6 +288,7 @@ class ConformanceTestSuite {
bool verbose_;
bool performance_;
bool enforce_recommended_;
Edition maximum_edition_;
std::string output_;
std::string output_dir_;
std::string failure_list_flag_name_;

@ -37,12 +37,15 @@
#include <unistd.h>
#include <algorithm>
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <fstream>
#include <future>
#include <vector>
#include "absl/log/absl_log.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/str_format.h"
#include "conformance/conformance.pb.h"
#include "conformance_test.h"
@ -119,6 +122,11 @@ void UsageError() {
fprintf(stderr,
" strictly conforming to protobuf\n");
fprintf(stderr, " spec.\n");
fprintf(stderr,
" --maximum edition Only run conformance tests up to \n");
fprintf(stderr,
" and including the specified\n");
fprintf(stderr, " edition.\n");
fprintf(stderr,
" --output_dir <dirname> Directory to write\n"
" output files.\n");
@ -200,6 +208,14 @@ int ForkPipeRunner::Run(int argc, char *argv[],
suite->SetVerbose(true);
} else if (strcmp(argv[arg], "--enforce_recommended") == 0) {
suite->SetEnforceRecommended(true);
} else if (strcmp(argv[arg], "--maximum_edition") == 0) {
if (++arg == argc) UsageError();
Edition edition = EDITION_UNKNOWN;
if (!Edition_Parse(absl::StrCat("EDITION_", argv[arg]), &edition)) {
fprintf(stderr, "Unknown edition: %s\n", argv[arg]);
UsageError();
}
suite->SetMaximumEdition(edition);
} else if (strcmp(argv[arg], "--output_dir") == 0) {
if (++arg == argc) UsageError();
suite->SetOutputDir(argv[arg]);

@ -8,6 +8,7 @@ def conformance_test(
testee,
failure_list = None,
text_format_failure_list = None,
maximum_edition = None,
**kwargs):
"""Conformance test runner.
@ -27,6 +28,8 @@ def conformance_test(
if text_format_failure_list:
args = args + ["--text_format_failure_list %s" % _strip_bazel(text_format_failure_list)]
failure_lists = failure_lists + [text_format_failure_list]
if maximum_edition:
args = args + ["--maximum_edition %s" % maximum_edition]
native.sh_test(
name = name,

@ -7,30 +7,57 @@
# TODO: insert links to corresponding bugs tracking the issue.
# Should we use GitHub issues or the Google-internal bug tracker?
Recommended.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Editions_Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Editions_Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Editions_Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Editions_Proto3.JsonInput.BoolFieldDoubleQuotedFalse
Recommended.Proto3.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Editions_Proto3.JsonInput.BoolFieldDoubleQuotedTrue
Recommended.Proto3.JsonInput.FieldMaskInvalidCharacter
Recommended.Editions_Proto3.JsonInput.FieldMaskInvalidCharacter
Recommended.Proto3.JsonInput.FieldNameDuplicate
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicate
Recommended.Proto3.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicateDifferentCasing1
Recommended.Proto3.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Editions_Proto3.JsonInput.FieldNameDuplicateDifferentCasing2
Recommended.Proto3.JsonInput.FieldNameNotQuoted
Recommended.Editions_Proto3.JsonInput.FieldNameNotQuoted
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Proto3.JsonInput.MapFieldValueIsNull
Recommended.Editions_Proto3.JsonInput.MapFieldValueIsNull
Recommended.Proto3.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Editions_Proto3.JsonInput.RepeatedFieldMessageElementIsNull
Recommended.Proto3.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Editions_Proto3.JsonInput.RepeatedFieldPrimitiveElementIsNull
Recommended.Proto3.JsonInput.RepeatedFieldTrailingComma
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingComma
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithNewlines
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpace
Recommended.Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Editions_Proto3.JsonInput.RepeatedFieldTrailingCommaWithSpaceCommaSpace
Recommended.Proto3.JsonInput.StringFieldSingleQuoteBoth
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteBoth
Recommended.Proto3.JsonInput.StringFieldSingleQuoteKey
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteKey
Recommended.Proto3.JsonInput.StringFieldSingleQuoteValue
Recommended.Editions_Proto3.JsonInput.StringFieldSingleQuoteValue
Recommended.Proto3.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Editions_Proto3.JsonInput.StringFieldUppercaseEscapeLetter
Recommended.Proto3.JsonInput.TrailingCommaInAnObject
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObject
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithNewlines
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithSpace
Recommended.Proto3.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.Editions_Proto3.JsonInput.TrailingCommaInAnObjectWithSpaceCommaSpace
Recommended.Proto2.JsonInput.FieldNameExtension.Validator
Recommended.Editions_Proto2.JsonInput.FieldNameExtension.Validator

@ -5,5 +5,5 @@ Required.Proto3.JsonInput.OneofFieldNullFirst.JsonOutput
Required.Proto3.JsonInput.OneofFieldNullFirst.ProtobufOutput
Required.Proto3.JsonInput.OneofFieldNullSecond.JsonOutput
Required.Proto3.JsonInput.OneofFieldNullSecond.ProtobufOutput
Recommended.ValueRejectInfNumberValue.JsonOutput
Recommended.ValueRejectNanNumberValue.JsonOutput
Recommended.Proto3.ValueRejectInfNumberValue.JsonOutput
Recommended.Proto3.ValueRejectNanNumberValue.JsonOutput

@ -4,9 +4,9 @@
# By listing them here we can keep tabs on which ones are failing and be sure
# that we don't introduce regressions in other tests.
Recommended.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.JsonInput.BoolFieldAllCapitalFalse
Recommended.Proto3.JsonInput.BoolFieldAllCapitalTrue
Recommended.Proto3.JsonInput.BoolFieldCamelCaseFalse

@ -1,6 +1,6 @@
Recommended.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto3.FieldMaskNumbersDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskPathsDontRoundTrip.JsonOutput
Recommended.Proto3.FieldMaskTooManyUnderscore.JsonOutput
Recommended.Proto2.JsonInput.FieldNameExtension.Validator
Recommended.Proto3.JsonInput.BoolFieldAllCapitalFalse
Recommended.Proto3.JsonInput.BoolFieldAllCapitalTrue

@ -1,3 +1,6 @@
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput

@ -9,3 +9,6 @@
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput

@ -0,0 +1,6 @@
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInMapValue.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInOptionalField.ProtobufOutput
Recommended.Editions_Proto3.JsonInput.IgnoreUnknownEnumStringValueInRepeatedField.ProtobufOutput

@ -7,25 +7,30 @@
#include "text_format_conformance_suite.h"
#include "google/protobuf/any.pb.h"
#include <cstddef>
#include <string>
#include <vector>
#include "absl/log/absl_log.h"
#include "absl/log/die_if_null.h"
#include "absl/strings/str_cat.h"
#include "conformance_test.h"
#include "google/protobuf/editions/golden/test_messages_proto2_editions.pb.h"
#include "google/protobuf/editions/golden/test_messages_proto3_editions.pb.h"
#include "google/protobuf/test_messages_proto2.pb.h"
#include "google/protobuf/test_messages_proto3.pb.h"
#include "google/protobuf/text_format.h"
namespace proto2_messages = protobuf_test_messages::proto2;
using conformance::ConformanceRequest;
using conformance::ConformanceResponse;
using conformance::WireFormat;
using google::protobuf::Message;
using google::protobuf::TextFormat;
using proto2_messages::TestAllTypesProto2;
using proto2_messages::UnknownToTestAllTypes;
using protobuf_test_messages::proto2::TestAllTypesProto2;
using protobuf_test_messages::proto2::UnknownToTestAllTypes;
using protobuf_test_messages::proto3::TestAllTypesProto3;
using std::string;
using TestAllTypesProto2Editions =
protobuf_test_messages::editions::proto2::TestAllTypesProto2;
using TestAllTypesProto3Editions =
protobuf_test_messages::editions::proto3::TestAllTypesProto3;
namespace google {
namespace protobuf {
@ -61,7 +66,7 @@ bool TextFormatConformanceTestSuite::ParseResponse(
const ConformanceRequestSetting& setting, Message* test_message) {
const ConformanceRequest& request = setting.GetRequest();
WireFormat requested_output = request.requested_output_format();
const string& test_name = setting.GetTestName();
const std::string& test_name = setting.GetTestName();
ConformanceLevel level = setting.GetLevel();
switch (response.result_case()) {
@ -111,10 +116,39 @@ bool TextFormatConformanceTestSuite::ParseResponse(
return true;
}
void TextFormatConformanceTestSuite::ExpectParseFailure(const string& test_name,
ConformanceLevel level,
const string& input) {
TestAllTypesProto3 prototype;
void TextFormatConformanceTestSuite::RunSuiteImpl() {
TextFormatConformanceTestSuiteImpl<TestAllTypesProto2>(this);
TextFormatConformanceTestSuiteImpl<TestAllTypesProto3>(this);
if (maximum_edition_ >= Edition::EDITION_2023) {
TextFormatConformanceTestSuiteImpl<TestAllTypesProto2Editions>(this);
TextFormatConformanceTestSuiteImpl<TestAllTypesProto3Editions>(this);
}
}
template <typename MessageType>
TextFormatConformanceTestSuiteImpl<MessageType>::
TextFormatConformanceTestSuiteImpl(TextFormatConformanceTestSuite* suite)
: suite_(*ABSL_DIE_IF_NULL(suite)) {
// Flag control performance tests to keep them internal and opt-in only
if (suite_.performance_) {
RunTextFormatPerformanceTests();
} else {
if (MessageType::GetDescriptor()->name() == "TestAllTypesProto2") {
RunGroupTests();
}
if (MessageType::GetDescriptor()->name() == "TestAllTypesProto3") {
RunAnyTests();
// TODO Run these over proto2 also.
RunAllTests();
}
}
}
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::ExpectParseFailure(
const std::string& test_name, ConformanceLevel level,
const std::string& input) {
MessageType prototype;
// We don't expect output, but if the program erroneously accepts the protobuf
// we let it send its response as this. We must not leave it unspecified.
ConformanceRequestSetting setting(
@ -122,86 +156,75 @@ void TextFormatConformanceTestSuite::ExpectParseFailure(const string& test_name,
conformance::TEXT_FORMAT_TEST, prototype, test_name, input);
const ConformanceRequest& request = setting.GetRequest();
ConformanceResponse response;
string effective_test_name =
absl::StrCat(setting.ConformanceLevelToString(level),
".Proto3.TextFormatInput.", test_name);
std::string effective_test_name = absl::StrCat(
setting.ConformanceLevelToString(level), ".",
setting.GetSyntaxIdentifier(), ".TextFormatInput.", test_name);
RunTest(effective_test_name, request, &response);
suite_.RunTest(effective_test_name, request, &response);
if (response.result_case() == ConformanceResponse::kParseError) {
ReportSuccess(effective_test_name);
suite_.ReportSuccess(effective_test_name);
} else if (response.result_case() == ConformanceResponse::kSkipped) {
ReportSkip(effective_test_name, request, response);
suite_.ReportSkip(effective_test_name, request, response);
} else {
ReportFailure(effective_test_name, level, request, response,
suite_.ReportFailure(effective_test_name, level, request, response,
"Should have failed to parse, but didn't.");
}
}
void TextFormatConformanceTestSuite::RunValidTextFormatTest(
const string& test_name, ConformanceLevel level, const string& input_text) {
TestAllTypesProto3 prototype;
RunValidTextFormatTestWithMessage(test_name, level, input_text, prototype);
}
void TextFormatConformanceTestSuite::RunValidTextFormatTestProto2(
const string& test_name, ConformanceLevel level, const string& input_text) {
TestAllTypesProto2 prototype;
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::RunValidTextFormatTest(
const std::string& test_name, ConformanceLevel level,
const std::string& input_text) {
MessageType prototype;
RunValidTextFormatTestWithMessage(test_name, level, input_text, prototype);
}
void TextFormatConformanceTestSuite::RunValidTextFormatTestWithExpected(
const string& test_name, ConformanceLevel level, const string& input_text,
const string& expected_text) {
TestAllTypesProto3 prototype;
RunValidTextFormatTestWithMessage(test_name, level, input_text, expected_text,
prototype);
}
void TextFormatConformanceTestSuite::RunValidTextFormatTestProto2WithExpected(
const string& test_name, ConformanceLevel level, const string& input_text,
const string& expected_text) {
TestAllTypesProto2 prototype;
RunValidTextFormatTestWithMessage(test_name, level, input_text, expected_text,
prototype);
}
void TextFormatConformanceTestSuite::RunValidTextFormatTestWithMessage(
const string& test_name, ConformanceLevel level, const string& input_text,
const Message& prototype) {
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::
RunValidTextFormatTestWithMessage(const std::string& test_name,
ConformanceLevel level,
const std::string& input_text,
const Message& message) {
ConformanceRequestSetting setting1(
level, conformance::TEXT_FORMAT, conformance::PROTOBUF,
conformance::TEXT_FORMAT_TEST, prototype, test_name, input_text);
RunValidInputTest(setting1, input_text);
conformance::TEXT_FORMAT_TEST, message, test_name, input_text);
suite_.RunValidInputTest(setting1, input_text);
ConformanceRequestSetting setting2(
level, conformance::TEXT_FORMAT, conformance::TEXT_FORMAT,
conformance::TEXT_FORMAT_TEST, prototype, test_name, input_text);
RunValidInputTest(setting2, input_text);
conformance::TEXT_FORMAT_TEST, message, test_name, input_text);
suite_.RunValidInputTest(setting2, input_text);
}
void TextFormatConformanceTestSuite::RunValidTextFormatTestWithMessage(
const string& test_name, ConformanceLevel level, const string& input_text,
const string& expected_text, const Message& prototype) {
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::
RunValidTextFormatTestWithExpected(const std::string& test_name,
ConformanceLevel level,
const std::string& input_text,
const std::string& expected_text) {
MessageType prototype;
ConformanceRequestSetting setting1(
level, conformance::TEXT_FORMAT, conformance::PROTOBUF,
conformance::TEXT_FORMAT_TEST, prototype, test_name, input_text);
RunValidInputTest(setting1, expected_text);
suite_.RunValidInputTest(setting1, expected_text);
ConformanceRequestSetting setting2(
level, conformance::TEXT_FORMAT, conformance::TEXT_FORMAT,
conformance::TEXT_FORMAT_TEST, prototype, test_name, input_text);
RunValidInputTest(setting2, expected_text);
suite_.RunValidInputTest(setting2, expected_text);
}
void TextFormatConformanceTestSuite::RunValidUnknownTextFormatTest(
const string& test_name, const Message& message) {
string serialized_input;
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<
MessageType>::RunValidUnknownTextFormatTest(const std::string& test_name,
const Message& message) {
std::string serialized_input;
message.SerializeToString(&serialized_input);
TestAllTypesProto3 prototype;
MessageType prototype;
ConformanceRequestSetting setting1(
RECOMMENDED, conformance::PROTOBUF, conformance::TEXT_FORMAT,
conformance::TEXT_FORMAT_TEST, prototype,
absl::StrCat(test_name, "_Drop"), serialized_input);
setting1.SetPrototypeMessageForCompare(message);
RunValidBinaryInputTest(setting1, "");
suite_.RunValidBinaryInputTest(setting1, "");
ConformanceRequestSetting setting2(
RECOMMENDED, conformance::PROTOBUF, conformance::TEXT_FORMAT,
@ -209,11 +232,20 @@ void TextFormatConformanceTestSuite::RunValidUnknownTextFormatTest(
absl::StrCat(test_name, "_Print"), serialized_input);
setting2.SetPrototypeMessageForCompare(message);
setting2.SetPrintUnknownFields(true);
RunValidBinaryInputTest(setting2, serialized_input);
suite_.RunValidBinaryInputTest(setting2, serialized_input);
}
void TextFormatConformanceTestSuite::RunSuiteImpl() {
if (!performance_) {
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::RunGroupTests() {
RunValidTextFormatTest("GroupFieldNoColon", REQUIRED,
"Data { group_int32: 1 }");
RunValidTextFormatTest("GroupFieldWithColon", REQUIRED,
"Data: { group_int32: 1 }");
RunValidTextFormatTest("GroupFieldEmpty", REQUIRED, "Data {}");
}
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::RunAllTests() {
RunValidTextFormatTest("HelloWorld", REQUIRED,
"optional_string: 'Hello, World!'");
// Integer fields.
@ -252,8 +284,7 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
"optional_float: 3.4028235e+38");
RunValidTextFormatTest("FloatFieldMinValue", REQUIRED,
"optional_float: 1.17549e-38");
RunValidTextFormatTest("FloatFieldNaNValue", REQUIRED,
"optional_float: NaN");
RunValidTextFormatTest("FloatFieldNaNValue", REQUIRED, "optional_float: NaN");
RunValidTextFormatTest("FloatFieldPosInfValue", REQUIRED,
"optional_float: inf");
RunValidTextFormatTest("FloatFieldNegInfValue", REQUIRED,
@ -282,8 +313,8 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
RunValidTextFormatTest(
absl::StrCat("StringLiteralOctalEscapes", field_type), REQUIRED,
absl::StrCat(field_name, ": '\\341\\210\\264'"));
RunValidTextFormatTest(
absl::StrCat("StringLiteralHexEscapes", field_type), REQUIRED,
RunValidTextFormatTest(absl::StrCat("StringLiteralHexEscapes", field_type),
REQUIRED,
absl::StrCat(field_name, ": '\\xe1\\x88\\xb4'"));
RunValidTextFormatTest(
absl::StrCat("StringLiteralShortUnicodeEscape", field_type),
@ -292,8 +323,8 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
absl::StrCat("StringLiteralLongUnicodeEscapes", field_type),
RECOMMENDED, absl::StrCat(field_name, ": '\\U00001234\\U00010437'"));
// String literals don't include line feeds.
ExpectParseFailure(
absl::StrCat("StringLiteralIncludesLF", field_type), REQUIRED,
ExpectParseFailure(absl::StrCat("StringLiteralIncludesLF", field_type),
REQUIRED,
absl::StrCat(field_name, ": 'first line\nsecond line'"));
// Unicode escapes don't include code points that lie beyond the planes
// (> 0x10ffff).
@ -322,8 +353,7 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
field_type),
RECOMMENDED, absl::StrCat(field_name, ": '\\U0000dc00'"));
ExpectParseFailure(
absl::StrCat("StringLiteralLongUnicodeEscapeSurrogatePair",
field_type),
absl::StrCat("StringLiteralLongUnicodeEscapeSurrogatePair", field_type),
RECOMMENDED, absl::StrCat(field_name, ": '\\U0000d801\\U00000dc37'"));
ExpectParseFailure(
absl::StrCat("StringLiteralUnicodeEscapeSurrogatePairLongShort",
@ -338,23 +368,16 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
// validation.
const auto test_method =
field_type == "String"
? &TextFormatConformanceTestSuite::ExpectParseFailure
: &TextFormatConformanceTestSuite::RunValidTextFormatTest;
? &TextFormatConformanceTestSuiteImpl::ExpectParseFailure
: &TextFormatConformanceTestSuiteImpl::RunValidTextFormatTest;
// String fields reject invalid UTF-8 byte sequences; bytes fields don't.
(this->*test_method)(absl::StrCat(field_type, "FieldBadUTF8Octal"),
REQUIRED, absl::StrCat(field_name, ": '\\300'"));
(this->*test_method)(absl::StrCat(field_type, "FieldBadUTF8Hex"),
REQUIRED, absl::StrCat(field_name, ": '\\xc0'"));
(this->*test_method)(absl::StrCat(field_type, "FieldBadUTF8Hex"), REQUIRED,
absl::StrCat(field_name, ": '\\xc0'"));
}
// Group fields
RunValidTextFormatTestProto2("GroupFieldNoColon", REQUIRED,
"Data { group_int32: 1 }");
RunValidTextFormatTestProto2("GroupFieldWithColon", REQUIRED,
"Data: { group_int32: 1 }");
RunValidTextFormatTestProto2("GroupFieldEmpty", REQUIRED, "Data {}");
// Unknown Fields
UnknownToTestAllTypes message;
// Unable to print unknown Fixed32/Fixed64 fields as if they are known.
@ -377,33 +400,8 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
message.add_repeated_int32(3);
RunValidUnknownTextFormatTest("RepeatedUnknownFields", message);
// Any fields
RunValidTextFormatTest("AnyField", REQUIRED,
R"(
optional_any: {
[type.googleapis.com/protobuf_test_messages.proto3.TestAllTypesProto3] {
optional_int32: 12345
}
}
)");
RunValidTextFormatTest("AnyFieldWithRawBytes", REQUIRED,
R"(
optional_any: {
type_url: "type.googleapis.com/protobuf_test_messages.proto3.TestAllTypesProto3"
value: "\b\271`"
}
)");
ExpectParseFailure("AnyFieldWithInvalidType", REQUIRED,
R"(
optional_any: {
[type.googleapis.com/unknown] {
optional_int32: 12345
}
}
)");
// Map fields
TestAllTypesProto3 prototype;
MessageType prototype;
(*prototype.mutable_map_string_string())["c"] = "value";
(*prototype.mutable_map_string_string())["b"] = "value";
(*prototype.mutable_map_string_string())["a"] = "value";
@ -429,8 +427,7 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
(*prototype.mutable_map_int32_int32())[3] = 0;
(*prototype.mutable_map_int32_int32())[2] = 0;
(*prototype.mutable_map_int32_int32())[1] = 0;
RunValidTextFormatTestWithMessage("AlphabeticallySortedMapIntKeys",
REQUIRED,
RunValidTextFormatTestWithMessage("AlphabeticallySortedMapIntKeys", REQUIRED,
R"(
map_int32_int32 {
key: 1
@ -450,8 +447,7 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
prototype.Clear();
(*prototype.mutable_map_bool_bool())[true] = false;
(*prototype.mutable_map_bool_bool())[false] = false;
RunValidTextFormatTestWithMessage("AlphabeticallySortedMapBoolKeys",
REQUIRED,
RunValidTextFormatTestWithMessage("AlphabeticallySortedMapBoolKeys", REQUIRED,
R"(
map_bool_bool {
key: false
@ -478,20 +474,46 @@ void TextFormatConformanceTestSuite::RunSuiteImpl() {
}
)");
// The last-specified value will be retained in a parsed map
RunValidInputTest(setting_map, R"(
suite_.RunValidInputTest(setting_map, R"(
map_string_nested_message {
key: "duplicate"
value: { corecursive: {} }
}
)");
}
// Flag control performance tests to keep them internal and opt-in only
if (performance_) {
RunTextFormatPerformanceTests();
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::RunAnyTests() {
// Any fields
RunValidTextFormatTest("AnyField", REQUIRED,
R"(
optional_any: {
[type.googleapis.com/protobuf_test_messages.proto3.TestAllTypesProto3]
{ optional_int32: 12345
}
}
)");
RunValidTextFormatTest("AnyFieldWithRawBytes", REQUIRED,
R"(
optional_any: {
type_url:
"type.googleapis.com/protobuf_test_messages.proto3.TestAllTypesProto3" value:
"\b\271`"
}
)");
ExpectParseFailure("AnyFieldWithInvalidType", REQUIRED,
R"(
optional_any: {
[type.googleapis.com/unknown] {
optional_int32: 12345
}
}
)");
}
void TextFormatConformanceTestSuite::RunTextFormatPerformanceTests() {
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<
MessageType>::RunTextFormatPerformanceTests() {
TestTextFormatPerformanceMergeMessageWithRepeatedField("Bool",
"repeated_bool: true");
TestTextFormatPerformanceMergeMessageWithRepeatedField(
@ -507,30 +529,27 @@ void TextFormatConformanceTestSuite::RunTextFormatPerformanceTests() {
}
// This is currently considered valid input by some languages but not others
void TextFormatConformanceTestSuite::
template <typename MessageType>
void TextFormatConformanceTestSuiteImpl<MessageType>::
TestTextFormatPerformanceMergeMessageWithRepeatedField(
const string& test_type_name, const string& message_field) {
string recursive_message =
const std::string& test_type_name, const std::string& message_field) {
std::string recursive_message =
absl::StrCat("recursive_message { ", message_field, " }");
string input;
std::string input;
for (size_t i = 0; i < kPerformanceRepeatCount; i++) {
absl::StrAppend(&input, recursive_message);
}
string expected = "recursive_message { ";
std::string expected = "recursive_message { ";
for (size_t i = 0; i < kPerformanceRepeatCount; i++) {
absl::StrAppend(&expected, message_field, " ");
}
absl::StrAppend(&expected, "}");
RunValidTextFormatTestProto2WithExpected(
absl::StrCat("TestTextFormatPerformanceMergeMessageWithRepeatedField",
test_type_name, "Proto2"),
RECOMMENDED, input, expected);
RunValidTextFormatTestWithExpected(
absl::StrCat("TestTextFormatPerformanceMergeMessageWithRepeatedField",
test_type_name, "Proto3"),
test_type_name),
RECOMMENDED, input, expected);
}

@ -8,7 +8,10 @@
#ifndef TEXT_FORMAT_CONFORMANCE_SUITE_H_
#define TEXT_FORMAT_CONFORMANCE_SUITE_H_
#include <string>
#include "conformance_test.h"
#include "google/protobuf/message.h"
namespace google {
namespace protobuf {
@ -19,41 +22,55 @@ class TextFormatConformanceTestSuite : public ConformanceTestSuite {
private:
void RunSuiteImpl() override;
bool ParseTextFormatResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message);
bool ParseResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message) override;
template <typename MessageType>
friend class TextFormatConformanceTestSuiteImpl;
};
template <typename MessageType>
class TextFormatConformanceTestSuiteImpl {
public:
explicit TextFormatConformanceTestSuiteImpl(
TextFormatConformanceTestSuite* suite);
private:
using ConformanceRequestSetting =
TextFormatConformanceTestSuite::ConformanceRequestSetting;
using ConformanceLevel = TextFormatConformanceTestSuite::ConformanceLevel;
constexpr static ConformanceLevel RECOMMENDED = ConformanceLevel::RECOMMENDED;
constexpr static ConformanceLevel REQUIRED = ConformanceLevel::REQUIRED;
void RunAllTests();
void RunGroupTests();
void RunAnyTests();
void RunTextFormatPerformanceTests();
void RunValidTextFormatTest(const std::string& test_name,
ConformanceLevel level, const std::string& input);
void RunValidTextFormatTestProto2(const std::string& test_name,
ConformanceLevel level,
const std::string& input);
void RunValidTextFormatTestWithExpected(const std::string& test_name,
ConformanceLevel level,
const std::string& input,
const std::string& expected);
void RunValidTextFormatTestProto2WithExpected(const std::string& test_name,
ConformanceLevel level,
const std::string& input,
const std::string& expected);
void RunValidTextFormatTestWithMessage(const std::string& test_name,
ConformanceLevel level,
const std::string& input_text,
const Message& prototype);
const std::string& expected_text);
void RunValidUnknownTextFormatTest(const std::string& test_name,
const Message& message);
void RunValidTextFormatTestWithMessage(const std::string& test_name,
ConformanceLevel level,
const std::string& input_text,
const std::string& expected_text,
const Message& prototype);
void RunValidUnknownTextFormatTest(const std::string& test_name,
const Message& message);
void ExpectParseFailure(const std::string& test_name, ConformanceLevel level,
const std::string& input);
bool ParseTextFormatResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message);
bool ParseResponse(const conformance::ConformanceResponse& response,
const ConformanceRequestSetting& setting,
Message* test_message) override;
void TestTextFormatPerformanceMergeMessageWithRepeatedField(
const std::string& test_type_name, const std::string& message_field);
TextFormatConformanceTestSuite& suite_;
};
} // namespace protobuf

@ -1,20 +1,40 @@
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongBytes
Recommended.Editions_Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongString
Recommended.Editions_Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongString
Required.Proto3.TextFormatInput.StringFieldBadUTF8Hex
Required.Editions_Proto3.TextFormatInput.StringFieldBadUTF8Hex
Required.Proto3.TextFormatInput.StringFieldBadUTF8Octal
Required.Editions_Proto3.TextFormatInput.StringFieldBadUTF8Octal
Required.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeTooLargeBytes
Required.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeTooLargeBytes
Required.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeTooLargeString
Required.Editions_Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeTooLargeString

@ -3,32 +3,13 @@
# TODO: These should be fixed.
Required.Proto3.TextFormatInput.FloatFieldMaxValue.ProtobufOutput
Required.Proto3.TextFormatInput.FloatFieldMaxValue.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesBytes.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesBytes.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesString.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesString.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeBytes.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeBytes.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeString.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeString.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongString
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.FloatFieldMaxValue.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.FloatFieldMaxValue.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput

@ -1,28 +1,8 @@
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesBytes.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesBytes.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesString.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralLongUnicodeEscapesString.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeBytes.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeBytes.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeString.ProtobufOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeString.TextFormatOutput
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateFirstOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogatePairString
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyBytes
Recommended.Proto3.TextFormatInput.StringLiteralShortUnicodeEscapeSurrogateSecondOnlyString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairLongShortString
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongBytes
Recommended.Proto3.TextFormatInput.StringLiteralUnicodeEscapeSurrogatePairShortLongString
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput

@ -0,0 +1,11 @@
# This is the list of text format conformance tests that are known to fail right
# now.
# TODO: These should be fixed.
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesBytes.TextFormatOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.ProtobufOutput
Required.Editions_Proto3.TextFormatInput.StringLiteralBasicEscapesString.TextFormatOutput

@ -5,7 +5,7 @@
<title>Google Protocol Buffers tools</title>
<summary>Tools for Protocol Buffers - Google's data interchange format.</summary>
<description>See project site for more info.</description>
<version>3.24.0</version>
<version>3.25.0</version>
<authors>Google Inc.</authors>
<owners>protobuf-packages</owners>
<licenseUrl>https://github.com/protocolbuffers/protobuf/blob/main/LICENSE</licenseUrl>

@ -4,13 +4,13 @@ load("//build_defs:internal_shell.bzl", "inline_sh_test")
inline_sh_test(
name = "tests",
srcs = ["test.sh"],
cmd = "$(location test.sh) 3.0.0 $(rootpath //:protoc)",
tools = ["//:protoc"],
deps = glob([
"src/**/*.cs*", # .cs and .csproj
"protos/**/*.proto",
]) + [
"//csharp:srcs",
"//:well_known_protos",
"//csharp:srcs",
],
cmd = "$(location test.sh) 3.0.0 $(rootpath //:protoc)"
)

@ -632,6 +632,59 @@ namespace Google.Protobuf.Collections
CollectionAssert.AreEquivalent(((IDictionary<string, string>)map).Values, ((IReadOnlyDictionary<string, string>)map).Values);
}
[Test]
public void SortIntKeys_RandomOrder()
{
var map = new MapField<int, string>() { { 1, "val" }, { -1, "val"}, { 0, "val" } };
var sortedList = map.GetSortedListCopy(map.ToList()).ToList();
var sortedKeys = sortedList.Select(kvp => kvp.Key);
CollectionAssert.AreEqual(new[] { -1, 0, 1 }, sortedKeys);
}
[Test]
public void SortIntKeys_Empty()
{
var map = new MapField<int, string> { };
var sortedList = map.GetSortedListCopy(map.ToList()).ToList();
var sortedKeys = sortedList.Select(kvp => kvp.Key);
Assert.IsEmpty(sortedKeys);
}
[Test]
public void SortStringKeys_RandomOrder()
{
var map = new MapField<string, string> { { "a", "val" }, { "c", "val" }, { "b", "val" } };
var sortedList = map.GetSortedListCopy(map.ToList()).ToList();
var sortedKeys = sortedList.Select(kvp => kvp.Key);
CollectionAssert.AreEqual(new[] { "a", "b", "c" }, sortedKeys);
}
[Test]
public void SortStringKeys_EnsureOrdinalSort()
{
var map = new MapField<string, string>
{
{ "i", "val" } , { "I", "val" }, { "ı", "val" }, { "İ", "val" }
};
var sortedList = map.GetSortedListCopy(map.ToList());
var sortedKeys = sortedList.Select(kvp => kvp.Key);
// Assert Ordinal sort I, i, ı, İ (Non-ordinal sort returns i, I, İ, ı)
// I == 0x49 , i == 0x69 , İ == 0x130 , ı == 0x131
CollectionAssert.AreEqual(new[] { "I", "i", "İ", "ı" }, sortedKeys);
}
[Test]
public void SortBoolKeys()
{
var map = new MapField<bool, string>
{
{ true, "val" } , { false, "val" }
};
var sortedList = map.GetSortedListCopy(map.ToList());
var sortedKeys = sortedList.Select(kvp => kvp.Key);
CollectionAssert.AreEqual(new[] { false, true }, sortedKeys);
}
private static KeyValuePair<TKey, TValue> NewKeyValuePair<TKey, TValue>(TKey key, TValue value)
{
return new KeyValuePair<TKey, TValue>(key, value);

@ -406,6 +406,12 @@ namespace Google.Protobuf
Merge(new FieldMaskTree().AddFieldPath("payload.single_int32"),
sourceWithPayloadInt32Unset, destination, options, useDynamicMessage);
Assert.IsNotNull(destination.Payload);
// Clear unset primitive fields even if source payload is cleared
destination = source.Clone();
Merge(new FieldMaskTree().AddFieldPath("payload.single_int32"),
clearedSource, destination, options, useDynamicMessage);
Assert.AreEqual(0, destination.Payload.SingleInt32);
}
[Test]

@ -668,6 +668,79 @@ namespace Google.Protobuf
});
}
[Test]
public void MapStringString_DeterministicTrue_ThenBytesIdentical()
{
// Define three strings consisting of different versions of the letter I.
// LATIN CAPITAL LETTER I (U+0049)
string capitalLetterI = "I";
// LATIN SMALL LETTER I (U+0069)
string smallLetterI = "i";
// LATIN SMALL LETTER DOTLESS I (U+0131)
string smallLetterDotlessI = "\u0131";
var testMap1 = new TestMap();
testMap1.MapStringString.Add(smallLetterDotlessI, "value_"+smallLetterDotlessI);
testMap1.MapStringString.Add(smallLetterI, "value_"+smallLetterI);
testMap1.MapStringString.Add(capitalLetterI, "content_"+capitalLetterI);
var bytes1 = SerializeTestMap(testMap1, true);
var testMap2 = new TestMap();
testMap2.MapStringString.Add(capitalLetterI, "content_"+capitalLetterI);
testMap2.MapStringString.Add(smallLetterI, "value_"+smallLetterI);
testMap2.MapStringString.Add(smallLetterDotlessI, "value_"+smallLetterDotlessI);
var bytes2 = SerializeTestMap(testMap2, true);
var parsedBytes2 = TestMap.Parser.ParseFrom(bytes2);
var parsedBytes1 = TestMap.Parser.ParseFrom(bytes1);
Assert.IsTrue(bytes1.SequenceEqual(bytes2));
}
[Test]
public void MapInt32Bytes_DeterministicTrue_ThenBytesIdentical()
{
var testMap1 = new TestMap();
testMap1.MapInt32Bytes.Add(1, ByteString.CopyFromUtf8("test1"));
testMap1.MapInt32Bytes.Add(2, ByteString.CopyFromUtf8("test2"));
var bytes1 = SerializeTestMap(testMap1, true);
var testMap2 = new TestMap();
testMap2.MapInt32Bytes.Add(2, ByteString.CopyFromUtf8("test2"));
testMap2.MapInt32Bytes.Add(1, ByteString.CopyFromUtf8("test1"));
var bytes2 = SerializeTestMap(testMap2, true);
Assert.IsTrue(bytes1.SequenceEqual(bytes2));
}
[Test]
public void MapInt32Bytes_DeterministicFalse_ThenBytesDifferent()
{
var testMap1 = new TestMap();
testMap1.MapInt32Bytes.Add(1, ByteString.CopyFromUtf8("test1"));
testMap1.MapInt32Bytes.Add(2, ByteString.CopyFromUtf8("test2"));
var bytes1 = SerializeTestMap(testMap1, false);
var testMap2 = new TestMap();
testMap2.MapInt32Bytes.Add(2, ByteString.CopyFromUtf8("test2"));
testMap2.MapInt32Bytes.Add(1, ByteString.CopyFromUtf8("test1"));
var bytes2 = SerializeTestMap(testMap2, false);
Assert.IsFalse(bytes1.SequenceEqual(bytes2));
}
private byte[] SerializeTestMap(TestMap testMap, bool deterministic)
{
using var memoryStream = new MemoryStream();
var codedOutputStream = new CodedOutputStream(memoryStream);
codedOutputStream.Deterministic = deterministic;
testMap.WriteTo(codedOutputStream);
codedOutputStream.Flush();
memoryStream.Seek(0, SeekOrigin.Begin);
return memoryStream.ToArray();
}
[Test]
public void DiscardUnknownFields_RealDataStillRead()
{

@ -1,4 +1,4 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net462;net6.0</TargetFrameworks>
@ -14,14 +14,14 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.7.2" />
<PackageReference Include="NUnit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.2.2" />
<PackageReference Include="NUnit3TestAdapter" Version="4.5.0" />
</ItemGroup>
<!-- Needed for the netfx build to work on Unix. See https://github.com/dotnet/designs/pull/33 -->
<ItemGroup>
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.2" PrivateAssets="All" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" PrivateAssets="All" />
</ItemGroup>
<ItemGroup>

@ -1,4 +1,4 @@
#region Copyright notice and license
#region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
//
@ -471,11 +471,11 @@ namespace Google.Protobuf
// Assume that anything non-bounds-related is covered in the Int32 case
[Test]
[TestCase("9223372036854775808")]
// Theoretical bound would be -9223372036854775809, but when that is parsed to a double
// we end up with the exact value of long.MinValue due to lack of precision. The value here
// is the "next double down".
[TestCase("-9223372036854780000")]
// Runtime implementation differences produce different results for values just outside
// (long.MinValue, long.MaxValue) which cannot be exactly represented as a double. Use the
// next values exactly representable as doubles to ensure consistency.
[TestCase("9223372036854777856")]
[TestCase("-9223372036854777856")]
public void NumberToInt64_Invalid(string jsonValue)
{
string json = "{ \"singleInt64\": " + jsonValue + "}";
@ -498,7 +498,10 @@ namespace Google.Protobuf
// Assume that anything non-bounds-related is covered in the Int32 case
[Test]
[TestCase("-1")]
[TestCase("18446744073709551616")]
// Runtime implementation differences produce different results for values just beyond
// ulong.MaxValue which cannot be exactly represented as a double. Use the next value
// exactly representable as a double to ensure consistency.
[TestCase("18446744073709555712")]
public void NumberToUInt64_Invalid(string jsonValue)
{
string json = "{ \"singleUint64\": " + jsonValue + "}";

@ -10,6 +10,7 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Security;
@ -23,6 +24,8 @@ namespace Google.Protobuf
/// Immutable array of bytes.
/// </summary>
[SecuritySafeCritical]
[DebuggerDisplay("Length = {Length}")]
[DebuggerTypeProxy(typeof(ByteStringDebugView))]
public sealed class ByteString : IEnumerable<byte>, IEquatable<ByteString>
{
private static readonly ByteString empty = new ByteString(new byte[0]);
@ -400,5 +403,18 @@ namespace Google.Protobuf
outputStream.Write(array, 0, array.Length);
}
}
private sealed class ByteStringDebugView
{
private readonly ByteString data;
public ByteStringDebugView(ByteString data)
{
this.data = data;
}
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public byte[] Items => data.bytes.ToArray();
}
}
}

@ -137,6 +137,33 @@ namespace Google.Protobuf
}
}
/// <summary>
/// Configures whether or not serialization is deterministic.
/// </summary>
/// <remarks>
/// Deterministic serialization guarantees that for a given binary, equal messages (defined by the
/// equals methods in protos) will always be serialized to the same bytes. This implies:
/// <list type="bullet">
/// <item><description>Repeated serialization of a message will return the same bytes.</description></item>
/// <item><description>Different processes of the same binary (which may be executing on different machines)
/// will serialize equal messages to the same bytes.</description></item>
/// </list>
/// Note the deterministic serialization is NOT canonical across languages; it is also unstable
/// across different builds with schema changes due to unknown fields. Users who need canonical
/// serialization, e.g. persistent storage in a canonical form, fingerprinting, etc, should define
/// their own canonicalization specification and implement the serializer using reflection APIs
/// rather than relying on this API.
/// Once set, the serializer will: (Note this is an implementation detail and may subject to
/// change in the future)
/// <list type="bullet">
/// <item><description>Sort map entries by keys in lexicographical order or numerical order. Note: For string
/// keys, the order is based on comparing the UTF-16 code unit value of each character in the strings.
/// The order may be different from the deterministic serialization in other languages where
/// maps are sorted on the lexicographical order of the UTF8 encoded keys.</description></item>
/// </list>
/// </remarks>
public bool Deterministic { get; set; }
#region Writing of values (not including tags)
/// <summary>

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save