commit
77bea54bdc
1116 changed files with 78357 additions and 53565 deletions
@ -0,0 +1,67 @@ |
||||
"""Validate the YAML files for GitHub Actions workflows. |
||||
|
||||
TODO: b/359303016 - convert to use unittest |
||||
""" |
||||
|
||||
import os |
||||
import re |
||||
|
||||
import yaml |
||||
|
||||
# Ensure every job is in the list of blocking jobs. |
||||
with open( |
||||
os.path.join(os.path.dirname(__file__), '../workflows/test_runner.yml'), 'r' |
||||
) as f: |
||||
data = yaml.safe_load(f) |
||||
|
||||
# List of all YAML files that are used by jobs in the test_runner.yml file. |
||||
yaml_files = [] |
||||
|
||||
# Get a list of all jobs in the test_runner, except for the blocking job and |
||||
# the tag removal job, which is not always run. |
||||
all_jobs = list(data['jobs'].keys()) |
||||
all_jobs.remove('all-blocking-tests') |
||||
all_jobs.remove('remove-tag') |
||||
|
||||
passed = True |
||||
blocking_jobs = data['jobs']['all-blocking-tests']['needs'] |
||||
|
||||
for job in all_jobs: |
||||
if 'uses' in data['jobs'][job]: |
||||
yaml_files.append( |
||||
os.path.join( |
||||
os.path.dirname(__file__), |
||||
'../workflows', |
||||
os.path.basename(data['jobs'][job]['uses']), |
||||
) |
||||
) |
||||
if job not in blocking_jobs: |
||||
passed = False |
||||
raise ValueError('Job %s is not in the list of blocking jobs.' % job) |
||||
|
||||
print('PASSED: All jobs are in the list of blocking jobs.') |
||||
|
||||
# Ensure every job with a continuous prefix conditions every step on whether we |
||||
# are in a continuous run. |
||||
for file in yaml_files: |
||||
with open(file, 'r') as f: |
||||
data = yaml.safe_load(f) |
||||
jobs = data['jobs'] |
||||
for job in jobs: |
||||
if 'steps' not in jobs[job]: |
||||
continue |
||||
continuous_condition = 'inputs.continuous-prefix' in jobs[job]['name'] |
||||
steps = jobs[job]['steps'] |
||||
for step in steps: |
||||
if continuous_condition and 'continuous-run' not in step.get('if', ''): |
||||
raise ValueError( |
||||
'Step %s in job %s does not check the continuous-run condition' |
||||
% (step['name'], job) |
||||
) |
||||
if not continuous_condition and 'continuous-run' in step.get('if', ''): |
||||
raise ValueError( |
||||
'Step %s in job %s checks the continuous-run condition but ' |
||||
'the job does not contain the continuous-prefix' |
||||
% (step['name'], job) |
||||
) |
||||
print('PASSED: All steps in all jobs check the continuous-run condition.') |
@ -0,0 +1,9 @@ |
||||
#!/bin/bash |
||||
|
||||
source googletest.sh || exit 1 |
||||
|
||||
script=${TEST_SRCDIR}/google3/third_party/protobuf/github/validate_yaml |
||||
|
||||
$script || die "Failed to execute $script" |
||||
|
||||
echo "PASS" |
@ -0,0 +1,25 @@ |
||||
name: Release Branch Tests |
||||
|
||||
on: |
||||
schedule: |
||||
# Run daily at 10 AM UTC (2 AM PDT) |
||||
- cron: 0 10 * * * |
||||
workflow_dispatch: |
||||
|
||||
permissions: {} |
||||
|
||||
jobs: |
||||
releases: |
||||
strategy: |
||||
fail-fast: false |
||||
matrix: |
||||
branch: [25.x, 28.x, 29.x] |
||||
runs-on: ubuntu-latest |
||||
permissions: |
||||
actions: write |
||||
env: |
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |
||||
GH_REPO: ${{ github.repository }} |
||||
name: Run Tests on ${{ matrix.branch }} |
||||
steps: |
||||
- run: gh workflow run test_runner.yml --ref ${{ matrix.branch }} |
@ -0,0 +1,25 @@ |
||||
name: Validate YAML |
||||
|
||||
on: |
||||
workflow_call: |
||||
inputs: |
||||
safe-checkout: |
||||
required: true |
||||
description: "The SHA key for the commit we want to run over" |
||||
type: string |
||||
|
||||
permissions: |
||||
contents: read |
||||
|
||||
jobs: |
||||
validate-yaml: |
||||
name: Validate YAML |
||||
runs-on: ubuntu-latest |
||||
steps: |
||||
- name: Checkout pending changes |
||||
uses: protocolbuffers/protobuf-ci/checkout@v3 |
||||
with: |
||||
ref: ${{ inputs.safe-checkout }} |
||||
- name: Run python validation script |
||||
run: | |
||||
python .github/scripts/validate_yaml.py |
@ -0,0 +1,45 @@ |
||||
# This is a WORKSPACE file used by bzlmod in combination with MODULE.bazel. |
||||
# It's used for a gradual migration and it should be empty. |
||||
# Don't remove this file. If the file doesn't exist, bzlmod falls back to WORKSPACE file. |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
|
||||
# TODO: either replace rules_ruby with a maintained version on BCR |
||||
# or use bzlmod extensions to depend on this specific repo |
||||
http_archive( |
||||
name = "rules_ruby", |
||||
urls = [ |
||||
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip" |
||||
], |
||||
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436", |
||||
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8", |
||||
) |
||||
|
||||
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime") |
||||
|
||||
ruby_runtime("system_ruby") |
||||
|
||||
register_toolchains("@system_ruby//:toolchain") |
||||
|
||||
# Following are just needed to run conformance tests, not really needed to support them via MODULE.bazel |
||||
|
||||
# For testing runtime against old gencode from a previous major version. |
||||
http_archive( |
||||
name = "com_google_protobuf_v25.0", |
||||
strip_prefix = "protobuf-25.0", |
||||
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz", |
||||
) |
||||
|
||||
# Needed as a dependency of @com_google_protobuf_v25.0 |
||||
load("@com_google_protobuf_v25.0//:protobuf_deps.bzl", protobuf_v25_deps="protobuf_deps") |
||||
protobuf_v25_deps() |
||||
|
||||
|
||||
# Needed for checking breaking changes from the previous release version. |
||||
load("//:protobuf_version.bzl", "PROTOBUF_PREVIOUS_RELEASE") |
||||
|
||||
http_archive( |
||||
name = "com_google_protobuf_previous_release", |
||||
strip_prefix = "protobuf-" + PROTOBUF_PREVIOUS_RELEASE, |
||||
url = "https://github.com/protocolbuffers/protobuf/releases/download/v{0}/protobuf-{0}.tar.gz".format(PROTOBUF_PREVIOUS_RELEASE), |
||||
) |
@ -1,3 +1,10 @@ |
||||
"""cc_proto_library rule""" |
||||
|
||||
cc_proto_library = native.cc_proto_library |
||||
load("//bazel/private:bazel_cc_proto_library.bzl", _cc_proto_library = "cc_proto_library") # buildifier: disable=bzl-visibility |
||||
|
||||
def cc_proto_library(**kwattrs): |
||||
# Only use Starlark rules when they are removed from Bazel |
||||
if not hasattr(native, "cc_proto_library"): |
||||
_cc_proto_library(**kwattrs) |
||||
else: |
||||
native.cc_proto_library(**kwattrs) # buildifier: disable=native-cc-proto |
||||
|
@ -1,5 +1,355 @@ |
||||
"""proto_common""" |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Definition of proto_common module, together with bazel providers for proto rules.""" |
||||
|
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
proto_common = native_proto_common |
||||
def _import_virtual_proto_path(path): |
||||
"""Imports all paths for virtual imports. |
||||
|
||||
They're of the form: |
||||
'bazel-out/k8-fastbuild/bin/external/foo/e/_virtual_imports/e' or |
||||
'bazel-out/foo/k8-fastbuild/bin/e/_virtual_imports/e'""" |
||||
if path.count("/") > 4: |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _import_repo_proto_path(path): |
||||
"""Imports all paths for generated files in external repositories. |
||||
|
||||
They are of the form: |
||||
'bazel-out/k8-fastbuild/bin/external/foo' or |
||||
'bazel-out/foo/k8-fastbuild/bin'""" |
||||
path_count = path.count("/") |
||||
if path_count > 2 and path_count <= 4: |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _import_main_output_proto_path(path): |
||||
"""Imports all paths for generated files or source files in external repositories. |
||||
|
||||
They're of the form: |
||||
'bazel-out/k8-fastbuild/bin' |
||||
'external/foo' |
||||
'../foo' |
||||
""" |
||||
if path.count("/") <= 2 and path != ".": |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _remove_repo(file): |
||||
"""Removes `../repo/` prefix from path, e.g. `../repo/package/path -> package/path`""" |
||||
short_path = file.short_path |
||||
workspace_root = file.owner.workspace_root |
||||
if workspace_root: |
||||
if workspace_root.startswith("external/"): |
||||
workspace_root = "../" + workspace_root.removeprefix("external/") |
||||
return short_path.removeprefix(workspace_root + "/") |
||||
return short_path |
||||
|
||||
def _get_import_path(proto_file): |
||||
"""Returns the import path of a .proto file |
||||
|
||||
This is the path as used for the file that can be used in an `import` statement in another |
||||
.proto file. |
||||
|
||||
Args: |
||||
proto_file: (File) The .proto file |
||||
Returns: |
||||
(str) import path |
||||
""" |
||||
repo_path = _remove_repo(proto_file) |
||||
index = repo_path.find("_virtual_imports/") |
||||
if index >= 0: |
||||
index = repo_path.find("/", index + len("_virtual_imports/")) |
||||
repo_path = repo_path[index + 1:] |
||||
return repo_path |
||||
|
||||
def _output_directory(proto_info, root): |
||||
proto_source_root = proto_info.proto_source_root |
||||
if proto_source_root.startswith(root.path): |
||||
#TODO: remove this branch when bin_dir is removed from proto_source_root |
||||
proto_source_root = proto_source_root.removeprefix(root.path).removeprefix("/") |
||||
|
||||
if proto_source_root == "" or proto_source_root == ".": |
||||
return root.path |
||||
|
||||
return root.path + "/" + proto_source_root |
||||
|
||||
def _check_collocated(label, proto_info, proto_lang_toolchain_info): |
||||
"""Checks if lang_proto_library is collocated with proto_library. |
||||
|
||||
Exceptions are allowed by an allowlist defined on `proto_lang_toolchain` and |
||||
on an allowlist defined on `proto_library`'s `allow_exports` attribute. |
||||
|
||||
If checks are not successful the function fails. |
||||
|
||||
Args: |
||||
label: (Label) The label of lang_proto_library |
||||
proto_info: (ProtoInfo) The ProtoInfo from the proto_library dependency. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target. |
||||
""" |
||||
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo |
||||
if not _PackageSpecificationInfo: |
||||
if proto_lang_toolchain_info.allowlist_different_package or getattr(proto_info, "allow_exports", None): |
||||
fail("Allowlist checks not supported before Bazel 6.4.0") |
||||
return |
||||
|
||||
if (proto_info.direct_descriptor_set.owner.package != label.package and |
||||
proto_lang_toolchain_info.allowlist_different_package): |
||||
if not proto_lang_toolchain_info.allowlist_different_package[_PackageSpecificationInfo].contains(label): |
||||
fail(("lang_proto_library '%s' may only be created in the same package " + |
||||
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner)) |
||||
if (proto_info.direct_descriptor_set.owner.package != label.package and |
||||
hasattr(proto_info, "allow_exports")): |
||||
if not proto_info.allow_exports[_PackageSpecificationInfo].contains(label): |
||||
fail(("lang_proto_library '%s' may only be created in the same package " + |
||||
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner)) |
||||
|
||||
def _compile( |
||||
actions, |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
generated_files, |
||||
plugin_output = None, |
||||
additional_args = None, |
||||
additional_tools = [], |
||||
additional_inputs = depset(), |
||||
additional_proto_lang_toolchain_info = None, |
||||
resource_set = None, |
||||
experimental_exec_group = None, |
||||
experimental_progress_message = None, |
||||
experimental_output_files = "legacy"): |
||||
"""Creates proto compile action for compiling *.proto files to language specific sources. |
||||
|
||||
Args: |
||||
actions: (ActionFactory) Obtained by ctx.actions, used to register the actions. |
||||
proto_info: (ProtoInfo) The ProtoInfo from proto_library to generate the sources for. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc.. |
||||
generated_files: (list[File]) The output files generated by the proto compiler. |
||||
Callee needs to declare files using `ctx.actions.declare_file`. |
||||
See also: `proto_common.declare_generated_files`. |
||||
plugin_output: (File|str) Deprecated: Set `proto_lang_toolchain.output_files` |
||||
and remove the parameter. |
||||
For backwards compatibility, when the proto_lang_toolchain isn't updated |
||||
the value is used. |
||||
additional_args: (Args) Additional arguments to add to the action. |
||||
Accepts a ctx.actions.args() object that is added at the beginning |
||||
of the command line. |
||||
additional_tools: (list[File]) Additional tools to add to the action. |
||||
additional_inputs: (Depset[File]) Additional input files to add to the action. |
||||
resource_set: (func) A callback function that is passed to the created action. |
||||
See `ctx.actions.run`, `resource_set` parameter for full definition of |
||||
the callback. |
||||
experimental_exec_group: (str) Sets `exec_group` on proto compile action. |
||||
Avoid using this parameter. |
||||
experimental_progress_message: Overrides progress_message from the toolchain. |
||||
Don't use this parameter. It's only intended for the transition. |
||||
experimental_output_files: (str) Overwrites output_files from the toolchain. |
||||
Don't use this parameter. It's only intended for the transition. |
||||
""" |
||||
if type(generated_files) != type([]): |
||||
fail("generated_files is expected to be a list of Files") |
||||
if not generated_files: |
||||
return # nothing to do |
||||
if experimental_output_files not in ["single", "multiple", "legacy"]: |
||||
fail('experimental_output_files expected to be one of ["single", "multiple", "legacy"]') |
||||
|
||||
args = actions.args() |
||||
args.use_param_file(param_file_arg = "@%s") |
||||
args.set_param_file_format("multiline") |
||||
tools = list(additional_tools) |
||||
|
||||
if experimental_output_files != "legacy": |
||||
output_files = experimental_output_files |
||||
else: |
||||
output_files = getattr(proto_lang_toolchain_info, "output_files", "legacy") |
||||
if output_files != "legacy": |
||||
if proto_lang_toolchain_info.out_replacement_format_flag: |
||||
if output_files == "single": |
||||
if len(generated_files) > 1: |
||||
fail("generated_files only expected a single file") |
||||
plugin_output = generated_files[0] |
||||
else: |
||||
plugin_output = _output_directory(proto_info, generated_files[0].root) |
||||
|
||||
if plugin_output: |
||||
args.add(plugin_output, format = proto_lang_toolchain_info.out_replacement_format_flag) |
||||
if proto_lang_toolchain_info.plugin: |
||||
tools.append(proto_lang_toolchain_info.plugin) |
||||
args.add(proto_lang_toolchain_info.plugin.executable, format = proto_lang_toolchain_info.plugin_format_flag) |
||||
|
||||
# Protoc searches for .protos -I paths in order they are given and then |
||||
# uses the path within the directory as the package. |
||||
# This requires ordering the paths from most specific (longest) to least |
||||
# specific ones, so that no path in the list is a prefix of any of the |
||||
# following paths in the list. |
||||
# For example: 'bazel-out/k8-fastbuild/bin/external/foo' needs to be listed |
||||
# before 'bazel-out/k8-fastbuild/bin'. If not, protoc will discover file under |
||||
# the shorter path and use 'external/foo/...' as its package path. |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_virtual_proto_path) |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_repo_proto_path) |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_main_output_proto_path) |
||||
args.add("-I.") # Needs to come last |
||||
|
||||
args.add_all(proto_lang_toolchain_info.protoc_opts) |
||||
|
||||
args.add_all(proto_info.direct_sources) |
||||
|
||||
if additional_args: |
||||
additional_args.use_param_file(param_file_arg = "@%s") |
||||
additional_args.set_param_file_format("multiline") |
||||
|
||||
actions.run( |
||||
mnemonic = proto_lang_toolchain_info.mnemonic, |
||||
progress_message = experimental_progress_message if experimental_progress_message else proto_lang_toolchain_info.progress_message, |
||||
executable = proto_lang_toolchain_info.proto_compiler, |
||||
arguments = [args, additional_args] if additional_args else [args], |
||||
inputs = depset(transitive = [proto_info.transitive_sources, additional_inputs]), |
||||
outputs = generated_files, |
||||
tools = tools, |
||||
use_default_shell_env = True, |
||||
resource_set = resource_set, |
||||
exec_group = experimental_exec_group, |
||||
toolchain = _toolchain_type(proto_lang_toolchain_info), |
||||
) |
||||
|
||||
_BAZEL_TOOLS_PREFIX = "external/bazel_tools/" |
||||
|
||||
def _experimental_filter_sources(proto_info, proto_lang_toolchain_info): |
||||
if not proto_info.direct_sources: |
||||
return [], [] |
||||
|
||||
# Collect a set of provided protos |
||||
provided_proto_sources = proto_lang_toolchain_info.provided_proto_sources |
||||
provided_paths = {} |
||||
for src in provided_proto_sources: |
||||
path = src.path |
||||
|
||||
# For listed protos bundled with the Bazel tools repository, their exec paths start |
||||
# with external/bazel_tools/. This prefix needs to be removed first, because the protos in |
||||
# user repositories will not have that prefix. |
||||
if path.startswith(_BAZEL_TOOLS_PREFIX): |
||||
provided_paths[path[len(_BAZEL_TOOLS_PREFIX):]] = None |
||||
else: |
||||
provided_paths[path] = None |
||||
|
||||
# Filter proto files |
||||
proto_files = proto_info._direct_proto_sources |
||||
excluded = [] |
||||
included = [] |
||||
for proto_file in proto_files: |
||||
if proto_file.path in provided_paths: |
||||
excluded.append(proto_file) |
||||
else: |
||||
included.append(proto_file) |
||||
return included, excluded |
||||
|
||||
def _experimental_should_generate_code( |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
rule_name, |
||||
target_label): |
||||
"""Checks if the code should be generated for the given proto_library. |
||||
|
||||
The code shouldn't be generated only when the toolchain already provides it |
||||
to the language through its runtime dependency. |
||||
|
||||
It fails when the proto_library contains mixed proto files, that should and |
||||
shouldn't generate code. |
||||
|
||||
Args: |
||||
proto_info: (ProtoInfo) The ProtoInfo from proto_library to check the generation for. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc. |
||||
rule_name: (str) Name of the rule used in the failure message. |
||||
target_label: (Label) The label of the target used in the failure message. |
||||
|
||||
Returns: |
||||
(bool) True when the code should be generated. |
||||
""" |
||||
included, excluded = _experimental_filter_sources(proto_info, proto_lang_toolchain_info) |
||||
|
||||
if included and excluded: |
||||
fail(("The 'srcs' attribute of '%s' contains protos for which '%s' " + |
||||
"shouldn't generate code (%s), in addition to protos for which it should (%s).\n" + |
||||
"Separate '%s' into 2 proto_library rules.") % ( |
||||
target_label, |
||||
rule_name, |
||||
", ".join([f.short_path for f in excluded]), |
||||
", ".join([f.short_path for f in included]), |
||||
target_label, |
||||
)) |
||||
|
||||
return bool(included) |
||||
|
||||
def _declare_generated_files( |
||||
actions, |
||||
proto_info, |
||||
extension, |
||||
name_mapper = None): |
||||
"""Declares generated files with a specific extension. |
||||
|
||||
Use this in lang_proto_library-es when protocol compiler generates files |
||||
that correspond to .proto file names. |
||||
|
||||
The function removes ".proto" extension with given one (e.g. ".pb.cc") and |
||||
declares new output files. |
||||
|
||||
Args: |
||||
actions: (ActionFactory) Obtained by ctx.actions, used to declare the files. |
||||
proto_info: (ProtoInfo) The ProtoInfo to declare the files for. |
||||
extension: (str) The extension to use for generated files. |
||||
name_mapper: (str->str) A function mapped over the base filename without |
||||
the extension. Used it to replace characters in the name that |
||||
cause problems in a specific programming language. |
||||
|
||||
Returns: |
||||
(list[File]) The list of declared files. |
||||
""" |
||||
proto_sources = proto_info.direct_sources |
||||
outputs = [] |
||||
|
||||
for src in proto_sources: |
||||
basename_no_ext = src.basename[:-(len(src.extension) + 1)] |
||||
|
||||
if name_mapper: |
||||
basename_no_ext = name_mapper(basename_no_ext) |
||||
|
||||
# Note that two proto_library rules can have the same source file, so this is actually a |
||||
# shared action. NB: This can probably result in action conflicts if the proto_library rules |
||||
# are not the same. |
||||
outputs.append(actions.declare_file(basename_no_ext + extension, sibling = src)) |
||||
|
||||
return outputs |
||||
|
||||
def _toolchain_type(proto_lang_toolchain_info): |
||||
if toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
return getattr(proto_lang_toolchain_info, "toolchain_type", None) |
||||
else: |
||||
return None |
||||
|
||||
proto_common = struct( |
||||
compile = _compile, |
||||
declare_generated_files = _declare_generated_files, |
||||
check_collocated = _check_collocated, |
||||
experimental_should_generate_code = _experimental_should_generate_code, |
||||
experimental_filter_sources = _experimental_filter_sources, |
||||
get_import_path = _get_import_path, |
||||
ProtoLangToolchainInfo = ProtoLangToolchainInfo, |
||||
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION, |
||||
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = ( |
||||
getattr(native_proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False) or |
||||
not hasattr(native_proto_common, "ProtoLangToolchainInfo") |
||||
), |
||||
) |
||||
|
@ -1,5 +1,7 @@ |
||||
"""ProtoInfo""" |
||||
|
||||
load("//bazel/private:native.bzl", "NativeProtoInfo") |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/private:proto_info.bzl", _ProtoInfo = "ProtoInfo") # buildifier: disable=bzl-visibility |
||||
|
||||
ProtoInfo = NativeProtoInfo |
||||
# This resolves to Starlark ProtoInfo in Bazel 8 or with --incompatible_enable_autoload flag |
||||
ProtoInfo = getattr(bazel_features.globals, "ProtoInfo", None) or _ProtoInfo |
||||
|
@ -1,5 +1,26 @@ |
||||
"""ProtoLangToolchainInfo""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/private:native.bzl", "native_proto_common") # buildifier: disable=bzl-visibility |
||||
|
||||
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo |
||||
# Use Starlark implementation only if native_proto_common.ProtoLangToolchainInfo doesn't exist |
||||
ProtoLangToolchainInfo = getattr(native_proto_common, "ProtoLangToolchainInfo", provider( |
||||
doc = """Specifies how to generate language-specific code from .proto files. |
||||
Used by LANG_proto_library rules.""", |
||||
fields = dict( |
||||
out_replacement_format_flag = """(str) Format string used when passing output to the plugin |
||||
used by proto compiler.""", |
||||
output_files = """("single","multiple","legacy") Format out_replacement_format_flag with |
||||
a path to single file or a directory in case of multiple files.""", |
||||
plugin_format_flag = "(str) Format string used when passing plugin to proto compiler.", |
||||
plugin = "(FilesToRunProvider) Proto compiler plugin.", |
||||
runtime = "(Target) Runtime.", |
||||
provided_proto_sources = "(list[File]) Proto sources provided by the toolchain.", |
||||
proto_compiler = "(FilesToRunProvider) Proto compiler.", |
||||
protoc_opts = "(list[str]) Options to pass to proto compiler.", |
||||
progress_message = "(str) Progress message to set on the proto compiler action.", |
||||
mnemonic = "(str) Mnemonic to set on the proto compiler action.", |
||||
allowlist_different_package = """(Target) Allowlist to create lang_proto_library in a |
||||
different package than proto_library""", |
||||
toolchain_type = """(Label) Toolchain type that was used to obtain this info""", |
||||
), |
||||
)) |
||||
|
@ -1,3 +1,16 @@ |
||||
# Copyright (c) 2009-2024, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""java_lite_proto_library rule""" |
||||
|
||||
java_lite_proto_library = native.java_lite_proto_library |
||||
load("//bazel/private:java_lite_proto_library.bzl", _java_lite_proto_library = "java_lite_proto_library") # buildifier: disable=bzl-visibility |
||||
|
||||
def java_lite_proto_library(**kwattrs): |
||||
# Only use Starlark rules when they are removed from Bazel |
||||
if not hasattr(native, "java_lite_proto_library"): |
||||
_java_lite_proto_library(**kwattrs) |
||||
else: |
||||
native.java_lite_proto_library(**kwattrs) |
||||
|
@ -1,3 +1,16 @@ |
||||
# Copyright (c) 2009-2024, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""java_proto_library rule""" |
||||
|
||||
java_proto_library = native.java_proto_library |
||||
load("//bazel/private:bazel_java_proto_library_rule.bzl", _java_proto_library = "java_proto_library") # buildifier: disable=bzl-visibility |
||||
|
||||
def java_proto_library(**kwattrs): |
||||
# Only use Starlark rules when they are removed from Bazel |
||||
if not hasattr(native, "java_proto_library"): |
||||
_java_proto_library(**kwattrs) |
||||
else: |
||||
native.java_proto_library(**kwattrs) |
||||
|
@ -0,0 +1,140 @@ |
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
load("//bazel/private:native_bool_flag.bzl", "native_bool_flag") |
||||
|
||||
package(default_applicable_licenses = ["//:license"]) |
||||
|
||||
toolchain_type( |
||||
name = "proto_toolchain_type", |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
toolchain_type( |
||||
name = "cc_toolchain_type", |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
toolchain_type( |
||||
name = "java_toolchain_type", |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
toolchain_type( |
||||
name = "javalite_toolchain_type", |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
toolchain_type( |
||||
name = "python_toolchain_type", |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "upb_proto_library_internal_bzl", |
||||
srcs = [ |
||||
"upb_proto_library_internal/aspect.bzl", |
||||
"upb_proto_library_internal/cc_library_func.bzl", |
||||
"upb_proto_library_internal/copts.bzl", |
||||
"upb_proto_library_internal/rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__pkg__"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"@bazel_skylib//lib:paths", |
||||
"@bazel_tools//tools/cpp:toolchain_utils.bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "native_bzl", |
||||
srcs = [ |
||||
"native.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "bazel_proto_library_rule_bzl", |
||||
srcs = [ |
||||
"bazel_proto_library_rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"//bazel/common:proto_info_bzl", |
||||
"//bazel/private:toolchain_helpers_bzl", |
||||
"@bazel_skylib//lib:paths", |
||||
"@bazel_skylib//rules:common_settings", |
||||
"@proto_bazel_features//:features", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_toolchain_rule_bzl", |
||||
srcs = [ |
||||
"proto_toolchain_rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"//bazel/common:proto_lang_toolchain_info_bzl", |
||||
"//bazel/private:toolchain_helpers_bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_lang_toolchain_rule_bzl", |
||||
srcs = [ |
||||
"proto_lang_toolchain_rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
deps = [ |
||||
":toolchain_helpers_bzl", |
||||
"//bazel/common:proto_common_bzl", |
||||
"//bazel/common:proto_info_bzl", |
||||
"//bazel/common:proto_lang_toolchain_info_bzl", |
||||
"@proto_bazel_features//:features", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "toolchain_helpers_bzl", |
||||
srcs = [ |
||||
"toolchain_helpers.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
deps = [ |
||||
":native_bzl", |
||||
"//bazel/common:proto_lang_toolchain_info_bzl", |
||||
], |
||||
) |
||||
|
||||
native_bool_flag( |
||||
name = "experimental_proto_descriptor_sets_include_source_info", |
||||
flag = "experimental_proto_descriptor_sets_include_source_info", |
||||
match_value = "true", |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
native_bool_flag( |
||||
name = "strict_proto_deps", |
||||
flag = "strict_proto_deps", |
||||
match_value = "off", |
||||
result = False, |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
native_bool_flag( |
||||
name = "strict_public_imports", |
||||
flag = "strict_public_imports", |
||||
match_value = "off", |
||||
result = False, |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "bazel_osx_p4deps", |
||||
srcs = glob(["**"]), |
||||
visibility = [ |
||||
"//bazel:__pkg__", |
||||
], |
||||
) |
@ -1,42 +0,0 @@ |
||||
# Copyright (c) 2009-2021, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
|
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
|
||||
licenses(["notice"]) |
||||
|
||||
bzl_library( |
||||
name = "upb_proto_library_internal_bzl", |
||||
srcs = [ |
||||
"upb_proto_library_internal/aspect.bzl", |
||||
"upb_proto_library_internal/cc_library_func.bzl", |
||||
"upb_proto_library_internal/copts.bzl", |
||||
"upb_proto_library_internal/rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__pkg__"], |
||||
deps = [ |
||||
"//bazel/common:proto_common_bzl", |
||||
"@bazel_skylib//lib:paths", |
||||
"@bazel_tools//tools/cpp:toolchain_utils.bzl", |
||||
], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "native_bzl", |
||||
srcs = [ |
||||
"native.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
||||
|
||||
bzl_library( |
||||
name = "proto_toolchain_rule_bzl", |
||||
srcs = [ |
||||
"proto_toolchain_rule.bzl", |
||||
], |
||||
visibility = ["//bazel:__subpackages__"], |
||||
) |
@ -0,0 +1,197 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Bazel's implementation of cc_proto_library""" |
||||
|
||||
load("@rules_cc//cc:find_cc_toolchain.bzl", "use_cc_toolchain") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/private:cc_proto_support.bzl", "cc_proto_compile_and_link") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
_CC_PROTO_TOOLCHAIN = "//bazel/private:cc_toolchain_type" |
||||
|
||||
_ProtoCcFilesInfo = provider(fields = ["files"], doc = "Provide cc proto files.") |
||||
_ProtoCcHeaderInfo = provider(fields = ["headers"], doc = "Provide cc proto headers.") |
||||
|
||||
def _get_output_files(actions, proto_info, suffixes): |
||||
result = [] |
||||
for suffix in suffixes: |
||||
result.extend(proto_common.declare_generated_files( |
||||
actions = actions, |
||||
proto_info = proto_info, |
||||
extension = suffix, |
||||
)) |
||||
return result |
||||
|
||||
# TODO: Make this code actually work. |
||||
def _get_strip_include_prefix(ctx, proto_info): |
||||
proto_root = proto_info.proto_source_root |
||||
if proto_root == "." or proto_root == ctx.label.workspace_root: |
||||
return "" |
||||
strip_include_prefix = "" |
||||
if proto_root.startswith(ctx.bin_dir.path): |
||||
proto_root = proto_root[len(ctx.bin_dir.path) + 1:] |
||||
elif proto_root.startswith(ctx.genfiles_dir.path): |
||||
proto_root = proto_root[len(ctx.genfiles_dir.path) + 1:] |
||||
|
||||
if proto_root.startswith(ctx.label.workspace_root): |
||||
proto_root = proto_root[len(ctx.label.workspace_root):] |
||||
|
||||
strip_include_prefix = "//" + proto_root |
||||
return strip_include_prefix |
||||
|
||||
def _aspect_impl(target, ctx): |
||||
proto_info = target[ProtoInfo] |
||||
proto_configuration = ctx.fragments.proto |
||||
|
||||
sources = [] |
||||
headers = [] |
||||
textual_hdrs = [] |
||||
|
||||
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN) |
||||
should_generate_code = proto_common.experimental_should_generate_code(proto_info, proto_toolchain, "cc_proto_library", target.label) |
||||
|
||||
if should_generate_code: |
||||
if len(proto_info.direct_sources) != 0: |
||||
# Bazel 7 didn't expose cc_proto_library_source_suffixes used by Kythe |
||||
# gradually falling back to .pb.cc |
||||
if type(proto_configuration.cc_proto_library_source_suffixes) == "builtin_function_or_method": |
||||
source_suffixes = [".pb.cc"] |
||||
header_suffixes = [".pb.h"] |
||||
else: |
||||
source_suffixes = proto_configuration.cc_proto_library_source_suffixes |
||||
header_suffixes = proto_configuration.cc_proto_library_header_suffixes |
||||
sources = _get_output_files(ctx.actions, proto_info, source_suffixes) |
||||
headers = _get_output_files(ctx.actions, proto_info, header_suffixes) |
||||
header_provider = _ProtoCcHeaderInfo(headers = depset(headers)) |
||||
else: |
||||
# If this proto_library doesn't have sources, it provides the combined headers of all its |
||||
# direct dependencies. Thus, if a direct dependency does have sources, the generated files |
||||
# are also provided by this library. If a direct dependency does not have sources, it will |
||||
# do the same thing, so that effectively this library looks through all source-less |
||||
# proto_libraries and provides all generated headers of the proto_libraries with sources |
||||
# that it depends on. |
||||
transitive_headers = [] |
||||
for dep in getattr(ctx.rule.attr, "deps", []): |
||||
if _ProtoCcHeaderInfo in dep: |
||||
textual_hdrs.extend(dep[_ProtoCcHeaderInfo].headers.to_list()) |
||||
transitive_headers.append(dep[_ProtoCcHeaderInfo].headers) |
||||
header_provider = _ProtoCcHeaderInfo(headers = depset(transitive = transitive_headers)) |
||||
|
||||
else: # shouldn't generate code |
||||
header_provider = _ProtoCcHeaderInfo(headers = depset()) |
||||
|
||||
proto_common.compile( |
||||
actions = ctx.actions, |
||||
proto_info = proto_info, |
||||
proto_lang_toolchain_info = proto_toolchain, |
||||
generated_files = sources + headers, |
||||
experimental_output_files = "multiple", |
||||
) |
||||
|
||||
deps = [] |
||||
if proto_toolchain.runtime: |
||||
deps = [proto_toolchain.runtime] |
||||
deps.extend(getattr(ctx.rule.attr, "deps", [])) |
||||
|
||||
cc_info, libraries, temps = cc_proto_compile_and_link( |
||||
ctx = ctx, |
||||
deps = deps, |
||||
sources = sources, |
||||
headers = headers, |
||||
textual_hdrs = textual_hdrs, |
||||
strip_include_prefix = _get_strip_include_prefix(ctx, proto_info), |
||||
) |
||||
|
||||
return [ |
||||
cc_info, |
||||
_ProtoCcFilesInfo(files = depset(sources + headers + libraries)), |
||||
OutputGroupInfo(temp_files_INTERNAL_ = temps), |
||||
header_provider, |
||||
] |
||||
|
||||
cc_proto_aspect = aspect( |
||||
implementation = _aspect_impl, |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp", "proto"], |
||||
required_providers = [ProtoInfo], |
||||
provides = [CcInfo], |
||||
attrs = toolchains.if_legacy_toolchain({"_aspect_cc_proto_toolchain": attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"), |
||||
)}), |
||||
toolchains = use_cc_toolchain() + toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN), |
||||
) |
||||
|
||||
def _cc_proto_library_impl(ctx): |
||||
if len(ctx.attr.deps) != 1: |
||||
fail( |
||||
"'deps' attribute must contain exactly one label " + |
||||
"(we didn't name it 'dep' for consistency). " + |
||||
"The main use-case for multiple deps is to create a rule that contains several " + |
||||
"other targets. This makes dependency bloat more likely. It also makes it harder" + |
||||
"to remove unused deps.", |
||||
attr = "deps", |
||||
) |
||||
dep = ctx.attr.deps[0] |
||||
|
||||
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_cc_proto_toolchain", _CC_PROTO_TOOLCHAIN) |
||||
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain) |
||||
|
||||
return [DefaultInfo(files = dep[_ProtoCcFilesInfo].files), dep[CcInfo], dep[OutputGroupInfo]] |
||||
|
||||
cc_proto_library = rule( |
||||
implementation = _cc_proto_library_impl, |
||||
doc = """ |
||||
<p> |
||||
<code>cc_proto_library</code> generates C++ code from <code>.proto</code> files. |
||||
</p> |
||||
|
||||
<p> |
||||
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library |
||||
</code></a> rules. |
||||
</p> |
||||
|
||||
<p> |
||||
Example: |
||||
</p> |
||||
|
||||
<pre> |
||||
<code class="lang-starlark"> |
||||
cc_library( |
||||
name = "lib", |
||||
deps = [":foo_cc_proto"], |
||||
) |
||||
|
||||
cc_proto_library( |
||||
name = "foo_cc_proto", |
||||
deps = [":foo_proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "foo_proto", |
||||
) |
||||
</code> |
||||
</pre> |
||||
""", |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [cc_proto_aspect], |
||||
allow_rules = ["proto_library"], |
||||
allow_files = False, |
||||
doc = """ |
||||
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a> |
||||
rules to generate C++ code for.""", |
||||
), |
||||
} | toolchains.if_legacy_toolchain({ |
||||
"_aspect_cc_proto_toolchain": attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_cc"), |
||||
), |
||||
}), |
||||
provides = [CcInfo], |
||||
toolchains = toolchains.use_toolchain(_CC_PROTO_TOOLCHAIN), |
||||
) |
@ -0,0 +1,164 @@ |
||||
# Copyright (c) 2009-2024, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""The implementation of the `java_proto_library` rule and its aspect.""" |
||||
|
||||
load("@rules_java//java/common:java_info.bzl", "JavaInfo") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
_JAVA_PROTO_TOOLCHAIN = "//bazel/private:java_toolchain_type" |
||||
|
||||
def _filter_provider(provider, *attrs): |
||||
return [dep[provider] for attr in attrs for dep in attr if provider in dep] |
||||
|
||||
def _bazel_java_proto_aspect_impl(target, ctx): |
||||
"""Generates and compiles Java code for a proto_library. |
||||
|
||||
The function runs protobuf compiler on the `proto_library` target using |
||||
`proto_lang_toolchain` specified by `--proto_toolchain_for_java` flag. |
||||
This generates a source jar. |
||||
|
||||
After that the source jar is compiled, respecting `deps` and `exports` of |
||||
the `proto_library`. |
||||
|
||||
Args: |
||||
target: (Target) The `proto_library` target (any target providing `ProtoInfo`. |
||||
ctx: (RuleContext) The rule context. |
||||
|
||||
Returns: |
||||
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java |
||||
version of`proto_library` and `JavaProtoAspectInfo` with all source and |
||||
runtime jars. |
||||
""" |
||||
|
||||
proto_toolchain_info = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN) |
||||
source_jar = None |
||||
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_proto_library", target.label): |
||||
# Generate source jar using proto compiler. |
||||
source_jar = ctx.actions.declare_file(ctx.label.name + "-speed-src.jar") |
||||
proto_common.compile( |
||||
ctx.actions, |
||||
target[ProtoInfo], |
||||
proto_toolchain_info, |
||||
[source_jar], |
||||
experimental_output_files = "single", |
||||
) |
||||
|
||||
# Compile Java sources (or just merge if there aren't any) |
||||
deps = _filter_provider(JavaInfo, ctx.rule.attr.deps) |
||||
exports = _filter_provider(JavaInfo, ctx.rule.attr.exports) |
||||
if source_jar and proto_toolchain_info.runtime: |
||||
deps.append(proto_toolchain_info.runtime[JavaInfo]) |
||||
java_info, jars = java_compile_for_protos( |
||||
ctx, |
||||
"-speed.jar", |
||||
source_jar, |
||||
deps, |
||||
exports, |
||||
) |
||||
|
||||
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps if JavaProtoAspectInfo in dep] |
||||
return [ |
||||
java_info, |
||||
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)), |
||||
] |
||||
|
||||
bazel_java_proto_aspect = aspect( |
||||
implementation = _bazel_java_proto_aspect_impl, |
||||
attrs = toolchains.if_legacy_toolchain({ |
||||
"_aspect_java_proto_toolchain": attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"), |
||||
), |
||||
}), |
||||
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] + toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN), |
||||
attr_aspects = ["deps", "exports"], |
||||
required_providers = [ProtoInfo], |
||||
provides = [JavaInfo, JavaProtoAspectInfo], |
||||
fragments = ["java"], |
||||
) |
||||
|
||||
def bazel_java_proto_library_rule(ctx): |
||||
"""Merges results of `java_proto_aspect` in `deps`. |
||||
|
||||
Args: |
||||
ctx: (RuleContext) The rule context. |
||||
Returns: |
||||
([JavaInfo, DefaultInfo, OutputGroupInfo]) |
||||
""" |
||||
proto_toolchain = toolchains.find_toolchain(ctx, "_aspect_java_proto_toolchain", _JAVA_PROTO_TOOLCHAIN) |
||||
for dep in ctx.attr.deps: |
||||
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain) |
||||
|
||||
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False) |
||||
|
||||
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps]) |
||||
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars]) |
||||
|
||||
return [ |
||||
java_info, |
||||
DefaultInfo( |
||||
files = transitive_src_and_runtime_jars, |
||||
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars), |
||||
), |
||||
OutputGroupInfo(default = depset()), |
||||
] |
||||
|
||||
java_proto_library = rule( |
||||
implementation = bazel_java_proto_library_rule, |
||||
doc = """ |
||||
<p> |
||||
<code>java_proto_library</code> generates Java code from <code>.proto</code> files. |
||||
</p> |
||||
|
||||
<p> |
||||
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library |
||||
</code></a> rules. |
||||
</p> |
||||
|
||||
<p> |
||||
Example: |
||||
</p> |
||||
|
||||
<pre class="code"> |
||||
<code class="lang-starlark"> |
||||
java_library( |
||||
name = "lib", |
||||
runtime_deps = [":foo_java_proto"], |
||||
) |
||||
|
||||
java_proto_library( |
||||
name = "foo_java_proto", |
||||
deps = [":foo_proto"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "foo_proto", |
||||
) |
||||
</code> |
||||
</pre> |
||||
""", |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
aspects = [bazel_java_proto_aspect], |
||||
doc = """ |
||||
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a> |
||||
rules to generate Java code for. |
||||
""", |
||||
), |
||||
# buildifier: disable=attr-license (calling attr.license()) |
||||
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(), |
||||
} | toolchains.if_legacy_toolchain({ |
||||
"_aspect_java_proto_toolchain": attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java"), |
||||
), |
||||
}), # buildifier: disable=attr-licenses (attribute called licenses) |
||||
provides = [JavaInfo], |
||||
toolchains = toolchains.use_toolchain(_JAVA_PROTO_TOOLCHAIN), |
||||
) |
@ -0,0 +1,356 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
Implementation of proto_library rule. |
||||
""" |
||||
|
||||
load("@bazel_skylib//lib:paths.bzl", "paths") |
||||
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo") |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
STRICT_DEPS_FLAG_TEMPLATE = ( |
||||
# |
||||
"--direct_dependencies_violation_msg=" + |
||||
"%%s is imported, but %s doesn't directly depend on a proto_library that 'srcs' it." |
||||
) |
||||
|
||||
def _check_srcs_package(target_package, srcs): |
||||
"""Check that .proto files in sources are from the same package. |
||||
|
||||
This is done to avoid clashes with the generated sources.""" |
||||
|
||||
#TODO: this does not work with filegroups that contain files that are not in the package |
||||
for src in srcs: |
||||
if target_package != src.label.package: |
||||
fail("Proto source with label '%s' must be in same package as consuming rule." % src.label) |
||||
|
||||
def _get_import_prefix(ctx): |
||||
"""Gets and verifies import_prefix attribute if it is declared.""" |
||||
|
||||
import_prefix = ctx.attr.import_prefix |
||||
|
||||
if not paths.is_normalized(import_prefix): |
||||
fail("should be normalized (without uplevel references or '.' path segments)", attr = "import_prefix") |
||||
if paths.is_absolute(import_prefix): |
||||
fail("should be a relative path", attr = "import_prefix") |
||||
|
||||
return import_prefix |
||||
|
||||
def _get_strip_import_prefix(ctx): |
||||
"""Gets and verifies strip_import_prefix.""" |
||||
|
||||
strip_import_prefix = ctx.attr.strip_import_prefix |
||||
|
||||
if not paths.is_normalized(strip_import_prefix): |
||||
fail("should be normalized (without uplevel references or '.' path segments)", attr = "strip_import_prefix") |
||||
|
||||
if paths.is_absolute(strip_import_prefix): |
||||
strip_import_prefix = strip_import_prefix[1:] |
||||
else: # Relative to current package |
||||
strip_import_prefix = _join(ctx.label.package, strip_import_prefix) |
||||
|
||||
return strip_import_prefix.removesuffix("/") |
||||
|
||||
def _proto_library_impl(ctx): |
||||
# Verifies attributes. |
||||
_check_srcs_package(ctx.label.package, ctx.attr.srcs) |
||||
srcs = ctx.files.srcs |
||||
deps = [dep[ProtoInfo] for dep in ctx.attr.deps] |
||||
exports = [dep[ProtoInfo] for dep in ctx.attr.exports] |
||||
import_prefix = _get_import_prefix(ctx) |
||||
strip_import_prefix = _get_strip_import_prefix(ctx) |
||||
check_for_reexport = deps + exports if not srcs else exports |
||||
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo |
||||
for proto in check_for_reexport: |
||||
if getattr(proto, "allow_exports", None): |
||||
if not _PackageSpecificationInfo: |
||||
fail("Allowlist checks not supported before Bazel 6.4.0") |
||||
if not proto.allow_exports[_PackageSpecificationInfo].contains(ctx.label): |
||||
fail("proto_library '%s' can't be reexported in package '//%s'" % (proto.direct_descriptor_set.owner, ctx.label.package)) |
||||
|
||||
proto_path, virtual_srcs = _process_srcs(ctx, srcs, import_prefix, strip_import_prefix) |
||||
descriptor_set = ctx.actions.declare_file(ctx.label.name + "-descriptor-set.proto.bin") |
||||
proto_info = ProtoInfo( |
||||
srcs = virtual_srcs, |
||||
deps = deps, |
||||
descriptor_set = descriptor_set, |
||||
proto_path = proto_path, |
||||
workspace_root = ctx.label.workspace_root, |
||||
bin_dir = ctx.bin_dir.path, |
||||
allow_exports = ctx.attr.allow_exports, |
||||
) |
||||
|
||||
_write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set) |
||||
|
||||
# We assume that the proto sources will not have conflicting artifacts |
||||
# with the same root relative path |
||||
data_runfiles = ctx.runfiles( |
||||
files = [proto_info.direct_descriptor_set], |
||||
transitive_files = depset(transitive = [proto_info.transitive_sources]), |
||||
) |
||||
return [ |
||||
proto_info, |
||||
DefaultInfo( |
||||
files = depset([proto_info.direct_descriptor_set]), |
||||
default_runfiles = ctx.runfiles(), # empty |
||||
data_runfiles = data_runfiles, |
||||
), |
||||
] |
||||
|
||||
def _process_srcs(ctx, srcs, import_prefix, strip_import_prefix): |
||||
"""Returns proto_path and sources, optionally symlinking them to _virtual_imports. |
||||
|
||||
Returns: |
||||
(str, [File]) A pair of proto_path and virtual_sources. |
||||
""" |
||||
if import_prefix != "" or strip_import_prefix != "": |
||||
# Use virtual source roots |
||||
return _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix) |
||||
else: |
||||
# No virtual source roots |
||||
return "", srcs |
||||
|
||||
def _join(*path): |
||||
return "/".join([p for p in path if p != ""]) |
||||
|
||||
def _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix): |
||||
"""Symlinks srcs to _virtual_imports. |
||||
|
||||
Returns: |
||||
A pair proto_path, directs_sources. |
||||
""" |
||||
virtual_imports = _join("_virtual_imports", ctx.label.name) |
||||
proto_path = _join(ctx.label.package, virtual_imports) |
||||
|
||||
if ctx.label.workspace_name == "": |
||||
full_strip_import_prefix = strip_import_prefix |
||||
else: |
||||
full_strip_import_prefix = _join("..", ctx.label.workspace_name, strip_import_prefix) |
||||
if full_strip_import_prefix: |
||||
full_strip_import_prefix += "/" |
||||
|
||||
virtual_srcs = [] |
||||
for src in srcs: |
||||
# Remove strip_import_prefix |
||||
if not src.short_path.startswith(full_strip_import_prefix): |
||||
fail(".proto file '%s' is not under the specified strip prefix '%s'" % |
||||
(src.short_path, full_strip_import_prefix)) |
||||
import_path = src.short_path[len(full_strip_import_prefix):] |
||||
|
||||
# Add import_prefix |
||||
virtual_src = ctx.actions.declare_file(_join(virtual_imports, import_prefix, import_path)) |
||||
ctx.actions.symlink( |
||||
output = virtual_src, |
||||
target_file = src, |
||||
progress_message = "Symlinking virtual .proto sources for %{label}", |
||||
) |
||||
virtual_srcs.append(virtual_src) |
||||
return proto_path, virtual_srcs |
||||
|
||||
def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set): |
||||
"""Writes descriptor set.""" |
||||
if proto_info.direct_sources == []: |
||||
ctx.actions.write(descriptor_set, "") |
||||
return |
||||
|
||||
dependencies_descriptor_sets = depset(transitive = [dep.transitive_descriptor_sets for dep in deps]) |
||||
|
||||
args = ctx.actions.args() |
||||
|
||||
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value: |
||||
args.add("--include_source_info") |
||||
args.add("--retain_options") |
||||
|
||||
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value |
||||
if strict_deps: |
||||
if proto_info.direct_sources: |
||||
strict_importable_sources = depset( |
||||
direct = proto_info._direct_proto_sources, |
||||
transitive = [dep._exported_sources for dep in deps], |
||||
) |
||||
else: |
||||
strict_importable_sources = None |
||||
if strict_importable_sources: |
||||
args.add_joined( |
||||
"--direct_dependencies", |
||||
strict_importable_sources, |
||||
map_each = proto_common.get_import_path, |
||||
join_with = ":", |
||||
) |
||||
# Example: `--direct_dependencies a.proto:b.proto` |
||||
|
||||
else: |
||||
# The proto compiler requires an empty list to turn on strict deps checking |
||||
args.add("--direct_dependencies=") |
||||
|
||||
# Set `-direct_dependencies_violation_msg=` |
||||
args.add(ctx.label, format = STRICT_DEPS_FLAG_TEMPLATE) |
||||
|
||||
strict_imports = ctx.attr._strict_public_imports[BuildSettingInfo].value |
||||
if strict_imports: |
||||
public_import_protos = depset(transitive = [export._exported_sources for export in exports]) |
||||
if not public_import_protos: |
||||
# This line is necessary to trigger the check. |
||||
args.add("--allowed_public_imports=") |
||||
else: |
||||
args.add_joined( |
||||
"--allowed_public_imports", |
||||
public_import_protos, |
||||
map_each = proto_common.get_import_path, |
||||
join_with = ":", |
||||
) |
||||
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
toolchain = ctx.toolchains[toolchains.PROTO_TOOLCHAIN] |
||||
if not toolchain: |
||||
fail("Protocol compiler toolchain could not be resolved.") |
||||
proto_lang_toolchain_info = toolchain.proto |
||||
else: |
||||
proto_lang_toolchain_info = proto_common.ProtoLangToolchainInfo( |
||||
out_replacement_format_flag = "--descriptor_set_out=%s", |
||||
output_files = "single", |
||||
mnemonic = "GenProtoDescriptorSet", |
||||
progress_message = "Generating Descriptor Set proto_library %{label}", |
||||
proto_compiler = ctx.executable._proto_compiler, |
||||
protoc_opts = ctx.fragments.proto.experimental_protoc_opts, |
||||
plugin = None, |
||||
) |
||||
|
||||
proto_common.compile( |
||||
ctx.actions, |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
generated_files = [descriptor_set], |
||||
additional_inputs = dependencies_descriptor_sets, |
||||
additional_args = args, |
||||
) |
||||
|
||||
proto_library = rule( |
||||
_proto_library_impl, |
||||
# TODO: proto_common docs are missing |
||||
# TODO: ProtoInfo link doesn't work and docs are missing |
||||
doc = """ |
||||
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto"> |
||||
https://github.com/bazelbuild/rules_proto</a>. |
||||
|
||||
<p>Use <code>proto_library</code> to define libraries of protocol buffers which |
||||
may be used from multiple languages. A <code>proto_library</code> may be listed |
||||
in the <code>deps</code> clause of supported rules, such as |
||||
<code>java_proto_library</code>. |
||||
|
||||
<p>When compiled on the command-line, a <code>proto_library</code> creates a file |
||||
named <code>foo-descriptor-set.proto.bin</code>, which is the descriptor set for |
||||
the messages the rule srcs. The file is a serialized |
||||
<code>FileDescriptorSet</code>, which is described in |
||||
<a href="https://developers.google.com/protocol-buffers/docs/techniques#self-description"> |
||||
https://developers.google.com/protocol-buffers/docs/techniques#self-description</a>. |
||||
|
||||
<p>It only contains information about the <code>.proto</code> files directly |
||||
mentioned by a <code>proto_library</code> rule; the collection of transitive |
||||
descriptor sets is available through the |
||||
<code>[ProtoInfo].transitive_descriptor_sets</code> Starlark provider. |
||||
See documentation in <code>proto_info.bzl</code>. |
||||
|
||||
<p>Recommended code organization: |
||||
<ul> |
||||
<li>One <code>proto_library</code> rule per <code>.proto</code> file. |
||||
<li>A file named <code>foo.proto</code> will be in a rule named <code>foo_proto</code>, |
||||
which is located in the same package. |
||||
<li>A <code>[language]_proto_library</code> that wraps a <code>proto_library</code> |
||||
named <code>foo_proto</code> should be called <code>foo_[language]_proto</code>, |
||||
and be located in the same package. |
||||
</ul>""", |
||||
attrs = { |
||||
"srcs": attr.label_list( |
||||
allow_files = [".proto", ".protodevel"], |
||||
flags = ["DIRECT_COMPILE_TIME_INPUT"], |
||||
# TODO: Should .protodevel be advertised or deprecated? |
||||
doc = """ |
||||
The list of <code>.proto</code> and <code>.protodevel</code> files that are |
||||
processed to create the target. This is usually a non empty list. One usecase |
||||
where <code>srcs</code> can be empty is an <i>alias-library</i>. This is a |
||||
proto_library rule having one or more other proto_library in <code>deps</code>. |
||||
This pattern can be used to e.g. export a public api under a persistent name.""", |
||||
), |
||||
"deps": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
The list of other <code>proto_library</code> rules that the target depends upon. |
||||
A <code>proto_library</code> may only depend on other <code>proto_library</code> |
||||
targets. It may not depend on language-specific libraries.""", |
||||
), |
||||
"exports": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
List of proto_library targets that can be referenced via "import public" in the |
||||
proto source. |
||||
It's an error if you use "import public" but do not list the corresponding library |
||||
in the exports attribute. |
||||
Note that you have list the library both in deps and exports since not all |
||||
lang_proto_library implementations have been changed yet.""", |
||||
), |
||||
"strip_import_prefix": attr.string( |
||||
default = "/", |
||||
doc = """ |
||||
The prefix to strip from the paths of the .proto files in this rule. |
||||
|
||||
<p>When set, .proto source files in the <code>srcs</code> attribute of this rule are |
||||
accessible at their path with this prefix cut off. |
||||
|
||||
<p>If it's a relative path (not starting with a slash), it's taken as a package-relative |
||||
one. If it's an absolute one, it's understood as a repository-relative path. |
||||
|
||||
<p>The prefix in the <code>import_prefix</code> attribute is added after this prefix is |
||||
stripped.""", |
||||
), |
||||
"import_prefix": attr.string( |
||||
doc = """ |
||||
The prefix to add to the paths of the .proto files in this rule. |
||||
|
||||
<p>When set, the .proto source files in the <code>srcs</code> attribute of this rule are |
||||
accessible at is the value of this attribute prepended to their repository-relative path. |
||||
|
||||
<p>The prefix in the <code>strip_import_prefix</code> attribute is removed before this |
||||
prefix is added.""", |
||||
), |
||||
"allow_exports": attr.label( |
||||
cfg = "exec", |
||||
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [], |
||||
doc = """ |
||||
An optional allowlist that prevents proto library to be reexported or used in |
||||
lang_proto_library that is not in one of the listed packages.""", |
||||
), |
||||
"data": attr.label_list( |
||||
allow_files = True, |
||||
flags = ["SKIP_CONSTRAINTS_OVERRIDE"], |
||||
), |
||||
# buildifier: disable=attr-license (calling attr.license()) |
||||
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(), |
||||
"_experimental_proto_descriptor_sets_include_source_info": attr.label( |
||||
default = "//bazel/private:experimental_proto_descriptor_sets_include_source_info", |
||||
), |
||||
"_strict_proto_deps": attr.label( |
||||
default = |
||||
"//bazel/private:strict_proto_deps", |
||||
), |
||||
"_strict_public_imports": attr.label( |
||||
default = "//bazel/private:strict_public_imports", |
||||
), |
||||
} | toolchains.if_legacy_toolchain({ |
||||
"_proto_compiler": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
allow_files = True, |
||||
default = configuration_field("proto", "proto_compiler"), |
||||
), |
||||
}), # buildifier: disable=attr-licenses (attribute called licenses) |
||||
fragments = ["proto"], |
||||
provides = [ProtoInfo], |
||||
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), |
||||
) |
@ -0,0 +1,141 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Supporting C++ compilation of generated code""" |
||||
|
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("@rules_cc//cc:find_cc_toolchain.bzl", "find_cc_toolchain") |
||||
|
||||
def get_feature_configuration(ctx, has_sources, extra_requested_features = []): |
||||
"""Returns C++ feature configuration for compiling and linking generated C++ files. |
||||
|
||||
Args: |
||||
ctx: (RuleCtx) rule context. |
||||
has_sources: (bool) Has the proto_library sources. |
||||
extra_requested_features: (list[str]) Additionally requested features. |
||||
Returns: |
||||
(FeatureConfiguration) C++ feature configuration |
||||
""" |
||||
cc_toolchain = find_cc_toolchain(ctx) |
||||
requested_features = ctx.features + extra_requested_features |
||||
|
||||
# TODO: Remove LAYERING_CHECK once we have verified that there are direct |
||||
# dependencies for all generated #includes. |
||||
unsupported_features = ctx.disabled_features + ["parse_headers", "layering_check"] |
||||
if has_sources: |
||||
requested_features.append("header_modules") |
||||
else: |
||||
unsupported_features.append("header_modules") |
||||
return cc_common.configure_features( |
||||
ctx = ctx, |
||||
cc_toolchain = cc_toolchain, |
||||
requested_features = requested_features, |
||||
unsupported_features = unsupported_features, |
||||
) |
||||
|
||||
def _get_libraries_from_linking_outputs(linking_outputs, feature_configuration): |
||||
library_to_link = linking_outputs.library_to_link |
||||
if not library_to_link: |
||||
return [] |
||||
outputs = [] |
||||
if library_to_link.static_library: |
||||
outputs.append(library_to_link.static_library) |
||||
if library_to_link.pic_static_library: |
||||
outputs.append(library_to_link.pic_static_library) |
||||
|
||||
# On Windows, dynamic library is not built by default, so don't add them to files_to_build. |
||||
if not cc_common.is_enabled(feature_configuration = feature_configuration, feature_name = "targets_windows"): |
||||
if library_to_link.resolved_symlink_dynamic_library: |
||||
outputs.append(library_to_link.resolved_symlink_dynamic_library) |
||||
elif library_to_link.dynamic_library: |
||||
outputs.append(library_to_link.dynamic_library) |
||||
if library_to_link.resolved_symlink_interface_library: |
||||
outputs.append(library_to_link.resolved_symlink_interface_library) |
||||
elif library_to_link.interface_library: |
||||
outputs.append(library_to_link.interface_library) |
||||
return outputs |
||||
|
||||
def cc_proto_compile_and_link(ctx, deps, sources, headers, disallow_dynamic_library = None, feature_configuration = None, alwayslink = False, **kwargs): |
||||
"""Creates C++ compilation and linking actions for C++ proto sources. |
||||
|
||||
Args: |
||||
ctx: rule context |
||||
deps: (list[CcInfo]) List of libraries to be added as dependencies to compilation and linking |
||||
actions. |
||||
sources:(list[File]) List of C++ sources files. |
||||
headers: list(File] List of C++ headers files. |
||||
disallow_dynamic_library: (bool) Are dynamic libraries disallowed. |
||||
feature_configuration: (FeatureConfiguration) feature configuration to use. |
||||
alwayslink: (bool) Should the library be always linked. |
||||
**kwargs: Additional arguments passed to the compilation. See cc_common.compile. |
||||
|
||||
Returns: |
||||
(CcInfo, list[File], list[File]) |
||||
- CcInfo provider with compilation context and linking context |
||||
- A list of linked libraries related to this proto |
||||
- A list of temporary files generated durind compilation |
||||
""" |
||||
cc_toolchain = find_cc_toolchain(ctx) |
||||
feature_configuration = feature_configuration or get_feature_configuration(ctx, bool(sources)) |
||||
if disallow_dynamic_library == None: |
||||
# TODO: Configure output artifact with action_config |
||||
# once proto compile action is configurable from the crosstool. |
||||
disallow_dynamic_library = not cc_common.is_enabled( |
||||
feature_name = "supports_dynamic_linker", |
||||
feature_configuration = feature_configuration, |
||||
) |
||||
|
||||
(compilation_context, compilation_outputs) = cc_common.compile( |
||||
actions = ctx.actions, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = cc_toolchain, |
||||
srcs = sources, |
||||
public_hdrs = headers, |
||||
compilation_contexts = [dep[CcInfo].compilation_context for dep in deps if CcInfo in dep], |
||||
name = ctx.label.name, |
||||
# Don't instrument the generated C++ files even when --collect_code_coverage is set. |
||||
# If we actually start generating coverage instrumentation for .proto files based on coverage |
||||
# data from the generated C++ files, this will have to be removed. Currently, the work done |
||||
# to instrument those files and execute the instrumentation is all for nothing, and it can |
||||
# be quite a bit of extra computation even when that's not made worse by performance bugs, |
||||
# as in b/64963386. |
||||
# code_coverage_enabled = False (cc_common.compile disables code_coverage by default) |
||||
**kwargs |
||||
) |
||||
|
||||
if sources: |
||||
linking_context, linking_outputs = cc_common.create_linking_context_from_compilation_outputs( |
||||
actions = ctx.actions, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = cc_toolchain, |
||||
compilation_outputs = compilation_outputs, |
||||
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep], |
||||
name = ctx.label.name, |
||||
disallow_dynamic_library = disallow_dynamic_library, |
||||
alwayslink = alwayslink, |
||||
) |
||||
libraries = _get_libraries_from_linking_outputs(linking_outputs, feature_configuration) |
||||
else: |
||||
linking_context = cc_common.merge_linking_contexts( |
||||
linking_contexts = [dep[CcInfo].linking_context for dep in deps if CcInfo in dep], |
||||
) |
||||
libraries = [] |
||||
|
||||
debug_context = None |
||||
temps = [] |
||||
if bazel_features.cc.protobuf_on_allowlist: |
||||
debug_context = cc_common.merge_debug_context( |
||||
[cc_common.create_debug_context(compilation_outputs)] + |
||||
[dep[CcInfo].debug_context() for dep in deps if CcInfo in dep], |
||||
) |
||||
temps = compilation_outputs.temps() |
||||
|
||||
return CcInfo( |
||||
compilation_context = compilation_context, |
||||
linking_context = linking_context, |
||||
debug_context = debug_context, |
||||
), libraries, temps |
@ -0,0 +1,178 @@ |
||||
# Copyright (c) 2009-2024, Google LLC |
||||
# All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""A Starlark implementation of the java_lite_proto_library rule.""" |
||||
|
||||
load("@rules_java//java/common:java_common.bzl", "java_common") |
||||
load("@rules_java//java/common:java_info.bzl", "JavaInfo") |
||||
load("@rules_java//java/common:proguard_spec_info.bzl", "ProguardSpecInfo") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/private:java_proto_support.bzl", "JavaProtoAspectInfo", "java_compile_for_protos", "java_info_merge_for_protos") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
_PROTO_TOOLCHAIN_ATTR = "_aspect_proto_toolchain_for_javalite" |
||||
|
||||
_JAVA_LITE_PROTO_TOOLCHAIN = "//bazel/private:javalite_toolchain_type" |
||||
|
||||
def _aspect_impl(target, ctx): |
||||
"""Generates and compiles Java code for a proto_library dependency graph. |
||||
|
||||
Args: |
||||
target: (Target) The `proto_library` target. |
||||
ctx: (RuleContext) The rule context. |
||||
|
||||
Returns: |
||||
([JavaInfo, JavaProtoAspectInfo]) A JavaInfo describing compiled Java |
||||
version of`proto_library` and `JavaProtoAspectInfo` with all source and |
||||
runtime jars. |
||||
""" |
||||
|
||||
deps = [dep[JavaInfo] for dep in ctx.rule.attr.deps] |
||||
exports = [exp[JavaInfo] for exp in ctx.rule.attr.exports] |
||||
proto_toolchain_info = toolchains.find_toolchain( |
||||
ctx, |
||||
"_aspect_proto_toolchain_for_javalite", |
||||
_JAVA_LITE_PROTO_TOOLCHAIN, |
||||
) |
||||
source_jar = None |
||||
|
||||
if proto_common.experimental_should_generate_code(target[ProtoInfo], proto_toolchain_info, "java_lite_proto_library", target.label): |
||||
source_jar = ctx.actions.declare_file(ctx.label.name + "-lite-src.jar") |
||||
proto_common.compile( |
||||
ctx.actions, |
||||
target[ProtoInfo], |
||||
proto_toolchain_info, |
||||
[source_jar], |
||||
experimental_output_files = "single", |
||||
) |
||||
runtime = proto_toolchain_info.runtime |
||||
if runtime: |
||||
deps.append(runtime[JavaInfo]) |
||||
|
||||
java_info, jars = java_compile_for_protos( |
||||
ctx, |
||||
"-lite.jar", |
||||
source_jar, |
||||
deps, |
||||
exports, |
||||
injecting_rule_kind = "java_lite_proto_library", |
||||
) |
||||
transitive_jars = [dep[JavaProtoAspectInfo].jars for dep in ctx.rule.attr.deps] |
||||
|
||||
return [ |
||||
java_info, |
||||
JavaProtoAspectInfo(jars = depset(jars, transitive = transitive_jars)), |
||||
] |
||||
|
||||
_java_lite_proto_aspect = aspect( |
||||
implementation = _aspect_impl, |
||||
attr_aspects = ["deps", "exports"], |
||||
attrs = toolchains.if_legacy_toolchain({ |
||||
_PROTO_TOOLCHAIN_ATTR: attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"), |
||||
), |
||||
}), |
||||
fragments = ["java"], |
||||
required_providers = [ProtoInfo], |
||||
provides = [JavaInfo, JavaProtoAspectInfo], |
||||
toolchains = ["@bazel_tools//tools/jdk:toolchain_type"] + |
||||
toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN), |
||||
) |
||||
|
||||
def _rule_impl(ctx): |
||||
"""Merges results of `java_proto_aspect` in `deps`. |
||||
|
||||
`java_lite_proto_library` is identical to `java_proto_library` in every respect, except it |
||||
builds JavaLite protos. |
||||
Implementation of this rule is built on the implementation of `java_proto_library`. |
||||
|
||||
Args: |
||||
ctx: (RuleContext) The rule context. |
||||
Returns: |
||||
([JavaInfo, DefaultInfo, OutputGroupInfo, ProguardSpecInfo]) |
||||
""" |
||||
|
||||
proto_toolchain_info = toolchains.find_toolchain( |
||||
ctx, |
||||
"_aspect_proto_toolchain_for_javalite", |
||||
_JAVA_LITE_PROTO_TOOLCHAIN, |
||||
) |
||||
for dep in ctx.attr.deps: |
||||
proto_common.check_collocated(ctx.label, dep[ProtoInfo], proto_toolchain_info) |
||||
|
||||
runtime = proto_toolchain_info.runtime |
||||
|
||||
if runtime: |
||||
proguard_provider_specs = runtime[ProguardSpecInfo] |
||||
else: |
||||
proguard_provider_specs = ProguardSpecInfo(specs = depset()) |
||||
|
||||
java_info = java_info_merge_for_protos([dep[JavaInfo] for dep in ctx.attr.deps], merge_java_outputs = False) |
||||
|
||||
transitive_src_and_runtime_jars = depset(transitive = [dep[JavaProtoAspectInfo].jars for dep in ctx.attr.deps]) |
||||
transitive_runtime_jars = depset(transitive = [java_info.transitive_runtime_jars]) |
||||
|
||||
if hasattr(java_common, "add_constraints"): |
||||
java_info = java_common.add_constraints(java_info, constraints = ["android"]) |
||||
|
||||
return [ |
||||
java_info, |
||||
DefaultInfo( |
||||
files = transitive_src_and_runtime_jars, |
||||
runfiles = ctx.runfiles(transitive_files = transitive_runtime_jars), |
||||
), |
||||
OutputGroupInfo(default = depset()), |
||||
proguard_provider_specs, |
||||
] |
||||
|
||||
java_lite_proto_library = rule( |
||||
implementation = _rule_impl, |
||||
doc = """ |
||||
<p> |
||||
<code>java_lite_proto_library</code> generates Java code from <code>.proto</code> files. |
||||
</p> |
||||
|
||||
<p> |
||||
<code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library |
||||
</code></a> rules. |
||||
</p> |
||||
|
||||
<p> |
||||
Example: |
||||
</p> |
||||
|
||||
<pre class="code"> |
||||
<code class="lang-starlark"> |
||||
java_library( |
||||
name = "lib", |
||||
runtime_deps = [":foo"], |
||||
) |
||||
|
||||
java_lite_proto_library( |
||||
name = "foo", |
||||
deps = [":bar"], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "bar", |
||||
) |
||||
</code> |
||||
</pre> |
||||
""", |
||||
attrs = { |
||||
"deps": attr.label_list(providers = [ProtoInfo], aspects = [_java_lite_proto_aspect], doc = """ |
||||
The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a> |
||||
rules to generate Java code for. |
||||
"""), |
||||
} | toolchains.if_legacy_toolchain({ |
||||
_PROTO_TOOLCHAIN_ATTR: attr.label( |
||||
default = configuration_field(fragment = "proto", name = "proto_toolchain_for_java_lite"), |
||||
), |
||||
}), |
||||
provides = [JavaInfo], |
||||
toolchains = toolchains.use_toolchain(_JAVA_LITE_PROTO_TOOLCHAIN), |
||||
) |
@ -0,0 +1,62 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""Support for compiling protoc generated Java code.""" |
||||
|
||||
load("@rules_java//java/private:proto_support.bzl", "compile", "merge") # buildifier: disable=bzl-visibility |
||||
|
||||
# The provider is used to collect source and runtime jars in the `proto_library` dependency graph. |
||||
JavaProtoAspectInfo = provider("JavaProtoAspectInfo", fields = ["jars"]) |
||||
|
||||
java_info_merge_for_protos = merge |
||||
|
||||
def java_compile_for_protos(ctx, output_jar_suffix, source_jar = None, deps = [], exports = [], injecting_rule_kind = "java_proto_library"): |
||||
"""Compiles Java source jar returned by proto compiler. |
||||
|
||||
Use this call for java_xxx_proto_library. It uses java_common.compile with |
||||
some checks disabled (via javacopts) and jspecify disabled, so that the |
||||
generated code passes. |
||||
|
||||
It also takes care that input source jar is not repackaged with a different |
||||
name. |
||||
|
||||
When `source_jar` is `None`, the function only merges `deps` and `exports`. |
||||
|
||||
Args: |
||||
ctx: (RuleContext) Used to call `java_common.compile` |
||||
output_jar_suffix: (str) How to name the output jar. For example: `-speed.jar`. |
||||
source_jar: (File) Input source jar (may be `None`). |
||||
deps: (list[JavaInfo]) `deps` of the `proto_library`. |
||||
exports: (list[JavaInfo]) `exports` of the `proto_library`. |
||||
injecting_rule_kind: (str) Rule kind requesting the compilation. |
||||
It's embedded into META-INF of the produced runtime jar, for debugging. |
||||
Returns: |
||||
((JavaInfo, list[File])) JavaInfo of this target and list containing source |
||||
and runtime jar, when they are created. |
||||
""" |
||||
if source_jar != None: |
||||
path, sep, filename = ctx.label.name.rpartition("/") |
||||
output_jar = ctx.actions.declare_file(path + sep + "lib" + filename + output_jar_suffix) |
||||
java_toolchain = ctx.toolchains["@bazel_tools//tools/jdk:toolchain_type"].java |
||||
java_info = compile( |
||||
ctx = ctx, |
||||
output = output_jar, |
||||
java_toolchain = java_toolchain, |
||||
source_jars = [source_jar], |
||||
deps = deps, |
||||
exports = exports, |
||||
output_source_jar = source_jar, |
||||
injecting_rule_kind = injecting_rule_kind, |
||||
javac_opts = java_toolchain._compatible_javacopts.get("proto", depset()), |
||||
enable_jspecify = False, |
||||
include_compilation_info = False, |
||||
) |
||||
jars = [source_jar, output_jar] |
||||
else: |
||||
# If there are no proto sources just pass along the compilation dependencies. |
||||
java_info = merge(deps + exports, merge_java_outputs = False, merge_source_jars = False) |
||||
jars = [] |
||||
return java_info, jars |
@ -1,5 +1,3 @@ |
||||
"""Renames toplevel symbols so they can be exported in Starlark under the same name""" |
||||
|
||||
NativeProtoInfo = ProtoInfo |
||||
|
||||
native_proto_common = proto_common_do_not_use |
||||
|
@ -0,0 +1,35 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
A helper rule that reads a native boolean flag. |
||||
""" |
||||
|
||||
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo") |
||||
|
||||
def _impl(ctx): |
||||
return [BuildSettingInfo(value = ctx.attr.value)] |
||||
|
||||
_native_bool_flag_rule = rule( |
||||
implementation = _impl, |
||||
attrs = {"value": attr.bool()}, |
||||
) |
||||
|
||||
def native_bool_flag(*, name, flag, match_value = "true", result = True, **kwargs): |
||||
_native_bool_flag_rule( |
||||
name = name, |
||||
value = select({ |
||||
name + "_setting": result, |
||||
"//conditions:default": not result, |
||||
}), |
||||
**kwargs |
||||
) |
||||
|
||||
native.config_setting( |
||||
name = name + "_setting", |
||||
values = {flag: match_value}, |
||||
visibility = ["//visibility:private"], |
||||
) |
@ -0,0 +1,59 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Vendored version of bazel_features for protobuf, to keep a one-step setup""" |
||||
|
||||
_PROTO_BAZEL_FEATURES = """bazel_features = struct( |
||||
cc = struct( |
||||
protobuf_on_allowlist = {protobuf_on_allowlist}, |
||||
), |
||||
proto = struct( |
||||
starlark_proto_info = {starlark_proto_info}, |
||||
), |
||||
globals = struct( |
||||
PackageSpecificationInfo = {PackageSpecificationInfo}, |
||||
ProtoInfo = getattr(getattr(native, 'legacy_globals', None), 'ProtoInfo', {ProtoInfo}) |
||||
), |
||||
) |
||||
""" |
||||
|
||||
def _proto_bazel_features_impl(rctx): |
||||
# An empty string is treated as a "dev version", which is greater than anything. |
||||
bazel_version = native.bazel_version or "999999.999999.999999" |
||||
version_parts = bazel_version.split("-")[0].split(".") |
||||
if len(version_parts) != 3: |
||||
fail("invalid Bazel version '{}': got {} dot-separated segments, want 3".format(bazel_version, len(version_parts))) |
||||
major_version_int = int(version_parts[0]) |
||||
minor_version_int = int(version_parts[1]) |
||||
|
||||
starlark_proto_info = major_version_int >= 7 |
||||
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4) |
||||
|
||||
protobuf_on_allowlist = major_version_int > 7 |
||||
ProtoInfo = "ProtoInfo" if major_version_int < 8 else "None" |
||||
|
||||
rctx.file("BUILD.bazel", """ |
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
bzl_library( |
||||
name = "features", |
||||
srcs = ["features.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
exports_files(["features.bzl"]) |
||||
""") |
||||
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format( |
||||
starlark_proto_info = repr(starlark_proto_info), |
||||
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None", |
||||
protobuf_on_allowlist = repr(protobuf_on_allowlist), |
||||
ProtoInfo = ProtoInfo, |
||||
)) |
||||
|
||||
proto_bazel_features = repository_rule( |
||||
implementation = _proto_bazel_features_impl, |
||||
# Force reruns on server restarts to keep native.bazel_version up-to-date. |
||||
local = True, |
||||
) |
@ -0,0 +1,186 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
""" |
||||
Definition of ProtoInfo provider. |
||||
""" |
||||
|
||||
_warning = """ Don't use this field. It's intended for internal use and will be changed or removed |
||||
without warning.""" |
||||
|
||||
def _uniq(iterable): |
||||
unique_elements = {element: None for element in iterable} |
||||
return list(unique_elements.keys()) |
||||
|
||||
def _join(*path): |
||||
return "/".join([p for p in path if p != ""]) |
||||
|
||||
def _empty_to_dot(path): |
||||
return path if path else "." |
||||
|
||||
def _from_root(root, repo, relpath): |
||||
"""Constructs an exec path from root to relpath""" |
||||
if not root: |
||||
# `relpath` is a directory with an input source file, the exec path is one of: |
||||
# - when in main repo: `package/path` |
||||
# - when in a external repository: `external/repo/package/path` |
||||
# - with sibling layout: `../repo/package/path` |
||||
return _join(repo, relpath) |
||||
else: |
||||
# `relpath` is a directory with a generated file or an output directory: |
||||
# - when in main repo: `{root}/package/path` |
||||
# - when in an external repository: `{root}/external/repo/package/path` |
||||
# - with sibling layout: `{root}/package/path` |
||||
return _join(root, "" if repo.startswith("../") else repo, relpath) |
||||
|
||||
def _create_proto_info(*, srcs, deps, descriptor_set, proto_path = "", workspace_root = "", bin_dir = None, allow_exports = None): |
||||
"""Constructs ProtoInfo. |
||||
|
||||
Args: |
||||
srcs: ([File]) List of .proto files (possibly under _virtual path) |
||||
deps: ([ProtoInfo]) List of dependencies |
||||
descriptor_set: (File) Descriptor set for this Proto |
||||
proto_path: (str) Path that should be stripped from files in srcs. When |
||||
stripping is needed, the files should be symlinked into `_virtual_imports/target_name` |
||||
directory. Only such paths are accepted. |
||||
workspace_root: (str) Set to ctx.workspace_root if this is not the main repository. |
||||
bin_dir: (str) Set to ctx.bin_dir if _virtual_imports are used. |
||||
allow_exports: (Target) The packages where this proto_library can be exported. |
||||
|
||||
Returns: |
||||
(ProtoInfo) |
||||
""" |
||||
|
||||
# Validate parameters |
||||
src_prefix = _join(workspace_root.replace("external/", "../"), proto_path) |
||||
for src in srcs: |
||||
if type(src) != "File": |
||||
fail("srcs parameter expects a list of Files") |
||||
if src.owner.workspace_root != workspace_root: |
||||
fail("srcs parameter expects all files to have the same workspace_root: ", workspace_root) |
||||
if not src.short_path.startswith(src_prefix): |
||||
fail("srcs parameter expects all files start with %s" % src_prefix) |
||||
if type(descriptor_set) != "File": |
||||
fail("descriptor_set parameter expected to be a File") |
||||
if proto_path: |
||||
if "_virtual_imports/" not in proto_path: |
||||
fail("proto_path needs to contain '_virtual_imports' directory") |
||||
if proto_path.split("/")[-2] != "_virtual_imports": |
||||
fail("proto_path needs to be formed like '_virtual_imports/target_name'") |
||||
if not bin_dir: |
||||
fail("bin_dir parameter should be set when _virtual_imports are used") |
||||
|
||||
direct_proto_sources = srcs |
||||
transitive_proto_sources = depset( |
||||
direct = direct_proto_sources, |
||||
transitive = [dep._transitive_proto_sources for dep in deps], |
||||
order = "preorder", |
||||
) |
||||
transitive_sources = depset( |
||||
direct = srcs, |
||||
transitive = [dep.transitive_sources for dep in deps], |
||||
order = "preorder", |
||||
) |
||||
|
||||
# There can be up more than 1 direct proto_paths, for example when there's |
||||
# a generated and non-generated .proto file in srcs |
||||
root_paths = _uniq([src.root.path for src in srcs]) |
||||
transitive_proto_path = depset( |
||||
direct = [_empty_to_dot(_from_root(root, workspace_root, proto_path)) for root in root_paths], |
||||
transitive = [dep.transitive_proto_path for dep in deps], |
||||
) |
||||
|
||||
if srcs: |
||||
check_deps_sources = depset(direct = srcs) |
||||
else: |
||||
check_deps_sources = depset(transitive = [dep.check_deps_sources for dep in deps]) |
||||
|
||||
transitive_descriptor_sets = depset( |
||||
direct = [descriptor_set], |
||||
transitive = [dep.transitive_descriptor_sets for dep in deps], |
||||
) |
||||
|
||||
# Layering checks. |
||||
if srcs: |
||||
exported_sources = depset(direct = direct_proto_sources) |
||||
else: |
||||
exported_sources = depset(transitive = [dep._exported_sources for dep in deps]) |
||||
|
||||
if "_virtual_imports/" in proto_path: |
||||
#TODO: remove bin_dir from proto_source_root (when users assuming it's there are migrated) |
||||
proto_source_root = _empty_to_dot(_from_root(bin_dir, workspace_root, proto_path)) |
||||
elif workspace_root.startswith("../"): |
||||
proto_source_root = proto_path |
||||
else: |
||||
proto_source_root = _empty_to_dot(_join(workspace_root, proto_path)) |
||||
|
||||
proto_info = dict( |
||||
direct_sources = srcs, |
||||
transitive_sources = transitive_sources, |
||||
direct_descriptor_set = descriptor_set, |
||||
transitive_descriptor_sets = transitive_descriptor_sets, |
||||
proto_source_root = proto_source_root, |
||||
transitive_proto_path = transitive_proto_path, |
||||
check_deps_sources = check_deps_sources, |
||||
transitive_imports = transitive_sources, |
||||
_direct_proto_sources = direct_proto_sources, |
||||
_transitive_proto_sources = transitive_proto_sources, |
||||
_exported_sources = exported_sources, |
||||
) |
||||
if allow_exports: |
||||
proto_info["allow_exports"] = allow_exports |
||||
return proto_info |
||||
|
||||
ProtoInfo, _ = provider( |
||||
doc = "Encapsulates information provided by a `proto_library.`", |
||||
fields = { |
||||
"direct_sources": "(list[File]) The `.proto` source files from the `srcs` attribute.", |
||||
"transitive_sources": """(depset[File]) The `.proto` source files from this rule and all |
||||
its dependent protocol buffer rules.""", |
||||
"direct_descriptor_set": """(File) The descriptor set of the direct sources. If no srcs, |
||||
contains an empty file.""", |
||||
"transitive_descriptor_sets": """(depset[File]) A set of descriptor set files of all |
||||
dependent `proto_library` rules, and this one's. This is not the same as passing |
||||
--include_imports to proto-compiler. Will be empty if no dependencies.""", |
||||
"proto_source_root": """(str) The directory relative to which the `.proto` files defined in |
||||
the `proto_library` are defined. For example, if this is `a/b` and the rule has the |
||||
file `a/b/c/d.proto` as a source, that source file would be imported as |
||||
`import c/d.proto` |
||||
|
||||
In principle, the `proto_source_root` directory itself should always |
||||
be relative to the output directory (`ctx.bin_dir`). |
||||
|
||||
This is at the moment not true for `proto_libraries` using (additional and/or strip) |
||||
import prefixes. `proto_source_root` is in this case prefixed with the output |
||||
directory. For example, the value is similar to |
||||
`bazel-out/k8-fastbuild/bin/a/_virtual_includes/b` for an input file in |
||||
`a/_virtual_includes/b/c.proto` that should be imported as `c.proto`. |
||||
|
||||
When using the value please account for both cases in a general way. |
||||
That is assume the value is either prefixed with the output directory or not. |
||||
This will make it possible to fix `proto_library` in the future. |
||||
""", |
||||
"transitive_proto_path": """(depset(str) A set of `proto_source_root`s collected from the |
||||
transitive closure of this rule.""", |
||||
"check_deps_sources": """(depset[File]) The `.proto` sources from the 'srcs' attribute. |
||||
If the library is a proxy library that has no sources, it contains the |
||||
`check_deps_sources` from this library's direct deps.""", |
||||
"allow_exports": """(Target) The packages where this proto_library can be exported.""", |
||||
|
||||
# Deprecated fields: |
||||
"transitive_imports": """(depset[File]) Deprecated: use `transitive_sources` instead.""", |
||||
|
||||
# Internal fields: |
||||
"_direct_proto_sources": """(list[File]) The `ProtoSourceInfo`s from the `srcs` |
||||
attribute.""" + _warning, |
||||
"_transitive_proto_sources": """(depset[File]) The `ProtoSourceInfo`s from this |
||||
rule and all its dependent protocol buffer rules.""" + _warning, |
||||
"_exported_sources": """(depset[File]) A set of `ProtoSourceInfo`s that may be |
||||
imported by another `proto_library` depending on this one.""" + _warning, |
||||
}, |
||||
init = _create_proto_info, |
||||
) |
@ -0,0 +1,155 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Implementation of the proto_lang_toolchain rule.""" |
||||
|
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
def _rule_impl(ctx): |
||||
provided_proto_sources = depset(transitive = [bp[ProtoInfo]._transitive_proto_sources for bp in ctx.attr.blacklisted_protos]).to_list() |
||||
|
||||
flag = ctx.attr.command_line |
||||
if flag.find("$(PLUGIN_OUT)") > -1: |
||||
fail("in attribute 'command_line': Placeholder '$(PLUGIN_OUT)' is not supported.") |
||||
flag = flag.replace("$(OUT)", "%s") |
||||
|
||||
plugin = None |
||||
if ctx.attr.plugin != None: |
||||
plugin = ctx.attr.plugin[DefaultInfo].files_to_run |
||||
|
||||
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
proto_compiler = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.proto_compiler |
||||
protoc_opts = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.protoc_opts |
||||
else: |
||||
proto_compiler = ctx.attr._proto_compiler.files_to_run |
||||
protoc_opts = ctx.fragments.proto.experimental_protoc_opts |
||||
|
||||
if ctx.attr.protoc_minimal_do_not_use: |
||||
proto_compiler = ctx.attr.protoc_minimal_do_not_use.files_to_run |
||||
|
||||
proto_lang_toolchain_info = ProtoLangToolchainInfo( |
||||
out_replacement_format_flag = flag, |
||||
output_files = ctx.attr.output_files, |
||||
plugin_format_flag = ctx.attr.plugin_format_flag, |
||||
plugin = plugin, |
||||
runtime = ctx.attr.runtime, |
||||
provided_proto_sources = provided_proto_sources, |
||||
proto_compiler = proto_compiler, |
||||
protoc_opts = protoc_opts, |
||||
progress_message = ctx.attr.progress_message, |
||||
mnemonic = ctx.attr.mnemonic, |
||||
allowlist_different_package = ctx.attr.allowlist_different_package, |
||||
toolchain_type = ctx.attr.toolchain_type.label if ctx.attr.toolchain_type else None, |
||||
) |
||||
return [ |
||||
DefaultInfo(files = depset(), runfiles = ctx.runfiles()), |
||||
platform_common.ToolchainInfo(proto = proto_lang_toolchain_info), |
||||
# TODO: remove when --incompatible_enable_proto_toolchains is flipped and removed |
||||
proto_lang_toolchain_info, |
||||
] |
||||
|
||||
proto_lang_toolchain = rule( |
||||
_rule_impl, |
||||
doc = """ |
||||
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto"> |
||||
https://github.com/bazelbuild/rules_proto</a>. |
||||
|
||||
<p>Specifies how a LANG_proto_library rule (e.g., <code>java_proto_library</code>) should invoke the |
||||
proto-compiler. |
||||
Some LANG_proto_library rules allow specifying which toolchain to use using command-line flags; |
||||
consult their documentation. |
||||
|
||||
<p>Normally you should not write those kind of rules unless you want to |
||||
tune your Java compiler. |
||||
|
||||
<p>There's no compiler. The proto-compiler is taken from the proto_library rule we attach to. It is |
||||
passed as a command-line flag to Blaze. |
||||
Several features require a proto-compiler to be invoked on the proto_library rule itself. |
||||
It's beneficial to enforce the compiler that LANG_proto_library uses is the same as the one |
||||
<code>proto_library</code> does. |
||||
|
||||
<h4>Examples</h4> |
||||
|
||||
<p>A simple example would be: |
||||
<pre><code class="lang-starlark"> |
||||
proto_lang_toolchain( |
||||
name = "javalite_toolchain", |
||||
command_line = "--javalite_out=shared,immutable:$(OUT)", |
||||
plugin = ":javalite_plugin", |
||||
runtime = ":protobuf_lite", |
||||
) |
||||
</code></pre> |
||||
""", |
||||
attrs = { |
||||
"progress_message": attr.string(default = "Generating proto_library %{label}", doc = """ |
||||
This value will be set as the progress message on protoc action."""), |
||||
"mnemonic": attr.string(default = "GenProto", doc = """ |
||||
This value will be set as the mnemonic on protoc action."""), |
||||
"command_line": attr.string(mandatory = True, doc = """ |
||||
This value will be passed to proto-compiler to generate the code. Only include the parts |
||||
specific to this code-generator/plugin (e.g., do not include -I parameters) |
||||
<ul> |
||||
<li><code>$(OUT)</code> is LANG_proto_library-specific. The rules are expected to define |
||||
how they interpret this variable. For Java, for example, $(OUT) will be replaced with |
||||
the src-jar filename to create.</li> |
||||
</ul>"""), |
||||
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "legacy", doc = """ |
||||
Controls how <code>$(OUT)</code> in <code>command_line</code> is formatted, either by |
||||
a path to a single file or output directory in case of multiple files. |
||||
Possible values are: "single", "multiple"."""), |
||||
"plugin_format_flag": attr.string(doc = """ |
||||
If provided, this value will be passed to proto-compiler to use the plugin. |
||||
The value must contain a single %s which is replaced with plugin executable. |
||||
<code>--plugin=protoc-gen-PLUGIN=<executable>.</code>"""), |
||||
"plugin": attr.label( |
||||
executable = True, |
||||
cfg = "exec", |
||||
doc = """ |
||||
If provided, will be made available to the action that calls the proto-compiler, and will be |
||||
passed to the proto-compiler: |
||||
<code>--plugin=protoc-gen-PLUGIN=<executable>.</code>""", |
||||
), |
||||
"runtime": attr.label(doc = """ |
||||
A language-specific library that the generated code is compiled against. |
||||
The exact behavior is LANG_proto_library-specific. |
||||
Java, for example, should compile against the runtime."""), |
||||
"blacklisted_protos": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
No code will be generated for files in the <code>srcs</code> attribute of |
||||
<code>blacklisted_protos</code>. |
||||
This is used for .proto files that are already linked into proto runtimes, such as |
||||
<code>any.proto</code>.""", |
||||
), |
||||
# TODO: add doc |
||||
"allowlist_different_package": attr.label( |
||||
cfg = "exec", |
||||
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [], |
||||
), |
||||
# TODO: add doc |
||||
"toolchain_type": attr.label(), |
||||
# DO NOT USE. For Protobuf incremental changes only: b/305068148. |
||||
"protoc_minimal_do_not_use": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
), |
||||
} | ({} if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION else { |
||||
"_proto_compiler": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
allow_files = True, |
||||
default = configuration_field("proto", "proto_compiler"), |
||||
), |
||||
}), |
||||
provides = [ProtoLangToolchainInfo], |
||||
fragments = ["proto"], |
||||
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), # Used to obtain protoc |
||||
) |
@ -0,0 +1,49 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
""" |
||||
Toolchain helpers. |
||||
|
||||
The helpers here should be used for a migration to toolchain in proto rules. |
||||
|
||||
Anybody that needs them in another repository should copy them, because after |
||||
the migration is finished, the helpers can be removed. |
||||
""" |
||||
|
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
|
||||
_incompatible_toolchain_resolution = getattr(native_proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False) |
||||
|
||||
def _find_toolchain(ctx, legacy_attr, toolchain_type): |
||||
if _incompatible_toolchain_resolution: |
||||
toolchain = ctx.toolchains[toolchain_type] |
||||
if not toolchain: |
||||
fail("No toolchains registered for '%s'." % toolchain_type) |
||||
return toolchain.proto |
||||
else: |
||||
return getattr(ctx.attr, legacy_attr)[ProtoLangToolchainInfo] |
||||
|
||||
def _use_toolchain(toolchain_type): |
||||
if _incompatible_toolchain_resolution: |
||||
return [config_common.toolchain_type(toolchain_type, mandatory = False)] |
||||
else: |
||||
return [] |
||||
|
||||
def _if_legacy_toolchain(legacy_attr_dict): |
||||
if _incompatible_toolchain_resolution: |
||||
return {} |
||||
else: |
||||
return legacy_attr_dict |
||||
|
||||
toolchains = struct( |
||||
use_toolchain = _use_toolchain, |
||||
find_toolchain = _find_toolchain, |
||||
if_legacy_toolchain = _if_legacy_toolchain, |
||||
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution, |
||||
PROTO_TOOLCHAIN = "//bazel/private:proto_toolchain_type", |
||||
) |
@ -0,0 +1,74 @@ |
||||
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain") |
||||
load("//bazel/toolchains:proto_toolchain.bzl", "proto_toolchain") |
||||
|
||||
# Keep this file as small as possible and free of any unnecessary loads |
||||
# It is loaded by every use of protobuf repository, and loads here can force |
||||
# fetching of additional external repositories |
||||
|
||||
# It's also intentionally using toolchain instead of proto_lang_toolchain, |
||||
# because the former does not resolve dependencies until toolchain resolution |
||||
# needs them |
||||
|
||||
proto_toolchain( |
||||
name = "protoc_sources", |
||||
exec_compatible_with = [], |
||||
proto_compiler = "//:protoc", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "cc_source_toolchain", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//:cc_toolchain", |
||||
toolchain_type = "//bazel/private:cc_toolchain_type", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "java_source_toolchain", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//java/core:toolchain", |
||||
toolchain_type = "//bazel/private:java_toolchain_type", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "javalite_source_toolchain", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//java/lite:toolchain", |
||||
toolchain_type = "//bazel/private:javalite_toolchain_type", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "python_source_toolchain", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//python:python_toolchain", |
||||
toolchain_type = "//bazel/private:python_toolchain_type", |
||||
) |
||||
|
||||
# Following toolchain registrations are for builtin Bazel 7 rules |
||||
# which defined them in other repositories. |
||||
toolchain( |
||||
name = "cc_source_toolchain_bazel7", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//:cc_toolchain", |
||||
toolchain_type = "@rules_cc//cc/proto:toolchain_type", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "java_source_toolchain_bazel7", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//java/core:toolchain", |
||||
toolchain_type = "@rules_java//java/proto:toolchain_type", |
||||
) |
||||
|
||||
toolchain( |
||||
name = "javalite_source_toolchain_bazel7", |
||||
exec_compatible_with = [], |
||||
target_compatible_with = [], |
||||
toolchain = "//java/lite:toolchain", |
||||
toolchain_type = "@rules_java//java/proto:lite_toolchain_type", |
||||
) |
@ -1,3 +1,20 @@ |
||||
"""proto_library rule""" |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
Macro of proto_library rule. |
||||
""" |
||||
|
||||
proto_library = native.proto_library |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/private:bazel_proto_library_rule.bzl", _proto_library = "proto_library") |
||||
|
||||
def proto_library(**kwattrs): |
||||
# This condition causes Starlark rules to be used only on Bazel >=7.0.0 |
||||
if bazel_features.proto.starlark_proto_info: |
||||
_proto_library(**kwattrs) |
||||
else: |
||||
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen |
||||
native.proto_library(**kwattrs) |
||||
|
@ -0,0 +1,5 @@ |
||||
load(":proto_common_compile_tests.bzl", "proto_common_compile_test_suite") |
||||
|
||||
package(default_applicable_licenses = ["//:license"]) |
||||
|
||||
proto_common_compile_test_suite(name = "proto_common_compile_test_suite") |
@ -0,0 +1,368 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Tests for `proto_common.compile` function.""" |
||||
|
||||
load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite") |
||||
load("@rules_testing//lib:truth.bzl", "matching") |
||||
load("@rules_testing//lib:util.bzl", "util") |
||||
load("//bazel:proto_library.bzl", "proto_library") |
||||
load("//bazel/tests/testdata:compile_rule.bzl", "compile_rule") |
||||
|
||||
protocol_compiler = "/protoc" |
||||
|
||||
def proto_common_compile_test_suite(name): |
||||
util.helper_target( |
||||
proto_library, |
||||
name = "simple_proto", |
||||
srcs = ["A.proto"], |
||||
) |
||||
test_suite( |
||||
name = name, |
||||
tests = [ |
||||
_test_compile_basic, |
||||
_test_compile_noplugin, |
||||
_test_compile_with_plugin_output, |
||||
_test_compile_with_directory_plugin_output, |
||||
_test_compile_additional_args, |
||||
_test_compile_additional_tools, |
||||
_test_compile_additional_tools_no_plugin, |
||||
_test_compile_additional_inputs, |
||||
_test_compile_resource_set, |
||||
_test_compile_protoc_opts, |
||||
_test_compile_direct_generated_protos, |
||||
_test_compile_indirect_generated_protos, |
||||
], |
||||
) |
||||
|
||||
# Verifies basic usage of `proto_common.compile`. |
||||
def _test_compile_basic(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_basic_impl, |
||||
) |
||||
|
||||
def _test_compile_basic_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
action.mnemonic().equals("MyMnemonic") |
||||
|
||||
# Verifies usage of proto_common.generate_code with no plugin specified by toolchain. |
||||
def _test_compile_noplugin(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
toolchain = "//bazel/tests/testdata:toolchain_noplugin", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_noplugin_impl, |
||||
) |
||||
|
||||
def _test_compile_noplugin_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file. |
||||
def _test_compile_with_plugin_output(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
plugin_output = "single", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_with_plugin_output_impl, |
||||
) |
||||
|
||||
def _test_compile_with_plugin_output_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--java_out=param1,param2:b*-out/*/test_compile_with_plugin_output_compile"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file. |
||||
def _test_compile_with_directory_plugin_output(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
plugin_output = "multiple", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_with_directory_plugin_output_impl, |
||||
) |
||||
|
||||
def _test_compile_with_directory_plugin_output_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--java_out=param1,param2:b*-out/*/bin"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_args` parameter |
||||
def _test_compile_additional_args(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_args = ["--a", "--b"], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_args_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_args_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("--a"), |
||||
matching.equals_wrapper("--b"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter |
||||
def _test_compile_additional_tools(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_tools = [ |
||||
"//bazel/tests/testdata:_tool1", |
||||
"//bazel/tests/testdata:_tool2", |
||||
], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_tools_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_tools_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("_tool1"), |
||||
matching.file_basename_equals("_tool2"), |
||||
matching.file_basename_equals("plugin"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain. |
||||
def _test_compile_additional_tools_no_plugin(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_tools = [ |
||||
"//bazel/tests/testdata:_tool1", |
||||
"//bazel/tests/testdata:_tool2", |
||||
], |
||||
toolchain = "//bazel/tests/testdata:toolchain_noplugin", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_tools_no_plugin_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_tools_no_plugin_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("_tool1"), |
||||
matching.file_basename_equals("_tool2"), |
||||
], |
||||
) |
||||
action.inputs().not_contains_predicate(matching.file_basename_equals("plugin")) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_inputs` parameter. |
||||
def _test_compile_additional_inputs(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_inputs = ["input1.txt", "input2.txt"], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_inputs_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_inputs_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("input1.txt"), |
||||
matching.file_basename_equals("input2.txt"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain. |
||||
def _test_compile_resource_set(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
use_resource_set = True, |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_resource_set_impl, |
||||
) |
||||
|
||||
def _test_compile_resource_set_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") # @unused |
||||
# We can't check the specification of the resource set, but we at least verify analysis passes |
||||
|
||||
# Verifies `--protocopts` are passed to command line. |
||||
def _test_compile_protoc_opts(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
config_settings = {"//command_line_option:protocopt": ["--foo", "--bar"]}, |
||||
impl = _test_compile_protoc_opts_impl, |
||||
) |
||||
|
||||
def _test_compile_protoc_opts_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("--foo"), |
||||
matching.equals_wrapper("--bar"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies `proto_common.compile`> correctly handles direct generated `.proto` files. |
||||
def _test_compile_direct_generated_protos(name): |
||||
util.helper_target(native.genrule, name = name + "_generate_G", cmd = "", outs = ["G.proto"]) |
||||
util.helper_target( |
||||
proto_library, |
||||
name = name + "_directly_generated_proto", |
||||
srcs = ["A.proto", "G.proto"], |
||||
) |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = name + "_directly_generated_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_direct_generated_protos_impl, |
||||
) |
||||
|
||||
def _test_compile_direct_generated_protos_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.str_matches("-Ib*-out/*/*"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
matching.str_matches("*-out/*/*/*/G.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter |
||||
def _test_compile_indirect_generated_protos(name): |
||||
util.helper_target(native.genrule, name = "_generate_h", srcs = ["A.txt"], cmd = "", outs = ["H.proto"]) |
||||
util.helper_target(proto_library, name = "_generated_proto", srcs = ["H.proto"]) |
||||
util.helper_target( |
||||
proto_library, |
||||
name = name + "_indirectly_generated_proto", |
||||
srcs = ["A.proto"], |
||||
deps = [":_generated_proto"], |
||||
) |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = name + "_indirectly_generated_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_indirect_generated_protos_impl, |
||||
) |
||||
|
||||
def _test_compile_indirect_generated_protos_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.str_matches("-Ib*-out/*/*"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
@ -0,0 +1,135 @@ |
||||
load("//bazel/toolchains:proto_lang_toolchain.bzl", "proto_lang_toolchain") |
||||
|
||||
package( |
||||
default_applicable_licenses = ["//:license"], |
||||
default_visibility = ["//visibility:public"], |
||||
) |
||||
|
||||
proto_lang_toolchain( |
||||
name = "toolchain", |
||||
blacklisted_protos = [":denied"], |
||||
command_line = "--java_out=param1,param2:$(OUT)", |
||||
mnemonic = "MyMnemonic", |
||||
plugin = ":plugin", |
||||
plugin_format_flag = "--plugin=%s", |
||||
progress_message = "Progress Message %{label}", |
||||
runtime = ":runtime", |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_lang_toolchain( |
||||
name = "toolchain_noplugin", |
||||
blacklisted_protos = [":denied"], |
||||
command_line = "--java_out=param1,param2:$(OUT)", |
||||
mnemonic = "MyMnemonic", |
||||
progress_message = "Progress Message %{label}", |
||||
runtime = ":runtime", |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "plugin", |
||||
srcs = ["plugin.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_library( |
||||
name = "runtime", |
||||
srcs = ["runtime.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "descriptors", |
||||
srcs = [ |
||||
"descriptor.proto", |
||||
"metadata.proto", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "any", |
||||
srcs = ["any.proto"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "something", |
||||
srcs = ["something.proto"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "mixed", |
||||
srcs = [ |
||||
":descriptors", |
||||
":something", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "denied", |
||||
srcs = [ |
||||
":any", |
||||
":descriptors", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "_tool1", |
||||
srcs = ["tool1.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "_tool2", |
||||
srcs = ["tool2.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
@ -0,0 +1,57 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Testing function for proto_common module""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
|
||||
def _resource_set_callback(_os, inputs_size): |
||||
return {"memory": 25 + 0.15 * inputs_size, "cpu": 1} |
||||
|
||||
def _impl(ctx): |
||||
outfile = ctx.actions.declare_file(ctx.attr.name) |
||||
kwargs = {} |
||||
if ctx.attr.plugin_output == "single": |
||||
kwargs["plugin_output"] = outfile.path |
||||
elif ctx.attr.plugin_output == "multiple": |
||||
kwargs["plugin_output"] = ctx.bin_dir.path |
||||
elif ctx.attr.plugin_output == "wrong": |
||||
kwargs["plugin_output"] = ctx.bin_dir.path + "///" |
||||
if ctx.attr.additional_args: |
||||
additional_args = ctx.actions.args() |
||||
additional_args.add_all(ctx.attr.additional_args) |
||||
kwargs["additional_args"] = additional_args |
||||
if ctx.files.additional_tools: |
||||
kwargs["additional_tools"] = ctx.files.additional_tools |
||||
if ctx.files.additional_inputs: |
||||
kwargs["additional_inputs"] = depset(ctx.files.additional_inputs) |
||||
if ctx.attr.use_resource_set: |
||||
kwargs["resource_set"] = _resource_set_callback |
||||
if ctx.attr.progress_message: |
||||
kwargs["experimental_progress_message"] = ctx.attr.progress_message |
||||
proto_common.compile( |
||||
ctx.actions, |
||||
ctx.attr.proto_dep[ProtoInfo], |
||||
ctx.attr.toolchain[proto_common.ProtoLangToolchainInfo], |
||||
[outfile], |
||||
**kwargs |
||||
) |
||||
return [DefaultInfo(files = depset([outfile]))] |
||||
|
||||
compile_rule = rule( |
||||
_impl, |
||||
attrs = { |
||||
"proto_dep": attr.label(), |
||||
"plugin_output": attr.string(), |
||||
"toolchain": attr.label(default = ":toolchain"), |
||||
"additional_args": attr.string_list(), |
||||
"additional_tools": attr.label_list(cfg = "exec"), |
||||
"additional_inputs": attr.label_list(allow_files = True), |
||||
"use_resource_set": attr.bool(), |
||||
"progress_message": attr.string(), |
||||
}, |
||||
) |
@ -1,3 +1,4 @@ |
||||
import common.bazelrc |
||||
|
||||
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14 |
||||
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations" |
||||
|
@ -1,5 +1,6 @@ |
||||
import common.bazelrc |
||||
|
||||
build --cxxopt=-std=c++14 --host_cxxopt=-std=c++14 |
||||
build --copt="-Werror" --copt="-Wno-sign-compare" --copt="-Wno-sign-conversion" --copt="-Wno-error=sign-conversion" --copt="-Wno-deprecated-declarations" |
||||
common --repo_env=BAZEL_NO_APPLE_CPP_TOOLCHAIN=1 |
||||
common --xcode_version_config=@com_google_protobuf//.github:host_xcodes |
@ -0,0 +1,10 @@ |
||||
prefix=@CMAKE_INSTALL_PREFIX@ |
||||
exec_prefix=@CMAKE_INSTALL_PREFIX@ |
||||
libdir=@CMAKE_INSTALL_FULL_LIBDIR@ |
||||
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@ |
||||
|
||||
Name: Protocol Buffers |
||||
Description: Google's Data Interchange Format |
||||
Version: @protobuf_VERSION@ |
||||
Libs: -L${libdir} -lupb @CMAKE_THREAD_LIBS_INIT@ |
||||
Cflags: -I${includedir} |
@ -0,0 +1 @@ |
||||
version: v1 |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue