Merge pull request #18339 from protocolbuffers/bazel-rules2

Cherry-pick changes related to new Bazel rules
revert-18339-bazel-rules2
Joshua Haberman 5 months ago committed by GitHub
commit 106f4a6aa6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 11
      MODULE.bazel
  2. 9
      WORKSPACE
  3. 1
      bazel/BUILD.bazel
  4. 14
      bazel/common/BUILD
  5. 352
      bazel/common/proto_common.bzl
  6. 4
      bazel/common/proto_lang_toolchain_info.bzl
  7. 122
      bazel/private/BUILD
  8. 42
      bazel/private/BUILD.bazel
  9. 357
      bazel/private/bazel_proto_library_rule.bzl
  10. 35
      bazel/private/native_bool_flag.bzl
  11. 50
      bazel/private/proto_bazel_features.bzl
  12. 155
      bazel/private/proto_lang_toolchain_rule.bzl
  13. 15
      bazel/private/proto_toolchain_rule.bzl
  14. 49
      bazel/private/toolchain_helpers.bzl
  15. 21
      bazel/proto_library.bzl
  16. 18
      bazel/py_proto_library.bzl
  17. 3
      bazel/tests/BUILD
  18. 368
      bazel/tests/proto_common_compile_tests.bzl
  19. 130
      bazel/tests/testdata/BUILD
  20. 57
      bazel/tests/testdata/compile_rule.bzl
  21. 11
      bazel/toolchains/BUILD
  22. 17
      bazel/toolchains/proto_lang_toolchain.bzl
  23. 10
      bazel/toolchains/proto_toolchain.bzl
  24. 38
      protobuf_deps.bzl

@ -13,7 +13,7 @@ module(
# https://bazel.build/versions/6.0.0/build/bzlmod#version-resolution
# Thus the highest version in their module graph is resolved.
bazel_dep(name = "abseil-cpp", version = "20230802.0.bcr.1", repo_name = "com_google_absl")
bazel_dep(name = "bazel_skylib", version = "1.4.1")
bazel_dep(name = "bazel_skylib", version = "1.7.0")
bazel_dep(name = "jsoncpp", version = "1.9.5")
bazel_dep(name = "rules_cc", version = "0.0.9")
bazel_dep(name = "rules_fuzzing", version = "0.5.2")
@ -24,9 +24,7 @@ bazel_dep(name = "rules_python", version = "0.28.0")
bazel_dep(name = "rules_rust", version = "0.45.1")
bazel_dep(name = "platforms", version = "0.0.8")
bazel_dep(name = "zlib", version = "1.3.1")
# TODO: remove after toolchain types are moved to protobuf
bazel_dep(name = "rules_proto", version = "4.0.0")
bazel_dep(name = "bazel_features", version = "1.13.0", repo_name = "proto_bazel_features")
SUPPORTED_PYTHON_VERSIONS = [
"3.8",
@ -70,3 +68,8 @@ crate.spec(
)
crate.from_specs()
use_repo(crate, crate_index = "crates")
# Development dependencies
bazel_dep(name = "rules_testing", version = "0.6.0", dev_dependency = True)
# rules_proto are needed for @com_google_protobuf_v25.0 used in //compatibility/... tests
bazel_dep(name = "rules_proto", version = "4.0.0", dev_dependency = True)

@ -241,3 +241,12 @@ http_archive(
# Needed as a dependency of @com_google_protobuf_v25.0
load("@com_google_protobuf_v25.0//:protobuf_deps.bzl", protobuf_v25_deps="protobuf_deps")
protobuf_v25_deps()
# Needed for testing only
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "rules_testing",
sha256 = "02c62574631876a4e3b02a1820cb51167bb9cdcdea2381b2fa9d9b8b11c407c4",
strip_prefix = "rules_testing-0.6.0",
url = "https://github.com/bazelbuild/rules_testing/releases/download/v0.6.0/rules_testing-v0.6.0.tar.gz",
)

@ -40,6 +40,7 @@ bzl_library(
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@rules_python//python:py_info_bzl",
],
)

@ -7,7 +7,9 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
"//bazel/private:native_bzl",
":proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@proto_bazel_features//:features",
],
)
@ -29,6 +31,14 @@ bzl_library(
],
visibility = ["//visibility:public"],
deps = [
":proto_common.bzl",
"//bazel/private:native_bzl",
],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]) + ["@proto_bazel_features//:features"],
visibility = [
"//bazel:__pkg__",
],
)

@ -1,5 +1,351 @@
"""proto_common"""
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Definition of proto_common module, together with bazel providers for proto rules."""
load("//bazel/private:native.bzl", "native_proto_common")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
proto_common = native_proto_common
def _import_virtual_proto_path(path):
"""Imports all paths for virtual imports.
They're of the form:
'bazel-out/k8-fastbuild/bin/external/foo/e/_virtual_imports/e' or
'bazel-out/foo/k8-fastbuild/bin/e/_virtual_imports/e'"""
if path.count("/") > 4:
return "-I%s" % path
return None
def _import_repo_proto_path(path):
"""Imports all paths for generated files in external repositories.
They are of the form:
'bazel-out/k8-fastbuild/bin/external/foo' or
'bazel-out/foo/k8-fastbuild/bin'"""
path_count = path.count("/")
if path_count > 2 and path_count <= 4:
return "-I%s" % path
return None
def _import_main_output_proto_path(path):
"""Imports all paths for generated files or source files in external repositories.
They're of the form:
'bazel-out/k8-fastbuild/bin'
'external/foo'
'../foo'
"""
if path.count("/") <= 2 and path != ".":
return "-I%s" % path
return None
def _remove_repo(file):
"""Removes `../repo/` prefix from path, e.g. `../repo/package/path -> package/path`"""
short_path = file.short_path
workspace_root = file.owner.workspace_root
if workspace_root:
if workspace_root.startswith("external/"):
workspace_root = "../" + workspace_root.removeprefix("external/")
return short_path.removeprefix(workspace_root + "/")
return short_path
def _get_import_path(proto_file):
"""Returns the import path of a .proto file
This is the path as used for the file that can be used in an `import` statement in another
.proto file.
Args:
proto_file: (File) The .proto file
Returns:
(str) import path
"""
repo_path = _remove_repo(proto_file)
index = repo_path.find("_virtual_imports/")
if index >= 0:
index = repo_path.find("/", index + len("_virtual_imports/"))
repo_path = repo_path[index + 1:]
return repo_path
def _output_directory(proto_info, root):
proto_source_root = proto_info.proto_source_root
if proto_source_root.startswith(root.path):
#TODO: remove this branch when bin_dir is removed from proto_source_root
proto_source_root = proto_source_root.removeprefix(root.path).removeprefix("/")
if proto_source_root == "" or proto_source_root == ".":
return root.path
return root.path + "/" + proto_source_root
def _check_collocated(label, proto_info, proto_lang_toolchain_info):
"""Checks if lang_proto_library is collocated with proto_library.
Exceptions are allowed by an allowlist defined on `proto_lang_toolchain` and
on an allowlist defined on `proto_library`'s `allow_exports` attribute.
If checks are not successful the function fails.
Args:
label: (Label) The label of lang_proto_library
proto_info: (ProtoInfo) The ProtoInfo from the proto_library dependency.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target.
"""
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
if not _PackageSpecificationInfo:
if proto_lang_toolchain_info.allowlist_different_package or getattr(proto_info, "allow_exports", None):
fail("Allowlist checks not supported before Bazel 6.4.0")
return
if (proto_info.direct_descriptor_set.owner.package != label.package and
proto_lang_toolchain_info.allowlist_different_package):
if not proto_lang_toolchain_info.allowlist_different_package[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
if (proto_info.direct_descriptor_set.owner.package != label.package and
hasattr(proto_info, "allow_exports")):
if not proto_info.allow_exports[_PackageSpecificationInfo].contains(label):
fail(("lang_proto_library '%s' may only be created in the same package " +
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner))
def _compile(
actions,
proto_info,
proto_lang_toolchain_info,
generated_files,
plugin_output = None,
additional_args = None,
additional_tools = [],
additional_inputs = depset(),
additional_proto_lang_toolchain_info = None,
resource_set = None,
experimental_exec_group = None,
experimental_progress_message = None,
experimental_output_files = "legacy"):
"""Creates proto compile action for compiling *.proto files to language specific sources.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to register the actions.
proto_info: (ProtoInfo) The ProtoInfo from proto_library to generate the sources for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc..
generated_files: (list[File]) The output files generated by the proto compiler.
Callee needs to declare files using `ctx.actions.declare_file`.
See also: `proto_common.declare_generated_files`.
plugin_output: (File|str) Deprecated: Set `proto_lang_toolchain.output_files`
and remove the parameter.
For backwards compatibility, when the proto_lang_toolchain isn't updated
the value is used.
additional_args: (Args) Additional arguments to add to the action.
Accepts a ctx.actions.args() object that is added at the beginning
of the command line.
additional_tools: (list[File]) Additional tools to add to the action.
additional_inputs: (Depset[File]) Additional input files to add to the action.
resource_set: (func) A callback function that is passed to the created action.
See `ctx.actions.run`, `resource_set` parameter for full definition of
the callback.
experimental_exec_group: (str) Sets `exec_group` on proto compile action.
Avoid using this parameter.
experimental_progress_message: Overrides progress_message from the toolchain.
Don't use this parameter. It's only intended for the transition.
experimental_output_files: (str) Overwrites output_files from the toolchain.
Don't use this parameter. It's only intended for the transition.
"""
if type(generated_files) != type([]):
fail("generated_files is expected to be a list of Files")
if not generated_files:
return # nothing to do
if experimental_output_files not in ["single", "multiple", "legacy"]:
fail('experimental_output_files expected to be one of ["single", "multiple", "legacy"]')
args = actions.args()
args.use_param_file(param_file_arg = "@%s")
args.set_param_file_format("multiline")
tools = list(additional_tools)
if experimental_output_files != "legacy":
output_files = experimental_output_files
else:
output_files = getattr(proto_lang_toolchain_info, "output_files", "legacy")
if output_files != "legacy":
if proto_lang_toolchain_info.out_replacement_format_flag:
if output_files == "single":
if len(generated_files) > 1:
fail("generated_files only expected a single file")
plugin_output = generated_files[0]
else:
plugin_output = _output_directory(proto_info, generated_files[0].root)
if plugin_output:
args.add(plugin_output, format = proto_lang_toolchain_info.out_replacement_format_flag)
if proto_lang_toolchain_info.plugin:
tools.append(proto_lang_toolchain_info.plugin)
args.add(proto_lang_toolchain_info.plugin.executable, format = proto_lang_toolchain_info.plugin_format_flag)
# Protoc searches for .protos -I paths in order they are given and then
# uses the path within the directory as the package.
# This requires ordering the paths from most specific (longest) to least
# specific ones, so that no path in the list is a prefix of any of the
# following paths in the list.
# For example: 'bazel-out/k8-fastbuild/bin/external/foo' needs to be listed
# before 'bazel-out/k8-fastbuild/bin'. If not, protoc will discover file under
# the shorter path and use 'external/foo/...' as its package path.
args.add_all(proto_info.transitive_proto_path, map_each = _import_virtual_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_repo_proto_path)
args.add_all(proto_info.transitive_proto_path, map_each = _import_main_output_proto_path)
args.add("-I.") # Needs to come last
args.add_all(proto_lang_toolchain_info.protoc_opts)
args.add_all(proto_info.direct_sources)
if additional_args:
additional_args.use_param_file(param_file_arg = "@%s")
additional_args.set_param_file_format("multiline")
actions.run(
mnemonic = proto_lang_toolchain_info.mnemonic,
progress_message = experimental_progress_message if experimental_progress_message else proto_lang_toolchain_info.progress_message,
executable = proto_lang_toolchain_info.proto_compiler,
arguments = [args, additional_args] if additional_args else [args],
inputs = depset(transitive = [proto_info.transitive_sources, additional_inputs]),
outputs = generated_files,
tools = tools,
use_default_shell_env = True,
resource_set = resource_set,
exec_group = experimental_exec_group,
toolchain = _toolchain_type(proto_lang_toolchain_info),
)
_BAZEL_TOOLS_PREFIX = "external/bazel_tools/"
def _experimental_filter_sources(proto_info, proto_lang_toolchain_info):
if not proto_info.direct_sources:
return [], []
# Collect a set of provided protos
provided_proto_sources = proto_lang_toolchain_info.provided_proto_sources
provided_paths = {}
for src in provided_proto_sources:
path = src.path
# For listed protos bundled with the Bazel tools repository, their exec paths start
# with external/bazel_tools/. This prefix needs to be removed first, because the protos in
# user repositories will not have that prefix.
if path.startswith(_BAZEL_TOOLS_PREFIX):
provided_paths[path[len(_BAZEL_TOOLS_PREFIX):]] = None
else:
provided_paths[path] = None
# Filter proto files
proto_files = proto_info._direct_proto_sources
excluded = []
included = []
for proto_file in proto_files:
if proto_file.path in provided_paths:
excluded.append(proto_file)
else:
included.append(proto_file)
return included, excluded
def _experimental_should_generate_code(
proto_info,
proto_lang_toolchain_info,
rule_name,
target_label):
"""Checks if the code should be generated for the given proto_library.
The code shouldn't be generated only when the toolchain already provides it
to the language through its runtime dependency.
It fails when the proto_library contains mixed proto files, that should and
shouldn't generate code.
Args:
proto_info: (ProtoInfo) The ProtoInfo from proto_library to check the generation for.
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info.
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc.
rule_name: (str) Name of the rule used in the failure message.
target_label: (Label) The label of the target used in the failure message.
Returns:
(bool) True when the code should be generated.
"""
included, excluded = _experimental_filter_sources(proto_info, proto_lang_toolchain_info)
if included and excluded:
fail(("The 'srcs' attribute of '%s' contains protos for which '%s' " +
"shouldn't generate code (%s), in addition to protos for which it should (%s).\n" +
"Separate '%s' into 2 proto_library rules.") % (
target_label,
rule_name,
", ".join([f.short_path for f in excluded]),
", ".join([f.short_path for f in included]),
target_label,
))
return bool(included)
def _declare_generated_files(
actions,
proto_info,
extension,
name_mapper = None):
"""Declares generated files with a specific extension.
Use this in lang_proto_library-es when protocol compiler generates files
that correspond to .proto file names.
The function removes ".proto" extension with given one (e.g. ".pb.cc") and
declares new output files.
Args:
actions: (ActionFactory) Obtained by ctx.actions, used to declare the files.
proto_info: (ProtoInfo) The ProtoInfo to declare the files for.
extension: (str) The extension to use for generated files.
name_mapper: (str->str) A function mapped over the base filename without
the extension. Used it to replace characters in the name that
cause problems in a specific programming language.
Returns:
(list[File]) The list of declared files.
"""
proto_sources = proto_info.direct_sources
outputs = []
for src in proto_sources:
basename_no_ext = src.basename[:-(len(src.extension) + 1)]
if name_mapper:
basename_no_ext = name_mapper(basename_no_ext)
# Note that two proto_library rules can have the same source file, so this is actually a
# shared action. NB: This can probably result in action conflicts if the proto_library rules
# are not the same.
outputs.append(actions.declare_file(basename_no_ext + extension, sibling = src))
return outputs
def _toolchain_type(proto_lang_toolchain_info):
if toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
return getattr(proto_lang_toolchain_info, "toolchain_type", None)
else:
return None
proto_common = struct(
compile = _compile,
declare_generated_files = _declare_generated_files,
check_collocated = _check_collocated,
experimental_should_generate_code = _experimental_should_generate_code,
experimental_filter_sources = _experimental_filter_sources,
get_import_path = _get_import_path,
ProtoLangToolchainInfo = ProtoLangToolchainInfo,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION,
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = True,
)

@ -1,5 +1,5 @@
"""ProtoLangToolchainInfo"""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:native.bzl", "native_proto_common")
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo
ProtoLangToolchainInfo = native_proto_common.ProtoLangToolchainInfo

@ -0,0 +1,122 @@
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
load("//bazel/private:native_bool_flag.bzl", "native_bool_flag")
toolchain_type(
name = "proto_toolchain_type",
)
toolchain_type(
name = "python_toolchain_type",
visibility = ["//visibility:public"],
)
bzl_library(
name = "upb_proto_library_internal_bzl",
srcs = [
"upb_proto_library_internal/aspect.bzl",
"upb_proto_library_internal/cc_library_func.bzl",
"upb_proto_library_internal/copts.bzl",
"upb_proto_library_internal/rule.bzl",
],
visibility = ["//bazel:__pkg__"],
deps = [
"//bazel/common:proto_common_bzl",
"@bazel_skylib//lib:paths",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)
bzl_library(
name = "native_bzl",
srcs = [
"native.bzl",
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "bazel_proto_library_rule_bzl",
srcs = [
"bazel_proto_library_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
"@bazel_skylib//lib:paths",
"@bazel_skylib//rules:common_settings",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
"proto_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
"//bazel/private:toolchain_helpers_bzl",
],
)
bzl_library(
name = "proto_lang_toolchain_rule_bzl",
srcs = [
"proto_lang_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":toolchain_helpers_bzl",
"//bazel/common:proto_common_bzl",
"//bazel/common:proto_info_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
"@proto_bazel_features//:features",
],
)
bzl_library(
name = "toolchain_helpers_bzl",
srcs = [
"toolchain_helpers.bzl",
],
visibility = ["//bazel:__subpackages__"],
deps = [
":native_bzl",
"//bazel/common:proto_lang_toolchain_info_bzl",
],
)
native_bool_flag(
name = "experimental_proto_descriptor_sets_include_source_info",
flag = "experimental_proto_descriptor_sets_include_source_info",
match_value = "true",
visibility = ["//bazel:__subpackages__"],
)
native_bool_flag(
name = "strict_proto_deps",
flag = "strict_proto_deps",
match_value = "off",
result = False,
visibility = ["//bazel:__subpackages__"],
)
native_bool_flag(
name = "strict_public_imports",
flag = "strict_public_imports",
match_value = "off",
result = False,
visibility = ["//bazel:__subpackages__"],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
visibility = [
"//bazel:__pkg__",
],
)

@ -1,42 +0,0 @@
# Copyright (c) 2009-2021, Google LLC
# All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
licenses(["notice"])
bzl_library(
name = "upb_proto_library_internal_bzl",
srcs = [
"upb_proto_library_internal/aspect.bzl",
"upb_proto_library_internal/cc_library_func.bzl",
"upb_proto_library_internal/copts.bzl",
"upb_proto_library_internal/rule.bzl",
],
visibility = ["//bazel:__pkg__"],
deps = [
"//bazel/common:proto_common_bzl",
"@bazel_skylib//lib:paths",
"@bazel_tools//tools/cpp:toolchain_utils.bzl",
],
)
bzl_library(
name = "native_bzl",
srcs = [
"native.bzl",
],
visibility = ["//bazel:__subpackages__"],
)
bzl_library(
name = "proto_toolchain_rule_bzl",
srcs = [
"proto_toolchain_rule.bzl",
],
visibility = ["//bazel:__subpackages__"],
)

@ -0,0 +1,357 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Implementation of proto_library rule.
"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
STRICT_DEPS_FLAG_TEMPLATE = (
#
"--direct_dependencies_violation_msg=" +
"%%s is imported, but %s doesn't directly depend on a proto_library that 'srcs' it."
)
def _check_srcs_package(target_package, srcs):
"""Check that .proto files in sources are from the same package.
This is done to avoid clashes with the generated sources."""
#TODO: this does not work with filegroups that contain files that are not in the package
for src in srcs:
if target_package != src.label.package:
fail("Proto source with label '%s' must be in same package as consuming rule." % src.label)
def _get_import_prefix(ctx):
"""Gets and verifies import_prefix attribute if it is declared."""
import_prefix = ctx.attr.import_prefix
if not paths.is_normalized(import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "import_prefix")
if paths.is_absolute(import_prefix):
fail("should be a relative path", attr = "import_prefix")
return import_prefix
def _get_strip_import_prefix(ctx):
"""Gets and verifies strip_import_prefix."""
strip_import_prefix = ctx.attr.strip_import_prefix
if not paths.is_normalized(strip_import_prefix):
fail("should be normalized (without uplevel references or '.' path segments)", attr = "strip_import_prefix")
if paths.is_absolute(strip_import_prefix):
strip_import_prefix = strip_import_prefix[1:]
else: # Relative to current package
strip_import_prefix = _join(ctx.label.package, strip_import_prefix)
return strip_import_prefix.removesuffix("/")
def _proto_library_impl(ctx):
# Verifies attributes.
_check_srcs_package(ctx.label.package, ctx.attr.srcs)
srcs = ctx.files.srcs
deps = [dep[ProtoInfo] for dep in ctx.attr.deps]
exports = [dep[ProtoInfo] for dep in ctx.attr.exports]
import_prefix = _get_import_prefix(ctx)
strip_import_prefix = _get_strip_import_prefix(ctx)
check_for_reexport = deps + exports if not srcs else exports
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo
for proto in check_for_reexport:
if getattr(proto, "allow_exports", None):
if not _PackageSpecificationInfo:
fail("Allowlist checks not supported before Bazel 6.4.0")
if not proto.allow_exports[_PackageSpecificationInfo].contains(ctx.label):
fail("proto_library '%s' can't be reexported in package '//%s'" % (proto.direct_descriptor_set.owner, ctx.label.package))
proto_path, virtual_srcs = _process_srcs(ctx, srcs, import_prefix, strip_import_prefix)
descriptor_set = ctx.actions.declare_file(ctx.label.name + "-descriptor-set.proto.bin")
proto_info = ProtoInfo(
srcs = virtual_srcs,
deps = deps,
descriptor_set = descriptor_set,
proto_path = proto_path,
workspace_root = ctx.label.workspace_root,
bin_dir = ctx.bin_dir.path,
allow_exports = ctx.attr.allow_exports,
)
_write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set)
# We assume that the proto sources will not have conflicting artifacts
# with the same root relative path
data_runfiles = ctx.runfiles(
files = [proto_info.direct_descriptor_set],
transitive_files = depset(transitive = [proto_info.transitive_sources]),
)
return [
proto_info,
DefaultInfo(
files = depset([proto_info.direct_descriptor_set]),
default_runfiles = ctx.runfiles(), # empty
data_runfiles = data_runfiles,
),
]
def _process_srcs(ctx, srcs, import_prefix, strip_import_prefix):
"""Returns proto_path and sources, optionally symlinking them to _virtual_imports.
Returns:
(str, [File]) A pair of proto_path and virtual_sources.
"""
if import_prefix != "" or strip_import_prefix != "":
# Use virtual source roots
return _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix)
else:
# No virtual source roots
return "", srcs
def _join(*path):
return "/".join([p for p in path if p != ""])
def _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix):
"""Symlinks srcs to _virtual_imports.
Returns:
A pair proto_path, directs_sources.
"""
virtual_imports = _join("_virtual_imports", ctx.label.name)
proto_path = _join(ctx.label.package, virtual_imports)
if ctx.label.workspace_name == "":
full_strip_import_prefix = strip_import_prefix
else:
full_strip_import_prefix = _join("..", ctx.label.workspace_name, strip_import_prefix)
if full_strip_import_prefix:
full_strip_import_prefix += "/"
virtual_srcs = []
for src in srcs:
# Remove strip_import_prefix
if not src.short_path.startswith(full_strip_import_prefix):
fail(".proto file '%s' is not under the specified strip prefix '%s'" %
(src.short_path, full_strip_import_prefix))
import_path = src.short_path[len(full_strip_import_prefix):]
# Add import_prefix
virtual_src = ctx.actions.declare_file(_join(virtual_imports, import_prefix, import_path))
ctx.actions.symlink(
output = virtual_src,
target_file = src,
progress_message = "Symlinking virtual .proto sources for %{label}",
)
virtual_srcs.append(virtual_src)
return proto_path, virtual_srcs
def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set):
"""Writes descriptor set."""
if proto_info.direct_sources == []:
ctx.actions.write(descriptor_set, "")
return
dependencies_descriptor_sets = depset(transitive = [dep.transitive_descriptor_sets for dep in deps])
args = ctx.actions.args()
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value:
args.add("--include_source_info")
if hasattr(ctx.attr, "_retain_options") and ctx.attr._retain_options:
args.add("--retain_options")
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value
if strict_deps:
if proto_info.direct_sources:
strict_importable_sources = depset(
direct = proto_info._direct_proto_sources,
transitive = [dep._exported_sources for dep in deps],
)
else:
strict_importable_sources = None
if strict_importable_sources:
args.add_joined(
"--direct_dependencies",
strict_importable_sources,
map_each = proto_common.get_import_path,
join_with = ":",
)
# Example: `--direct_dependencies a.proto:b.proto`
else:
# The proto compiler requires an empty list to turn on strict deps checking
args.add("--direct_dependencies=")
# Set `-direct_dependencies_violation_msg=`
args.add(ctx.label, format = STRICT_DEPS_FLAG_TEMPLATE)
strict_imports = ctx.attr._strict_public_imports[BuildSettingInfo].value
if strict_imports:
public_import_protos = depset(transitive = [export._exported_sources for export in exports])
if not public_import_protos:
# This line is necessary to trigger the check.
args.add("--allowed_public_imports=")
else:
args.add_joined(
"--allowed_public_imports",
public_import_protos,
map_each = proto_common.get_import_path,
join_with = ":",
)
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[toolchains.PROTO_TOOLCHAIN]
if not toolchain:
fail("Protocol compiler toolchain could not be resolved.")
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = proto_common.ProtoLangToolchainInfo(
out_replacement_format_flag = "--descriptor_set_out=%s",
output_files = "single",
mnemonic = "GenProtoDescriptorSet",
progress_message = "Generating Descriptor Set proto_library %{label}",
proto_compiler = ctx.executable._proto_compiler,
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
plugin = None,
)
proto_common.compile(
ctx.actions,
proto_info,
proto_lang_toolchain_info,
generated_files = [descriptor_set],
additional_inputs = dependencies_descriptor_sets,
additional_args = args,
)
proto_library = rule(
_proto_library_impl,
# TODO: proto_common docs are missing
# TODO: ProtoInfo link doesn't work and docs are missing
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Use <code>proto_library</code> to define libraries of protocol buffers which
may be used from multiple languages. A <code>proto_library</code> may be listed
in the <code>deps</code> clause of supported rules, such as
<code>java_proto_library</code>.
<p>When compiled on the command-line, a <code>proto_library</code> creates a file
named <code>foo-descriptor-set.proto.bin</code>, which is the descriptor set for
the messages the rule srcs. The file is a serialized
<code>FileDescriptorSet</code>, which is described in
<a href="https://developers.google.com/protocol-buffers/docs/techniques#self-description">
https://developers.google.com/protocol-buffers/docs/techniques#self-description</a>.
<p>It only contains information about the <code>.proto</code> files directly
mentioned by a <code>proto_library</code> rule; the collection of transitive
descriptor sets is available through the
<code>[ProtoInfo].transitive_descriptor_sets</code> Starlark provider.
See documentation in <code>proto_info.bzl</code>.
<p>Recommended code organization:
<ul>
<li>One <code>proto_library</code> rule per <code>.proto</code> file.
<li>A file named <code>foo.proto</code> will be in a rule named <code>foo_proto</code>,
which is located in the same package.
<li>A <code>[language]_proto_library</code> that wraps a <code>proto_library</code>
named <code>foo_proto</code> should be called <code>foo_[language]_proto</code>,
and be located in the same package.
</ul>""",
attrs = {
"srcs": attr.label_list(
allow_files = [".proto", ".protodevel"],
flags = ["DIRECT_COMPILE_TIME_INPUT"],
# TODO: Should .protodevel be advertised or deprecated?
doc = """
The list of <code>.proto</code> and <code>.protodevel</code> files that are
processed to create the target. This is usually a non empty list. One usecase
where <code>srcs</code> can be empty is an <i>alias-library</i>. This is a
proto_library rule having one or more other proto_library in <code>deps</code>.
This pattern can be used to e.g. export a public api under a persistent name.""",
),
"deps": attr.label_list(
providers = [ProtoInfo],
doc = """
The list of other <code>proto_library</code> rules that the target depends upon.
A <code>proto_library</code> may only depend on other <code>proto_library</code>
targets. It may not depend on language-specific libraries.""",
),
"exports": attr.label_list(
providers = [ProtoInfo],
doc = """
List of proto_library targets that can be referenced via "import public" in the
proto source.
It's an error if you use "import public" but do not list the corresponding library
in the exports attribute.
Note that you have list the library both in deps and exports since not all
lang_proto_library implementations have been changed yet.""",
),
"strip_import_prefix": attr.string(
default = "/",
doc = """
The prefix to strip from the paths of the .proto files in this rule.
<p>When set, .proto source files in the <code>srcs</code> attribute of this rule are
accessible at their path with this prefix cut off.
<p>If it's a relative path (not starting with a slash), it's taken as a package-relative
one. If it's an absolute one, it's understood as a repository-relative path.
<p>The prefix in the <code>import_prefix</code> attribute is added after this prefix is
stripped.""",
),
"import_prefix": attr.string(
doc = """
The prefix to add to the paths of the .proto files in this rule.
<p>When set, the .proto source files in the <code>srcs</code> attribute of this rule are
accessible at is the value of this attribute prepended to their repository-relative path.
<p>The prefix in the <code>strip_import_prefix</code> attribute is removed before this
prefix is added.""",
),
"allow_exports": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
doc = """
An optional allowlist that prevents proto library to be reexported or used in
lang_proto_library that is not in one of the listed packages.""",
),
"data": attr.label_list(
allow_files = True,
flags = ["SKIP_CONSTRAINTS_OVERRIDE"],
),
# buildifier: disable=attr-license (calling attr.license())
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(),
"_experimental_proto_descriptor_sets_include_source_info": attr.label(
default = "//bazel/private:experimental_proto_descriptor_sets_include_source_info",
),
"_strict_proto_deps": attr.label(
default =
"//bazel/private:strict_proto_deps",
),
"_strict_public_imports": attr.label(
default = "//bazel/private:strict_public_imports",
),
} | toolchains.if_legacy_toolchain({
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}), # buildifier: disable=attr-licenses (attribute called licenses)
fragments = ["proto"],
provides = [ProtoInfo],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN),
)

@ -0,0 +1,35 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
A helper rule that reads a native boolean flag.
"""
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
def _impl(ctx):
return [BuildSettingInfo(value = ctx.attr.value)]
_native_bool_flag_rule = rule(
implementation = _impl,
attrs = {"value": attr.bool()},
)
def native_bool_flag(*, name, flag, match_value = "true", result = True, **kwargs):
_native_bool_flag_rule(
name = name,
value = select({
name + "_setting": result,
"//conditions:default": not result,
}),
**kwargs
)
native.config_setting(
name = name + "_setting",
values = {flag: match_value},
visibility = ["//visibility:private"],
)

@ -0,0 +1,50 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Vendored version of bazel_features for protobuf, to keep a one-step setup"""
_PROTO_BAZEL_FEATURES = """bazel_features = struct(
proto = struct(
starlark_proto_info = {starlark_proto_info},
),
globals = struct(
PackageSpecificationInfo = {PackageSpecificationInfo},
),
)
"""
def _proto_bazel_features_impl(rctx):
# An empty string is treated as a "dev version", which is greater than anything.
bazel_version = native.bazel_version or "999999.999999.999999"
version_parts = bazel_version.split("-")[0].split(".")
if len(version_parts) != 3:
fail("invalid Bazel version '{}': got {} dot-separated segments, want 3".format(bazel_version, len(version_parts)))
major_version_int = int(version_parts[0])
minor_version_int = int(version_parts[1])
starlark_proto_info = major_version_int >= 7
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4)
rctx.file("BUILD.bazel", """
load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
bzl_library(
name = "features",
srcs = ["features.bzl"],
visibility = ["//visibility:public"],
)
exports_files(["features.bzl"])
""")
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format(
starlark_proto_info = repr(starlark_proto_info),
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None",
))
proto_bazel_features = repository_rule(
implementation = _proto_bazel_features_impl,
# Force reruns on server restarts to keep native.bazel_version up-to-date.
local = True,
)

@ -0,0 +1,155 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Implementation of the proto_lang_toolchain rule."""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _rule_impl(ctx):
provided_proto_sources = depset(transitive = [bp[ProtoInfo]._transitive_proto_sources for bp in ctx.attr.blacklisted_protos]).to_list()
flag = ctx.attr.command_line
if flag.find("$(PLUGIN_OUT)") > -1:
fail("in attribute 'command_line': Placeholder '$(PLUGIN_OUT)' is not supported.")
flag = flag.replace("$(OUT)", "%s")
plugin = None
if ctx.attr.plugin != None:
plugin = ctx.attr.plugin[DefaultInfo].files_to_run
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
proto_compiler = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.proto_compiler
protoc_opts = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.protoc_opts
else:
proto_compiler = ctx.attr._proto_compiler.files_to_run
protoc_opts = ctx.fragments.proto.experimental_protoc_opts
if ctx.attr.protoc_minimal_do_not_use:
proto_compiler = ctx.attr.protoc_minimal_do_not_use.files_to_run
proto_lang_toolchain_info = ProtoLangToolchainInfo(
out_replacement_format_flag = flag,
output_files = ctx.attr.output_files,
plugin_format_flag = ctx.attr.plugin_format_flag,
plugin = plugin,
runtime = ctx.attr.runtime,
provided_proto_sources = provided_proto_sources,
proto_compiler = proto_compiler,
protoc_opts = protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
allowlist_different_package = ctx.attr.allowlist_different_package,
toolchain_type = ctx.attr.toolchain_type.label if ctx.attr.toolchain_type else None,
)
return [
DefaultInfo(files = depset(), runfiles = ctx.runfiles()),
platform_common.ToolchainInfo(proto = proto_lang_toolchain_info),
# TODO: remove when --incompatible_enable_proto_toolchains is flipped and removed
proto_lang_toolchain_info,
]
proto_lang_toolchain = rule(
_rule_impl,
doc = """
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto">
https://github.com/bazelbuild/rules_proto</a>.
<p>Specifies how a LANG_proto_library rule (e.g., <code>java_proto_library</code>) should invoke the
proto-compiler.
Some LANG_proto_library rules allow specifying which toolchain to use using command-line flags;
consult their documentation.
<p>Normally you should not write those kind of rules unless you want to
tune your Java compiler.
<p>There's no compiler. The proto-compiler is taken from the proto_library rule we attach to. It is
passed as a command-line flag to Blaze.
Several features require a proto-compiler to be invoked on the proto_library rule itself.
It's beneficial to enforce the compiler that LANG_proto_library uses is the same as the one
<code>proto_library</code> does.
<h4>Examples</h4>
<p>A simple example would be:
<pre><code class="lang-starlark">
proto_lang_toolchain(
name = "javalite_toolchain",
command_line = "--javalite_out=shared,immutable:$(OUT)",
plugin = ":javalite_plugin",
runtime = ":protobuf_lite",
)
</code></pre>
""",
attrs = {
"progress_message": attr.string(default = "Generating proto_library %{label}", doc = """
This value will be set as the progress message on protoc action."""),
"mnemonic": attr.string(default = "GenProto", doc = """
This value will be set as the mnemonic on protoc action."""),
"command_line": attr.string(mandatory = True, doc = """
This value will be passed to proto-compiler to generate the code. Only include the parts
specific to this code-generator/plugin (e.g., do not include -I parameters)
<ul>
<li><code>$(OUT)</code> is LANG_proto_library-specific. The rules are expected to define
how they interpret this variable. For Java, for example, $(OUT) will be replaced with
the src-jar filename to create.</li>
</ul>"""),
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "legacy", doc = """
Controls how <code>$(OUT)</code> in <code>command_line</code> is formatted, either by
a path to a single file or output directory in case of multiple files.
Possible values are: "single", "multiple"."""),
"plugin_format_flag": attr.string(doc = """
If provided, this value will be passed to proto-compiler to use the plugin.
The value must contain a single %s which is replaced with plugin executable.
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>"""),
"plugin": attr.label(
executable = True,
cfg = "exec",
doc = """
If provided, will be made available to the action that calls the proto-compiler, and will be
passed to the proto-compiler:
<code>--plugin=protoc-gen-PLUGIN=&lt;executable&gt;.</code>""",
),
"runtime": attr.label(doc = """
A language-specific library that the generated code is compiled against.
The exact behavior is LANG_proto_library-specific.
Java, for example, should compile against the runtime."""),
"blacklisted_protos": attr.label_list(
providers = [ProtoInfo],
doc = """
No code will be generated for files in the <code>srcs</code> attribute of
<code>blacklisted_protos</code>.
This is used for .proto files that are already linked into proto runtimes, such as
<code>any.proto</code>.""",
),
# TODO: add doc
"allowlist_different_package": attr.label(
cfg = "exec",
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [],
),
# TODO: add doc
"toolchain_type": attr.label(),
# DO NOT USE. For Protobuf incremental changes only: b/305068148.
"protoc_minimal_do_not_use": attr.label(
cfg = "exec",
executable = True,
),
} | ({} if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION else {
"_proto_compiler": attr.label(
cfg = "exec",
executable = True,
allow_files = True,
default = configuration_field("proto", "proto_compiler"),
),
}),
provides = [ProtoLangToolchainInfo],
fragments = ["proto"],
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), # Used to obtain protoc
)

@ -1,13 +1,16 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""A Starlark implementation of the proto_toolchain rule."""
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def _impl(ctx):
kwargs = {}
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
kwargs["toolchain_type"] = "@rules_proto//proto:toolchain_type"
return [
DefaultInfo(
files = depset(),
@ -23,7 +26,7 @@ def _impl(ctx):
protoc_opts = ctx.fragments.proto.experimental_protoc_opts,
progress_message = ctx.attr.progress_message,
mnemonic = ctx.attr.mnemonic,
**kwargs
toolchain_type = toolchains.PROTO_TOOLCHAIN,
),
),
]

@ -0,0 +1,49 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""
Toolchain helpers.
The helpers here should be used for a migration to toolchain in proto rules.
Anybody that needs them in another repository should copy them, because after
the migration is finished, the helpers can be removed.
"""
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo")
load("//bazel/private:native.bzl", "native_proto_common")
_incompatible_toolchain_resolution = getattr(native_proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _find_toolchain(ctx, legacy_attr, toolchain_type):
if _incompatible_toolchain_resolution:
toolchain = ctx.toolchains[toolchain_type]
if not toolchain:
fail("No toolchains registered for '%s'." % toolchain_type)
return toolchain.proto
else:
return getattr(ctx.attr, legacy_attr)[ProtoLangToolchainInfo]
def _use_toolchain(toolchain_type):
if _incompatible_toolchain_resolution:
return [config_common.toolchain_type(toolchain_type, mandatory = False)]
else:
return []
def _if_legacy_toolchain(legacy_attr_dict):
if _incompatible_toolchain_resolution:
return {}
else:
return legacy_attr_dict
toolchains = struct(
use_toolchain = _use_toolchain,
find_toolchain = _find_toolchain,
if_legacy_toolchain = _if_legacy_toolchain,
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution,
PROTO_TOOLCHAIN = "//bazel/private:proto_toolchain_type",
)

@ -1,3 +1,20 @@
"""proto_library rule"""
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
"""
Macro of proto_library rule.
"""
proto_library = native.proto_library
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/private:bazel_proto_library_rule.bzl", _proto_library = "proto_library")
def proto_library(**kwattrs):
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_library(**kwattrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_library(**kwattrs)

@ -3,8 +3,9 @@
load("@rules_python//python:py_info.bzl", "PyInfo")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/common:proto_info.bzl", "ProtoInfo")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type"
_PY_PROTO_TOOLCHAIN = Label("//bazel/private:python_toolchain_type")
_PyProtoInfo = provider(
doc = "Encapsulates information needed by the Python proto rules.",
@ -22,9 +23,6 @@ _PyProtoInfo = provider(
def _filter_provider(provider, *attrs):
return [dep[provider] for attr in attrs for dep in attr if provider in dep]
def _incompatible_toolchains_enabled():
return getattr(proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
def _py_proto_aspect_impl(target, ctx):
"""Generates and compiles Python code for a proto_library.
@ -51,10 +49,10 @@ def _py_proto_aspect_impl(target, ctx):
proto.path,
))
if _incompatible_toolchains_enabled():
toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN]
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION:
toolchain = ctx.toolchains[_PY_PROTO_TOOLCHAIN]
if not toolchain:
fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN)
fail("No toolchains registered for '%s'." % _PY_PROTO_TOOLCHAIN)
proto_lang_toolchain_info = toolchain.proto
else:
proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo]
@ -120,15 +118,15 @@ def _py_proto_aspect_impl(target, ctx):
_py_proto_aspect = aspect(
implementation = _py_proto_aspect_impl,
attrs = {} if _incompatible_toolchains_enabled() else {
attrs = toolchains.if_legacy_toolchain({
"_aspect_proto_toolchain": attr.label(
default = "//python:python_toolchain",
),
},
}),
attr_aspects = ["deps"],
required_providers = [ProtoInfo],
provides = [_PyProtoInfo],
toolchains = [PY_PROTO_TOOLCHAIN] if _incompatible_toolchains_enabled() else [],
toolchains = toolchains.use_toolchain(_PY_PROTO_TOOLCHAIN),
)
def _py_proto_library_rule(ctx):

@ -0,0 +1,3 @@
load(":proto_common_compile_tests.bzl", "proto_common_compile_test_suite")
proto_common_compile_test_suite(name = "proto_common_compile_test_suite")

@ -0,0 +1,368 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Tests for `proto_common.compile` function."""
load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
load("@rules_testing//lib:truth.bzl", "matching")
load("@rules_testing//lib:util.bzl", "util")
load("//bazel:proto_library.bzl", "proto_library")
load("//bazel/tests/testdata:compile_rule.bzl", "compile_rule")
protocol_compiler = "/protoc"
def proto_common_compile_test_suite(name):
util.helper_target(
proto_library,
name = "simple_proto",
srcs = ["A.proto"],
)
test_suite(
name = name,
tests = [
_test_compile_basic,
_test_compile_noplugin,
_test_compile_with_plugin_output,
_test_compile_with_directory_plugin_output,
_test_compile_additional_args,
_test_compile_additional_tools,
_test_compile_additional_tools_no_plugin,
_test_compile_additional_inputs,
_test_compile_resource_set,
_test_compile_protoc_opts,
_test_compile_direct_generated_protos,
_test_compile_indirect_generated_protos,
],
)
# Verifies basic usage of `proto_common.compile`.
def _test_compile_basic(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_basic_impl,
)
def _test_compile_basic_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
action.mnemonic().equals("MyMnemonic")
# Verifies usage of proto_common.generate_code with no plugin specified by toolchain.
def _test_compile_noplugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_noplugin_impl,
)
def _test_compile_noplugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "single",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_plugin_output_impl,
)
def _test_compile_with_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/test_compile_with_plugin_output_compile"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file.
def _test_compile_with_directory_plugin_output(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
plugin_output = "multiple",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_with_directory_plugin_output_impl,
)
def _test_compile_with_directory_plugin_output_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--java_out=param1,param2:b*-out/*/bin"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_args` parameter
def _test_compile_additional_args(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_args = ["--a", "--b"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_args_impl,
)
def _test_compile_additional_args_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--a"),
matching.equals_wrapper("--b"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter
def _test_compile_additional_tools(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_impl,
)
def _test_compile_additional_tools_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
matching.file_basename_equals("plugin"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_additional_tools_no_plugin(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_tools = [
"//bazel/tests/testdata:_tool1",
"//bazel/tests/testdata:_tool2",
],
toolchain = "//bazel/tests/testdata:toolchain_noplugin",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_tools_no_plugin_impl,
)
def _test_compile_additional_tools_no_plugin_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("_tool1"),
matching.file_basename_equals("_tool2"),
],
)
action.inputs().not_contains_predicate(matching.file_basename_equals("plugin"))
# Verifies usage of `proto_common.compile` with `additional_inputs` parameter.
def _test_compile_additional_inputs(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
additional_inputs = ["input1.txt", "input2.txt"],
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_additional_inputs_impl,
)
def _test_compile_additional_inputs_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.inputs().contains_at_least_predicates(
[
matching.file_basename_equals("input1.txt"),
matching.file_basename_equals("input2.txt"),
],
)
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain.
def _test_compile_resource_set(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
use_resource_set = True,
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_resource_set_impl,
)
def _test_compile_resource_set_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic") # @unused
# We can't check the specification of the resource set, but we at least verify analysis passes
# Verifies `--protocopts` are passed to command line.
def _test_compile_protoc_opts(name):
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = ":simple_proto",
)
analysis_test(
name = name,
target = name + "_compile",
config_settings = {"//command_line_option:protocopt": ["--foo", "--bar"]},
impl = _test_compile_protoc_opts_impl,
)
def _test_compile_protoc_opts_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.equals_wrapper("--foo"),
matching.equals_wrapper("--bar"),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)
# Verifies `proto_common.compile`> correctly handles direct generated `.proto` files.
def _test_compile_direct_generated_protos(name):
util.helper_target(native.genrule, name = name + "_generate_G", cmd = "", outs = ["G.proto"])
util.helper_target(
proto_library,
name = name + "_directly_generated_proto",
srcs = ["A.proto", "G.proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_directly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_direct_generated_protos_impl,
)
def _test_compile_direct_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
matching.str_matches("*-out/*/*/*/G.proto"),
],
)
# Verifies usage of `proto_common.compile` with `plugin_output` parameter
def _test_compile_indirect_generated_protos(name):
util.helper_target(native.genrule, name = "_generate_h", srcs = ["A.txt"], cmd = "", outs = ["H.proto"])
util.helper_target(proto_library, name = "_generated_proto", srcs = ["H.proto"])
util.helper_target(
proto_library,
name = name + "_indirectly_generated_proto",
srcs = ["A.proto"],
deps = [":_generated_proto"],
)
util.helper_target(
compile_rule,
name = name + "_compile",
proto_dep = name + "_indirectly_generated_proto",
)
analysis_test(
name = name,
target = name + "_compile",
impl = _test_compile_indirect_generated_protos_impl,
)
def _test_compile_indirect_generated_protos_impl(env, target):
action = env.expect.that_target(target).action_named("MyMnemonic")
action.argv().contains_exactly_predicates(
[
matching.str_endswith(protocol_compiler),
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"),
matching.str_matches("-Ib*-out/*/*"),
matching.equals_wrapper("-I."),
matching.str_endswith("/A.proto"),
],
)

@ -0,0 +1,130 @@
package(default_visibility = ["//visibility:public"])
proto_lang_toolchain(
name = "toolchain",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
plugin = ":plugin",
plugin_format_flag = "--plugin=%s",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_lang_toolchain(
name = "toolchain_noplugin",
blacklisted_protos = [":denied"],
command_line = "--java_out=param1,param2:$(OUT)",
mnemonic = "MyMnemonic",
progress_message = "Progress Message %{label}",
runtime = ":runtime",
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "plugin",
srcs = ["plugin.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_library(
name = "runtime",
srcs = ["runtime.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "descriptors",
srcs = [
"descriptor.proto",
"metadata.proto",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "any",
srcs = ["any.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
filegroup(
name = "something",
srcs = ["something.proto"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "mixed",
srcs = [
":descriptors",
":something",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
proto_library(
name = "denied",
srcs = [
":any",
":descriptors",
],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool1",
srcs = ["tool1.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)
cc_binary(
name = "_tool2",
srcs = ["tool2.cc"],
tags = [
"manual",
"nobuilder",
"notap",
],
)

@ -0,0 +1,57 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Testing function for proto_common module"""
load("//bazel/common:proto_common.bzl", "proto_common")
def _resource_set_callback(_os, inputs_size):
return {"memory": 25 + 0.15 * inputs_size, "cpu": 1}
def _impl(ctx):
outfile = ctx.actions.declare_file(ctx.attr.name)
kwargs = {}
if ctx.attr.plugin_output == "single":
kwargs["plugin_output"] = outfile.path
elif ctx.attr.plugin_output == "multiple":
kwargs["plugin_output"] = ctx.bin_dir.path
elif ctx.attr.plugin_output == "wrong":
kwargs["plugin_output"] = ctx.bin_dir.path + "///"
if ctx.attr.additional_args:
additional_args = ctx.actions.args()
additional_args.add_all(ctx.attr.additional_args)
kwargs["additional_args"] = additional_args
if ctx.files.additional_tools:
kwargs["additional_tools"] = ctx.files.additional_tools
if ctx.files.additional_inputs:
kwargs["additional_inputs"] = depset(ctx.files.additional_inputs)
if ctx.attr.use_resource_set:
kwargs["resource_set"] = _resource_set_callback
if ctx.attr.progress_message:
kwargs["experimental_progress_message"] = ctx.attr.progress_message
proto_common.compile(
ctx.actions,
ctx.attr.proto_dep[ProtoInfo],
ctx.attr.toolchain[proto_common.ProtoLangToolchainInfo],
[outfile],
**kwargs
)
return [DefaultInfo(files = depset([outfile]))]
compile_rule = rule(
_impl,
attrs = {
"proto_dep": attr.label(),
"plugin_output": attr.string(),
"toolchain": attr.label(default = ":toolchain"),
"additional_args": attr.string_list(),
"additional_tools": attr.label_list(cfg = "exec"),
"additional_inputs": attr.label_list(allow_files = True),
"use_resource_set": attr.bool(),
"progress_message": attr.string(),
},
)

@ -8,6 +8,7 @@ bzl_library(
visibility = ["//visibility:public"],
deps = [
"//bazel/private:proto_toolchain_rule_bzl",
"//bazel/private:toolchain_helpers_bzl",
],
)
@ -19,5 +20,15 @@ bzl_library(
visibility = ["//visibility:public"],
deps = [
"//bazel/common:proto_common_bzl",
"//bazel/private:proto_lang_toolchain_rule_bzl",
"@proto_bazel_features//:features",
],
)
filegroup(
name = "bazel_osx_p4deps",
srcs = glob(["**"]),
visibility = [
"//bazel:__pkg__",
],
)

@ -1,6 +1,15 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""proto_lang_toolchain rule"""
load("@proto_bazel_features//:features.bzl", "bazel_features")
load("//bazel/common:proto_common.bzl", "proto_common")
load("//bazel/private:proto_lang_toolchain_rule.bzl", _proto_lang_toolchain_rule = "proto_lang_toolchain")
def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with = [], target_compatible_with = [], **attrs):
"""Creates a proto_lang_toolchain and corresponding toolchain target.
@ -21,8 +30,12 @@ def proto_lang_toolchain(*, name, toolchain_type = None, exec_compatible_with =
if getattr(proto_common, "INCOMPATIBLE_PASS_TOOLCHAIN_TYPE", False):
attrs["toolchain_type"] = toolchain_type
# buildifier: disable=native-proto
native.proto_lang_toolchain(name = name, **attrs)
# This condition causes Starlark rules to be used only on Bazel >=7.0.0
if bazel_features.proto.starlark_proto_info:
_proto_lang_toolchain_rule(name = name, **attrs)
else:
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen
native.proto_lang_toolchain(name = name, **attrs)
if toolchain_type:
native.toolchain(

@ -1,9 +1,17 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2024 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
#
"""Macro wrapping the proto_toolchain implementation.
The macro additionally creates toolchain target when toolchain_type is given.
"""
load("//bazel/private:proto_toolchain_rule.bzl", _proto_toolchain_rule = "proto_toolchain")
load("//bazel/private:toolchain_helpers.bzl", "toolchains")
def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []):
"""Creates a proto_toolchain and toolchain target for proto_library.
@ -19,7 +27,7 @@ def proto_toolchain(*, name, proto_compiler, exec_compatible_with = []):
native.toolchain(
name = name + "_toolchain",
toolchain_type = "@rules_proto//proto:toolchain_type",
toolchain_type = toolchains.PROTO_TOOLCHAIN,
exec_compatible_with = exec_compatible_with,
target_compatible_with = [],
toolchain = name,

@ -1,6 +1,24 @@
"""Load dependencies needed to compile the protobuf library as a 3rd-party consumer."""
"""Load dependencies needed to compile the protobuf library as a 3rd-party consumer.
The consumers should use the following WORKSPACE snippet, which loads dependencies
and sets up the repositories protobuf needs:
```
http_archive(
name = "protobuf",
strip_prefix = "protobuf-VERSION",
sha256 = ...,
url = ...,
)
load("@protobuf//:protobuf_deps.bzl", "protobuf_deps")
protobuf_deps()
```
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("//bazel/private:proto_bazel_features.bzl", "proto_bazel_features") # buildifier: disable=bzl-visibility
load("//python/dist:python_downloads.bzl", "python_nuget_package", "python_source_archive")
load("//python/dist:system_python.bzl", "system_python")
@ -33,11 +51,11 @@ def protobuf_deps():
if not native.existing_rule("bazel_skylib"):
http_archive(
name = "bazel_skylib",
sha256 = "d00f1389ee20b60018e92644e0948e16e350a7707219e7a390fb0a99b6ec9262",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.7.0/bazel-skylib-1.7.0.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.7.0/bazel-skylib-1.7.0.tar.gz",
],
sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
)
if not native.existing_rule("com_google_absl"):
@ -86,16 +104,8 @@ def protobuf_deps():
sha256 = "469b7f3b580b4fcf8112f4d6d0d5a4ce8e1ad5e21fee67d8e8335d5f8b3debab",
)
# TODO: remove after toolchain types are moved to protobuf
if not native.existing_rule("rules_proto"):
http_archive(
name = "rules_proto",
sha256 = "dc3fb206a2cb3441b485eb1e423165b231235a1ea9b031b4433cf7bc1fa460dd",
strip_prefix = "rules_proto-5.3.0-21.7",
urls = [
"https://github.com/bazelbuild/rules_proto/archive/refs/tags/5.3.0-21.7.tar.gz",
],
)
if not native.existing_rule("proto_bazel_features"):
proto_bazel_features(name = "proto_bazel_features")
if not native.existing_rule("rules_python"):
http_archive(

Loading…
Cancel
Save