parent
8aa4407664
commit
373af8ac98
389 changed files with 18540 additions and 9532 deletions
@ -0,0 +1,40 @@ |
||||
# This is a WORKSPACE file used by bzlmod in combination with MODULE.bazel. |
||||
# It's used for a gradual migration and it should be empty. |
||||
# Don't remove this file. If the file doesn't exist, bzlmod falls back to WORKSPACE file. |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
|
||||
# TODO: either replace rules_ruby with a maintained version on BCR |
||||
# or use bzlmod extensions to depend on this specific repo |
||||
http_archive( |
||||
name = "rules_ruby", |
||||
urls = [ |
||||
"https://github.com/protocolbuffers/rules_ruby/archive/b7f3e9756f3c45527be27bc38840d5a1ba690436.zip" |
||||
], |
||||
strip_prefix = "rules_ruby-b7f3e9756f3c45527be27bc38840d5a1ba690436", |
||||
sha256 = "347927fd8de6132099fcdc58e8f7eab7bde4eb2fd424546b9cd4f1c6f8f8bad8", |
||||
) |
||||
|
||||
load("@rules_ruby//ruby:defs.bzl", "ruby_runtime") |
||||
|
||||
ruby_runtime("system_ruby") |
||||
|
||||
register_toolchains("@system_ruby//:toolchain") |
||||
|
||||
# Follwing are just needed to run conformance tests, not really needed to support them via MODULE.bazel |
||||
|
||||
# For testing runtime against old gencode from a previous major version. |
||||
http_archive( |
||||
name = "com_google_protobuf_v25.0", |
||||
strip_prefix = "protobuf-25.0", |
||||
url = "https://github.com/protocolbuffers/protobuf/releases/download/v25.0/protobuf-25.0.tar.gz", |
||||
) |
||||
|
||||
# Needed as a dependency of @com_google_protobuf_v25.x, which was before |
||||
# utf8_range was merged in. |
||||
http_archive( |
||||
name = "utf8_range", |
||||
strip_prefix = "utf8_range-d863bc33e15cba6d873c878dcca9e6fe52b2f8cb", |
||||
url = "https://github.com/protocolbuffers/utf8_range/archive/d863bc33e15cba6d873c878dcca9e6fe52b2f8cb.zip", |
||||
) |
||||
|
@ -1,5 +1,350 @@ |
||||
"""proto_common""" |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Definition of proto_common module, together with bazel providers for proto rules.""" |
||||
|
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
proto_common = native_proto_common |
||||
def _import_virtual_proto_path(path): |
||||
"""Imports all paths for virtual imports. |
||||
|
||||
They're of the form: |
||||
'bazel-out/k8-fastbuild/bin/external/foo/e/_virtual_imports/e' or |
||||
'bazel-out/foo/k8-fastbuild/bin/e/_virtual_imports/e'""" |
||||
if path.count("/") > 4: |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _import_repo_proto_path(path): |
||||
"""Imports all paths for generated files in external repositories. |
||||
|
||||
They are of the form: |
||||
'bazel-out/k8-fastbuild/bin/external/foo' or |
||||
'bazel-out/foo/k8-fastbuild/bin'""" |
||||
path_count = path.count("/") |
||||
if path_count > 2 and path_count <= 4: |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _import_main_output_proto_path(path): |
||||
"""Imports all paths for generated files or source files in external repositories. |
||||
|
||||
They're of the form: |
||||
'bazel-out/k8-fastbuild/bin' |
||||
'external/foo' |
||||
'../foo' |
||||
""" |
||||
if path.count("/") <= 2 and path != ".": |
||||
return "-I%s" % path |
||||
return None |
||||
|
||||
def _remove_repo(file): |
||||
"""Removes `../repo/` prefix from path, e.g. `../repo/package/path -> package/path`""" |
||||
short_path = file.short_path |
||||
workspace_root = file.owner.workspace_root |
||||
if workspace_root: |
||||
if workspace_root.startswith("external/"): |
||||
workspace_root = "../" + workspace_root.removeprefix("external/") |
||||
return short_path.removeprefix(workspace_root + "/") |
||||
return short_path |
||||
|
||||
def _get_import_path(proto_file): |
||||
"""Returns the import path of a .proto file |
||||
|
||||
This is the path as used for the file that can be used in an `import` statement in another |
||||
.proto file. |
||||
|
||||
Args: |
||||
proto_file: (File) The .proto file |
||||
Returns: |
||||
(str) import path |
||||
""" |
||||
repo_path = _remove_repo(proto_file) |
||||
index = repo_path.find("_virtual_imports/") |
||||
if index >= 0: |
||||
index = repo_path.find("/", index + len("_virtual_imports/")) |
||||
repo_path = repo_path[index + 1:] |
||||
return repo_path |
||||
|
||||
def _output_directory(proto_info, root): |
||||
proto_source_root = proto_info.proto_source_root |
||||
if proto_source_root.startswith(root.path): |
||||
#TODO: remove this branch when bin_dir is removed from proto_source_root |
||||
proto_source_root = proto_source_root.removeprefix(root.path).removeprefix("/") |
||||
|
||||
if proto_source_root == "" or proto_source_root == ".": |
||||
return root.path |
||||
|
||||
return root.path + "/" + proto_source_root |
||||
|
||||
def _check_collocated(label, proto_info, proto_lang_toolchain_info): |
||||
"""Checks if lang_proto_library is collocated with proto_library. |
||||
|
||||
Exceptions are allowed by an allowlist defined on `proto_lang_toolchain` and |
||||
on an allowlist defined on `proto_library`'s `allow_exports` attribute. |
||||
|
||||
If checks are not successful the function fails. |
||||
|
||||
Args: |
||||
label: (Label) The label of lang_proto_library |
||||
proto_info: (ProtoInfo) The ProtoInfo from the proto_library dependency. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target. |
||||
""" |
||||
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo |
||||
if not _PackageSpecificationInfo: |
||||
if proto_lang_toolchain_info.allowlist_different_package or getattr(proto_info, "allow_exports", None): |
||||
fail("Allowlist checks not supported before Bazel 6.4.0") |
||||
return |
||||
|
||||
if (proto_info.direct_descriptor_set.owner.package != label.package and |
||||
proto_lang_toolchain_info.allowlist_different_package): |
||||
if not proto_lang_toolchain_info.allowlist_different_package[_PackageSpecificationInfo].contains(label): |
||||
fail(("lang_proto_library '%s' may only be created in the same package " + |
||||
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner)) |
||||
if (proto_info.direct_descriptor_set.owner.package != label.package and |
||||
hasattr(proto_info, "allow_exports")): |
||||
if not proto_info.allow_exports[_PackageSpecificationInfo].contains(label): |
||||
fail(("lang_proto_library '%s' may only be created in the same package " + |
||||
"as proto_library '%s'") % (label, proto_info.direct_descriptor_set.owner)) |
||||
|
||||
def _compile( |
||||
actions, |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
generated_files, |
||||
plugin_output = None, |
||||
additional_args = None, |
||||
additional_tools = [], |
||||
additional_inputs = depset(), |
||||
resource_set = None, |
||||
experimental_exec_group = None, |
||||
experimental_progress_message = None, |
||||
experimental_output_files = "legacy"): |
||||
"""Creates proto compile action for compiling *.proto files to language specific sources. |
||||
|
||||
Args: |
||||
actions: (ActionFactory) Obtained by ctx.actions, used to register the actions. |
||||
proto_info: (ProtoInfo) The ProtoInfo from proto_library to generate the sources for. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc.. |
||||
generated_files: (list[File]) The output files generated by the proto compiler. |
||||
Callee needs to declare files using `ctx.actions.declare_file`. |
||||
See also: `proto_common.declare_generated_files`. |
||||
plugin_output: (File|str) Deprecated: Set `proto_lang_toolchain.output_files` |
||||
and remove the parameter. |
||||
For backwards compatibility, when the proto_lang_toolchain isn't updated |
||||
the value is used. |
||||
additional_args: (Args) Additional arguments to add to the action. |
||||
Accepts a ctx.actions.args() object that is added at the beginning |
||||
of the command line. |
||||
additional_tools: (list[File]) Additional tools to add to the action. |
||||
additional_inputs: (Depset[File]) Additional input files to add to the action. |
||||
resource_set: (func) A callback function that is passed to the created action. |
||||
See `ctx.actions.run`, `resource_set` parameter for full definition of |
||||
the callback. |
||||
experimental_exec_group: (str) Sets `exec_group` on proto compile action. |
||||
Avoid using this parameter. |
||||
experimental_progress_message: Overrides progress_message from the toolchain. |
||||
Don't use this parameter. It's only intended for the transition. |
||||
experimental_output_files: (str) Overwrites output_files from the toolchain. |
||||
Don't use this parameter. It's only intended for the transition. |
||||
""" |
||||
if type(generated_files) != type([]): |
||||
fail("generated_files is expected to be a list of Files") |
||||
if not generated_files: |
||||
return # nothing to do |
||||
if experimental_output_files not in ["single", "multiple", "legacy"]: |
||||
fail('experimental_output_files expected to be one of ["single", "multiple", "legacy"]') |
||||
|
||||
args = actions.args() |
||||
args.use_param_file(param_file_arg = "@%s") |
||||
args.set_param_file_format("multiline") |
||||
tools = list(additional_tools) |
||||
|
||||
if experimental_output_files != "legacy": |
||||
output_files = experimental_output_files |
||||
else: |
||||
output_files = getattr(proto_lang_toolchain_info, "output_files", "legacy") |
||||
if output_files != "legacy": |
||||
if proto_lang_toolchain_info.out_replacement_format_flag: |
||||
if output_files == "single": |
||||
if len(generated_files) > 1: |
||||
fail("generated_files only expected a single file") |
||||
plugin_output = generated_files[0] |
||||
else: |
||||
plugin_output = _output_directory(proto_info, generated_files[0].root) |
||||
|
||||
if plugin_output: |
||||
args.add(plugin_output, format = proto_lang_toolchain_info.out_replacement_format_flag) |
||||
if proto_lang_toolchain_info.plugin: |
||||
tools.append(proto_lang_toolchain_info.plugin) |
||||
args.add(proto_lang_toolchain_info.plugin.executable, format = proto_lang_toolchain_info.plugin_format_flag) |
||||
|
||||
# Protoc searches for .protos -I paths in order they are given and then |
||||
# uses the path within the directory as the package. |
||||
# This requires ordering the paths from most specific (longest) to least |
||||
# specific ones, so that no path in the list is a prefix of any of the |
||||
# following paths in the list. |
||||
# For example: 'bazel-out/k8-fastbuild/bin/external/foo' needs to be listed |
||||
# before 'bazel-out/k8-fastbuild/bin'. If not, protoc will discover file under |
||||
# the shorter path and use 'external/foo/...' as its package path. |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_virtual_proto_path) |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_repo_proto_path) |
||||
args.add_all(proto_info.transitive_proto_path, map_each = _import_main_output_proto_path) |
||||
args.add("-I.") # Needs to come last |
||||
|
||||
args.add_all(proto_lang_toolchain_info.protoc_opts) |
||||
|
||||
args.add_all(proto_info.direct_sources) |
||||
|
||||
if additional_args: |
||||
additional_args.use_param_file(param_file_arg = "@%s") |
||||
additional_args.set_param_file_format("multiline") |
||||
|
||||
actions.run( |
||||
mnemonic = proto_lang_toolchain_info.mnemonic, |
||||
progress_message = experimental_progress_message if experimental_progress_message else proto_lang_toolchain_info.progress_message, |
||||
executable = proto_lang_toolchain_info.proto_compiler, |
||||
arguments = [additional_args, args] if additional_args else [args], |
||||
inputs = depset(transitive = [proto_info.transitive_sources, additional_inputs]), |
||||
outputs = generated_files, |
||||
tools = tools, |
||||
use_default_shell_env = True, |
||||
resource_set = resource_set, |
||||
exec_group = experimental_exec_group, |
||||
toolchain = _toolchain_type(proto_lang_toolchain_info), |
||||
) |
||||
|
||||
_BAZEL_TOOLS_PREFIX = "external/bazel_tools/" |
||||
|
||||
def _experimental_filter_sources(proto_info, proto_lang_toolchain_info): |
||||
if not proto_info.direct_sources: |
||||
return [], [] |
||||
|
||||
# Collect a set of provided protos |
||||
provided_proto_sources = proto_lang_toolchain_info.provided_proto_sources |
||||
provided_paths = {} |
||||
for src in provided_proto_sources: |
||||
path = src.path |
||||
|
||||
# For listed protos bundled with the Bazel tools repository, their exec paths start |
||||
# with external/bazel_tools/. This prefix needs to be removed first, because the protos in |
||||
# user repositories will not have that prefix. |
||||
if path.startswith(_BAZEL_TOOLS_PREFIX): |
||||
provided_paths[path[len(_BAZEL_TOOLS_PREFIX):]] = None |
||||
else: |
||||
provided_paths[path] = None |
||||
|
||||
# Filter proto files |
||||
proto_files = proto_info._direct_proto_sources |
||||
excluded = [] |
||||
included = [] |
||||
for proto_file in proto_files: |
||||
if proto_file.path in provided_paths: |
||||
excluded.append(proto_file) |
||||
else: |
||||
included.append(proto_file) |
||||
return included, excluded |
||||
|
||||
def _experimental_should_generate_code( |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
rule_name, |
||||
target_label): |
||||
"""Checks if the code should be generated for the given proto_library. |
||||
|
||||
The code shouldn't be generated only when the toolchain already provides it |
||||
to the language through its runtime dependency. |
||||
|
||||
It fails when the proto_library contains mixed proto files, that should and |
||||
shouldn't generate code. |
||||
|
||||
Args: |
||||
proto_info: (ProtoInfo) The ProtoInfo from proto_library to check the generation for. |
||||
proto_lang_toolchain_info: (ProtoLangToolchainInfo) The proto lang toolchain info. |
||||
Obtained from a `proto_lang_toolchain` target or constructed ad-hoc. |
||||
rule_name: (str) Name of the rule used in the failure message. |
||||
target_label: (Label) The label of the target used in the failure message. |
||||
|
||||
Returns: |
||||
(bool) True when the code should be generated. |
||||
""" |
||||
included, excluded = _experimental_filter_sources(proto_info, proto_lang_toolchain_info) |
||||
|
||||
if included and excluded: |
||||
fail(("The 'srcs' attribute of '%s' contains protos for which '%s' " + |
||||
"shouldn't generate code (%s), in addition to protos for which it should (%s).\n" + |
||||
"Separate '%s' into 2 proto_library rules.") % ( |
||||
target_label, |
||||
rule_name, |
||||
", ".join([f.short_path for f in excluded]), |
||||
", ".join([f.short_path for f in included]), |
||||
target_label, |
||||
)) |
||||
|
||||
return bool(included) |
||||
|
||||
def _declare_generated_files( |
||||
actions, |
||||
proto_info, |
||||
extension, |
||||
name_mapper = None): |
||||
"""Declares generated files with a specific extension. |
||||
|
||||
Use this in lang_proto_library-es when protocol compiler generates files |
||||
that correspond to .proto file names. |
||||
|
||||
The function removes ".proto" extension with given one (e.g. ".pb.cc") and |
||||
declares new output files. |
||||
|
||||
Args: |
||||
actions: (ActionFactory) Obtained by ctx.actions, used to declare the files. |
||||
proto_info: (ProtoInfo) The ProtoInfo to declare the files for. |
||||
extension: (str) The extension to use for generated files. |
||||
name_mapper: (str->str) A function mapped over the base filename without |
||||
the extension. Used it to replace characters in the name that |
||||
cause problems in a specific programming language. |
||||
|
||||
Returns: |
||||
(list[File]) The list of declared files. |
||||
""" |
||||
proto_sources = proto_info.direct_sources |
||||
outputs = [] |
||||
|
||||
for src in proto_sources: |
||||
basename_no_ext = src.basename[:-(len(src.extension) + 1)] |
||||
|
||||
if name_mapper: |
||||
basename_no_ext = name_mapper(basename_no_ext) |
||||
|
||||
# Note that two proto_library rules can have the same source file, so this is actually a |
||||
# shared action. NB: This can probably result in action conflicts if the proto_library rules |
||||
# are not the same. |
||||
outputs.append(actions.declare_file(basename_no_ext + extension, sibling = src)) |
||||
|
||||
return outputs |
||||
|
||||
def _toolchain_type(proto_lang_toolchain_info): |
||||
if toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
return getattr(proto_lang_toolchain_info, "toolchain_type", None) |
||||
else: |
||||
return None |
||||
|
||||
proto_common = struct( |
||||
compile = _compile, |
||||
declare_generated_files = _declare_generated_files, |
||||
check_collocated = _check_collocated, |
||||
experimental_should_generate_code = _experimental_should_generate_code, |
||||
experimental_filter_sources = _experimental_filter_sources, |
||||
get_import_path = _get_import_path, |
||||
ProtoLangToolchainInfo = ProtoLangToolchainInfo, |
||||
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = toolchains.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION, |
||||
INCOMPATIBLE_PASS_TOOLCHAIN_TYPE = True, |
||||
) |
||||
|
@ -1,5 +1,5 @@ |
||||
"""ProtoLangToolchainInfo""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
|
||||
ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo |
||||
ProtoLangToolchainInfo = native_proto_common.ProtoLangToolchainInfo |
||||
|
@ -0,0 +1,35 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
A helper rule that reads a native boolean flag. |
||||
""" |
||||
|
||||
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo") |
||||
|
||||
def _impl(ctx): |
||||
return [BuildSettingInfo(value = ctx.attr.value)] |
||||
|
||||
_native_bool_flag_rule = rule( |
||||
implementation = _impl, |
||||
attrs = {"value": attr.bool()}, |
||||
) |
||||
|
||||
def native_bool_flag(*, name, flag, match_value = "true", result = True, **kwargs): |
||||
_native_bool_flag_rule( |
||||
name = name, |
||||
value = select({ |
||||
name + "_setting": result, |
||||
"//conditions:default": not result, |
||||
}), |
||||
**kwargs |
||||
) |
||||
|
||||
native.config_setting( |
||||
name = name + "_setting", |
||||
values = {flag: match_value}, |
||||
visibility = ["//visibility:private"], |
||||
) |
@ -0,0 +1,50 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
"""Vendored version of bazel_features for protobuf, to keep a one-step setup""" |
||||
|
||||
_PROTO_BAZEL_FEATURES = """bazel_features = struct( |
||||
proto = struct( |
||||
starlark_proto_info = {starlark_proto_info}, |
||||
), |
||||
globals = struct( |
||||
PackageSpecificationInfo = {PackageSpecificationInfo}, |
||||
), |
||||
) |
||||
""" |
||||
|
||||
def _proto_bazel_features_impl(rctx): |
||||
# An empty string is treated as a "dev version", which is greater than anything. |
||||
bazel_version = native.bazel_version or "999999.999999.999999" |
||||
version_parts = bazel_version.split("-")[0].split(".") |
||||
if len(version_parts) != 3: |
||||
fail("invalid Bazel version '{}': got {} dot-separated segments, want 3".format(bazel_version, len(version_parts))) |
||||
major_version_int = int(version_parts[0]) |
||||
minor_version_int = int(version_parts[1]) |
||||
|
||||
starlark_proto_info = major_version_int >= 7 |
||||
PackageSpecificationInfo = major_version_int > 6 or (major_version_int == 6 and minor_version_int >= 4) |
||||
|
||||
rctx.file("BUILD.bazel", """ |
||||
load("@bazel_skylib//:bzl_library.bzl", "bzl_library") |
||||
bzl_library( |
||||
name = "features", |
||||
srcs = ["features.bzl"], |
||||
visibility = ["//visibility:public"], |
||||
) |
||||
exports_files(["features.bzl"]) |
||||
""") |
||||
rctx.file("features.bzl", _PROTO_BAZEL_FEATURES.format( |
||||
starlark_proto_info = repr(starlark_proto_info), |
||||
PackageSpecificationInfo = "PackageSpecificationInfo" if PackageSpecificationInfo else "None", |
||||
)) |
||||
|
||||
proto_bazel_features = repository_rule( |
||||
implementation = _proto_bazel_features_impl, |
||||
# Force reruns on server restarts to keep native.bazel_version up-to-date. |
||||
local = True, |
||||
) |
@ -0,0 +1,154 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
"""Implementation of the proto_lang_toolchain rule.""" |
||||
|
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
def _rule_impl(ctx): |
||||
provided_proto_sources = depset(transitive = [bp[ProtoInfo]._transitive_proto_sources for bp in ctx.attr.blacklisted_protos]).to_list() |
||||
|
||||
flag = ctx.attr.command_line |
||||
if flag.find("$(PLUGIN_OUT)") > -1: |
||||
fail("in attribute 'command_line': Placeholder '$(PLUGIN_OUT)' is not supported.") |
||||
flag = flag.replace("$(OUT)", "%s") |
||||
|
||||
plugin = None |
||||
if ctx.attr.plugin != None: |
||||
plugin = ctx.attr.plugin[DefaultInfo].files_to_run |
||||
|
||||
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
proto_compiler = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.proto_compiler |
||||
protoc_opts = ctx.toolchains[toolchains.PROTO_TOOLCHAIN].proto.protoc_opts |
||||
else: |
||||
proto_compiler = ctx.attr._proto_compiler.files_to_run |
||||
protoc_opts = ctx.fragments.proto.experimental_protoc_opts |
||||
|
||||
if ctx.attr.protoc_minimal_do_not_use: |
||||
proto_compiler = ctx.attr.protoc_minimal_do_not_use.files_to_run |
||||
|
||||
proto_lang_toolchain_info = ProtoLangToolchainInfo( |
||||
out_replacement_format_flag = flag, |
||||
output_files = ctx.attr.output_files, |
||||
plugin_format_flag = ctx.attr.plugin_format_flag, |
||||
plugin = plugin, |
||||
runtime = ctx.attr.runtime, |
||||
provided_proto_sources = provided_proto_sources, |
||||
proto_compiler = proto_compiler, |
||||
protoc_opts = protoc_opts, |
||||
progress_message = ctx.attr.progress_message, |
||||
mnemonic = ctx.attr.mnemonic, |
||||
allowlist_different_package = ctx.attr.allowlist_different_package, |
||||
toolchain_type = ctx.attr.toolchain_type.label if ctx.attr.toolchain_type else None, |
||||
) |
||||
return [ |
||||
DefaultInfo(files = depset(), runfiles = ctx.runfiles()), |
||||
platform_common.ToolchainInfo(proto = proto_lang_toolchain_info), |
||||
# TODO: remove when --incompatible_enable_proto_toolchains is flipped and removed |
||||
proto_lang_toolchain_info, |
||||
] |
||||
|
||||
proto_lang_toolchain = rule( |
||||
_rule_impl, |
||||
doc = """ |
||||
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto"> |
||||
https://github.com/bazelbuild/rules_proto</a>. |
||||
|
||||
<p>Specifies how a LANG_proto_library rule (e.g., <code>java_proto_library</code>) should invoke the |
||||
proto-compiler. |
||||
Some LANG_proto_library rules allow specifying which toolchain to use using command-line flags; |
||||
consult their documentation. |
||||
|
||||
<p>Normally you should not write those kind of rules unless you want to |
||||
tune your Java compiler. |
||||
|
||||
<p>There's no compiler. The proto-compiler is taken from the proto_library rule we attach to. It is |
||||
passed as a command-line flag to Blaze. |
||||
Several features require a proto-compiler to be invoked on the proto_library rule itself. |
||||
It's beneficial to enforce the compiler that LANG_proto_library uses is the same as the one |
||||
<code>proto_library</code> does. |
||||
|
||||
<h4>Examples</h4> |
||||
|
||||
<p>A simple example would be: |
||||
<pre><code class="lang-starlark"> |
||||
proto_lang_toolchain( |
||||
name = "javalite_toolchain", |
||||
command_line = "--javalite_out=shared,immutable:$(OUT)", |
||||
plugin = ":javalite_plugin", |
||||
runtime = ":protobuf_lite", |
||||
) |
||||
</code></pre> |
||||
""", |
||||
attrs = { |
||||
"progress_message": attr.string(default = "Generating proto_library %{label}", doc = """ |
||||
This value will be set as the progress message on protoc action."""), |
||||
"mnemonic": attr.string(default = "GenProto", doc = """ |
||||
This value will be set as the mnemonic on protoc action."""), |
||||
"command_line": attr.string(mandatory = True, doc = """ |
||||
This value will be passed to proto-compiler to generate the code. Only include the parts |
||||
specific to this code-generator/plugin (e.g., do not include -I parameters) |
||||
<ul> |
||||
<li><code>$(OUT)</code> is LANG_proto_library-specific. The rules are expected to define |
||||
how they interpret this variable. For Java, for example, $(OUT) will be replaced with |
||||
the src-jar filename to create.</li> |
||||
</ul>"""), |
||||
"output_files": attr.string(values = ["single", "multiple", "legacy"], default = "legacy", doc = """ |
||||
Controls how <code>$(OUT)</code> in <code>command_line</code> is formatted, either by |
||||
a path to a single file or output directory in case of multiple files. |
||||
Possible values are: "single", "multiple"."""), |
||||
"plugin_format_flag": attr.string(doc = """ |
||||
If provided, this value will be passed to proto-compiler to use the plugin. |
||||
The value must contain a single %s which is replaced with plugin executable. |
||||
<code>--plugin=protoc-gen-PLUGIN=<executable>.</code>"""), |
||||
"plugin": attr.label( |
||||
executable = True, |
||||
cfg = "exec", |
||||
doc = """ |
||||
If provided, will be made available to the action that calls the proto-compiler, and will be |
||||
passed to the proto-compiler: |
||||
<code>--plugin=protoc-gen-PLUGIN=<executable>.</code>""", |
||||
), |
||||
"runtime": attr.label(doc = """ |
||||
A language-specific library that the generated code is compiled against. |
||||
The exact behavior is LANG_proto_library-specific. |
||||
Java, for example, should compile against the runtime."""), |
||||
"blacklisted_protos": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
No code will be generated for files in the <code>srcs</code> attribute of |
||||
<code>blacklisted_protos</code>. |
||||
This is used for .proto files that are already linked into proto runtimes, such as |
||||
<code>any.proto</code>.""", |
||||
), |
||||
# TODO: add doc |
||||
"allowlist_different_package": attr.label( |
||||
cfg = "exec", |
||||
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [], |
||||
), |
||||
# TODO: add doc |
||||
"toolchain_type": attr.label(), |
||||
# DO NOT USE. For Protobuf incremental changes only: b/305068148. |
||||
"protoc_minimal_do_not_use": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
), |
||||
} | ({} if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION else { |
||||
"_proto_compiler": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
allow_files = True, |
||||
default = configuration_field("proto", "proto_compiler"), |
||||
), |
||||
}), |
||||
provides = [ProtoLangToolchainInfo], |
||||
fragments = ["proto"], |
||||
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), # Used to obtain protoc |
||||
) |
@ -0,0 +1,357 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
Implementation of proto_library rule. |
||||
""" |
||||
|
||||
load("@bazel_skylib//lib:paths.bzl", "paths") |
||||
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo") |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
load("//bazel/common:proto_info.bzl", "ProtoInfo") |
||||
load("//bazel/private:toolchain_helpers.bzl", "toolchains") |
||||
|
||||
STRICT_DEPS_FLAG_TEMPLATE = ( |
||||
# |
||||
"--direct_dependencies_violation_msg=" + |
||||
"%%s is imported, but %s doesn't directly depend on a proto_library that 'srcs' it." |
||||
) |
||||
|
||||
def _check_srcs_package(target_package, srcs): |
||||
"""Check that .proto files in sources are from the same package. |
||||
|
||||
This is done to avoid clashes with the generated sources.""" |
||||
|
||||
#TODO: this does not work with filegroups that contain files that are not in the package |
||||
for src in srcs: |
||||
if target_package != src.label.package: |
||||
fail("Proto source with label '%s' must be in same package as consuming rule." % src.label) |
||||
|
||||
def _get_import_prefix(ctx): |
||||
"""Gets and verifies import_prefix attribute if it is declared.""" |
||||
|
||||
import_prefix = ctx.attr.import_prefix |
||||
|
||||
if not paths.is_normalized(import_prefix): |
||||
fail("should be normalized (without uplevel references or '.' path segments)", attr = "import_prefix") |
||||
if paths.is_absolute(import_prefix): |
||||
fail("should be a relative path", attr = "import_prefix") |
||||
|
||||
return import_prefix |
||||
|
||||
def _get_strip_import_prefix(ctx): |
||||
"""Gets and verifies strip_import_prefix.""" |
||||
|
||||
strip_import_prefix = ctx.attr.strip_import_prefix |
||||
|
||||
if not paths.is_normalized(strip_import_prefix): |
||||
fail("should be normalized (without uplevel references or '.' path segments)", attr = "strip_import_prefix") |
||||
|
||||
if paths.is_absolute(strip_import_prefix): |
||||
strip_import_prefix = strip_import_prefix[1:] |
||||
else: # Relative to current package |
||||
strip_import_prefix = _join(ctx.label.package, strip_import_prefix) |
||||
|
||||
return strip_import_prefix.removesuffix("/") |
||||
|
||||
def _proto_library_impl(ctx): |
||||
# Verifies attributes. |
||||
_check_srcs_package(ctx.label.package, ctx.attr.srcs) |
||||
srcs = ctx.files.srcs |
||||
deps = [dep[ProtoInfo] for dep in ctx.attr.deps] |
||||
exports = [dep[ProtoInfo] for dep in ctx.attr.exports] |
||||
import_prefix = _get_import_prefix(ctx) |
||||
strip_import_prefix = _get_strip_import_prefix(ctx) |
||||
check_for_reexport = deps + exports if not srcs else exports |
||||
_PackageSpecificationInfo = bazel_features.globals.PackageSpecificationInfo |
||||
for proto in check_for_reexport: |
||||
if getattr(proto, "allow_exports", None): |
||||
if not _PackageSpecificationInfo: |
||||
fail("Allowlist checks not supported before Bazel 6.4.0") |
||||
if not proto.allow_exports[_PackageSpecificationInfo].contains(ctx.label): |
||||
fail("proto_library '%s' can't be reexported in package '//%s'" % (proto.direct_descriptor_set.owner, ctx.label.package)) |
||||
|
||||
proto_path, virtual_srcs = _process_srcs(ctx, srcs, import_prefix, strip_import_prefix) |
||||
descriptor_set = ctx.actions.declare_file(ctx.label.name + "-descriptor-set.proto.bin") |
||||
proto_info = ProtoInfo( |
||||
srcs = virtual_srcs, |
||||
deps = deps, |
||||
descriptor_set = descriptor_set, |
||||
proto_path = proto_path, |
||||
workspace_root = ctx.label.workspace_root, |
||||
bin_dir = ctx.bin_dir.path, |
||||
allow_exports = ctx.attr.allow_exports, |
||||
) |
||||
|
||||
_write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set) |
||||
|
||||
# We assume that the proto sources will not have conflicting artifacts |
||||
# with the same root relative path |
||||
data_runfiles = ctx.runfiles( |
||||
files = [proto_info.direct_descriptor_set], |
||||
transitive_files = depset(transitive = [proto_info.transitive_sources]), |
||||
) |
||||
return [ |
||||
proto_info, |
||||
DefaultInfo( |
||||
files = depset([proto_info.direct_descriptor_set]), |
||||
default_runfiles = ctx.runfiles(), # empty |
||||
data_runfiles = data_runfiles, |
||||
), |
||||
] |
||||
|
||||
def _process_srcs(ctx, srcs, import_prefix, strip_import_prefix): |
||||
"""Returns proto_path and sources, optionally symlinking them to _virtual_imports. |
||||
|
||||
Returns: |
||||
(str, [File]) A pair of proto_path and virtual_sources. |
||||
""" |
||||
if import_prefix != "" or strip_import_prefix != "": |
||||
# Use virtual source roots |
||||
return _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix) |
||||
else: |
||||
# No virtual source roots |
||||
return "", srcs |
||||
|
||||
def _join(*path): |
||||
return "/".join([p for p in path if p != ""]) |
||||
|
||||
def _symlink_to_virtual_imports(ctx, srcs, import_prefix, strip_import_prefix): |
||||
"""Symlinks srcs to _virtual_imports. |
||||
|
||||
Returns: |
||||
A pair proto_path, directs_sources. |
||||
""" |
||||
virtual_imports = _join("_virtual_imports", ctx.label.name) |
||||
proto_path = _join(ctx.label.package, virtual_imports) |
||||
|
||||
if ctx.label.workspace_name == "": |
||||
full_strip_import_prefix = strip_import_prefix |
||||
else: |
||||
full_strip_import_prefix = _join("..", ctx.label.workspace_name, strip_import_prefix) |
||||
if full_strip_import_prefix: |
||||
full_strip_import_prefix += "/" |
||||
|
||||
virtual_srcs = [] |
||||
for src in srcs: |
||||
# Remove strip_import_prefix |
||||
if not src.short_path.startswith(full_strip_import_prefix): |
||||
fail(".proto file '%s' is not under the specified strip prefix '%s'" % |
||||
(src.short_path, full_strip_import_prefix)) |
||||
import_path = src.short_path[len(full_strip_import_prefix):] |
||||
|
||||
# Add import_prefix |
||||
virtual_src = ctx.actions.declare_file(_join(virtual_imports, import_prefix, import_path)) |
||||
ctx.actions.symlink( |
||||
output = virtual_src, |
||||
target_file = src, |
||||
progress_message = "Symlinking virtual .proto sources for %{label}", |
||||
) |
||||
virtual_srcs.append(virtual_src) |
||||
return proto_path, virtual_srcs |
||||
|
||||
def _write_descriptor_set(ctx, proto_info, deps, exports, descriptor_set): |
||||
"""Writes descriptor set.""" |
||||
if proto_info.direct_sources == []: |
||||
ctx.actions.write(descriptor_set, "") |
||||
return |
||||
|
||||
dependencies_descriptor_sets = depset(transitive = [dep.transitive_descriptor_sets for dep in deps]) |
||||
|
||||
args = ctx.actions.args() |
||||
|
||||
if ctx.attr._experimental_proto_descriptor_sets_include_source_info[BuildSettingInfo].value: |
||||
args.add("--include_source_info") |
||||
if hasattr(ctx.attr, "_retain_options") and ctx.attr._retain_options: |
||||
args.add("--retain_options") |
||||
|
||||
strict_deps = ctx.attr._strict_proto_deps[BuildSettingInfo].value |
||||
if strict_deps: |
||||
if proto_info.direct_sources: |
||||
strict_importable_sources = depset( |
||||
direct = proto_info._direct_proto_sources, |
||||
transitive = [dep._exported_sources for dep in deps], |
||||
) |
||||
else: |
||||
strict_importable_sources = None |
||||
if strict_importable_sources: |
||||
args.add_joined( |
||||
"--direct_dependencies", |
||||
strict_importable_sources, |
||||
map_each = proto_common.get_import_path, |
||||
join_with = ":", |
||||
) |
||||
# Example: `--direct_dependencies a.proto:b.proto` |
||||
|
||||
else: |
||||
# The proto compiler requires an empty list to turn on strict deps checking |
||||
args.add("--direct_dependencies=") |
||||
|
||||
# Set `-direct_dependencies_violation_msg=` |
||||
args.add(ctx.label, format = STRICT_DEPS_FLAG_TEMPLATE) |
||||
|
||||
strict_imports = ctx.attr._strict_public_imports[BuildSettingInfo].value |
||||
if strict_imports: |
||||
public_import_protos = depset(transitive = [export._exported_sources for export in exports]) |
||||
if not public_import_protos: |
||||
# This line is necessary to trigger the check. |
||||
args.add("--allowed_public_imports=") |
||||
else: |
||||
args.add_joined( |
||||
"--allowed_public_imports", |
||||
public_import_protos, |
||||
map_each = proto_common.get_import_path, |
||||
join_with = ":", |
||||
) |
||||
if proto_common.INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION: |
||||
toolchain = ctx.toolchains[toolchains.PROTO_TOOLCHAIN] |
||||
if not toolchain: |
||||
fail("Protocol compiler toolchain could not be resolved.") |
||||
proto_lang_toolchain_info = toolchain.proto |
||||
else: |
||||
proto_lang_toolchain_info = proto_common.ProtoLangToolchainInfo( |
||||
out_replacement_format_flag = "--descriptor_set_out=%s", |
||||
output_files = "single", |
||||
mnemonic = "GenProtoDescriptorSet", |
||||
progress_message = "Generating Descriptor Set proto_library %{label}", |
||||
proto_compiler = ctx.executable._proto_compiler, |
||||
protoc_opts = ctx.fragments.proto.experimental_protoc_opts, |
||||
plugin = None, |
||||
) |
||||
|
||||
proto_common.compile( |
||||
ctx.actions, |
||||
proto_info, |
||||
proto_lang_toolchain_info, |
||||
generated_files = [descriptor_set], |
||||
additional_inputs = dependencies_descriptor_sets, |
||||
additional_args = args, |
||||
) |
||||
|
||||
proto_library = rule( |
||||
_proto_library_impl, |
||||
# TODO: proto_common docs are missing |
||||
# TODO: ProtoInfo link doesn't work and docs are missing |
||||
doc = """ |
||||
<p>If using Bazel, please load the rule from <a href="https://github.com/bazelbuild/rules_proto"> |
||||
https://github.com/bazelbuild/rules_proto</a>. |
||||
|
||||
<p>Use <code>proto_library</code> to define libraries of protocol buffers which |
||||
may be used from multiple languages. A <code>proto_library</code> may be listed |
||||
in the <code>deps</code> clause of supported rules, such as |
||||
<code>java_proto_library</code>. |
||||
|
||||
<p>When compiled on the command-line, a <code>proto_library</code> creates a file |
||||
named <code>foo-descriptor-set.proto.bin</code>, which is the descriptor set for |
||||
the messages the rule srcs. The file is a serialized |
||||
<code>FileDescriptorSet</code>, which is described in |
||||
<a href="https://developers.google.com/protocol-buffers/docs/techniques#self-description"> |
||||
https://developers.google.com/protocol-buffers/docs/techniques#self-description</a>. |
||||
|
||||
<p>It only contains information about the <code>.proto</code> files directly |
||||
mentioned by a <code>proto_library</code> rule; the collection of transitive |
||||
descriptor sets is available through the |
||||
<code>[ProtoInfo].transitive_descriptor_sets</code> Starlark provider. |
||||
See documentation in <code>proto_info.bzl</code>. |
||||
|
||||
<p>Recommended code organization: |
||||
<ul> |
||||
<li>One <code>proto_library</code> rule per <code>.proto</code> file. |
||||
<li>A file named <code>foo.proto</code> will be in a rule named <code>foo_proto</code>, |
||||
which is located in the same package. |
||||
<li>A <code>[language]_proto_library</code> that wraps a <code>proto_library</code> |
||||
named <code>foo_proto</code> should be called <code>foo_[language]_proto</code>, |
||||
and be located in the same package. |
||||
</ul>""", |
||||
attrs = { |
||||
"srcs": attr.label_list( |
||||
allow_files = [".proto", ".protodevel"], |
||||
flags = ["DIRECT_COMPILE_TIME_INPUT"], |
||||
# TODO: Should .protodevel be advertised or deprecated? |
||||
doc = """ |
||||
The list of <code>.proto</code> and <code>.protodevel</code> files that are |
||||
processed to create the target. This is usually a non empty list. One usecase |
||||
where <code>srcs</code> can be empty is an <i>alias-library</i>. This is a |
||||
proto_library rule having one or more other proto_library in <code>deps</code>. |
||||
This pattern can be used to e.g. export a public api under a persistent name.""", |
||||
), |
||||
"deps": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
The list of other <code>proto_library</code> rules that the target depends upon. |
||||
A <code>proto_library</code> may only depend on other <code>proto_library</code> |
||||
targets. It may not depend on language-specific libraries.""", |
||||
), |
||||
"exports": attr.label_list( |
||||
providers = [ProtoInfo], |
||||
doc = """ |
||||
List of proto_library targets that can be referenced via "import public" in the |
||||
proto source. |
||||
It's an error if you use "import public" but do not list the corresponding library |
||||
in the exports attribute. |
||||
Note that you have list the library both in deps and exports since not all |
||||
lang_proto_library implementations have been changed yet.""", |
||||
), |
||||
"strip_import_prefix": attr.string( |
||||
default = "/", |
||||
doc = """ |
||||
The prefix to strip from the paths of the .proto files in this rule. |
||||
|
||||
<p>When set, .proto source files in the <code>srcs</code> attribute of this rule are |
||||
accessible at their path with this prefix cut off. |
||||
|
||||
<p>If it's a relative path (not starting with a slash), it's taken as a package-relative |
||||
one. If it's an absolute one, it's understood as a repository-relative path. |
||||
|
||||
<p>The prefix in the <code>import_prefix</code> attribute is added after this prefix is |
||||
stripped.""", |
||||
), |
||||
"import_prefix": attr.string( |
||||
doc = """ |
||||
The prefix to add to the paths of the .proto files in this rule. |
||||
|
||||
<p>When set, the .proto source files in the <code>srcs</code> attribute of this rule are |
||||
accessible at is the value of this attribute prepended to their repository-relative path. |
||||
|
||||
<p>The prefix in the <code>strip_import_prefix</code> attribute is removed before this |
||||
prefix is added.""", |
||||
), |
||||
"allow_exports": attr.label( |
||||
cfg = "exec", |
||||
providers = [bazel_features.globals.PackageSpecificationInfo] if bazel_features.globals.PackageSpecificationInfo else [], |
||||
doc = """ |
||||
An optional allowlist that prevents proto library to be reexported or used in |
||||
lang_proto_library that is not in one of the listed packages.""", |
||||
), |
||||
"data": attr.label_list( |
||||
allow_files = True, |
||||
flags = ["SKIP_CONSTRAINTS_OVERRIDE"], |
||||
), |
||||
# buildifier: disable=attr-license (calling attr.license()) |
||||
"licenses": attr.license() if hasattr(attr, "license") else attr.string_list(), |
||||
"_experimental_proto_descriptor_sets_include_source_info": attr.label( |
||||
default = "//bazel/private:experimental_proto_descriptor_sets_include_source_info", |
||||
), |
||||
"_strict_proto_deps": attr.label( |
||||
default = |
||||
"//bazel/private:strict_proto_deps", |
||||
), |
||||
"_strict_public_imports": attr.label( |
||||
default = "//bazel/private:strict_public_imports", |
||||
), |
||||
} | toolchains.if_legacy_toolchain({ |
||||
"_proto_compiler": attr.label( |
||||
cfg = "exec", |
||||
executable = True, |
||||
allow_files = True, |
||||
default = configuration_field("proto", "proto_compiler"), |
||||
), |
||||
}), # buildifier: disable=attr-licenses (attribute called licenses) |
||||
fragments = ["proto"], |
||||
provides = [ProtoInfo], |
||||
toolchains = toolchains.use_toolchain(toolchains.PROTO_TOOLCHAIN), |
||||
) |
@ -0,0 +1,49 @@ |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2024 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
# |
||||
""" |
||||
Toolchain helpers. |
||||
|
||||
The helpers here should be used for a migration to toolchain in proto rules. |
||||
|
||||
Anybody that needs them in another repository should copy them, because after |
||||
the migration is finished, the helpers can be removed. |
||||
""" |
||||
|
||||
load("//bazel/common:proto_lang_toolchain_info.bzl", "ProtoLangToolchainInfo") |
||||
load("//bazel/private:native.bzl", "native_proto_common") |
||||
|
||||
_incompatible_toolchain_resolution = getattr(native_proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False) |
||||
|
||||
def _find_toolchain(ctx, legacy_attr, toolchain_type): |
||||
if _incompatible_toolchain_resolution: |
||||
toolchain = ctx.toolchains[toolchain_type] |
||||
if not toolchain: |
||||
fail("No toolchains registered for '%s'." % toolchain_type) |
||||
return toolchain.proto |
||||
else: |
||||
return getattr(ctx.attr, legacy_attr)[ProtoLangToolchainInfo] |
||||
|
||||
def _use_toolchain(toolchain_type): |
||||
if _incompatible_toolchain_resolution: |
||||
return [config_common.toolchain_type(toolchain_type, mandatory = False)] |
||||
else: |
||||
return [] |
||||
|
||||
def _if_legacy_toolchain(legacy_attr_dict): |
||||
if _incompatible_toolchain_resolution: |
||||
return {} |
||||
else: |
||||
return legacy_attr_dict |
||||
|
||||
toolchains = struct( |
||||
use_toolchain = _use_toolchain, |
||||
find_toolchain = _find_toolchain, |
||||
if_legacy_toolchain = _if_legacy_toolchain, |
||||
INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION = _incompatible_toolchain_resolution, |
||||
PROTO_TOOLCHAIN = "@rules_proto//proto:toolchain_type", |
||||
) |
@ -1,3 +1,20 @@ |
||||
"""proto_library rule""" |
||||
# Protocol Buffers - Google's data interchange format |
||||
# Copyright 2008 Google Inc. All rights reserved. |
||||
# |
||||
# Use of this source code is governed by a BSD-style |
||||
# license that can be found in the LICENSE file or at |
||||
# https://developers.google.com/open-source/licenses/bsd |
||||
""" |
||||
Macro of proto_library rule. |
||||
""" |
||||
|
||||
proto_library = native.proto_library |
||||
load("@proto_bazel_features//:features.bzl", "bazel_features") |
||||
load("//bazel/private:proto_library_rule.bzl", _proto_library = "proto_library") |
||||
|
||||
def proto_library(**kwattrs): |
||||
# This condition causes Starlark rules to be used only on Bazel >=7.0.0 |
||||
if bazel_features.proto.starlark_proto_info: |
||||
_proto_library(**kwattrs) |
||||
else: |
||||
# On older Bazel versions keep using native rules, so that mismatch in ProtoInfo doesn't happen |
||||
native.proto_library(**kwattrs) |
||||
|
@ -0,0 +1,3 @@ |
||||
load(":proto_common_compile_tests.bzl", "proto_common_compile_test_suite") |
||||
|
||||
proto_common_compile_test_suite(name = "proto_common_compile_test_suite") |
@ -0,0 +1,361 @@ |
||||
"""Tests for `proto_common.compile` function.""" |
||||
|
||||
load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite") |
||||
load("@rules_testing//lib:truth.bzl", "matching") |
||||
load("@rules_testing//lib:util.bzl", "util") |
||||
load("//bazel:proto_library.bzl", "proto_library") |
||||
load("//bazel/tests/testdata:compile_rule.bzl", "compile_rule") |
||||
|
||||
protocol_compiler = "/protoc" |
||||
|
||||
def proto_common_compile_test_suite(name): |
||||
util.helper_target( |
||||
proto_library, |
||||
name = "simple_proto", |
||||
srcs = ["A.proto"], |
||||
) |
||||
test_suite( |
||||
name = name, |
||||
tests = [ |
||||
_test_compile_basic, |
||||
_test_compile_noplugin, |
||||
_test_compile_with_plugin_output, |
||||
_test_compile_with_directory_plugin_output, |
||||
_test_compile_additional_args, |
||||
_test_compile_additional_tools, |
||||
_test_compile_additional_tools_no_plugin, |
||||
_test_compile_additional_inputs, |
||||
_test_compile_resource_set, |
||||
_test_compile_protoc_opts, |
||||
_test_compile_direct_generated_protos, |
||||
_test_compile_indirect_generated_protos, |
||||
], |
||||
) |
||||
|
||||
# Verifies basic usage of `proto_common.compile`. |
||||
def _test_compile_basic(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_basic_impl, |
||||
) |
||||
|
||||
def _test_compile_basic_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
action.mnemonic().equals("MyMnemonic") |
||||
|
||||
# Verifies usage of proto_common.generate_code with no plugin specified by toolchain. |
||||
def _test_compile_noplugin(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
toolchain = "//bazel/tests/testdata:toolchain_noplugin", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_noplugin_impl, |
||||
) |
||||
|
||||
def _test_compile_noplugin_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file. |
||||
def _test_compile_with_plugin_output(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
plugin_output = "single", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_with_plugin_output_impl, |
||||
) |
||||
|
||||
def _test_compile_with_plugin_output_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--java_out=param1,param2:b*-out/*/test_compile_with_plugin_output_compile"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter set to file. |
||||
def _test_compile_with_directory_plugin_output(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
plugin_output = "multiple", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_with_directory_plugin_output_impl, |
||||
) |
||||
|
||||
def _test_compile_with_directory_plugin_output_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--java_out=param1,param2:b*-out/*/bin"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_args` parameter |
||||
def _test_compile_additional_args(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_args = ["--a", "--b"], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_args_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_args_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("--a"), |
||||
matching.equals_wrapper("--b"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter |
||||
def _test_compile_additional_tools(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_tools = [ |
||||
"//bazel/tests/testdata:_tool1", |
||||
"//bazel/tests/testdata:_tool2", |
||||
], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_tools_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_tools_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("_tool1"), |
||||
matching.file_basename_equals("_tool2"), |
||||
matching.file_basename_equals("plugin"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain. |
||||
def _test_compile_additional_tools_no_plugin(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_tools = [ |
||||
"//bazel/tests/testdata:_tool1", |
||||
"//bazel/tests/testdata:_tool2", |
||||
], |
||||
toolchain = "//bazel/tests/testdata:toolchain_noplugin", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_tools_no_plugin_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_tools_no_plugin_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("_tool1"), |
||||
matching.file_basename_equals("_tool2"), |
||||
], |
||||
) |
||||
action.inputs().not_contains_predicate(matching.file_basename_equals("plugin")) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_inputs` parameter. |
||||
def _test_compile_additional_inputs(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
additional_inputs = ["input1.txt", "input2.txt"], |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_additional_inputs_impl, |
||||
) |
||||
|
||||
def _test_compile_additional_inputs_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.inputs().contains_at_least_predicates( |
||||
[ |
||||
matching.file_basename_equals("input1.txt"), |
||||
matching.file_basename_equals("input2.txt"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `additional_tools` parameter and no plugin on the toolchain. |
||||
def _test_compile_resource_set(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
use_resource_set = True, |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_resource_set_impl, |
||||
) |
||||
|
||||
def _test_compile_resource_set_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") # @unused |
||||
# We can't check the specification of the resource set, but we at least verify analysis passes |
||||
|
||||
# Verifies `--protocopts` are passed to command line. |
||||
def _test_compile_protoc_opts(name): |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = ":simple_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
config_settings = {"//command_line_option:protocopt": ["--foo", "--bar"]}, |
||||
impl = _test_compile_protoc_opts_impl, |
||||
) |
||||
|
||||
def _test_compile_protoc_opts_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.equals_wrapper("--foo"), |
||||
matching.equals_wrapper("--bar"), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies `proto_common.compile`> correctly handles direct generated `.proto` files. |
||||
def _test_compile_direct_generated_protos(name): |
||||
util.helper_target(native.genrule, name = name + "_generate_G", cmd = "", outs = ["G.proto"]) |
||||
util.helper_target( |
||||
proto_library, |
||||
name = name + "_directly_generated_proto", |
||||
srcs = ["A.proto", "G.proto"], |
||||
) |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = name + "_directly_generated_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_direct_generated_protos_impl, |
||||
) |
||||
|
||||
def _test_compile_direct_generated_protos_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.str_matches("-Ib*-out/*/*"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
matching.str_matches("*-out/*/*/*/G.proto"), |
||||
], |
||||
) |
||||
|
||||
# Verifies usage of `proto_common.compile` with `plugin_output` parameter |
||||
def _test_compile_indirect_generated_protos(name): |
||||
util.helper_target(native.genrule, name = "_generate_h", srcs = ["A.txt"], cmd = "", outs = ["H.proto"]) |
||||
util.helper_target(proto_library, name = "_generated_proto", srcs = ["H.proto"]) |
||||
util.helper_target( |
||||
proto_library, |
||||
name = name + "_indirectly_generated_proto", |
||||
srcs = ["A.proto"], |
||||
deps = [":_generated_proto"], |
||||
) |
||||
util.helper_target( |
||||
compile_rule, |
||||
name = name + "_compile", |
||||
proto_dep = name + "_indirectly_generated_proto", |
||||
) |
||||
|
||||
analysis_test( |
||||
name = name, |
||||
target = name + "_compile", |
||||
impl = _test_compile_indirect_generated_protos_impl, |
||||
) |
||||
|
||||
def _test_compile_indirect_generated_protos_impl(env, target): |
||||
action = env.expect.that_target(target).action_named("MyMnemonic") |
||||
action.argv().contains_exactly_predicates( |
||||
[ |
||||
matching.str_endswith(protocol_compiler), |
||||
matching.str_matches("--plugin=b*-out/*-exec-*/bin/*/testdata/plugin"), |
||||
matching.str_matches("-Ib*-out/*/*"), |
||||
matching.equals_wrapper("-I."), |
||||
matching.str_endswith("/A.proto"), |
||||
], |
||||
) |
@ -0,0 +1,130 @@ |
||||
package(default_visibility = ["//visibility:public"]) |
||||
|
||||
proto_lang_toolchain( |
||||
name = "toolchain", |
||||
blacklisted_protos = [":denied"], |
||||
command_line = "--java_out=param1,param2:$(OUT)", |
||||
mnemonic = "MyMnemonic", |
||||
plugin = ":plugin", |
||||
plugin_format_flag = "--plugin=%s", |
||||
progress_message = "Progress Message %{label}", |
||||
runtime = ":runtime", |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_lang_toolchain( |
||||
name = "toolchain_noplugin", |
||||
blacklisted_protos = [":denied"], |
||||
command_line = "--java_out=param1,param2:$(OUT)", |
||||
mnemonic = "MyMnemonic", |
||||
progress_message = "Progress Message %{label}", |
||||
runtime = ":runtime", |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "plugin", |
||||
srcs = ["plugin.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_library( |
||||
name = "runtime", |
||||
srcs = ["runtime.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "descriptors", |
||||
srcs = [ |
||||
"descriptor.proto", |
||||
"metadata.proto", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "any", |
||||
srcs = ["any.proto"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
filegroup( |
||||
name = "something", |
||||
srcs = ["something.proto"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "mixed", |
||||
srcs = [ |
||||
":descriptors", |
||||
":something", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
proto_library( |
||||
name = "denied", |
||||
srcs = [ |
||||
":any", |
||||
":descriptors", |
||||
], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "_tool1", |
||||
srcs = ["tool1.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "_tool2", |
||||
srcs = ["tool2.cc"], |
||||
tags = [ |
||||
"manual", |
||||
"nobuilder", |
||||
"notap", |
||||
], |
||||
) |
@ -0,0 +1,50 @@ |
||||
"""Testing function for proto_common module""" |
||||
|
||||
load("//bazel/common:proto_common.bzl", "proto_common") |
||||
|
||||
def _resource_set_callback(_os, inputs_size): |
||||
return {"memory": 25 + 0.15 * inputs_size, "cpu": 1} |
||||
|
||||
def _impl(ctx): |
||||
outfile = ctx.actions.declare_file(ctx.attr.name) |
||||
kwargs = {} |
||||
if ctx.attr.plugin_output == "single": |
||||
kwargs["plugin_output"] = outfile.path |
||||
elif ctx.attr.plugin_output == "multiple": |
||||
kwargs["plugin_output"] = ctx.bin_dir.path |
||||
elif ctx.attr.plugin_output == "wrong": |
||||
kwargs["plugin_output"] = ctx.bin_dir.path + "///" |
||||
if ctx.attr.additional_args: |
||||
additional_args = ctx.actions.args() |
||||
additional_args.add_all(ctx.attr.additional_args) |
||||
kwargs["additional_args"] = additional_args |
||||
if ctx.files.additional_tools: |
||||
kwargs["additional_tools"] = ctx.files.additional_tools |
||||
if ctx.files.additional_inputs: |
||||
kwargs["additional_inputs"] = depset(ctx.files.additional_inputs) |
||||
if ctx.attr.use_resource_set: |
||||
kwargs["resource_set"] = _resource_set_callback |
||||
if ctx.attr.progress_message: |
||||
kwargs["experimental_progress_message"] = ctx.attr.progress_message |
||||
proto_common.compile( |
||||
ctx.actions, |
||||
ctx.attr.proto_dep[ProtoInfo], |
||||
ctx.attr.toolchain[proto_common.ProtoLangToolchainInfo], |
||||
[outfile], |
||||
**kwargs |
||||
) |
||||
return [DefaultInfo(files = depset([outfile]))] |
||||
|
||||
compile_rule = rule( |
||||
_impl, |
||||
attrs = { |
||||
"proto_dep": attr.label(), |
||||
"plugin_output": attr.string(), |
||||
"toolchain": attr.label(default = ":toolchain"), |
||||
"additional_args": attr.string_list(), |
||||
"additional_tools": attr.label_list(cfg = "exec"), |
||||
"additional_inputs": attr.label_list(allow_files = True), |
||||
"use_resource_set": attr.bool(), |
||||
"progress_message": attr.string(), |
||||
}, |
||||
) |
@ -0,0 +1,10 @@ |
||||
prefix=@CMAKE_INSTALL_PREFIX@ |
||||
exec_prefix=@CMAKE_INSTALL_PREFIX@ |
||||
libdir=@CMAKE_INSTALL_FULL_LIBDIR@ |
||||
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@ |
||||
|
||||
Name: Protocol Buffers |
||||
Description: Google's Data Interchange Format |
||||
Version: @protobuf_VERSION@ |
||||
Libs: -L${libdir} -lupb @CMAKE_THREAD_LIBS_INIT@ |
||||
Cflags: -I${includedir} |
@ -1,27 +0,0 @@ |
||||
proto3_implicit_proto |
||||
1 |
||||
third_party/protobuf/editions/codegen_tests/proto3_implicit.proto |
||||
proto2_optional_proto |
||||
1 |
||||
third_party/protobuf/editions/codegen_tests/proto2_optional.proto |
||||
proto3_enum_proto |
||||
1 |
||||
third_party/protobuf/editions/codegen_tests/proto3_enum.proto |
||||
struct |
||||
1 |
||||
google/protobuf/struct.proto |
||||
wrappers |
||||
1 |
||||
google/protobuf/wrappers.proto |
||||
duration |
||||
1 |
||||
google/protobuf/duration.proto |
||||
timestamp |
||||
1 |
||||
google/protobuf/timestamp.proto |
||||
field_mask |
||||
1 |
||||
google/protobuf/field_mask.proto |
||||
any |
||||
1 |
||||
google/protobuf/any.proto |
@ -1,8 +1,8 @@ |
||||
`protos` Generator |
||||
hpb Generator |
||||
================== |
||||
|
||||
This directory contains the generator for the [`protos` |
||||
API](https://github.com/protocolbuffers/protobuf/tree/main/protos), an |
||||
This directory contains the generator for the [`hpb` |
||||
API](https://github.com/protocolbuffers/protobuf/tree/main/hpb), an |
||||
experimental C++ protobuf implementation. Most users should use the standard |
||||
C++ implementation |
||||
[here](https://github.com/protocolbuffers/protobuf/tree/main/src). |
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue