Bumping protobuf dependency to newer commit

PiperOrigin-RevId: 460811319
pull/13171/head
Mike Kruskal 2 years ago committed by Copybara-Service
parent eb66ab601f
commit 17b6451684
  1. 3
      BUILD
  2. 12
      bazel/build_defs.bzl
  3. 46
      bazel/protobuf.patch
  4. 28
      bazel/py_proto_library.bzl
  5. 40
      bazel/upb_proto_library.bzl
  6. 2
      bazel/workspace_deps.bzl
  7. 10
      python/dist/BUILD.bazel
  8. 2
      python/pb_unit_tests/pyproto_test_wrapper.bzl
  9. 2
      upb/bindings/lua/BUILD.bazel
  10. 28
      upb/bindings/lua/lua_proto_library.bzl
  11. 2
      upb/bindings/lua/test_upb.lua
  12. 6
      upbc/BUILD
  13. 2
      upbc/code_generator_request.proto

@ -848,6 +848,7 @@ upb_amalgamation(
":reflection",
":upb",
],
strip_import_prefix = ["src"],
)
cc_library(
@ -881,6 +882,7 @@ upb_amalgamation(
":upb",
],
prefix = "php-",
strip_import_prefix = ["src"],
)
cc_library(
@ -913,6 +915,7 @@ upb_amalgamation(
":upb",
],
prefix = "ruby-",
strip_import_prefix = ["src"],
)
cc_library(

@ -25,6 +25,7 @@
"""Internal rules for building upb."""
load("@bazel_skylib//lib:paths.bzl", "paths")
load(":upb_proto_library.bzl", "GeneratedSrcsInfo")
_DEFAULT_CPPOPTS = []
@ -35,6 +36,7 @@ _DEFAULT_CPPOPTS.extend([
"-Wextra",
# "-Wshorten-64-to-32", # not in GCC (and my Kokoro images doesn't have Clang)
"-Werror",
"-Wno-unused-parameter",
"-Wno-long-long",
])
_DEFAULT_COPTS.extend([
@ -95,6 +97,13 @@ def _get_real_roots(files):
roots[real_root] = True
return roots.keys()
def _get_includes(files, strip_import_prefix):
roots = _get_real_roots(files)
includes = ["-I" + root for root in roots]
for include in strip_import_prefix:
includes += ["-I" + paths.join(root, include) for root in roots]
return includes
def make_shell_script(name, contents, out):
contents = contents.replace("$", "$$")
native.genrule(
@ -137,7 +146,7 @@ def _upb_amalgamation(ctx):
ctx.actions.run(
inputs = inputs,
outputs = ctx.outputs.outs,
arguments = [ctx.bin_dir.path + "/", ctx.attr.prefix] + [f.path for f in srcs] + ["-I" + root for root in _get_real_roots(inputs)],
arguments = [ctx.bin_dir.path + "/", ctx.attr.prefix] + [f.path for f in srcs] + _get_includes(inputs, ctx.attr.strip_import_prefix),
progress_message = "Making amalgamation",
executable = ctx.executable._amalgamator,
)
@ -155,6 +164,7 @@ upb_amalgamation = rule(
),
"libs": attr.label_list(aspects = [_file_list_aspect]),
"outs": attr.output_list(),
"strip_import_prefix": attr.string_list(),
},
implementation = _upb_amalgamation,
)

@ -1,17 +1,3 @@
--- BUILD.bazel
+++ BUILD.bazel
@@ -896,6 +896,10 @@ py_library(
[
"python/google/protobuf/**/*.py",
],
+ exclude = [
+ "python/google/protobuf/internal/*_test.py",
+ "python/google/protobuf/internal/test_util.py",
+ ]
),
imports = ["python"],
srcs_version = "PY2AND3",
--- python/google/protobuf/internal/test_util.py
+++ python/google/protobuf/internal/test_util.py
@@ -39,6 +39,7 @@ __author__ = 'robinson@google.com (Will Robinson)'
@ -29,7 +15,7 @@
+def _SearchUp(path, filename):
+ path = pathlib.Path(path).resolve()
+ for parent in [path] + list(path.parents):
+ file_path = parent / ('src/google/protobuf/testdata/' + filename)
+ file_path = parent / ('google/protobuf/testdata/' + filename)
+ if file_path.exists():
+ # Found it. Load the golden file from the testdata directory.
+ return file_path.open('rb')
@ -52,3 +38,33 @@
# Search internally.
path = '.'
--- python/internal.bzl
+++ python/internal.bzl
@@ -1,5 +1,11 @@
# Internal helpers for building the Python protobuf runtime.
+def _remove_cross_repo_path(path):
+ components = path.split("/")
+ if components[0] == "..":
+ return "/".join(components[2:])
+ return path
+
def _internal_copy_files_impl(ctx):
strip_prefix = ctx.attr.strip_prefix
if strip_prefix[-1] != "/":
@@ -7,10 +13,11 @@ def _internal_copy_files_impl(ctx):
src_dests = []
for src in ctx.files.srcs:
- if src.short_path[:len(strip_prefix)] != strip_prefix:
+ short_path = _remove_cross_repo_path(src.short_path)
+ if short_path[:len(strip_prefix)] != strip_prefix:
fail("Source does not start with %s: %s" %
- (strip_prefix, src.short_path))
- dest = ctx.actions.declare_file(src.short_path[len(strip_prefix):])
+ (strip_prefix, short_path))
+ dest = ctx.actions.declare_file(short_path[len(strip_prefix):])
src_dests.append([src, dest])
if ctx.attr.is_windows:

@ -41,6 +41,14 @@ load("@rules_proto//proto:defs.bzl", "ProtoInfo")
# Generic support code #########################################################
# begin:github_only
_is_google3 = False
# end:github_only
# begin:google_only
# _is_google3 = True
# end:google_only
def _get_real_short_path(file):
# For some reason, files from other archives have short paths that look like:
# ../com_google_protobuf/google/protobuf/descriptor.proto
@ -58,13 +66,25 @@ def _get_real_short_path(file):
short_path = short_path.split(virtual_imports)[1].split("/", 1)[1]
return short_path
def _get_real_root(file):
def _get_real_root(ctx, file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path) - 1]
root = file.path[:-len(real_short_path) - 1]
if not _is_google3 and ctx.rule.attr.strip_import_prefix:
root = paths.join(root, ctx.rule.attr.strip_import_prefix[1:])
return root
def _generate_output_file(ctx, src, extension):
package = ctx.label.package
if not _is_google3:
strip_import_prefix = ctx.rule.attr.strip_import_prefix
if strip_import_prefix:
if not package.startswith(strip_import_prefix[1:]):
fail("%s does not begin with prefix %s" % (package, strip_import_prefix))
package = package[len(strip_import_prefix):]
real_short_path = _get_real_short_path(src)
real_short_path = paths.relativize(real_short_path, ctx.label.package)
real_short_path = paths.relativize(real_short_path, package)
output_filename = paths.replace_extension(real_short_path, extension)
ret = ctx.actions.declare_file(output_filename)
return ret
@ -98,7 +118,7 @@ def _py_proto_library_aspect_impl(target, ctx):
outputs = srcs,
executable = ctx.executable._protoc,
arguments = [
"--python_out=" + _get_real_root(srcs[0]),
"--python_out=" + _get_real_root(ctx, srcs[0]),
"--descriptor_set_in=" + ctx.configuration.host_path_separator.join([f.path for f in transitive_sets]),
] +
[_get_real_short_path(file) for file in proto_sources],

@ -58,17 +58,37 @@ def _get_real_short_path(file):
short_path = short_path.split(virtual_imports)[1].split("/", 1)[1]
return short_path
def _get_real_root(file):
def _get_real_root(ctx, file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path) - 1]
root = file.path[:-len(real_short_path) - 1]
if not _is_google3 and ctx.rule.attr.strip_import_prefix:
root = paths.join(root, ctx.rule.attr.strip_import_prefix[1:])
return root
def _generate_output_file(ctx, src, extension):
package = ctx.label.package
if not _is_google3:
strip_import_prefix = ctx.rule.attr.strip_import_prefix
if strip_import_prefix:
if not package.startswith(strip_import_prefix[1:]):
fail("%s does not begin with prefix %s" % (package, strip_import_prefix))
package = package[len(strip_import_prefix):]
real_short_path = _get_real_short_path(src)
real_short_path = paths.relativize(real_short_path, ctx.label.package)
real_short_path = paths.relativize(real_short_path, package)
output_filename = paths.replace_extension(real_short_path, extension)
ret = ctx.actions.declare_file(output_filename)
return ret
def _generate_include_path(src, out, extension):
short_path = _get_real_short_path(src)
short_path = paths.replace_extension(short_path, extension)
if not out.path.endswith(short_path):
fail("%s does not end with %s" % (out.path, short_path))
return out.path[:-len(short_path)]
def _filter_none(elems):
out = []
for elem in elems:
@ -76,7 +96,7 @@ def _filter_none(elems):
out.append(elem)
return out
def _cc_library_func(ctx, name, hdrs, srcs, copts, dep_ccinfos):
def _cc_library_func(ctx, name, hdrs, srcs, copts, includes, dep_ccinfos):
"""Like cc_library(), but callable from rules.
Args:
@ -85,6 +105,7 @@ def _cc_library_func(ctx, name, hdrs, srcs, copts, dep_ccinfos):
hdrs: Public headers that can be #included from other rules.
srcs: C/C++ source files.
copts: Additional options for cc compilation.
includes: Additional include paths.
dep_ccinfos: CcInfo providers of dependencies we should build/link against.
Returns:
@ -112,6 +133,7 @@ def _cc_library_func(ctx, name, hdrs, srcs, copts, dep_ccinfos):
cc_toolchain = toolchain,
name = name,
srcs = srcs,
includes = includes,
public_hdrs = hdrs,
user_compile_flags = copts,
compilation_contexts = compilation_contexts,
@ -181,6 +203,7 @@ GeneratedSrcsInfo = provider(
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
"includes": "list of extra includes",
},
)
@ -213,7 +236,7 @@ def _compile_upb_protos(ctx, generator, proto_info, proto_sources):
outputs = srcs + hdrs,
executable = ctx.executable._protoc,
arguments = [
"--" + generator + "_out=" + codegen_params + _get_real_root(srcs[0]),
"--" + generator + "_out=" + codegen_params + _get_real_root(ctx, srcs[0]),
"--plugin=protoc-gen-" + generator + "=" + tool.path,
"--descriptor_set_in=" + ctx.configuration.host_path_separator.join([f.path for f in transitive_sets]),
] +
@ -221,7 +244,11 @@ def _compile_upb_protos(ctx, generator, proto_info, proto_sources):
progress_message = "Generating upb protos for :" + ctx.label.name,
mnemonic = "GenUpbProtos",
)
return GeneratedSrcsInfo(srcs = srcs, hdrs = hdrs)
return GeneratedSrcsInfo(
srcs = srcs,
hdrs = hdrs,
includes = [_generate_include_path(proto_sources[0], hdrs[0], ext + ".h")],
)
def _upb_proto_rule_impl(ctx):
if len(ctx.attr.deps) != 1:
@ -272,6 +299,7 @@ def _upb_proto_aspect_impl(target, ctx, generator, cc_provider, file_provider):
name = ctx.rule.attr.name + "." + generator,
hdrs = files.hdrs,
srcs = files.srcs,
includes = files.includes,
copts = ctx.attr._copts[UpbProtoLibraryCoptsInfo].copts,
dep_ccinfos = dep_ccinfos,
)

@ -24,7 +24,7 @@ def upb_deps():
_github_archive,
name = "com_google_protobuf",
repo = "https://github.com/protocolbuffers/protobuf",
commit = "14803e6f63d4785ecd95adeeae3ac42a728b3857",
commit = "3380463ff87873b1ed07b730c79d78477289d0cd",
patches = ["//bazel:protobuf.patch"],
)

@ -131,6 +131,7 @@ py_wheel(
strip_path_prefixes = [
"python/dist/",
"python/",
"src/",
],
version = PROTOBUF_PYTHON_VERSION,
deps = [
@ -161,6 +162,7 @@ py_wheel(
python_tag = "py3",
strip_path_prefixes = [
"python/",
"src/",
],
version = PROTOBUF_PYTHON_VERSION,
deps = [
@ -171,18 +173,22 @@ py_wheel(
py_wheel(
name = "test_wheel",
testonly = True,
abi = "none",
distribution = "protobuftests",
platform = "any",
python_tag = "py3",
strip_path_prefixes = ["python/"],
strip_path_prefixes = [
"python/",
"src/",
],
version = PROTOBUF_PYTHON_VERSION,
deps = [
"//python/pb_unit_tests:test_files",
"@com_google_protobuf//:python_common_test_protos",
"@com_google_protobuf//:python_specific_test_protos",
"@com_google_protobuf//:python_test_srcs",
"@com_google_protobuf//:testdata",
"@com_google_protobuf//src/google/protobuf:testdata",
],
)

@ -7,7 +7,7 @@ def pyproto_test_wrapper(name):
srcs = [src],
legacy_create_init = False,
main = src,
data = ["@com_google_protobuf//:testdata"],
data = ["@com_google_protobuf//src/google/protobuf:testdata"],
deps = [
"//python:_message",
"@com_google_protobuf//:python_common_test_protos",

@ -63,7 +63,7 @@ cc_binary(
visibility = ["//visibility:public"],
deps = [
"@com_google_absl//absl/strings",
"@com_google_protobuf//:protoc_lib",
"@com_google_protobuf//src/google/protobuf/compiler:code_generator",
],
)

@ -29,6 +29,14 @@ load("@bazel_skylib//lib:paths.bzl", "paths")
# Generic support code #########################################################
# begin:github_only
_is_google3 = False
# end:github_only
# begin:google_only
# _is_google3 = True
# end:google_only
def _get_real_short_path(file):
# For some reason, files from other archives have short paths that look like:
# ../com_google_protobuf/google/protobuf/descriptor.proto
@ -39,18 +47,26 @@ def _get_real_short_path(file):
# Sometimes it has another few prefixes like:
# _virtual_imports/any_proto/google/protobuf/any.proto
# benchmarks/_virtual_imports/100_msgs_proto/benchmarks/100_msgs.proto
# We want just google/protobuf/any.proto.
if short_path.startswith("_virtual_imports"):
short_path = short_path.split("/", 2)[-1]
virtual_imports = "_virtual_imports/"
if virtual_imports in short_path:
short_path = short_path.split(virtual_imports)[1].split("/", 1)[1]
return short_path
def _get_real_root(file):
def _get_real_root(ctx, file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path) - 1]
root = file.path[:-len(real_short_path) - 1]
if not _is_google3 and ctx.rule.attr.strip_import_prefix:
root = paths.join(root, ctx.rule.attr.strip_import_prefix[1:])
return root
def _generate_output_file(ctx, src, extension):
package = ctx.label.package
if not _is_google3 and ctx.rule.attr.strip_import_prefix:
package = package[len(ctx.rule.attr.strip_import_prefix):]
real_short_path = _get_real_short_path(src)
real_short_path = paths.relativize(real_short_path, ctx.label.package)
real_short_path = paths.relativize(real_short_path, package)
output_filename = paths.replace_extension(real_short_path, extension)
ret = ctx.actions.declare_file(output_filename)
return ret
@ -74,7 +90,7 @@ def _compile_upb_protos(ctx, proto_info, proto_sources):
outputs = files,
executable = ctx.executable._protoc,
arguments = [
"--lua_out=" + _get_real_root(files[0]),
"--lua_out=" + _get_real_root(ctx, files[0]),
"--plugin=protoc-gen-lua=" + ctx.executable._upbc.path,
"--descriptor_set_in=" + ctx.configuration.host_path_separator.join([f.path for f in transitive_sets]),
] +

@ -667,7 +667,7 @@ end
function test_foo()
local defpool = upb.DefPool()
local filename = "external/com_google_protobuf/descriptor_proto-descriptor-set.proto.bin"
local filename = "external/com_google_protobuf/src/google/protobuf/descriptor_proto-descriptor-set.proto.bin"
local file = io.open(filename, "rb") or io.open("bazel-bin/" .. filename, "rb")
assert_not_nil(file)
local descriptor = file:read("*a")

@ -98,7 +98,7 @@ cc_library(
"//:upb",
"@com_google_absl//absl/container:flat_hash_map",
"@com_google_absl//absl/strings",
"@com_google_protobuf//:protoc_lib",
"@com_google_protobuf//src/google/protobuf/compiler:code_generator",
],
)
@ -130,7 +130,7 @@ cc_binary(
"@com_google_absl//absl/container:flat_hash_set",
"@com_google_absl//absl/strings",
"@com_google_protobuf//:protobuf",
"@com_google_protobuf//:protoc_lib",
"@com_google_protobuf//src/google/protobuf/compiler:code_generator",
],
)
@ -147,7 +147,7 @@ cc_binary(
"@com_google_absl//absl/container:flat_hash_map",
"@com_google_absl//absl/strings",
"@com_google_protobuf//:protobuf",
"@com_google_protobuf//:protoc_lib",
"@com_google_protobuf//src/google/protobuf/compiler:code_generator",
],
)

@ -2,7 +2,7 @@ syntax = "proto2";
package upbc;
import "google/protobuf/compiler/plugin.proto";
import "src/google/protobuf/compiler/plugin.proto";
message CodeGeneratorRequest {
// The pb sent by protoc to its plugins.

Loading…
Cancel
Save