Move more special cases in bazel build to the .bzl files

pull/13829/head
Alexander Polcyn 7 years ago
parent e8cdc2dbbf
commit 0678db7e24
  1. 13
      BUILD
  2. 37
      bazel/grpc_build_system.bzl
  3. 129
      bazel/grpc_deps.bzl
  4. 86
      tools/run_tests/sanity/check_bazel_workspace.py

13
BUILD

@ -417,14 +417,13 @@ grpc_cc_library(
"src/core/ext/census/grpc_context.cc", "src/core/ext/census/grpc_context.cc",
], ],
external_deps = [ external_deps = [
"libssl", "nanopb",
], ],
language = "c++", language = "c++",
public_hdrs = [ public_hdrs = [
"include/grpc/census.h", "include/grpc/census.h",
], ],
deps = [ deps = [
"//third_party/nanopb",
"grpc_base", "grpc_base",
], ],
) )
@ -1010,9 +1009,11 @@ grpc_cc_library(
"src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h", "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h", "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h",
], ],
external_deps = [
"nanopb",
],
language = "c++", language = "c++",
deps = [ deps = [
"//third_party/nanopb",
"grpc_base", "grpc_base",
"grpc_client_channel", "grpc_client_channel",
"grpc_resolver_fake", "grpc_resolver_fake",
@ -1037,9 +1038,11 @@ grpc_cc_library(
"src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h", "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h", "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h",
], ],
external_deps = [
"nanopb",
],
language = "c++", language = "c++",
deps = [ deps = [
"//third_party/nanopb",
"grpc_base", "grpc_base",
"grpc_client_channel", "grpc_client_channel",
"grpc_resolver_fake", "grpc_resolver_fake",
@ -1388,7 +1391,7 @@ grpc_cc_library(
], ],
hdrs = [ hdrs = [
"src/core/ext/transport/cronet/transport/cronet_transport.h", "src/core/ext/transport/cronet/transport/cronet_transport.h",
"//third_party:objective_c/Cronet/bidirectional_stream_c.h", "third_party/objective_c/Cronet/bidirectional_stream_c.h",
], ],
language = "c++", language = "c++",
public_hdrs = [ public_hdrs = [

@ -23,6 +23,27 @@
# each change must be ported from one to the other. # each change must be ported from one to the other.
# #
def _get_external_deps(external_deps):
ret = []
for dep in external_deps:
if dep == "nanopb":
ret.append("//third_party/nanopb")
else:
ret.append("//external:" + dep)
return ret
def _maybe_update_cc_library_hdrs(hdrs):
ret = []
hdrs_to_update = {
"third_party/objective_c/Cronet/bidirectional_stream_c.h": "//third_party:objective_c/Cronet/bidirectional_stream_c.h",
}
for h in hdrs:
if h in hdrs_to_update.keys():
ret.append(hdrs_to_update[h])
else:
ret.append(h)
return ret
def grpc_cc_library(name, srcs = [], public_hdrs = [], hdrs = [], def grpc_cc_library(name, srcs = [], public_hdrs = [], hdrs = [],
external_deps = [], deps = [], standalone = False, external_deps = [], deps = [], standalone = False,
language = "C++", testonly = False, visibility = None, language = "C++", testonly = False, visibility = None,
@ -33,12 +54,12 @@ def grpc_cc_library(name, srcs = [], public_hdrs = [], hdrs = [],
native.cc_library( native.cc_library(
name = name, name = name,
srcs = srcs, srcs = srcs,
defines = select({ defines = select({"//:grpc_no_ares": ["GRPC_ARES=0"],
"//:grpc_no_ares": ["GRPC_ARES=0"], "//conditions:default": [],}) +
"//conditions:default": [], select({"//:remote_execution": ["GRPC_PORT_ISOLATED_RUNTIME=1"],
}), "//conditions:default": [],}),
hdrs = hdrs + public_hdrs, hdrs = _maybe_update_cc_library_hdrs(hdrs + public_hdrs),
deps = deps + ["//external:" + dep for dep in external_deps], deps = deps + _get_external_deps(external_deps),
copts = copts, copts = copts,
visibility = visibility, visibility = visibility,
testonly = testonly, testonly = testonly,
@ -79,7 +100,7 @@ def grpc_cc_test(name, srcs = [], deps = [], external_deps = [], args = [], data
srcs = srcs, srcs = srcs,
args = args, args = args,
data = data, data = data,
deps = deps + ["//external:" + dep for dep in external_deps], deps = deps + _get_external_deps(external_deps),
copts = copts, copts = copts,
linkopts = ["-pthread"], linkopts = ["-pthread"],
) )
@ -95,7 +116,7 @@ def grpc_cc_binary(name, srcs = [], deps = [], external_deps = [], args = [], da
data = data, data = data,
testonly = testonly, testonly = testonly,
linkshared = linkshared, linkshared = linkshared,
deps = deps + ["//external:" + dep for dep in external_deps], deps = deps + _get_external_deps(external_deps),
copts = copts, copts = copts,
linkopts = ["-pthread"] + linkopts, linkopts = ["-pthread"] + linkopts,
) )

@ -1,9 +1,7 @@
def grpc_deps(): """Load dependencies needed to compile and test the grpc library as a 3rd-party consumer."""
native.bind(
name = "nanopb",
actual = "@com_github_grpc_grpc//third_party/nanopb",
)
def grpc_deps():
"""Loads dependencies need to compile and test the grpc library."""
native.bind( native.bind(
name = "libssl", name = "libssl",
actual = "@boringssl//:ssl", actual = "@boringssl//:ssl",
@ -59,54 +57,73 @@ def grpc_deps():
actual = "@com_github_gflags_gflags//:gflags", actual = "@com_github_gflags_gflags//:gflags",
) )
native.http_archive( if "boringssl" not in native.existing_rules():
name = "boringssl", native.http_archive(
# on the master-with-bazel branch name = "boringssl",
url = "https://boringssl.googlesource.com/boringssl/+archive/886e7d75368e3f4fab3f4d0d3584e4abfc557755.tar.gz", # on the master-with-bazel branch
) url = "https://boringssl.googlesource.com/boringssl/+archive/886e7d75368e3f4fab3f4d0d3584e4abfc557755.tar.gz",
)
native.new_http_archive(
name = "com_github_madler_zlib", if "com_github_madler_zlib" not in native.existing_rules():
build_file = "@com_github_grpc_grpc//third_party:zlib.BUILD", native.new_http_archive(
strip_prefix = "zlib-cacf7f1d4e3d44d871b605da3b647f07d718623f", name = "com_github_madler_zlib",
url = "https://github.com/madler/zlib/archive/cacf7f1d4e3d44d871b605da3b647f07d718623f.tar.gz", build_file = "@com_github_grpc_grpc//third_party:zlib.BUILD",
) strip_prefix = "zlib-cacf7f1d4e3d44d871b605da3b647f07d718623f",
url = "https://github.com/madler/zlib/archive/cacf7f1d4e3d44d871b605da3b647f07d718623f.tar.gz",
native.http_archive( )
name = "com_google_protobuf",
strip_prefix = "protobuf-80a37e0782d2d702d52234b62dd4b9ec74fd2c95", if "com_google_protobuf" not in native.existing_rules():
url = "https://github.com/google/protobuf/archive/80a37e0782d2d702d52234b62dd4b9ec74fd2c95.tar.gz", native.http_archive(
) name = "com_google_protobuf",
strip_prefix = "protobuf-2761122b810fe8861004ae785cc3ab39f384d342",
native.new_http_archive( url = "https://github.com/google/protobuf/archive/2761122b810fe8861004ae785cc3ab39f384d342.tar.gz",
name = "com_github_google_googletest", )
build_file = "@com_github_grpc_grpc//third_party:gtest.BUILD",
strip_prefix = "googletest-ec44c6c1675c25b9827aacd08c02433cccde7780", if "com_github_google_googletest" not in native.existing_rules():
url = "https://github.com/google/googletest/archive/ec44c6c1675c25b9827aacd08c02433cccde7780.tar.gz", native.new_http_archive(
) name = "com_github_google_googletest",
build_file = "@com_github_grpc_grpc//third_party:gtest.BUILD",
native.http_archive( strip_prefix = "googletest-ec44c6c1675c25b9827aacd08c02433cccde7780",
name = "com_github_gflags_gflags", url = "https://github.com/google/googletest/archive/ec44c6c1675c25b9827aacd08c02433cccde7780.tar.gz",
strip_prefix = "gflags-30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e", )
url = "https://github.com/gflags/gflags/archive/30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e.tar.gz",
) if "com_github_gflags_gflags" not in native.existing_rules():
native.http_archive(
native.new_http_archive( name = "com_github_gflags_gflags",
name = "com_github_google_benchmark", strip_prefix = "gflags-30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e",
build_file = "@com_github_grpc_grpc//third_party:benchmark.BUILD", url = "https://github.com/gflags/gflags/archive/30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e.tar.gz",
strip_prefix = "benchmark-5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8", )
url = "https://github.com/google/benchmark/archive/5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8.tar.gz",
) if "com_github_google_benchmark" not in native.existing_rules():
native.new_http_archive(
native.new_http_archive( name = "com_github_google_benchmark",
name = "com_github_cares_cares", build_file = "@com_github_grpc_grpc//third_party:benchmark.BUILD",
build_file = "@com_github_grpc_grpc//third_party:cares/cares.BUILD", strip_prefix = "benchmark-5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8",
strip_prefix = "c-ares-3be1924221e1326df520f8498d704a5c4c8d0cce", url = "https://github.com/google/benchmark/archive/5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8.tar.gz",
url = "https://github.com/c-ares/c-ares/archive/3be1924221e1326df520f8498d704a5c4c8d0cce.tar.gz", )
)
if "com_github_cares_cares" not in native.existing_rules():
native.http_archive( native.new_http_archive(
name = "com_google_absl", name = "com_github_cares_cares",
strip_prefix = "abseil-cpp-cc4bed2d74f7c8717e31f9579214ab52a9c9c610", build_file = "@com_github_grpc_grpc//third_party:cares/cares.BUILD",
url = "https://github.com/abseil/abseil-cpp/archive/cc4bed2d74f7c8717e31f9579214ab52a9c9c610.tar.gz", strip_prefix = "c-ares-3be1924221e1326df520f8498d704a5c4c8d0cce",
) url = "https://github.com/c-ares/c-ares/archive/3be1924221e1326df520f8498d704a5c4c8d0cce.tar.gz",
)
if "com_google_absl" not in native.existing_rules():
native.http_archive(
name = "com_google_absl",
strip_prefix = "abseil-cpp-cc4bed2d74f7c8717e31f9579214ab52a9c9c610",
url = "https://github.com/abseil/abseil-cpp/archive/cc4bed2d74f7c8717e31f9579214ab52a9c9c610.tar.gz",
)
if "com_github_bazelbuild_bazeltoolchains" not in native.existing_rules():
native.http_archive(
name = "com_github_bazelbuild_bazeltoolchains",
strip_prefix = "bazel-toolchains-af4681c3d19f063f090222ec3d04108c4e0ca255",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
"https://github.com/bazelbuild/bazel-toolchains/archive/af4681c3d19f063f090222ec3d04108c4e0ca255.tar.gz",
],
sha256 = "d58bb2d6c8603f600d522b6104d6192a65339aa26cbba9f11ff5c4b36dedb928",
)

@ -30,13 +30,76 @@ git_hash_pattern = re.compile('[0-9a-f]{40}')
git_submodules = subprocess.check_output('git submodule', shell=True).strip().split('\n') git_submodules = subprocess.check_output('git submodule', shell=True).strip().split('\n')
git_submodule_hashes = {re.search(git_hash_pattern, s).group() for s in git_submodules} git_submodule_hashes = {re.search(git_hash_pattern, s).group() for s in git_submodules}
# Parse git hashes from Bazel WORKSPACE {new_}http_archive rules _BAZEL_TOOLCHAINS_DEP_NAME = 'com_github_bazelbuild_bazeltoolchains'
with open('WORKSPACE', 'r') as f:
workspace_rules = [expr.value for expr in ast.parse(f.read()).body]
http_archive_rules = [rule for rule in workspace_rules if rule.func.id.endswith('http_archive')] _GRPC_DEP_NAMES = [
archive_urls = [kw.value.s for rule in http_archive_rules for kw in rule.keywords if kw.arg == 'url'] 'boringssl',
workspace_git_hashes = {re.search(git_hash_pattern, url).group() for url in archive_urls} 'com_github_madler_zlib',
'com_google_protobuf',
'com_github_google_googletest',
'com_github_gflags_gflags',
'com_github_google_benchmark',
'com_github_cares_cares',
'com_google_absl',
_BAZEL_TOOLCHAINS_DEP_NAME,
]
class BazelEvalState(object):
def __init__(self, names_and_urls, overridden_name=None):
self.names_and_urls = names_and_urls
self.overridden_name = overridden_name
def http_archive(self, **args):
self.archive(**args)
def new_http_archive(self, **args):
self.archive(**args)
def bind(self, **args):
pass
def existing_rules(self):
if self.overridden_name:
return [self.overridden_name]
return []
def archive(self, **args):
if args['name'] == _BAZEL_TOOLCHAINS_DEP_NAME:
self.names_and_urls[args['name']] = 'dont care'
return
self.names_and_urls[args['name']] = args['url']
# Parse git hashes from bazel/grpc_deps.bzl {new_}http_archive rules
with open(os.path.join('bazel', 'grpc_deps.bzl'), 'r') as f:
names_and_urls = {}
eval_state = BazelEvalState(names_and_urls)
bazel_file = f.read()
# grpc_deps.bzl only defines 'grpc_deps', add this to call it
bazel_file += '\ngrpc_deps()\n'
build_rules = {
'native': eval_state,
}
exec bazel_file in build_rules
for name in _GRPC_DEP_NAMES:
assert name in names_and_urls.keys()
assert len(_GRPC_DEP_NAMES) == len(names_and_urls.keys())
# bazeltoolschains is an exception to this sanity check,
# we don't require that there is a corresponding git module.
names_without_bazeltoolchains = names_and_urls.keys()
names_without_bazeltoolchains.remove(_BAZEL_TOOLCHAINS_DEP_NAME)
archive_urls = [names_and_urls[name] for name in names_without_bazeltoolchains]
workspace_git_hashes = {
re.search(git_hash_pattern, url).group()
for url in archive_urls
}
if len(workspace_git_hashes) == 0:
print("(Likely) parse error, did not find any bazel git dependencies.")
sys.exit(1)
# Validate the equivalence of the git submodules and Bazel git dependencies. The # Validate the equivalence of the git submodules and Bazel git dependencies. The
# condition we impose is that there is a git submodule for every dependency in # condition we impose is that there is a git submodule for every dependency in
@ -46,4 +109,15 @@ if len(workspace_git_hashes - git_submodule_hashes) > 0:
print("Found discrepancies between git submodules and Bazel WORKSPACE dependencies") print("Found discrepancies between git submodules and Bazel WORKSPACE dependencies")
sys.exit(1) sys.exit(1)
# Also check that we can override each dependency
for name in _GRPC_DEP_NAMES:
names_and_urls_with_overridden_name = {}
state = BazelEvalState(
names_and_urls_with_overridden_name, overridden_name=name)
rules = {
'native': state,
}
exec bazel_file in rules
assert name not in names_and_urls_with_overridden_name.keys()
sys.exit(0) sys.exit(0)

Loading…
Cancel
Save