bazel: switch to native proto_library/cc_proto_library. (#201)

This PR follows thru on https://github.com/envoyproxy/envoy/issues/1873
on the data-plane-api side.

Also, update the Python validation script and added a test to ensure
this is captured in CI to avoid future bit rot.

Signed-off-by: Harvey Tuch <htuch@google.com>
pull/189/merge
htuch 8 years ago committed by GitHub
parent 05d16fe66d
commit d498884402
  1. 44
      WORKSPACE
  2. 45
      bazel/api_build_system.bzl
  3. 23
      bazel/repositories.bzl
  4. 10
      ci/build_setup.sh
  5. 2
      ci/ci_steps.sh
  6. 4
      ci/do_ci.sh
  7. 4
      examples/service_envoy/BUILD
  8. 3
      examples/service_envoy/http_connection_manager.pb
  9. 17
      examples/service_envoy/listeners.pb
  10. 11
      tools/BUILD
  11. 32
      tools/generate_listeners.py
  12. 8
      tools/generate_listeners_test.py

@ -4,41 +4,35 @@ load("//bazel:repositories.bzl", "api_dependencies")
api_dependencies()
git_repository(
name = "protobuf_bzl",
# v3.4.0
commit = "80a37e0782d2d702d52234b62dd4b9ec74fd2c95",
remote = "https://github.com/google/protobuf.git",
# TODO(htuch): This can switch back to a point release http_archive at the next
# release (> 3.4.1), we need HEAD proto_library support and
# https://github.com/google/protobuf/pull/3761.
http_archive(
name = "com_google_protobuf",
strip_prefix = "protobuf-c4f59dcc5c13debc572154c8f636b8a9361aacde",
sha256 = "5d4551193416861cb81c3bc0a428f22a6878148c57c31fb6f8f2aa4cf27ff635",
url = "https://github.com/google/protobuf/archive/c4f59dcc5c13debc572154c8f636b8a9361aacde.tar.gz",
)
bind(
name = "protobuf",
actual = "@protobuf_bzl//:protobuf",
)
bind(
name = "protobuf_python",
actual = "@protobuf_bzl//:protobuf_python",
)
bind(
name = "protobuf_python_genproto",
actual = "@protobuf_bzl//:protobuf_python_genproto",
# Needed for cc_proto_library, Bazel doesn't support aliases today for repos,
# see https://groups.google.com/forum/#!topic/bazel-discuss/859ybHQZnuI and
# https://github.com/bazelbuild/bazel/issues/3219.
http_archive(
name = "com_google_protobuf_cc",
strip_prefix = "protobuf-c4f59dcc5c13debc572154c8f636b8a9361aacde",
sha256 = "5d4551193416861cb81c3bc0a428f22a6878148c57c31fb6f8f2aa4cf27ff635",
url = "https://github.com/google/protobuf/archive/c4f59dcc5c13debc572154c8f636b8a9361aacde.tar.gz",
)
bind(
name = "protoc",
actual = "@protobuf_bzl//:protoc",
name = "six",
actual = "@six_archive//:six",
)
new_http_archive(
name = "six_archive",
build_file = "@protobuf_bzl//:six.BUILD",
build_file = "@com_google_protobuf//:six.BUILD",
sha256 = "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a",
url = "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz#md5=34eed507548117b2ab523ab14b2f8b55",
)
bind(
name = "six",
actual = "@six_archive//:six",
)

@ -1,4 +1,4 @@
load("@protobuf_bzl//:protobuf.bzl", "cc_proto_library", "py_proto_library")
load("@com_google_protobuf//:protobuf.bzl", "py_proto_library")
def _CcSuffix(d):
return d + "_cc"
@ -8,34 +8,41 @@ def _PySuffix(d):
# TODO(htuch): has_services is currently ignored but will in future support
# gRPC stub generation.
def api_cc_proto_library(name, srcs = [], deps = [], has_services = 0):
cc_proto_library(
name = name,
# TOOD(htuch): Convert this to native py_proto_library once
# https://github.com/bazelbuild/bazel/issues/3935 and/or
# https://github.com/bazelbuild/bazel/issues/2626 are resolved.
def api_py_proto_library(name, srcs = [], deps = [], has_services = 0):
py_proto_library(
name = _PySuffix(name),
srcs = srcs,
default_runtime = "//external:protobuf",
protoc = "//external:protoc",
deps = [_CcSuffix(d) for d in deps] + [
"@googleapis//:http_api_protos",
"@protobuf_bzl//:cc_wkt_protos",
],
default_runtime = "@com_google_protobuf//:protobuf_python",
protoc = "@com_google_protobuf//:protoc",
deps = [_PySuffix(d) for d in deps] + ["@googleapis//:http_api_protos_py"],
visibility = ["//visibility:public"],
)
# TODO(htuch): has_services is currently ignored but will in future support
# gRPC stub generation.
def api_py_proto_library(name, srcs = [], deps = [], has_services = 0):
py_proto_library(
def api_proto_library(name, srcs = [], deps = [], has_services = 0):
native.proto_library(
name = name,
srcs = srcs,
default_runtime = "//external:protobuf_python",
protoc = "//external:protoc",
deps = [_PySuffix(d) for d in deps] + ["@googleapis//:http_api_protos_py"],
deps = deps + [
"@com_google_protobuf//:any_proto",
"@com_google_protobuf//:descriptor_proto",
"@com_google_protobuf//:duration_proto",
"@com_google_protobuf//:struct_proto",
"@com_google_protobuf//:wrappers_proto",
"@googleapis//:http_api_protos_lib",
],
visibility = ["//visibility:public"],
)
def api_proto_library(name, srcs = [], deps = [], has_services = 0):
api_cc_proto_library(_CcSuffix(name), srcs, deps, has_services)
api_py_proto_library(_PySuffix(name), srcs, deps, has_services)
native.cc_proto_library(
name = _CcSuffix(name),
deps = [name],
visibility = ["//visibility:public"],
)
api_py_proto_library(name, srcs, deps, has_services)
def api_cc_test(name, srcs, proto_deps):
native.cc_test(

@ -5,7 +5,7 @@ def api_dependencies():
commit = "5c6df0cd18c6a429eab739fb711c27f6e1393366",
remote = "https://github.com/googleapis/googleapis.git",
build_file_content = """
load("@protobuf_bzl//:protobuf.bzl", "cc_proto_library", "py_proto_library")
load("@com_google_protobuf//:protobuf.bzl", "py_proto_library")
filegroup(
name = "http_api_protos_src",
@ -14,18 +14,19 @@ filegroup(
"google/api/http.proto",
],
visibility = ["//visibility:public"],
)
proto_library(
name = "http_api_protos_lib",
srcs = [":http_api_protos_src"],
deps = ["@com_google_protobuf//:descriptor_proto"],
visibility = ["//visibility:public"],
)
cc_proto_library(
name = "http_api_protos",
srcs = [
"google/api/annotations.proto",
"google/api/http.proto",
],
default_runtime = "//external:protobuf",
protoc = "//external:protoc",
deps = [":http_api_protos_lib"],
visibility = ["//visibility:public"],
deps = ["@protobuf_bzl//:cc_wkt_protos"],
)
py_proto_library(
@ -35,10 +36,10 @@ py_proto_library(
"google/api/http.proto",
],
include = ".",
default_runtime = "//external:protobuf_python",
protoc = "//external:protoc",
default_runtime = "@com_google_protobuf//:protobuf_python",
protoc = "@com_google_protobuf//:protoc",
visibility = ["//visibility:public"],
deps = ["//external:protobuf_python"],
deps = ["@com_google_protobuf//:protobuf_python"],
)
""",
)

@ -15,6 +15,14 @@ then
exit 1
fi
# Create a fake home. Python site libs tries to do getpwuid(3) if we don't and
# the CI Docker image gets confused as it has no passwd entry when running
# non-root unless we do this.
FAKE_HOME=/tmp/fake_home
mkdir -p "${FAKE_HOME}"
export HOME="${FAKE_HOME}"
export PYTHONUSERBASE="${FAKE_HOME}"
# Environment setup.
export USER=bazel
export TEST_TMPDIR=/build/tmp
@ -25,7 +33,7 @@ BAZEL_OPTIONS="--package_path %workspace%:/source"
export BAZEL_QUERY_OPTIONS="${BAZEL_OPTIONS}"
export BAZEL_BUILD_OPTIONS="--strategy=Genrule=standalone --spawn_strategy=standalone \
--verbose_failures ${BAZEL_OPTIONS} --jobs=${NUM_CPUS}"
export BAZEL_TEST_OPTIONS="${BAZEL_BUILD_OPTIONS} --cache_test_results=no --test_output=all"
export BAZEL_TEST_OPTIONS="${BAZEL_BUILD_OPTIONS} --cache_test_results=no --test_output=all --test_env=HOME --test_env=PYTHONUSERBASE"
[[ "${BAZEL_EXPUNGE}" == "1" ]] && "${BAZEL}" clean --expunge
function cleanup() {

@ -4,7 +4,7 @@
set -e
# We reuse the https://github.com/lyft/envoy/ CI image here to get Bazel.
ENVOY_BUILD_SHA=22c55f8ec756c5ddeb26c3424e128a91aec23116
ENVOY_BUILD_SHA=44d539cb572d04c81b62425373440c54934cf267
# Lint travis file.
travis lint .travis.yml --skip-completion-check

@ -10,8 +10,8 @@ echo "building using ${NUM_CPUS} CPUs"
if [[ "$1" == "bazel.test" ]]; then
echo "bazel building and testing..."
bazel --batch build ${BAZEL_BUILD_OPTIONS} //...
bazel --batch test ${BAZEL_TEST_OPTIONS} //...
bazel --batch build ${BAZEL_BUILD_OPTIONS} //api/...
bazel --batch test ${BAZEL_TEST_OPTIONS} //test/... //tools/...
exit 0
else
echo "Invalid do_ci.sh target. The only valid target is bazel.build."

@ -0,0 +1,4 @@
exports_files([
"http_connection_manager.pb",
"listeners.pb",
])

@ -10,7 +10,7 @@ route_config {
match {
prefix: "/service"
}
forward {
route {
cluster: "local_service"
timeout {
seconds: 0
@ -21,6 +21,5 @@ route_config {
}
http_filters {
type: DECODER
name: "router"
}

@ -1,16 +1,11 @@
listeners {
address {
named_address {
address {
socket_address {
protocol: TCP
port {
value: 80
port_value: 80
}
}
}
filter_chains {
filter_chain {
type: READ
}
filter_chains {
filters {
name: "http_connection_manager"
}
}
}

@ -8,3 +8,14 @@ py_binary(
"//api/filter:http_connection_manager_py",
],
)
py_test(
name = "generate_listeners_test",
srcs = ["generate_listeners_test.py"],
data = [
"//examples/service_envoy:http_connection_manager.pb",
"//examples/service_envoy:listeners.pb",
],
visibility = ["//visibility:public"],
deps = [":generate_listeners"],
)

@ -41,25 +41,14 @@ def ParseProto(path, filter_name):
return filter_config
if __name__ == '__main__':
if len(sys.argv) < 4:
print(
'Usage: %s <path to listeners.pb> <output listeners.pb> <output '
'listeners.json> <filter config fragment paths>') % sys.argv[0]
sys.exit(1)
def GenerateListeners(listeners_pb_path, output_pb_path, output_json_path,
fragments):
listener = lds_pb2.Listener()
with open(listeners_pb_path, 'r') as f:
text_format.Merge(f.read(), listener)
listeners_path = sys.argv[1]
output_pb_path = sys.argv[2]
output_json_path = sys.argv[3]
fragments = iter(sys.argv[4:])
listener_discover_response = lds_pb2.ListenerDiscoveryResponse()
with open(listeners_path, 'r') as f:
text_format.Merge(f.read(), listener_discover_response)
for listener in listener_discover_response.listeners:
for filter_chain in listener.filter_chains:
for f in filter_chain.filter_chain:
for f in filter_chain.filters:
f.config.CopyFrom(ProtoToStruct(ParseProto(fragments.next(), f.name)))
with open(output_pb_path, 'w') as f:
@ -67,3 +56,12 @@ if __name__ == '__main__':
with open(output_json_path, 'w') as f:
f.write(json_format.MessageToJson(listener))
if __name__ == '__main__':
if len(sys.argv) < 4:
print('Usage: %s <path to listeners.pb> <output listeners.pb> <output '
'listeners.json> <filter config fragment paths>') % sys.argv[0]
sys.exit(1)
GenerateListeners(sys.argv[1], sys.argv[2], sys.argv[3], iter(sys.argv[4:]))

@ -0,0 +1,8 @@
"""Tests for generate_listeners."""
import generate_listeners
if __name__ == "__main__":
generate_listeners.GenerateListeners(
"examples/service_envoy/listeners.pb", "/dev/stdout", "/dev/stdout",
iter(["examples/service_envoy/http_connection_manager.pb"]))
Loading…
Cancel
Save