docs: skeleton for RST documentation generation plugin. (#205)

Mostly Bazel hacking to get a protoc plugin running against the
proto_library graph. The Python plugin doesn't actually do any RST
generation yet, it just runs against each file and dumps the input
proto.

The tool can be run with:

bazel build //api --aspects \
  tools/protodoc/protodoc.bzl%proto_doc_aspect --output_groups=rst

There's a snafu with unsandboxed runs in CI, where I can only get it to
work on direct leaf invocations, will fix this in a followup PR.

Signed-off-by: Harvey Tuch <htuch@google.com>
pull/207/head
htuch 7 years ago committed by GitHub
parent 5b29f002b4
commit 0d30f54a20
  1. 1
      .travis.yml
  2. 12
      api/BUILD
  3. 3
      ci/build_setup.sh
  4. 2
      ci/ci_steps.sh
  5. 4
      ci/do_ci.sh
  6. 6
      tools/protodoc/BUILD
  7. 73
      tools/protodoc/protodoc.bzl
  8. 18
      tools/protodoc/protodoc.py

@ -8,6 +8,7 @@ matrix:
fast_finish: true
env:
- TEST_TYPE=bazel.test
- TEST_TYPE=bazel.docs
script: ./ci/ci_steps.sh
branches:

@ -116,3 +116,15 @@ api_proto_library(
":discovery",
],
)
# TODO(htuch): Grow this to cover everything we want to generate docs for, so we can just invoke
# bazel build //api --aspects tools/protodoc/protodoc.bzl%proto_doc_aspect --output_groups=rst
proto_library(
name = "api",
deps = [
":cds",
":eds",
":rds",
":lds",
],
)

@ -32,7 +32,8 @@ export BAZEL="bazel"
BAZEL_OPTIONS="--package_path %workspace%:/source"
export BAZEL_QUERY_OPTIONS="${BAZEL_OPTIONS}"
export BAZEL_BUILD_OPTIONS="--strategy=Genrule=standalone --spawn_strategy=standalone \
--verbose_failures ${BAZEL_OPTIONS} --jobs=${NUM_CPUS}"
--verbose_failures ${BAZEL_OPTIONS} --jobs=${NUM_CPUS} \
--action_env=HOME --action_env=PYTHONUSERBASE"
export BAZEL_TEST_OPTIONS="${BAZEL_BUILD_OPTIONS} --cache_test_results=no --test_output=all --test_env=HOME --test_env=PYTHONUSERBASE"
[[ "${BAZEL_EXPUNGE}" == "1" ]] && "${BAZEL}" clean --expunge

@ -4,7 +4,7 @@
set -e
# We reuse the https://github.com/lyft/envoy/ CI image here to get Bazel.
ENVOY_BUILD_SHA=44d539cb572d04c81b62425373440c54934cf267
ENVOY_BUILD_SHA=114e24c6fd05fc026492e9d2ca5608694e5ea59d
# Lint travis file.
travis lint .travis.yml --skip-completion-check

@ -13,6 +13,10 @@ if [[ "$1" == "bazel.test" ]]; then
bazel --batch build ${BAZEL_BUILD_OPTIONS} //api/...
bazel --batch test ${BAZEL_TEST_OPTIONS} //test/... //tools/...
exit 0
elif [[ "$1" == "bazel.docs" ]]; then
echo "generating docs..."
bazel --batch build ${BAZEL_BUILD_OPTIONS} --aspects tools/protodoc/protodoc.bzl%proto_doc_aspect \
--output_groups=rst //api
else
echo "Invalid do_ci.sh target. The only valid target is bazel.build."
exit 1

@ -0,0 +1,6 @@
py_binary(
name = "protodoc",
srcs = ["protodoc.py"],
deps = ["@com_google_protobuf//:protobuf_python"],
visibility = ["//visibility:public"],
)

@ -0,0 +1,73 @@
# Borrowed from
# https://github.com/bazelbuild/rules_go/blob/master/proto/toolchain.bzl. This
# does some magic munging to remove workspace prefixes from output paths to
# convert path as understood by Bazel into paths as understood by protoc.
def _proto_path(proto):
"""
The proto path is not really a file path
It's the path to the proto that was seen when the descriptor file was generated.
"""
path = proto.path
root = proto.root.path
ws = proto.owner.workspace_root
if path.startswith(root): path = path[len(root):]
if path.startswith("/"): path = path[1:]
if path.startswith(ws): path = path[len(ws):]
if path.startswith("/"): path = path[1:]
return path
# Bazel aspect (https://docs.bazel.build/versions/master/skylark/aspects.html)
# that can be invoked from the CLI to produce docs via //tools/protodoc for
# proto_library targets. Example use:
#
# bazel build //api --aspects tools/protodoc/protodoc.bzl%proto_doc_aspect \
# --output_groups=rst
#
# The aspect builds the transitive docs, so any .proto in the dependency graph
# get docs created.
def _proto_doc_aspect_impl(target, ctx):
# Compute RST files from the current proto_library node's dependencies.
transitive_outputs = depset()
for dep in ctx.rule.attr.deps:
transitive_outputs = transitive_outputs | dep.output_groups["rst"]
proto_sources = target.proto.direct_sources
# If this proto_library doesn't actually name any sources, e.g. //api:api,
# but just glues together other libs, we just need to follow the graph.
if not proto_sources:
return [OutputGroupInfo(rst=transitive_outputs)]
# The outputs live in the ctx.label's package root. We add some additional
# path information to match with protoc's notion of path relative locations.
outputs = [ctx.actions.declare_file(ctx.label.name + "/" + _proto_path(f) +
".rst") for f in proto_sources]
# Create the protoc command-line args.
ctx_path = ctx.label.package + "/" + ctx.label.name
output_path = outputs[0].root.path + "/" + outputs[0].owner.workspace_root + "/" + ctx_path
# proto_library will be generating the descriptor sets for all the .proto deps of the
# current node, we can feed them into protoc instead of setting up elaborate -I path
# expressions.
descriptor_set_in = ":".join([s.path for s in target.proto.transitive_descriptor_sets])
args = ["--descriptor_set_in", descriptor_set_in]
args += ["--plugin=protoc-gen-protodoc=" + ctx.executable._protodoc.path, "--protodoc_out=" + output_path]
args += [_proto_path(src) for src in target.proto.direct_sources]
ctx.action(executable=ctx.executable._protoc,
arguments=args,
inputs=[ctx.executable._protodoc] +
target.proto.transitive_descriptor_sets.to_list() +
proto_sources,
outputs=outputs,
mnemonic="ProtoDoc",
use_default_shell_env=True)
transitive_outputs = depset(outputs) | transitive_outputs
return [OutputGroupInfo(rst=transitive_outputs)]
proto_doc_aspect = aspect(implementation = _proto_doc_aspect_impl,
attr_aspects = ["deps"],
attrs = {
"_protoc": attr.label(default=Label("@com_google_protobuf//:protoc"),
executable=True,
cfg="host"),
"_protodoc": attr.label(default=Label("//tools/protodoc"),
executable=True,
cfg="host"),
}
)

@ -0,0 +1,18 @@
import sys
from google.protobuf.compiler import plugin_pb2
if __name__ == '__main__':
# http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/
request = plugin_pb2.CodeGeneratorRequest()
request.ParseFromString(sys.stdin.read())
response = plugin_pb2.CodeGeneratorResponse()
for proto_file in request.proto_file:
f = response.file.add()
f.name = proto_file.name + '.rst'
# We don't actually generate any RST right now, we just string dump the
# input proto file descriptor into the output file.
f.content = str(proto_file)
sys.stdout.write(response.SerializeToString())
Loading…
Cancel
Save