docs: sphinx and initial protodoc support for RST generation. (#212)

This takes us to the point where address.proto format in a style fairly
similar to the existing docs. There's some missing bits, e.g. oneof/enum
support, nested messages, optional/required, these will come as later
PRs.

Signed-off-by: Harvey Tuch <htuch@google.com>
pull/214/head
htuch 7 years ago committed by GitHub
parent b085af2c84
commit 384dc0c096
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .gitignore
  2. 3
      ci/do_ci.sh
  3. 11
      ci/run_envoy_docker.sh
  4. 40
      docs/build.sh
  5. 293
      docs/conf.py
  6. 8
      docs/index.rst
  7. 19
      docs/requirements.txt
  8. 23
      tools/protodoc/protodoc.bzl
  9. 275
      tools/protodoc/protodoc.py

2
.gitignore vendored

@ -1 +1,3 @@
bazel-*
build_docs/
generated/

@ -15,8 +15,7 @@ if [[ "$1" == "bazel.test" ]]; then
exit 0
elif [[ "$1" == "bazel.docs" ]]; then
echo "generating docs..."
bazel --batch build ${BAZEL_BUILD_OPTIONS} --aspects tools/protodoc/protodoc.bzl%proto_doc_aspect \
--output_groups=rst //api
./docs/build.sh
else
echo "Invalid do_ci.sh target. The only valid target is bazel.build."
exit 1

@ -0,0 +1,11 @@
#!/bin/bash
set -e
[[ -z "${IMAGE_NAME}" ]] && IMAGE_NAME="lyft/envoy-build-ubuntu"
[[ -z "${IMAGE_ID}" ]] && IMAGE_ID="latest"
[[ -z "${ENVOY_API_DOCKER_BUILD_DIR}" ]] && ENVOY_API_DOCKER_BUILD_DIR=/tmp/envoy-api-docker-build
mkdir -p "${ENVOY_API_DOCKER_BUILD_DIR}"
docker run -t -i -u $(id -u):$(id -g) -v "${ENVOY_API_DOCKER_BUILD_DIR}":/build \
-v "$PWD":/source "${IMAGE_NAME}":"${IMAGE_ID}" /bin/bash -c "cd source && $*"

@ -0,0 +1,40 @@
#!/bin/bash
set -e
SCRIPT_DIR=$(dirname "$0")
BUILD_DIR=build_docs
[[ -z "${DOCS_OUTPUT_DIR}" ]] && DOCS_OUTPUT_DIR=generated/docs
[[ -z "${GENERATED_RST_DIR}" ]] && GENERATED_RST_DIR=generated/rst
rm -rf "${DOCS_OUTPUT_DIR}"
mkdir -p "${DOCS_OUTPUT_DIR}"
rm -rf "${GENERATED_RST_DIR}"
mkdir -p "${GENERATED_RST_DIR}"
cp -f "${SCRIPT_DIR}"/{conf.py,index.rst} "${GENERATED_RST_DIR}"
if [ ! -d "${BUILD_DIR}"/venv ]; then
virtualenv "${BUILD_DIR}"/venv --no-site-packages
"${BUILD_DIR}"/venv/bin/pip install -r "${SCRIPT_DIR}"/requirements.txt
fi
source "${BUILD_DIR}"/venv/bin/activate
bazel --batch build -s ${BAZEL_BUILD_OPTIONS} //api --aspects \
tools/protodoc/protodoc.bzl%proto_doc_aspect --output_groups=rst
# These are the protos we want to put in docs, this list will grow.
# TODO(htuch): Factor this out of this script.
PROTO_RST="
/api/address/api/address.proto.rst
"
# Only copy in the protos we care about and know how to deal with in protodoc.
for p in $PROTO_RST
do
mkdir -p "$(dirname "${GENERATED_RST_DIR}/$p")"
cp -f bazel-bin/"${p}" "${GENERATED_RST_DIR}/$p"
done
sphinx-build -W -b html "${GENERATED_RST_DIR}" "${DOCS_OUTPUT_DIR}"

@ -0,0 +1,293 @@
# -*- coding: utf-8 -*-
#
# envoy documentation build configuration file, created by
# sphinx-quickstart on Sat May 28 10:51:27 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sphinx_rtd_theme
import sys
import os
import git
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinxcontrib.httpdomain', 'sphinx.ext.extlinks']
extlinks = {'repo': ('https://github.com/envoyproxy/envoy/blob/master/%s', '')}
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'envoy'
copyright = u'2016-2017, Lyft'
author = u'Lyft'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
repo = git.Repo('../../')
last_commit = str(repo.head.commit)[:6]
# The short X.Y version.
version = u'1.5.0-' + last_commit
# The full version, including alpha/beta/rc tags.
release = u'1.5.0-' + last_commit
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', '_venv', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = u'envoy v1.0.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = 'favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'envoydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'envoy.tex', u'envoy Documentation',
u'Lyft', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'envoy', u'envoy Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'envoy', u'envoy Documentation',
author, 'envoy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False

@ -0,0 +1,8 @@
Envoy v2 API documentation
==========================
.. toctree::
:glob:
:maxdepth: 2
**

@ -0,0 +1,19 @@
GitPython==2.0.8
Jinja2==2.9.6
MarkupSafe==1.0
Pygments==2.2.0
alabaster==0.7.10
babel==2.4.0
docutils==0.12
gitdb==0.6.4
imagesize==0.7.1
pytz==2017.2
requests==2.13.0
six==1.10.0
smmap==0.9.0
snowballstemmer==1.2.1
sphinx==1.6.5
sphinxcontrib-httpdomain==1.5.0
# Fix for https://github.com/rtfd/sphinx_rtd_theme/issues/397
git+https://github.com/rtfd/sphinx_rtd_theme@9d704f287ac197dfb1c9b27f0acfb91267dce4f1

@ -35,6 +35,18 @@ def _proto_doc_aspect_impl(target, ctx):
# but just glues together other libs, we just need to follow the graph.
if not proto_sources:
return [OutputGroupInfo(rst=transitive_outputs)]
# Figure out the set of import paths. Ideally we would use descriptor sets
# built by proto_library, which avoid having to do nasty path mangling, but
# these don't include source_code_info, which we need for comment
# extractions. See https://github.com/bazelbuild/bazel/issues/3971.
import_paths = []
for f in target.proto.transitive_sources:
if f.root.path:
import_path = f.root.path + "/" + f.owner.workspace_root
else:
import_path = f.owner.workspace_root
if import_path:
import_paths += [import_path]
# The outputs live in the ctx.label's package root. We add some additional
# path information to match with protoc's notion of path relative locations.
outputs = [ctx.actions.declare_file(ctx.label.name + "/" + _proto_path(f) +
@ -42,18 +54,13 @@ def _proto_doc_aspect_impl(target, ctx):
# Create the protoc command-line args.
ctx_path = ctx.label.package + "/" + ctx.label.name
output_path = outputs[0].root.path + "/" + outputs[0].owner.workspace_root + "/" + ctx_path
# proto_library will be generating the descriptor sets for all the .proto deps of the
# current node, we can feed them into protoc instead of setting up elaborate -I path
# expressions.
descriptor_set_in = ":".join([s.path for s in target.proto.transitive_descriptor_sets])
args = ["--descriptor_set_in", descriptor_set_in]
args = ["-I./" + ctx.label.workspace_root]
args += ["-I" + import_path for import_path in import_paths]
args += ["--plugin=protoc-gen-protodoc=" + ctx.executable._protodoc.path, "--protodoc_out=" + output_path]
args += [_proto_path(src) for src in target.proto.direct_sources]
ctx.action(executable=ctx.executable._protoc,
arguments=args,
inputs=[ctx.executable._protodoc] +
target.proto.transitive_descriptor_sets.to_list() +
proto_sources,
inputs=[ctx.executable._protodoc] + target.proto.transitive_sources.to_list(),
outputs=outputs,
mnemonic="ProtoDoc",
use_default_shell_env=True)

@ -1,7 +1,280 @@
# protoc plugin to map from FileDescriptorProtos to Envoy doc style RST.
# See https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto
# for the underlying protos mentioned in this file.
import functools
import sys
from google.protobuf.compiler import plugin_pb2
# Namespace prefix for Envoy APIs.
ENVOY_API_NAMESPACE_PREFIX = '.envoy.api.v2.'
class ProtodocError(Exception):
"""Base error class for the protodoc module."""
class SourceCodeInfo(object):
"""Wrapper for SourceCodeInfo proto."""
def __init__(self, source_code_info):
self._proto = source_code_info
@property
def file_level_comment(self):
"""Obtain inferred file level comment."""
comment = ''
earliest_detached_comment = max(
max(location.span) for location in self._proto.location)
for location in self._proto.location:
if location.leading_detached_comments and location.span[0] < earliest_detached_comment:
comment = StripLeadingSpace(''.join(
location.leading_detached_comments)) + '\n'
earliest_detached_comment = location.span[0]
return comment
def LeadingCommentPathLookup(self, path):
"""Lookup leading comment by path in SourceCodeInfo.
Args:
path: a list of path indexes as per
https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717.
Returns:
Attached leading comment if it exists, otherwise empty space.
"""
for location in self._proto.location:
if location.path == path:
return StripLeadingSpace(location.leading_comments) + '\n'
return ''
def MapLines(f, s):
"""Apply a function across each line in a flat string.
Args:
f: A string transform function for a line.
s: A string consisting of potentially multiple lines.
Returns:
A flat string with f applied to each line.
"""
return '\n'.join(f(line) for line in s.split('\n'))
def Indent(spaces, line):
"""Indent a string."""
return ' ' * spaces + line
def IndentLines(spaces, lines):
"""Indent a list of strings."""
return map(functools.partial(Indent, spaces), lines)
def FormatHeader(style, text):
"""Format RST header.
Args:
style: underline style, e.g. '=', '-'.
text: header text
Returns:
RST formatted header.
"""
return '%s\n%s\n\n' % (text, style * len(text))
def FormatFieldTypeAsJson(field):
"""Format FieldDescriptorProto.Type as a pseudo-JSON string.
Args:
field: FieldDescriptor proto.
Return:
RST formatted pseudo-JSON string representation of field type.
"""
if field.label == field.LABEL_REPEATED:
return '[]'
if field.type == field.TYPE_MESSAGE:
return '"{...}"'
return '"..."'
def FormatMessageAsJson(msg):
"""Format a message definition DescriptorProto as a pseudo-JSON block.
Args:
msg: message definition DescriptorProto.
Return:
RST formatted pseudo-JSON string representation of message definition.
"""
lines = ['"%s": %s' % (f.name, FormatFieldTypeAsJson(f)) for f in msg.field]
return '.. code-block:: json\n\n {\n' + ',\n'.join(IndentLines(
4, lines)) + '\n }\n\n'
def NormalizeFQN(fqn):
"""Normalize a fully qualified field type name.
Strips leading ENVOY_API_NAMESPACE_PREFIX and makes pretty wrapped type names.
Args:
fqn: a fully qualified type name from FieldDescriptorProto.type_name.
Return:
Normalized type name.
"""
if fqn.startswith(ENVOY_API_NAMESPACE_PREFIX):
return fqn[len(ENVOY_API_NAMESPACE_PREFIX):]
def Wrapped(s):
return '{%s}' % s
remap_fqn = {
'.google.protobuf.UInt32Value': Wrapped('uint32'),
'.google.protobuf.UInt64Value': Wrapped('uint64'),
'.google.protobuf.BoolValue': Wrapped('bool'),
}
if fqn in remap_fqn:
return remap_fqn[fqn]
return fqn
def FormatEmph(s):
"""RST format a string for emphasis."""
return '*%s*' % s
def FormatFieldType(field):
"""Format a FieldDescriptorProto type description.
Adds cross-refs for message types.
TODO(htuch): Add cross-refs for enums as well.
Args:
field: FieldDescriptor proto.
Return:
RST formatted field type.
"""
if field.type == field.TYPE_MESSAGE and field.type_name.startswith(
ENVOY_API_NAMESPACE_PREFIX):
type_name = NormalizeFQN(field.type_name)
return ':ref:`%s <%s>`' % (type_name, MessageCrossRefLabel(type_name))
# TODO(htuch): Replace with enum handling.
if field.type_name:
return FormatEmph(NormalizeFQN(field.type_name))
pretty_type_names = {
field.TYPE_DOUBLE: 'double',
field.TYPE_FLOAT: 'float',
field.TYPE_INT32: 'int32',
field.TYPE_UINT32: 'uint32',
field.TYPE_INT64: 'int64',
field.TYPE_UINT64: 'uint64',
field.TYPE_BOOL: 'bool',
field.TYPE_STRING: 'string',
field.TYPE_BYTES: 'bytes',
}
if field.type in pretty_type_names:
return FormatEmph(pretty_type_names[field.type])
raise ProtodocError('Unknown field type ' + str(field.type))
def StripLeadingSpace(s):
"""Remove leading space in flat comment strings."""
return MapLines(lambda s: s[1:], s)
def MessageCrossRefLabel(msg_name):
"""Message cross reference label."""
return 'envoy_api_%s' % msg_name
def FieldCrossRefLabel(msg_name, field_name):
"""Field cross reference label."""
return 'envoy_api_%s_%s' % (msg_name, field_name)
def FormatAnchor(label):
"""Format a label as an Envoy API RST anchor."""
return '.. _%s:\n\n' % label
def FormatFieldAsDefinitionListItem(source_code_info, msg, path, field):
"""Format a FieldDescriptorProto as RST definition list item.
Args:
source_code_info: SourceCodeInfo object.
msg: MessageDescriptorProto.
path: a list of path indexes as per
https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717.
field: FieldDescriptorProto.
Returns:
RST formatted definition list item.
"""
anchor = FormatAnchor(FieldCrossRefLabel(msg.name, field.name))
comment = '(%s) ' % FormatFieldType(
field) + source_code_info.LeadingCommentPathLookup(path)
return anchor + field.name + '\n' + MapLines(
functools.partial(Indent, 2), comment)
def FormatMessageAsDefinitionList(source_code_info, path, msg):
"""Format a MessageDescriptorProto as RST definition list.
Args:
source_code_info: SourceCodeInfo object.
path: a list of path indexes as per
https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717.
msg: MessageDescriptorProto.
Returns:
RST formatted definition list item.
"""
return '\n\n'.join(
FormatFieldAsDefinitionListItem(source_code_info, msg, path + [2, index],
field)
for index, field in enumerate(msg.field)) + '\n'
def FormatMessage(source_code_info, path, msg):
"""Format a MessageDescriptorProto as RST section.
Args:
source_code_info: SourceCodeInfo object.
path: a list of path indexes as per
https://github.com/google/protobuf/blob/a08b03d4c00a5793b88b494f672513f6ad46a681/src/google/protobuf/descriptor.proto#L717.
msg: MessageDescriptorProto.
Returns:
RST formatted section.
"""
anchor = FormatAnchor(MessageCrossRefLabel(msg.name))
header = FormatHeader('-', msg.name)
comment = source_code_info.LeadingCommentPathLookup(path)
return anchor + header + comment + FormatMessageAsJson(
msg) + FormatMessageAsDefinitionList(source_code_info, path, msg)
def FormatProtoAsBlockComment(proto):
"""Format as RST a proto as a block comment.
Useful in debugging, not usually referenced.
"""
return '\n\nproto::\n\n' + MapLines(functools.partial(Indent, 2),
str(proto)) + '\n'
def GenerateRst(proto_file):
"""Generate a RST representation from a FileDescriptor proto.
"""
header = FormatHeader('=', proto_file.name)
source_code_info = SourceCodeInfo(proto_file.source_code_info)
# Find the earliest detached comment, attribute it to file level.
comment = source_code_info.file_level_comment
msgs = '\n'.join(
FormatMessage(source_code_info, [4, index], msg)
for index, msg in enumerate(proto_file.message_type))
#debug_proto = FormatProtoAsBlockComment(proto_file.source_code_info)
return header + comment + msgs #+ debug_proto
if __name__ == '__main__':
# http://www.expobrain.net/2015/09/13/create-a-plugin-for-google-protocol-buffer/
request = plugin_pb2.CodeGeneratorRequest()
@ -13,6 +286,6 @@ if __name__ == '__main__':
f.name = proto_file.name + '.rst'
# We don't actually generate any RST right now, we just string dump the
# input proto file descriptor into the output file.
f.content = str(proto_file)
f.content = GenerateRst(proto_file)
sys.stdout.write(response.SerializeToString())

Loading…
Cancel
Save