commit
07ac6f0e8e
40 changed files with 1228 additions and 1810 deletions
@ -0,0 +1,10 @@ |
||||
--- |
||||
tasks: |
||||
ubuntu: |
||||
platform: ubuntu1604 |
||||
test_targets: |
||||
- //... |
||||
macos: |
||||
platform: macos |
||||
test_targets: |
||||
- //... |
@ -0,0 +1,221 @@ |
||||
"""Internal rules for building upb.""" |
||||
|
||||
load(":upb_proto_library.bzl", "GeneratedSrcs") |
||||
|
||||
def _librule(name): |
||||
return name + "_lib" |
||||
|
||||
def _get_real_short_path(file): |
||||
# For some reason, files from other archives have short paths that look like: |
||||
# ../com_google_protobuf/google/protobuf/descriptor.proto |
||||
short_path = file.short_path |
||||
if short_path.startswith("../"): |
||||
second_slash = short_path.index("/", 3) |
||||
short_path = short_path[second_slash + 1:] |
||||
return short_path |
||||
|
||||
def _get_real_root(file): |
||||
real_short_path = _get_real_short_path(file) |
||||
return file.path[:-len(real_short_path) - 1] |
||||
|
||||
def _get_real_roots(files): |
||||
roots = {} |
||||
for file in files: |
||||
real_root = _get_real_root(file) |
||||
if real_root: |
||||
roots[real_root] = True |
||||
return roots.keys() |
||||
|
||||
def lua_cclibrary(name, srcs, hdrs = [], deps = [], luadeps = []): |
||||
lib_rule = name + "_lib" |
||||
so_rule = "lib" + name + ".so" |
||||
so_file = _remove_prefix(name, "lua/") + ".so" |
||||
|
||||
native.cc_library( |
||||
name = _librule(name), |
||||
hdrs = hdrs, |
||||
srcs = srcs, |
||||
deps = deps + [_librule(dep) for dep in luadeps] + ["@lua//:liblua_headers"], |
||||
) |
||||
|
||||
native.cc_binary( |
||||
name = so_rule, |
||||
linkshared = True, |
||||
deps = [_librule(name)], |
||||
linkopts = select({ |
||||
":darwin": [ |
||||
"-undefined dynamic_lookup", |
||||
], |
||||
"//conditions:default": [], |
||||
}), |
||||
) |
||||
|
||||
native.genrule( |
||||
name = name + "_copy", |
||||
srcs = [":" + so_rule], |
||||
outs = [so_file], |
||||
cmd = "cp $< $@", |
||||
) |
||||
|
||||
native.filegroup( |
||||
name = name, |
||||
data = [so_file], |
||||
) |
||||
|
||||
def _remove_prefix(str, prefix): |
||||
if not str.startswith(prefix): |
||||
fail("%s doesn't start with %s" % (str, prefix)) |
||||
return str[len(prefix):] |
||||
|
||||
def _remove_suffix(str, suffix): |
||||
if not str.endswith(suffix): |
||||
fail("%s doesn't end with %s" % (str, suffix)) |
||||
return str[:-len(suffix)] |
||||
|
||||
def lua_library(name, srcs, strip_prefix, luadeps = []): |
||||
outs = [_remove_prefix(src, strip_prefix + "/") for src in srcs] |
||||
native.genrule( |
||||
name = name + "_copy", |
||||
srcs = srcs, |
||||
outs = outs, |
||||
cmd = "cp $(SRCS) $(@D)", |
||||
) |
||||
|
||||
native.filegroup( |
||||
name = name, |
||||
data = outs + luadeps, |
||||
) |
||||
|
||||
def make_shell_script(name, contents, out): |
||||
contents = contents.replace("$", "$$") |
||||
native.genrule( |
||||
name = "gen_" + name, |
||||
outs = [out], |
||||
cmd = "(cat <<'HEREDOC'\n%s\nHEREDOC\n) > $@" % contents, |
||||
) |
||||
|
||||
def _lua_binary_or_test(name, luamain, luadeps, rule): |
||||
script = name + ".sh" |
||||
|
||||
make_shell_script( |
||||
name = "gen_" + name, |
||||
out = script, |
||||
contents = """ |
||||
BASE=$(dirname $(rlocation upb/upb_c.so)) |
||||
export LUA_CPATH="$BASE/?.so" |
||||
export LUA_PATH="$BASE/?.lua" |
||||
$(rlocation lua/lua) $(rlocation upb/tools/upbc.lua) "$@" |
||||
""", |
||||
) |
||||
|
||||
rule( |
||||
name = name, |
||||
srcs = [script], |
||||
data = ["@lua//:lua", luamain] + luadeps, |
||||
) |
||||
|
||||
def lua_binary(name, luamain, luadeps = []): |
||||
_lua_binary_or_test(name, luamain, luadeps, native.sh_binary) |
||||
|
||||
def lua_test(name, luamain, luadeps = []): |
||||
_lua_binary_or_test(name, luamain, luadeps, native.sh_test) |
||||
|
||||
def generated_file_staleness_test(name, outs, generated_pattern): |
||||
"""Tests that checked-in file(s) match the contents of generated file(s). |
||||
|
||||
The resulting test will verify that all output files exist and have the |
||||
correct contents. If the test fails, it can be invoked with --fix to |
||||
bring the checked-in files up to date. |
||||
|
||||
Args: |
||||
name: Name of the rule. |
||||
outs: the checked-in files that are copied from generated files. |
||||
generated_pattern: the pattern for transforming each "out" file into a |
||||
generated file. For example, if generated_pattern="generated/%s" then |
||||
a file foo.txt will look for generated file generated/foo.txt. |
||||
""" |
||||
|
||||
script_name = name + ".py" |
||||
script_src = "//:tools/staleness_test.py" |
||||
|
||||
# Filter out non-existing rules so Blaze doesn't error out before we even |
||||
# run the test. |
||||
existing_outs = native.glob(include = outs) |
||||
|
||||
# The file list contains a few extra bits of information at the end. |
||||
# These get unpacked by the Config class in staleness_test_lib.py. |
||||
file_list = outs + [generated_pattern, native.package_name() or ".", name] |
||||
|
||||
native.genrule( |
||||
name = name + "_makescript", |
||||
outs = [script_name], |
||||
srcs = [script_src], |
||||
testonly = 1, |
||||
cmd = "cat $(location " + script_src + ") > $@; " + |
||||
"sed -i.bak -e 's|INSERT_FILE_LIST_HERE|" + "\\\n ".join(file_list) + "|' $@", |
||||
) |
||||
|
||||
native.py_test( |
||||
name = name, |
||||
srcs = [script_name], |
||||
data = existing_outs + [generated_pattern % file for file in outs], |
||||
deps = [ |
||||
"//:staleness_test_lib", |
||||
], |
||||
) |
||||
|
||||
# upb_amalgamation() rule, with file_list aspect. |
||||
|
||||
SrcList = provider( |
||||
fields = { |
||||
"srcs": "list of srcs", |
||||
}, |
||||
) |
||||
|
||||
def _file_list_aspect_impl(target, ctx): |
||||
if GeneratedSrcs in target: |
||||
srcs = target[GeneratedSrcs] |
||||
return [SrcList(srcs = srcs.srcs + srcs.hdrs)] |
||||
|
||||
srcs = [] |
||||
for src in ctx.rule.attr.srcs: |
||||
srcs += src.files.to_list() |
||||
for hdr in ctx.rule.attr.hdrs: |
||||
srcs += hdr.files.to_list() |
||||
for hdr in ctx.rule.attr.textual_hdrs: |
||||
srcs += hdr.files.to_list() |
||||
return [SrcList(srcs = srcs)] |
||||
|
||||
_file_list_aspect = aspect( |
||||
implementation = _file_list_aspect_impl, |
||||
) |
||||
|
||||
def _upb_amalgamation(ctx): |
||||
inputs = [] |
||||
for lib in ctx.attr.libs: |
||||
inputs += lib[SrcList].srcs |
||||
srcs = [src for src in inputs if src.path.endswith("c")] |
||||
ctx.actions.run( |
||||
inputs = inputs, |
||||
outputs = ctx.outputs.outs, |
||||
arguments = [ctx.bin_dir.path + "/"] + [f.path for f in srcs] + ["-I" + root for root in _get_real_roots(inputs)], |
||||
progress_message = "Making amalgamation", |
||||
executable = ctx.executable.amalgamator, |
||||
) |
||||
return [] |
||||
|
||||
upb_amalgamation = rule( |
||||
attrs = { |
||||
"amalgamator": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
"libs": attr.label_list(aspects = [_file_list_aspect]), |
||||
"outs": attr.output_list(), |
||||
}, |
||||
implementation = _upb_amalgamation, |
||||
) |
||||
|
||||
def licenses(*args): |
||||
# No-op (for Google-internal usage). |
||||
pass |
@ -0,0 +1,15 @@ |
||||
# A hacky way to work around the fact that native.bazel_version is only |
||||
# available from WORKSPACE macros, not BUILD macros or rules. |
||||
# |
||||
# Hopefully we can remove this if/when this is fixed: |
||||
# https://github.com/bazelbuild/bazel/issues/8305 |
||||
|
||||
def _impl(repository_ctx): |
||||
s = "bazel_version = \"" + native.bazel_version + "\"" |
||||
repository_ctx.file("bazel_version.bzl", s) |
||||
repository_ctx.file("BUILD", "") |
||||
|
||||
bazel_version_repository = repository_rule( |
||||
implementation = _impl, |
||||
local = True, |
||||
) |
@ -0,0 +1,294 @@ |
||||
"""Public rules for using upb protos: |
||||
- upb_proto_library() |
||||
- upb_proto_reflection_library() |
||||
""" |
||||
|
||||
load("@bazel_skylib//lib:paths.bzl", "paths") |
||||
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain") |
||||
|
||||
# copybara:strip_for_google3_begin |
||||
load("@bazel_skylib//lib:versions.bzl", "versions") |
||||
load("@bazel_version//:bazel_version.bzl", "bazel_version") |
||||
# copybara:strip_end |
||||
|
||||
# Generic support code ######################################################### |
||||
|
||||
_is_bazel = not hasattr(native, "genmpm") |
||||
|
||||
def _get_real_short_path(file): |
||||
# For some reason, files from other archives have short paths that look like: |
||||
# ../com_google_protobuf/google/protobuf/descriptor.proto |
||||
short_path = file.short_path |
||||
if short_path.startswith("../"): |
||||
second_slash = short_path.index("/", 3) |
||||
short_path = short_path[second_slash + 1:] |
||||
return short_path |
||||
|
||||
def _get_real_root(file): |
||||
real_short_path = _get_real_short_path(file) |
||||
return file.path[:-len(real_short_path) - 1] |
||||
|
||||
def _get_real_roots(files): |
||||
roots = {} |
||||
for file in files: |
||||
real_root = _get_real_root(file) |
||||
if real_root: |
||||
roots[real_root] = True |
||||
return roots.keys() |
||||
|
||||
def _generate_output_file(ctx, src, extension): |
||||
if _is_bazel: |
||||
real_short_path = _get_real_short_path(src) |
||||
else: |
||||
real_short_path = paths.relativize(src.short_path, ctx.label.package) |
||||
output_filename = paths.replace_extension(real_short_path, extension) |
||||
ret = ctx.new_file(ctx.genfiles_dir, output_filename) |
||||
return ret |
||||
|
||||
def _filter_none(elems): |
||||
out = [] |
||||
for elem in elems: |
||||
if elem: |
||||
out.append(elem) |
||||
return out |
||||
|
||||
def _cc_library_func(ctx, name, hdrs, srcs, dep_ccinfos): |
||||
"""Like cc_library(), but callable from rules. |
||||
|
||||
Args: |
||||
ctx: Rule context. |
||||
name: Unique name used to generate output files. |
||||
hdrs: Public headers that can be #included from other rules. |
||||
srcs: C/C++ source files. |
||||
dep_ccinfos: CcInfo providers of dependencies we should build/link against. |
||||
|
||||
Returns: |
||||
CcInfo provider for this compilation. |
||||
""" |
||||
|
||||
compilation_contexts = [info.compilation_context for info in dep_ccinfos] |
||||
linking_contexts = [info.linking_context for info in dep_ccinfos] |
||||
toolchain = find_cpp_toolchain(ctx) |
||||
feature_configuration = cc_common.configure_features( |
||||
ctx = ctx, |
||||
cc_toolchain = toolchain, |
||||
requested_features = ctx.features, |
||||
unsupported_features = ctx.disabled_features, |
||||
) |
||||
|
||||
# copybara:strip_for_google3_begin |
||||
if bazel_version == "0.24.1": |
||||
# Compatibility code until gRPC is on 0.25.2 or later. |
||||
compilation_info = cc_common.compile( |
||||
ctx = ctx, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
srcs = srcs, |
||||
hdrs = hdrs, |
||||
compilation_contexts = compilation_contexts, |
||||
) |
||||
linking_info = cc_common.link( |
||||
ctx = ctx, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
cc_compilation_outputs = compilation_info.cc_compilation_outputs, |
||||
linking_contexts = linking_contexts, |
||||
) |
||||
return CcInfo( |
||||
compilation_context = compilation_info.compilation_context, |
||||
linking_context = linking_info.linking_context, |
||||
) |
||||
|
||||
if not versions.is_at_least("0.25.2", bazel_version): |
||||
fail("upb requires Bazel >=0.25.2 or 0.24.1") |
||||
|
||||
# copybara:strip_end |
||||
|
||||
blaze_only_args = {} |
||||
|
||||
if not _is_bazel: |
||||
blaze_only_args["grep_includes"] = ctx.file._grep_includes |
||||
|
||||
(compilation_context, compilation_outputs) = cc_common.compile( |
||||
actions = ctx.actions, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
name = name, |
||||
srcs = srcs, |
||||
public_hdrs = hdrs, |
||||
compilation_contexts = compilation_contexts, |
||||
**blaze_only_args |
||||
) |
||||
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs( |
||||
actions = ctx.actions, |
||||
name = name, |
||||
feature_configuration = feature_configuration, |
||||
cc_toolchain = toolchain, |
||||
compilation_outputs = compilation_outputs, |
||||
linking_contexts = linking_contexts, |
||||
**blaze_only_args |
||||
) |
||||
|
||||
return CcInfo( |
||||
compilation_context = compilation_context, |
||||
linking_context = linking_context, |
||||
) |
||||
|
||||
# upb_proto_library / upb_proto_reflection_library shared code ################# |
||||
|
||||
GeneratedSrcs = provider( |
||||
fields = { |
||||
"srcs": "list of srcs", |
||||
"hdrs": "list of hdrs", |
||||
}, |
||||
) |
||||
|
||||
_WrappedCcInfo = provider(fields = ["cc_info"]) |
||||
_WrappedGeneratedSrcs = provider(fields = ["srcs"]) |
||||
|
||||
def _compile_upb_protos(ctx, proto_info, proto_sources, ext): |
||||
srcs = [_generate_output_file(ctx, name, ext + ".c") for name in proto_sources] |
||||
hdrs = [_generate_output_file(ctx, name, ext + ".h") for name in proto_sources] |
||||
transitive_sets = list(proto_info.transitive_descriptor_sets) |
||||
ctx.actions.run( |
||||
inputs = depset( |
||||
direct = [ctx.executable._upbc, proto_info.direct_descriptor_set], |
||||
transitive = [proto_info.transitive_descriptor_sets], |
||||
), |
||||
outputs = srcs + hdrs, |
||||
executable = ctx.executable._protoc, |
||||
arguments = [ |
||||
"--upb_out=" + _get_real_root(srcs[0]), |
||||
"--plugin=protoc-gen-upb=" + ctx.executable._upbc.path, |
||||
"--descriptor_set_in=" + ":".join([f.path for f in transitive_sets]), |
||||
] + |
||||
[_get_real_short_path(file) for file in proto_sources], |
||||
progress_message = "Generating upb protos for :" + ctx.label.name, |
||||
) |
||||
return GeneratedSrcs(srcs = srcs, hdrs = hdrs) |
||||
|
||||
def _upb_proto_rule_impl(ctx): |
||||
if len(ctx.attr.deps) != 1: |
||||
fail("only one deps dependency allowed.") |
||||
dep = ctx.attr.deps[0] |
||||
if _WrappedCcInfo not in dep or _WrappedGeneratedSrcs not in dep: |
||||
fail("proto_library rule must generate _WrappedCcInfo and " + |
||||
"_WrappedGeneratedSrcs (aspect should have handled this).") |
||||
cc_info = dep[_WrappedCcInfo].cc_info |
||||
srcs = dep[_WrappedGeneratedSrcs].srcs |
||||
lib = cc_info.linking_context.libraries_to_link[0] |
||||
files = _filter_none([ |
||||
lib.static_library, |
||||
lib.pic_static_library, |
||||
lib.dynamic_library, |
||||
]) |
||||
return [ |
||||
DefaultInfo(files = depset(files + srcs.hdrs + srcs.srcs)), |
||||
srcs, |
||||
cc_info, |
||||
] |
||||
|
||||
def _upb_proto_aspect_impl(target, ctx): |
||||
proto_info = target[ProtoInfo] |
||||
files = _compile_upb_protos(ctx, proto_info, proto_info.direct_sources, ctx.attr._ext) |
||||
deps = ctx.rule.attr.deps + ctx.attr._upb |
||||
dep_ccinfos = [dep[CcInfo] for dep in deps if CcInfo in dep] |
||||
dep_ccinfos += [dep[_WrappedCcInfo].cc_info for dep in deps if _WrappedCcInfo in dep] |
||||
cc_info = _cc_library_func( |
||||
ctx = ctx, |
||||
name = ctx.rule.attr.name + ctx.attr._ext, |
||||
hdrs = files.hdrs, |
||||
srcs = files.srcs, |
||||
dep_ccinfos = dep_ccinfos, |
||||
) |
||||
return [_WrappedCcInfo(cc_info = cc_info), _WrappedGeneratedSrcs(srcs = files)] |
||||
|
||||
def _maybe_add(d): |
||||
if not _is_bazel: |
||||
d["_grep_includes"] = attr.label( |
||||
allow_single_file = True, |
||||
cfg = "host", |
||||
default = "//tools/cpp:grep-includes", |
||||
) |
||||
return d |
||||
|
||||
# upb_proto_library() ########################################################## |
||||
|
||||
_upb_proto_library_aspect = aspect( |
||||
attrs = _maybe_add({ |
||||
"_upbc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = "//:protoc-gen-upb", |
||||
), |
||||
"_protoc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = "@com_google_protobuf//:protoc", |
||||
), |
||||
"_cc_toolchain": attr.label( |
||||
default = "@bazel_tools//tools/cpp:current_cc_toolchain", |
||||
), |
||||
"_upb": attr.label_list(default = ["//:upb"]), |
||||
"_ext": attr.string(default = ".upb"), |
||||
}), |
||||
implementation = _upb_proto_aspect_impl, |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp"], |
||||
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"], |
||||
) |
||||
|
||||
upb_proto_library = rule( |
||||
output_to_genfiles = True, |
||||
implementation = _upb_proto_rule_impl, |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [_upb_proto_library_aspect], |
||||
allow_rules = ["proto_library"], |
||||
providers = [ProtoInfo], |
||||
), |
||||
}, |
||||
) |
||||
|
||||
# upb_proto_reflection_library() ############################################### |
||||
|
||||
_upb_proto_reflection_library_aspect = aspect( |
||||
attrs = _maybe_add({ |
||||
"_upbc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = "//:protoc-gen-upb", |
||||
), |
||||
"_protoc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = "@com_google_protobuf//:protoc", |
||||
), |
||||
"_cc_toolchain": attr.label( |
||||
default = "@bazel_tools//tools/cpp:current_cc_toolchain", |
||||
), |
||||
"_upb": attr.label_list( |
||||
default = [ |
||||
"//:upb", |
||||
"//:reflection", |
||||
], |
||||
), |
||||
"_ext": attr.string(default = ".upbdefs"), |
||||
}), |
||||
implementation = _upb_proto_aspect_impl, |
||||
attr_aspects = ["deps"], |
||||
fragments = ["cpp"], |
||||
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"], |
||||
) |
||||
|
||||
upb_proto_reflection_library = rule( |
||||
output_to_genfiles = True, |
||||
implementation = _upb_proto_rule_impl, |
||||
attrs = { |
||||
"deps": attr.label_list( |
||||
aspects = [_upb_proto_reflection_library_aspect], |
||||
allow_rules = ["proto_library"], |
||||
providers = [ProtoInfo], |
||||
), |
||||
}, |
||||
) |
@ -0,0 +1,36 @@ |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") |
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") |
||||
load("//bazel:repository_defs.bzl", "bazel_version_repository") |
||||
|
||||
def upb_deps(): |
||||
bazel_version_repository( |
||||
name = "bazel_version", |
||||
) |
||||
|
||||
git_repository( |
||||
name = "absl", |
||||
commit = "070f6e47b33a2909d039e620c873204f78809492", |
||||
remote = "https://github.com/abseil/abseil-cpp.git", |
||||
shallow_since = "1541627663 -0500", |
||||
) |
||||
|
||||
git_repository( |
||||
name = "com_google_protobuf", |
||||
remote = "https://github.com/protocolbuffers/protobuf.git", |
||||
commit = "d41002663fd04325ead28439dfd5ce2822b0d6fb", |
||||
) |
||||
|
||||
http_archive( |
||||
name = "bazel_skylib", |
||||
strip_prefix = "bazel-skylib-master", |
||||
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/master.tar.gz"], |
||||
) |
||||
|
||||
http_archive( |
||||
name = "zlib", |
||||
build_file = "@com_google_protobuf//:third_party/zlib.BUILD", |
||||
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1", |
||||
strip_prefix = "zlib-1.2.11", |
||||
urls = ["https://zlib.net/zlib-1.2.11.tar.gz"], |
||||
) |
@ -1,338 +0,0 @@ |
||||
_shell_find_runfiles = """ |
||||
# --- begin runfiles.bash initialization --- |
||||
# Copy-pasted from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash). |
||||
set -euo pipefail |
||||
if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then |
||||
if [[ -f "$0.runfiles_manifest" ]]; then |
||||
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest" |
||||
elif [[ -f "$0.runfiles/MANIFEST" ]]; then |
||||
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST" |
||||
elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then |
||||
export RUNFILES_DIR="$0.runfiles" |
||||
fi |
||||
fi |
||||
if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then |
||||
source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash" |
||||
elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then |
||||
source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \ |
||||
"$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)" |
||||
else |
||||
echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash" |
||||
exit 1 |
||||
fi |
||||
# --- end runfiles.bash initialization --- |
||||
""" |
||||
|
||||
def _librule(name): |
||||
return name + "_lib" |
||||
|
||||
def lua_cclibrary(name, srcs, hdrs = [], deps = [], luadeps = []): |
||||
lib_rule = name + "_lib" |
||||
so_rule = "lib" + name + ".so" |
||||
so_file = _remove_prefix(name, "lua/") + ".so" |
||||
|
||||
native.cc_library( |
||||
name = _librule(name), |
||||
hdrs = hdrs, |
||||
srcs = srcs, |
||||
deps = deps + [_librule(dep) for dep in luadeps] + ["@lua//:liblua_headers"], |
||||
) |
||||
|
||||
native.cc_binary( |
||||
name = so_rule, |
||||
linkshared = True, |
||||
deps = [_librule(name)], |
||||
linkopts = select({ |
||||
":darwin": [ |
||||
"-undefined dynamic_lookup", |
||||
], |
||||
"//conditions:default": [], |
||||
}) |
||||
) |
||||
|
||||
native.genrule( |
||||
name = name + "_copy", |
||||
srcs = [":" + so_rule], |
||||
outs = [so_file], |
||||
cmd = "cp $< $@", |
||||
) |
||||
|
||||
native.filegroup( |
||||
name = name, |
||||
data = [so_file], |
||||
) |
||||
|
||||
def _remove_prefix(str, prefix): |
||||
if not str.startswith(prefix): |
||||
fail("%s doesn't start with %s" % (str, prefix)) |
||||
return str[len(prefix):] |
||||
|
||||
def _remove_suffix(str, suffix): |
||||
if not str.endswith(suffix): |
||||
fail("%s doesn't end with %s" % (str, suffix)) |
||||
return str[:-len(suffix)] |
||||
|
||||
def lua_library(name, srcs, strip_prefix, luadeps = []): |
||||
outs = [_remove_prefix(src, strip_prefix + "/") for src in srcs] |
||||
native.genrule( |
||||
name = name + "_copy", |
||||
srcs = srcs, |
||||
outs = outs, |
||||
cmd = "cp $(SRCS) $(@D)", |
||||
) |
||||
|
||||
native.filegroup( |
||||
name = name, |
||||
data = outs + luadeps, |
||||
) |
||||
|
||||
def make_shell_script(name, contents, out): |
||||
script_contents = (_shell_find_runfiles + contents).replace("$", "$$") |
||||
native.genrule( |
||||
name = "gen_" + name, |
||||
outs = [out], |
||||
cmd = "(cat <<'HEREDOC'\n%s\nHEREDOC\n) > $@" % script_contents, |
||||
) |
||||
|
||||
def _lua_binary_or_test(name, luamain, luadeps, rule): |
||||
script = name + ".sh" |
||||
|
||||
make_shell_script( |
||||
name = "gen_" + name, |
||||
out = script, |
||||
contents = """ |
||||
BASE=$(dirname $(rlocation upb/upb_c.so)) |
||||
export LUA_CPATH="$BASE/?.so" |
||||
export LUA_PATH="$BASE/?.lua" |
||||
$(rlocation lua/lua) $(rlocation upb/tools/upbc.lua) "$@" |
||||
""", |
||||
) |
||||
|
||||
rule( |
||||
name = name, |
||||
srcs = [script], |
||||
data = ["@lua//:lua", "@bazel_tools//tools/bash/runfiles", luamain] + luadeps, |
||||
) |
||||
|
||||
def lua_binary(name, luamain, luadeps = []): |
||||
_lua_binary_or_test(name, luamain, luadeps, native.sh_binary) |
||||
|
||||
def lua_test(name, luamain, luadeps = []): |
||||
_lua_binary_or_test(name, luamain, luadeps, native.sh_test) |
||||
|
||||
def generated_file_staleness_test(name, outs, generated_pattern): |
||||
"""Tests that checked-in file(s) match the contents of generated file(s). |
||||
|
||||
The resulting test will verify that all output files exist and have the |
||||
correct contents. If the test fails, it can be invoked with --fix to |
||||
bring the checked-in files up to date. |
||||
|
||||
Args: |
||||
name: Name of the rule. |
||||
outs: the checked-in files that are copied from generated files. |
||||
generated_pattern: the pattern for transforming each "out" file into a |
||||
generated file. For example, if generated_pattern="generated/%s" then |
||||
a file foo.txt will look for generated file generated/foo.txt. |
||||
""" |
||||
|
||||
script_name = name + ".py" |
||||
script_src = "//:tools/staleness_test.py" |
||||
|
||||
# Filter out non-existing rules so Blaze doesn't error out before we even |
||||
# run the test. |
||||
existing_outs = native.glob(include = outs) |
||||
|
||||
# The file list contains a few extra bits of information at the end. |
||||
# These get unpacked by the Config class in staleness_test_lib.py. |
||||
file_list = outs + [generated_pattern, native.package_name() or ".", name] |
||||
|
||||
native.genrule( |
||||
name = name + "_makescript", |
||||
outs = [script_name], |
||||
srcs = [script_src], |
||||
testonly = 1, |
||||
cmd = "cat $(location " + script_src + ") > $@; " + |
||||
"sed -i.bak -e 's|INSERT_FILE_LIST_HERE|" + "\\\n ".join(file_list) + "|' $@", |
||||
) |
||||
|
||||
native.py_test( |
||||
name = name, |
||||
srcs = [script_name], |
||||
data = existing_outs + [generated_pattern % file for file in outs], |
||||
deps = [ |
||||
"//:staleness_test_lib", |
||||
], |
||||
) |
||||
|
||||
# upb_amalgamation() rule, with file_list aspect. |
||||
|
||||
SrcList = provider( |
||||
fields = { |
||||
"srcs": "list of srcs", |
||||
"hdrs": "list of hdrs", |
||||
}, |
||||
) |
||||
|
||||
def _file_list_aspect_impl(target, ctx): |
||||
srcs = [] |
||||
hdrs = [] |
||||
for src in ctx.rule.attr.srcs: |
||||
srcs += src.files.to_list() |
||||
for hdr in ctx.rule.attr.hdrs: |
||||
hdrs += hdr.files.to_list() |
||||
return [SrcList(srcs = srcs, hdrs = hdrs)] |
||||
|
||||
_file_list_aspect = aspect( |
||||
implementation = _file_list_aspect_impl, |
||||
) |
||||
|
||||
def _upb_amalgamation(ctx): |
||||
inputs = [] |
||||
srcs = [] |
||||
for lib in ctx.attr.libs: |
||||
inputs += lib[SrcList].srcs |
||||
inputs += lib[SrcList].hdrs |
||||
srcs += [src for src in lib[SrcList].srcs if src.path.endswith("c")] |
||||
ctx.actions.run( |
||||
inputs = inputs, |
||||
outputs = ctx.outputs.outs, |
||||
arguments = ["", ctx.bin_dir.path + "/"] + [f.path for f in srcs], |
||||
progress_message = "Making amalgamation", |
||||
executable = ctx.executable.amalgamator, |
||||
) |
||||
|
||||
upb_amalgamation = rule( |
||||
attrs = { |
||||
"amalgamator": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
"libs": attr.label_list(aspects = [_file_list_aspect]), |
||||
"outs": attr.output_list(), |
||||
}, |
||||
implementation = _upb_amalgamation, |
||||
) |
||||
|
||||
is_bazel = not hasattr(native, "genmpm") |
||||
|
||||
google3_dep_map = { |
||||
"@absl//absl/base:core_headers": "//third_party/absl/base:core_headers", |
||||
"@absl//absl/strings": "//third_party/absl/strings", |
||||
"@com_google_protobuf//:protoc": "//third_party/protobuf:protoc", |
||||
"@com_google_protobuf//:protobuf": "//third_party/protobuf:protobuf", |
||||
"@com_google_protobuf//:protoc_lib": "//third_party/protobuf:libprotoc", |
||||
} |
||||
|
||||
def map_dep(dep): |
||||
if is_bazel: |
||||
return dep |
||||
else: |
||||
return google3_dep_map[dep] |
||||
|
||||
# upb_proto_library() rule |
||||
|
||||
def _remove_up(string): |
||||
if string.startswith("../"): |
||||
string = string[3:] |
||||
pos = string.find("/") |
||||
string = string[pos + 1:] |
||||
|
||||
return _remove_suffix(string, ".proto") |
||||
|
||||
def _upb_proto_srcs_impl(ctx, suffix): |
||||
sources = [] |
||||
outs = [] |
||||
include_dirs = {} |
||||
for dep in ctx.attr.deps: |
||||
if hasattr(dep, "proto"): |
||||
for src in dep.proto.transitive_sources: |
||||
sources.append(src) |
||||
include_dir = _remove_suffix(src.path, _remove_up(src.short_path) + "." + src.extension) |
||||
if include_dir: |
||||
include_dirs[include_dir] = True |
||||
outs.append(ctx.actions.declare_file(_remove_up(src.short_path) + suffix + ".h")) |
||||
outs.append(ctx.actions.declare_file(_remove_up(src.short_path) + suffix + ".c")) |
||||
outdir = _remove_suffix(outs[-1].path, _remove_up(src.short_path) + suffix + ".c") |
||||
|
||||
source_paths = [d.path for d in sources] |
||||
include_args = ["-I" + root for root in include_dirs.keys()] |
||||
|
||||
ctx.actions.run( |
||||
inputs = [ctx.executable.upbc] + sources, |
||||
outputs = outs, |
||||
executable = ctx.executable.protoc, |
||||
arguments = ["--upb_out", outdir, "--plugin=protoc-gen-upb=" + ctx.executable.upbc.path] + include_args + source_paths, |
||||
progress_message = "Generating upb protos", |
||||
) |
||||
|
||||
return [DefaultInfo(files = depset(outs))] |
||||
|
||||
def _upb_proto_library_srcs_impl(ctx): |
||||
return _upb_proto_srcs_impl(ctx, ".upb") |
||||
|
||||
def _upb_proto_reflection_library_srcs_impl(ctx): |
||||
return _upb_proto_srcs_impl(ctx, ".upbdefs") |
||||
|
||||
_upb_proto_library_srcs = rule( |
||||
attrs = { |
||||
"upbc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
"protoc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = map_dep("@com_google_protobuf//:protoc"), |
||||
), |
||||
"deps": attr.label_list(), |
||||
}, |
||||
implementation = _upb_proto_library_srcs_impl, |
||||
) |
||||
|
||||
def upb_proto_library(name, deps, upbc): |
||||
srcs_rule = name + "_srcs.cc" |
||||
_upb_proto_library_srcs( |
||||
name = srcs_rule, |
||||
upbc = upbc, |
||||
deps = deps, |
||||
) |
||||
native.cc_library( |
||||
name = name, |
||||
srcs = [":" + srcs_rule], |
||||
deps = [":upb"], |
||||
copts = ["-Ibazel-out/k8-fastbuild/bin"], |
||||
) |
||||
|
||||
_upb_proto_reflection_library_srcs = rule( |
||||
attrs = { |
||||
"upbc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
), |
||||
"protoc": attr.label( |
||||
executable = True, |
||||
cfg = "host", |
||||
default = map_dep("@com_google_protobuf//:protoc"), |
||||
), |
||||
"deps": attr.label_list(), |
||||
}, |
||||
implementation = _upb_proto_reflection_library_srcs_impl, |
||||
) |
||||
|
||||
def upb_proto_reflection_library(name, deps, upbc): |
||||
srcs_rule = name + "_defsrcs.cc" |
||||
_upb_proto_reflection_library_srcs( |
||||
name = srcs_rule, |
||||
upbc = upbc, |
||||
deps = deps, |
||||
) |
||||
native.cc_library( |
||||
name = name, |
||||
srcs = [":" + srcs_rule], |
||||
deps = [":upb", ":reflection"], |
||||
copts = ["-Ibazel-out/k8-fastbuild/bin"], |
||||
) |
||||
|
||||
def licenses(*args): |
||||
# No-op (for Google-internal usage). |
||||
pass |
@ -0,0 +1,18 @@ |
||||
|
||||
load("@upb//bazel:upb_proto_library.bzl", "upb_proto_library") |
||||
|
||||
proto_library( |
||||
name = "foo_proto", |
||||
srcs = ["foo.proto"], |
||||
) |
||||
|
||||
upb_proto_library( |
||||
name = "foo_upbproto", |
||||
deps = [":foo_proto"], |
||||
) |
||||
|
||||
cc_binary( |
||||
name = "test_binary", |
||||
srcs = ["test_binary.c"], |
||||
deps = [":foo_upbproto"], |
||||
) |
@ -0,0 +1,14 @@ |
||||
|
||||
workspace(name = "upb_example") |
||||
|
||||
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") |
||||
|
||||
git_repository( |
||||
name = "upb", |
||||
remote = "https://github.com/protocolbuffers/upb.git", |
||||
commit = "d16bf99ac4658793748cda3251226059892b3b7b", |
||||
) |
||||
|
||||
load("@upb//bazel:workspace_deps.bzl", "upb_deps") |
||||
|
||||
upb_deps() |
@ -0,0 +1,7 @@ |
||||
|
||||
syntax = "proto2"; |
||||
|
||||
message Foo { |
||||
optional int64 time = 1; |
||||
optional string greeting = 2; |
||||
} |
@ -0,0 +1,17 @@ |
||||
|
||||
#include <time.h> |
||||
|
||||
#include "foo.upb.h" |
||||
|
||||
int main() { |
||||
upb_arena *arena = upb_arena_new(); |
||||
Foo* foo = Foo_new(arena); |
||||
const char greeting[] = "Hello, World!\n"; |
||||
|
||||
Foo_set_time(foo, time(NULL)); |
||||
/* Warning: the proto will not copy this, the string data must outlive
|
||||
* the proto. */ |
||||
Foo_set_greeting(foo, upb_strview_makez(greeting)); |
||||
|
||||
upb_arena_free(arena); |
||||
} |
Binary file not shown.
@ -1,870 +0,0 @@ |
||||
// Protocol Buffers - Google's data interchange format |
||||
// Copyright 2008 Google Inc. All rights reserved. |
||||
// https://developers.google.com/protocol-buffers/ |
||||
// |
||||
// Redistribution and use in source and binary forms, with or without |
||||
// modification, are permitted provided that the following conditions are |
||||
// met: |
||||
// |
||||
// * Redistributions of source code must retain the above copyright |
||||
// notice, this list of conditions and the following disclaimer. |
||||
// * Redistributions in binary form must reproduce the above |
||||
// copyright notice, this list of conditions and the following disclaimer |
||||
// in the documentation and/or other materials provided with the |
||||
// distribution. |
||||
// * Neither the name of Google Inc. nor the names of its |
||||
// contributors may be used to endorse or promote products derived from |
||||
// this software without specific prior written permission. |
||||
// |
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
||||
|
||||
// Author: kenton@google.com (Kenton Varda) |
||||
// Based on original Protocol Buffers design by |
||||
// Sanjay Ghemawat, Jeff Dean, and others. |
||||
// |
||||
// The messages in this file describe the definitions found in .proto files. |
||||
// A valid .proto file can be translated directly to a FileDescriptorProto |
||||
// without any other information (e.g. without reading its imports). |
||||
|
||||
|
||||
syntax = "proto2"; |
||||
|
||||
package google.protobuf; |
||||
option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor"; |
||||
option java_package = "com.google.protobuf"; |
||||
option java_outer_classname = "DescriptorProtos"; |
||||
option csharp_namespace = "Google.Protobuf.Reflection"; |
||||
option objc_class_prefix = "GPB"; |
||||
option cc_enable_arenas = true; |
||||
|
||||
// descriptor.proto must be optimized for speed because reflection-based |
||||
// algorithms don't work during bootstrapping. |
||||
option optimize_for = SPEED; |
||||
|
||||
// The protocol compiler can output a FileDescriptorSet containing the .proto |
||||
// files it parses. |
||||
message FileDescriptorSet { |
||||
repeated FileDescriptorProto file = 1; |
||||
} |
||||
|
||||
// Describes a complete .proto file. |
||||
message FileDescriptorProto { |
||||
optional string name = 1; // file name, relative to root of source tree |
||||
optional string package = 2; // e.g. "foo", "foo.bar", etc. |
||||
|
||||
// Names of files imported by this file. |
||||
repeated string dependency = 3; |
||||
// Indexes of the public imported files in the dependency list above. |
||||
repeated int32 public_dependency = 10; |
||||
// Indexes of the weak imported files in the dependency list. |
||||
// For Google-internal migration only. Do not use. |
||||
repeated int32 weak_dependency = 11; |
||||
|
||||
// All top-level definitions in this file. |
||||
repeated DescriptorProto message_type = 4; |
||||
repeated EnumDescriptorProto enum_type = 5; |
||||
repeated ServiceDescriptorProto service = 6; |
||||
repeated FieldDescriptorProto extension = 7; |
||||
|
||||
optional FileOptions options = 8; |
||||
|
||||
// This field contains optional information about the original source code. |
||||
// You may safely remove this entire field without harming runtime |
||||
// functionality of the descriptors -- the information is needed only by |
||||
// development tools. |
||||
optional SourceCodeInfo source_code_info = 9; |
||||
|
||||
// The syntax of the proto file. |
||||
// The supported values are "proto2" and "proto3". |
||||
optional string syntax = 12; |
||||
} |
||||
|
||||
// Describes a message type. |
||||
message DescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
repeated FieldDescriptorProto field = 2; |
||||
repeated FieldDescriptorProto extension = 6; |
||||
|
||||
repeated DescriptorProto nested_type = 3; |
||||
repeated EnumDescriptorProto enum_type = 4; |
||||
|
||||
message ExtensionRange { |
||||
optional int32 start = 1; |
||||
optional int32 end = 2; |
||||
|
||||
optional ExtensionRangeOptions options = 3; |
||||
} |
||||
repeated ExtensionRange extension_range = 5; |
||||
|
||||
repeated OneofDescriptorProto oneof_decl = 8; |
||||
|
||||
optional MessageOptions options = 7; |
||||
|
||||
// Range of reserved tag numbers. Reserved tag numbers may not be used by |
||||
// fields or extension ranges in the same message. Reserved ranges may |
||||
// not overlap. |
||||
message ReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Exclusive. |
||||
} |
||||
repeated ReservedRange reserved_range = 9; |
||||
// Reserved field names, which may not be used by fields in the same message. |
||||
// A given name may only be reserved once. |
||||
repeated string reserved_name = 10; |
||||
} |
||||
|
||||
message ExtensionRangeOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
// Describes a field within a message. |
||||
message FieldDescriptorProto { |
||||
enum Type { |
||||
// 0 is reserved for errors. |
||||
// Order is weird for historical reasons. |
||||
TYPE_DOUBLE = 1; |
||||
TYPE_FLOAT = 2; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if |
||||
// negative values are likely. |
||||
TYPE_INT64 = 3; |
||||
TYPE_UINT64 = 4; |
||||
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if |
||||
// negative values are likely. |
||||
TYPE_INT32 = 5; |
||||
TYPE_FIXED64 = 6; |
||||
TYPE_FIXED32 = 7; |
||||
TYPE_BOOL = 8; |
||||
TYPE_STRING = 9; |
||||
// Tag-delimited aggregate. |
||||
// Group type is deprecated and not supported in proto3. However, Proto3 |
||||
// implementations should still be able to parse the group wire format and |
||||
// treat group fields as unknown fields. |
||||
TYPE_GROUP = 10; |
||||
TYPE_MESSAGE = 11; // Length-delimited aggregate. |
||||
|
||||
// New in version 2. |
||||
TYPE_BYTES = 12; |
||||
TYPE_UINT32 = 13; |
||||
TYPE_ENUM = 14; |
||||
TYPE_SFIXED32 = 15; |
||||
TYPE_SFIXED64 = 16; |
||||
TYPE_SINT32 = 17; // Uses ZigZag encoding. |
||||
TYPE_SINT64 = 18; // Uses ZigZag encoding. |
||||
}; |
||||
|
||||
enum Label { |
||||
// 0 is reserved for errors |
||||
LABEL_OPTIONAL = 1; |
||||
LABEL_REQUIRED = 2; |
||||
LABEL_REPEATED = 3; |
||||
}; |
||||
|
||||
optional string name = 1; |
||||
optional int32 number = 3; |
||||
optional Label label = 4; |
||||
|
||||
// If type_name is set, this need not be set. If both this and type_name |
||||
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. |
||||
optional Type type = 5; |
||||
|
||||
// For message and enum types, this is the name of the type. If the name |
||||
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping |
||||
// rules are used to find the type (i.e. first the nested types within this |
||||
// message are searched, then within the parent, on up to the root |
||||
// namespace). |
||||
optional string type_name = 6; |
||||
|
||||
// For extensions, this is the name of the type being extended. It is |
||||
// resolved in the same manner as type_name. |
||||
optional string extendee = 2; |
||||
|
||||
// For numeric types, contains the original text representation of the value. |
||||
// For booleans, "true" or "false". |
||||
// For strings, contains the default text contents (not escaped in any way). |
||||
// For bytes, contains the C escaped value. All bytes >= 128 are escaped. |
||||
// TODO(kenton): Base-64 encode? |
||||
optional string default_value = 7; |
||||
|
||||
// If set, gives the index of a oneof in the containing type's oneof_decl |
||||
// list. This field is a member of that oneof. |
||||
optional int32 oneof_index = 9; |
||||
|
||||
// JSON name of this field. The value is set by protocol compiler. If the |
||||
// user has set a "json_name" option on this field, that option's value |
||||
// will be used. Otherwise, it's deduced from the field's name by converting |
||||
// it to camelCase. |
||||
optional string json_name = 10; |
||||
|
||||
optional FieldOptions options = 8; |
||||
} |
||||
|
||||
// Describes a oneof. |
||||
message OneofDescriptorProto { |
||||
optional string name = 1; |
||||
optional OneofOptions options = 2; |
||||
} |
||||
|
||||
// Describes an enum type. |
||||
message EnumDescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
repeated EnumValueDescriptorProto value = 2; |
||||
|
||||
optional EnumOptions options = 3; |
||||
|
||||
// Range of reserved numeric values. Reserved values may not be used by |
||||
// entries in the same enum. Reserved ranges may not overlap. |
||||
// |
||||
// Note that this is distinct from DescriptorProto.ReservedRange in that it |
||||
// is inclusive such that it can appropriately represent the entire int32 |
||||
// domain. |
||||
message EnumReservedRange { |
||||
optional int32 start = 1; // Inclusive. |
||||
optional int32 end = 2; // Inclusive. |
||||
} |
||||
|
||||
// Range of reserved numeric values. Reserved numeric values may not be used |
||||
// by enum values in the same enum declaration. Reserved ranges may not |
||||
// overlap. |
||||
repeated EnumReservedRange reserved_range = 4; |
||||
|
||||
// Reserved enum value names, which may not be reused. A given name may only |
||||
// be reserved once. |
||||
repeated string reserved_name = 5; |
||||
} |
||||
|
||||
// Describes a value within an enum. |
||||
message EnumValueDescriptorProto { |
||||
optional string name = 1; |
||||
optional int32 number = 2; |
||||
|
||||
optional EnumValueOptions options = 3; |
||||
} |
||||
|
||||
// Describes a service. |
||||
message ServiceDescriptorProto { |
||||
optional string name = 1; |
||||
repeated MethodDescriptorProto method = 2; |
||||
|
||||
optional ServiceOptions options = 3; |
||||
} |
||||
|
||||
// Describes a method of a service. |
||||
message MethodDescriptorProto { |
||||
optional string name = 1; |
||||
|
||||
// Input and output type names. These are resolved in the same way as |
||||
// FieldDescriptorProto.type_name, but must refer to a message type. |
||||
optional string input_type = 2; |
||||
optional string output_type = 3; |
||||
|
||||
optional MethodOptions options = 4; |
||||
|
||||
// Identifies if client streams multiple client messages |
||||
optional bool client_streaming = 5 [default=false]; |
||||
// Identifies if server streams multiple server messages |
||||
optional bool server_streaming = 6 [default=false]; |
||||
} |
||||
|
||||
|
||||
// =================================================================== |
||||
// Options |
||||
|
||||
// Each of the definitions above may have "options" attached. These are |
||||
// just annotations which may cause code to be generated slightly differently |
||||
// or may contain hints for code that manipulates protocol messages. |
||||
// |
||||
// Clients may define custom options as extensions of the *Options messages. |
||||
// These extensions may not yet be known at parsing time, so the parser cannot |
||||
// store the values in them. Instead it stores them in a field in the *Options |
||||
// message called uninterpreted_option. This field must have the same name |
||||
// across all *Options messages. We then use this field to populate the |
||||
// extensions when we build a descriptor, at which point all protos have been |
||||
// parsed and so all extensions are known. |
||||
// |
||||
// Extension numbers for custom options may be chosen as follows: |
||||
// * For options which will only be used within a single application or |
||||
// organization, or for experimental options, use field numbers 50000 |
||||
// through 99999. It is up to you to ensure that you do not use the |
||||
// same number for multiple options. |
||||
// * For options which will be published and used publicly by multiple |
||||
// independent entities, e-mail protobuf-global-extension-registry@google.com |
||||
// to reserve extension numbers. Simply provide your project name (e.g. |
||||
// Objective-C plugin) and your project website (if available) -- there's no |
||||
// need to explain how you intend to use them. Usually you only need one |
||||
// extension number. You can declare multiple options with only one extension |
||||
// number by putting them in a sub-message. See the Custom Options section of |
||||
// the docs for examples: |
||||
// https://developers.google.com/protocol-buffers/docs/proto#options |
||||
// If this turns out to be popular, a web service will be set up |
||||
// to automatically assign option numbers. |
||||
|
||||
|
||||
message FileOptions { |
||||
|
||||
// Sets the Java package where classes generated from this .proto will be |
||||
// placed. By default, the proto package is used, but this is often |
||||
// inappropriate because proto packages do not normally start with backwards |
||||
// domain names. |
||||
optional string java_package = 1; |
||||
|
||||
|
||||
// If set, all the classes from the .proto file are wrapped in a single |
||||
// outer class with the given name. This applies to both Proto1 |
||||
// (equivalent to the old "--one_java_file" option) and Proto2 (where |
||||
// a .proto always translates to a single class, but you may want to |
||||
// explicitly choose the class name). |
||||
optional string java_outer_classname = 8; |
||||
|
||||
// If set true, then the Java code generator will generate a separate .java |
||||
// file for each top-level message, enum, and service defined in the .proto |
||||
// file. Thus, these types will *not* be nested inside the outer class |
||||
// named by java_outer_classname. However, the outer class will still be |
||||
// generated to contain the file's getDescriptor() method as well as any |
||||
// top-level extensions defined in the file. |
||||
optional bool java_multiple_files = 10 [default=false]; |
||||
|
||||
// This option does nothing. |
||||
optional bool java_generate_equals_and_hash = 20 [deprecated=true]; |
||||
|
||||
// If set true, then the Java2 code generator will generate code that |
||||
// throws an exception whenever an attempt is made to assign a non-UTF-8 |
||||
// byte sequence to a string field. |
||||
// Message reflection will do the same. |
||||
// However, an extension field still accepts non-UTF-8 byte sequences. |
||||
// This option has no effect on when used with the lite runtime. |
||||
optional bool java_string_check_utf8 = 27 [default=false]; |
||||
|
||||
|
||||
// Generated classes can be optimized for speed or code size. |
||||
enum OptimizeMode { |
||||
SPEED = 1; // Generate complete code for parsing, serialization, |
||||
// etc. |
||||
CODE_SIZE = 2; // Use ReflectionOps to implement these methods. |
||||
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. |
||||
} |
||||
optional OptimizeMode optimize_for = 9 [default=SPEED]; |
||||
|
||||
// Sets the Go package where structs generated from this .proto will be |
||||
// placed. If omitted, the Go package will be derived from the following: |
||||
// - The basename of the package import path, if provided. |
||||
// - Otherwise, the package statement in the .proto file, if present. |
||||
// - Otherwise, the basename of the .proto file, without extension. |
||||
optional string go_package = 11; |
||||
|
||||
|
||||
|
||||
// Should generic services be generated in each language? "Generic" services |
||||
// are not specific to any particular RPC system. They are generated by the |
||||
// main code generators in each language (without additional plugins). |
||||
// Generic services were the only kind of service generation supported by |
||||
// early versions of google.protobuf. |
||||
// |
||||
// Generic services are now considered deprecated in favor of using plugins |
||||
// that generate code specific to your particular RPC system. Therefore, |
||||
// these default to false. Old code which depends on generic services should |
||||
// explicitly set them to true. |
||||
optional bool cc_generic_services = 16 [default=false]; |
||||
optional bool java_generic_services = 17 [default=false]; |
||||
optional bool py_generic_services = 18 [default=false]; |
||||
optional bool php_generic_services = 42 [default=false]; |
||||
|
||||
// Is this file deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for everything in the file, or it will be completely ignored; in the very |
||||
// least, this is a formalization for deprecating files. |
||||
optional bool deprecated = 23 [default=false]; |
||||
|
||||
// Enables the use of arenas for the proto messages in this file. This applies |
||||
// only to generated classes for C++. |
||||
optional bool cc_enable_arenas = 31 [default=false]; |
||||
|
||||
|
||||
// Sets the objective c class prefix which is prepended to all objective c |
||||
// generated classes from this .proto. There is no default. |
||||
optional string objc_class_prefix = 36; |
||||
|
||||
// Namespace for generated classes; defaults to the package. |
||||
optional string csharp_namespace = 37; |
||||
|
||||
// By default Swift generators will take the proto package and CamelCase it |
||||
// replacing '.' with underscore and use that to prefix the types/symbols |
||||
// defined. When this options is provided, they will use this value instead |
||||
// to prefix the types/symbols defined. |
||||
optional string swift_prefix = 39; |
||||
|
||||
// Sets the php class prefix which is prepended to all php generated classes |
||||
// from this .proto. Default is empty. |
||||
optional string php_class_prefix = 40; |
||||
|
||||
// Use this option to change the namespace of php generated classes. Default |
||||
// is empty. When this option is empty, the package name will be used for |
||||
// determining the namespace. |
||||
optional string php_namespace = 41; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 38; |
||||
} |
||||
|
||||
message MessageOptions { |
||||
// Set true to use the old proto1 MessageSet wire format for extensions. |
||||
// This is provided for backwards-compatibility with the MessageSet wire |
||||
// format. You should not use this for any other reason: It's less |
||||
// efficient, has fewer features, and is more complicated. |
||||
// |
||||
// The message must be defined exactly as follows: |
||||
// message Foo { |
||||
// option message_set_wire_format = true; |
||||
// extensions 4 to max; |
||||
// } |
||||
// Note that the message cannot have any defined fields; MessageSets only |
||||
// have extensions. |
||||
// |
||||
// All extensions of your type must be singular messages; e.g. they cannot |
||||
// be int32s, enums, or repeated messages. |
||||
// |
||||
// Because this is an option, the above two restrictions are not enforced by |
||||
// the protocol compiler. |
||||
optional bool message_set_wire_format = 1 [default=false]; |
||||
|
||||
// Disables the generation of the standard "descriptor()" accessor, which can |
||||
// conflict with a field of the same name. This is meant to make migration |
||||
// from proto1 easier; new code should avoid fields named "descriptor". |
||||
optional bool no_standard_descriptor_accessor = 2 [default=false]; |
||||
|
||||
// Is this message deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the message, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating messages. |
||||
optional bool deprecated = 3 [default=false]; |
||||
|
||||
// Whether the message is an automatically generated map entry type for the |
||||
// maps field. |
||||
// |
||||
// For maps fields: |
||||
// map<KeyType, ValueType> map_field = 1; |
||||
// The parsed descriptor looks like: |
||||
// message MapFieldEntry { |
||||
// option map_entry = true; |
||||
// optional KeyType key = 1; |
||||
// optional ValueType value = 2; |
||||
// } |
||||
// repeated MapFieldEntry map_field = 1; |
||||
// |
||||
// Implementations may choose not to generate the map_entry=true message, but |
||||
// use a native map in the target language to hold the keys and values. |
||||
// The reflection APIs in such implementions still need to work as |
||||
// if the field is a repeated message field. |
||||
// |
||||
// NOTE: Do not set the option in .proto files. Always use the maps syntax |
||||
// instead. The option should only be implicitly set by the proto compiler |
||||
// parser. |
||||
optional bool map_entry = 7; |
||||
|
||||
reserved 8; // javalite_serializable |
||||
reserved 9; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message FieldOptions { |
||||
// The ctype option instructs the C++ code generator to use a different |
||||
// representation of the field than it normally would. See the specific |
||||
// options below. This option is not yet implemented in the open source |
||||
// release -- sorry, we'll try to include it in a future version! |
||||
optional CType ctype = 1 [default = STRING]; |
||||
enum CType { |
||||
// Default mode. |
||||
STRING = 0; |
||||
|
||||
CORD = 1; |
||||
|
||||
STRING_PIECE = 2; |
||||
} |
||||
// The packed option can be enabled for repeated primitive fields to enable |
||||
// a more efficient representation on the wire. Rather than repeatedly |
||||
// writing the tag and type for each element, the entire array is encoded as |
||||
// a single length-delimited blob. In proto3, only explicit setting it to |
||||
// false will avoid using packed encoding. |
||||
optional bool packed = 2; |
||||
|
||||
// The jstype option determines the JavaScript type used for values of the |
||||
// field. The option is permitted only for 64 bit integral and fixed types |
||||
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING |
||||
// is represented as JavaScript string, which avoids loss of precision that |
||||
// can happen when a large value is converted to a floating point JavaScript. |
||||
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to |
||||
// use the JavaScript "number" type. The behavior of the default option |
||||
// JS_NORMAL is implementation dependent. |
||||
// |
||||
// This option is an enum to permit additional types to be added, e.g. |
||||
// goog.math.Integer. |
||||
optional JSType jstype = 6 [default = JS_NORMAL]; |
||||
enum JSType { |
||||
// Use the default type. |
||||
JS_NORMAL = 0; |
||||
|
||||
// Use JavaScript strings. |
||||
JS_STRING = 1; |
||||
|
||||
// Use JavaScript numbers. |
||||
JS_NUMBER = 2; |
||||
} |
||||
|
||||
// Should this field be parsed lazily? Lazy applies only to message-type |
||||
// fields. It means that when the outer message is initially parsed, the |
||||
// inner message's contents will not be parsed but instead stored in encoded |
||||
// form. The inner message will actually be parsed when it is first accessed. |
||||
// |
||||
// This is only a hint. Implementations are free to choose whether to use |
||||
// eager or lazy parsing regardless of the value of this option. However, |
||||
// setting this option true suggests that the protocol author believes that |
||||
// using lazy parsing on this field is worth the additional bookkeeping |
||||
// overhead typically needed to implement it. |
||||
// |
||||
// This option does not affect the public interface of any generated code; |
||||
// all method signatures remain the same. Furthermore, thread-safety of the |
||||
// interface is not affected by this option; const methods remain safe to |
||||
// call from multiple threads concurrently, while non-const methods continue |
||||
// to require exclusive access. |
||||
// |
||||
// |
||||
// Note that implementations may choose not to check required fields within |
||||
// a lazy sub-message. That is, calling IsInitialized() on the outer message |
||||
// may return true even if the inner message has missing required fields. |
||||
// This is necessary because otherwise the inner message would have to be |
||||
// parsed in order to perform the check, defeating the purpose of lazy |
||||
// parsing. An implementation which chooses not to check required fields |
||||
// must be consistent about it. That is, for any particular sub-message, the |
||||
// implementation must either *always* check its required fields, or *never* |
||||
// check its required fields, regardless of whether or not the message has |
||||
// been parsed. |
||||
optional bool lazy = 5 [default=false]; |
||||
|
||||
// Is this field deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for accessors, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating fields. |
||||
optional bool deprecated = 3 [default=false]; |
||||
|
||||
// For Google-internal migration only. Do not use. |
||||
optional bool weak = 10 [default=false]; |
||||
|
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
|
||||
reserved 4; // removed jtype |
||||
} |
||||
|
||||
message OneofOptions { |
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumOptions { |
||||
|
||||
// Set this option to true to allow mapping different tag names to the same |
||||
// value. |
||||
optional bool allow_alias = 2; |
||||
|
||||
// Is this enum deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum, or it will be completely ignored; in the very least, this |
||||
// is a formalization for deprecating enums. |
||||
optional bool deprecated = 3 [default=false]; |
||||
|
||||
reserved 5; // javanano_as_lite |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message EnumValueOptions { |
||||
// Is this enum value deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the enum value, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating enum values. |
||||
optional bool deprecated = 1 [default=false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message ServiceOptions { |
||||
|
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this service deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the service, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating services. |
||||
optional bool deprecated = 33 [default=false]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
message MethodOptions { |
||||
|
||||
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC |
||||
// framework. We apologize for hoarding these numbers to ourselves, but |
||||
// we were already using them long before we decided to release Protocol |
||||
// Buffers. |
||||
|
||||
// Is this method deprecated? |
||||
// Depending on the target platform, this can emit Deprecated annotations |
||||
// for the method, or it will be completely ignored; in the very least, |
||||
// this is a formalization for deprecating methods. |
||||
optional bool deprecated = 33 [default=false]; |
||||
|
||||
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, |
||||
// or neither? HTTP based RPC implementation may choose GET verb for safe |
||||
// methods, and PUT verb for idempotent methods instead of the default POST. |
||||
enum IdempotencyLevel { |
||||
IDEMPOTENCY_UNKNOWN = 0; |
||||
NO_SIDE_EFFECTS = 1; // implies idempotent |
||||
IDEMPOTENT = 2; // idempotent, but may have side effects |
||||
} |
||||
optional IdempotencyLevel idempotency_level = |
||||
34 [default=IDEMPOTENCY_UNKNOWN]; |
||||
|
||||
// The parser stores options it doesn't recognize here. See above. |
||||
repeated UninterpretedOption uninterpreted_option = 999; |
||||
|
||||
// Clients can define custom options in extensions of this message. See above. |
||||
extensions 1000 to max; |
||||
} |
||||
|
||||
|
||||
// A message representing a option the parser does not recognize. This only |
||||
// appears in options protos created by the compiler::Parser class. |
||||
// DescriptorPool resolves these when building Descriptor objects. Therefore, |
||||
// options protos in descriptor objects (e.g. returned by Descriptor::options(), |
||||
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions |
||||
// in them. |
||||
message UninterpretedOption { |
||||
// The name of the uninterpreted option. Each string represents a segment in |
||||
// a dot-separated name. is_extension is true iff a segment represents an |
||||
// extension (denoted with parentheses in options specs in .proto files). |
||||
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents |
||||
// "foo.(bar.baz).qux". |
||||
message NamePart { |
||||
required string name_part = 1; |
||||
required bool is_extension = 2; |
||||
} |
||||
repeated NamePart name = 2; |
||||
|
||||
// The value of the uninterpreted option, in whatever type the tokenizer |
||||
// identified it as during parsing. Exactly one of these should be set. |
||||
optional string identifier_value = 3; |
||||
optional uint64 positive_int_value = 4; |
||||
optional int64 negative_int_value = 5; |
||||
optional double double_value = 6; |
||||
optional bytes string_value = 7; |
||||
optional string aggregate_value = 8; |
||||
} |
||||
|
||||
// =================================================================== |
||||
// Optional source code info |
||||
|
||||
// Encapsulates information about the original source file from which a |
||||
// FileDescriptorProto was generated. |
||||
message SourceCodeInfo { |
||||
// A Location identifies a piece of source code in a .proto file which |
||||
// corresponds to a particular definition. This information is intended |
||||
// to be useful to IDEs, code indexers, documentation generators, and similar |
||||
// tools. |
||||
// |
||||
// For example, say we have a file like: |
||||
// message Foo { |
||||
// optional string foo = 1; |
||||
// } |
||||
// Let's look at just the field definition: |
||||
// optional string foo = 1; |
||||
// ^ ^^ ^^ ^ ^^^ |
||||
// a bc de f ghi |
||||
// We have the following locations: |
||||
// span path represents |
||||
// [a,i) [ 4, 0, 2, 0 ] The whole field definition. |
||||
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). |
||||
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string). |
||||
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). |
||||
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1). |
||||
// |
||||
// Notes: |
||||
// - A location may refer to a repeated field itself (i.e. not to any |
||||
// particular index within it). This is used whenever a set of elements are |
||||
// logically enclosed in a single code segment. For example, an entire |
||||
// extend block (possibly containing multiple extension definitions) will |
||||
// have an outer location whose path refers to the "extensions" repeated |
||||
// field without an index. |
||||
// - Multiple locations may have the same path. This happens when a single |
||||
// logical declaration is spread out across multiple places. The most |
||||
// obvious example is the "extend" block again -- there may be multiple |
||||
// extend blocks in the same scope, each of which will have the same path. |
||||
// - A location's span is not always a subset of its parent's span. For |
||||
// example, the "extendee" of an extension declaration appears at the |
||||
// beginning of the "extend" block and is shared by all extensions within |
||||
// the block. |
||||
// - Just because a location's span is a subset of some other location's span |
||||
// does not mean that it is a descendent. For example, a "group" defines |
||||
// both a type and a field in a single declaration. Thus, the locations |
||||
// corresponding to the type and field and their components will overlap. |
||||
// - Code which tries to interpret locations should probably be designed to |
||||
// ignore those that it doesn't understand, as more types of locations could |
||||
// be recorded in the future. |
||||
repeated Location location = 1; |
||||
message Location { |
||||
// Identifies which part of the FileDescriptorProto was defined at this |
||||
// location. |
||||
// |
||||
// Each element is a field number or an index. They form a path from |
||||
// the root FileDescriptorProto to the place where the definition. For |
||||
// example, this path: |
||||
// [ 4, 3, 2, 7, 1 ] |
||||
// refers to: |
||||
// file.message_type(3) // 4, 3 |
||||
// .field(7) // 2, 7 |
||||
// .name() // 1 |
||||
// This is because FileDescriptorProto.message_type has field number 4: |
||||
// repeated DescriptorProto message_type = 4; |
||||
// and DescriptorProto.field has field number 2: |
||||
// repeated FieldDescriptorProto field = 2; |
||||
// and FieldDescriptorProto.name has field number 1: |
||||
// optional string name = 1; |
||||
// |
||||
// Thus, the above path gives the location of a field name. If we removed |
||||
// the last element: |
||||
// [ 4, 3, 2, 7 ] |
||||
// this path refers to the whole field declaration (from the beginning |
||||
// of the label to the terminating semicolon). |
||||
repeated int32 path = 1 [packed=true]; |
||||
|
||||
// Always has exactly three or four elements: start line, start column, |
||||
// end line (optional, otherwise assumed same as start line), end column. |
||||
// These are packed into a single field for efficiency. Note that line |
||||
// and column numbers are zero-based -- typically you will want to add |
||||
// 1 to each before displaying to a user. |
||||
repeated int32 span = 2 [packed=true]; |
||||
|
||||
// If this SourceCodeInfo represents a complete declaration, these are any |
||||
// comments appearing before and after the declaration which appear to be |
||||
// attached to the declaration. |
||||
// |
||||
// A series of line comments appearing on consecutive lines, with no other |
||||
// tokens appearing on those lines, will be treated as a single comment. |
||||
// |
||||
// leading_detached_comments will keep paragraphs of comments that appear |
||||
// before (but not connected to) the current element. Each paragraph, |
||||
// separated by empty lines, will be one comment element in the repeated |
||||
// field. |
||||
// |
||||
// Only the comment content is provided; comment markers (e.g. //) are |
||||
// stripped out. For block comments, leading whitespace and an asterisk |
||||
// will be stripped from the beginning of each line other than the first. |
||||
// Newlines are included in the output. |
||||
// |
||||
// Examples: |
||||
// |
||||
// optional int32 foo = 1; // Comment attached to foo. |
||||
// // Comment attached to bar. |
||||
// optional int32 bar = 2; |
||||
// |
||||
// optional string baz = 3; |
||||
// // Comment attached to baz. |
||||
// // Another line attached to baz. |
||||
// |
||||
// // Comment attached to qux. |
||||
// // |
||||
// // Another line attached to qux. |
||||
// optional double qux = 4; |
||||
// |
||||
// // Detached comment for corge. This is not leading or trailing comments |
||||
// // to qux or corge because there are blank lines separating it from |
||||
// // both. |
||||
// |
||||
// // Detached comment for corge paragraph 2. |
||||
// |
||||
// optional string corge = 5; |
||||
// /* Block comment attached |
||||
// * to corge. Leading asterisks |
||||
// * will be removed. */ |
||||
// /* Block comment attached to |
||||
// * grault. */ |
||||
// optional int32 grault = 6; |
||||
// |
||||
// // ignored detached comments. |
||||
optional string leading_comments = 3; |
||||
optional string trailing_comments = 4; |
||||
repeated string leading_detached_comments = 6; |
||||
} |
||||
} |
||||
|
||||
// Describes the relationship between generated code and its original source |
||||
// file. A GeneratedCodeInfo message is associated with only one generated |
||||
// source file, but may contain references to different source .proto files. |
||||
message GeneratedCodeInfo { |
||||
// An Annotation connects some span of text in generated code to an element |
||||
// of its generating .proto file. |
||||
repeated Annotation annotation = 1; |
||||
message Annotation { |
||||
// Identifies the element in the original source .proto file. This field |
||||
// is formatted the same as SourceCodeInfo.Location.path. |
||||
repeated int32 path = 1 [packed=true]; |
||||
|
||||
// Identifies the filesystem path to the original source .proto. |
||||
optional string source_file = 2; |
||||
|
||||
// Identifies the starting offset in bytes in the generated code |
||||
// that relates to the identified object. |
||||
optional int32 begin = 3; |
||||
|
||||
// Identifies the ending offset in bytes in the generated code that |
||||
// relates to the identified offset. The end offset should be one past |
||||
// the last relevant byte (so the length of the text = end - begin). |
||||
optional int32 end = 4; |
||||
} |
||||
} |
@ -0,0 +1,9 @@ |
||||
syntax = "proto2"; |
||||
|
||||
import "tests/json/test.proto"; |
||||
|
||||
package upb.test.json; |
||||
|
||||
message ImportEnum { |
||||
optional MyEnum e = 1; |
||||
} |
Binary file not shown.
@ -1,22 +0,0 @@ |
||||
#!/usr/bin/env bash |
||||
|
||||
set -e |
||||
|
||||
BINDIR=`dirname "$0"`/.. |
||||
SRCDIR=${CMAKE_CURRENT_SOURCE_DIR} |
||||
EXIT=0 |
||||
|
||||
function try_copy() { |
||||
if [ ! -f $1 ]; then |
||||
echo "Can't find $1, skipping..." |
||||
EXIT=1 |
||||
else |
||||
cp $1 $2 |
||||
echo $1 |
||||
fi |
||||
} |
||||
|
||||
try_copy $BINDIR/google/protobuf/descriptor.upb.c $SRCDIR/google/protobuf |
||||
try_copy $BINDIR/google/protobuf/descriptor.upb.h $SRCDIR/google/protobuf |
||||
try_copy $BINDIR/upb/json/parser.c $SRCDIR/upb/json |
||||
try_copy $BINDIR/upb/pb/compile_decoder_x64.h $SRCDIR/upb/pb |
@ -1,196 +0,0 @@ |
||||
#!/bin/bash |
||||
|
||||
install_protoc() { |
||||
sudo apt-get install protobuf-compiler |
||||
protoc --version || true |
||||
} |
||||
|
||||
# Bare build: no dependencies installed, no JIT enabled. |
||||
bare_install() { |
||||
: |
||||
} |
||||
bare_script() { |
||||
make -j12 tests |
||||
make test |
||||
} |
||||
|
||||
# Bare JIT build: no dependencies installed, but JIT enabled. |
||||
barejit_install() { |
||||
: |
||||
} |
||||
barejit_script() { |
||||
make -j12 tests WITH_JIT=yes |
||||
make test |
||||
} |
||||
|
||||
# Build with strict warnings. |
||||
warnings_install() { |
||||
: |
||||
} |
||||
warnings_script() { |
||||
make -j12 default WITH_MAX_WARNINGS=yes |
||||
make -j12 tests WITH_MAX_WARNINGS=yes |
||||
make test |
||||
} |
||||
|
||||
# A 32-bit build. Can only test the core because any dependencies |
||||
# need to be available as 32-bit libs also, which gets hairy fast. |
||||
# Can't enable the JIT because it only supports x64. |
||||
core32_install() { |
||||
sudo apt-get update -qq |
||||
sudo apt-get install libc6-dev-i386 g++-multilib |
||||
} |
||||
core32_script() { |
||||
make -j12 tests USER_CPPFLAGS="$USER_CPPFLAGS -m32" |
||||
make test |
||||
} |
||||
|
||||
# A build of Lua and running of Lua tests. |
||||
lua_install() { |
||||
sudo apt-get update -qq |
||||
sudo apt-get install lua5.2 liblua5.2-dev |
||||
} |
||||
lua_script() { |
||||
make -j12 testlua USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`" |
||||
} |
||||
|
||||
# Test that generated files don't need to be regenerated. |
||||
# |
||||
# We would include the Ragel output here too, but we can't really guarantee |
||||
# that its output will be stable for multiple versions of the tool, and we |
||||
# don't want the test to be brittle. |
||||
genfiles_install() { |
||||
sudo apt-get update -qq |
||||
sudo apt-get install lua5.2 liblua5.2-dev |
||||
|
||||
# Need a recent version of protoc to compile proto3 files. |
||||
# .travis.yml will add this to our path |
||||
mkdir protoc |
||||
cd protoc |
||||
wget https://github.com/google/protobuf/releases/download/v3.0.0-beta-2/protoc-3.0.0-beta-2-linux-x86_64.zip |
||||
unzip protoc-3.0.0-beta-2-linux-x86_64.zip |
||||
cd .. |
||||
} |
||||
genfiles_script() { |
||||
protoc --version || true |
||||
|
||||
# Avoid regenerating descriptor.pb, since its output can vary based on the |
||||
# version of protoc. |
||||
touch upb/descriptor/descriptor.pb |
||||
|
||||
make -j12 genfiles USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`" |
||||
# Will fail if any differences were observed. |
||||
git diff --exit-code |
||||
} |
||||
|
||||
# Tests the ndebug build. |
||||
ndebug_install() { |
||||
sudo apt-get update -qq |
||||
sudo apt-get install lua5.2 liblua5.2-dev libprotobuf-dev |
||||
install_protoc |
||||
} |
||||
ndebug_script() { |
||||
# Override of USER_CPPFLAGS removes -UNDEBUG. |
||||
export USER_CPPFLAGS="`pkg-config lua5.2 --cflags` -g -fomit-frame-pointer" |
||||
make -j12 tests testlua WITH_JIT=yes |
||||
make test |
||||
} |
||||
|
||||
# Tests the amalgamated build (this ensures that the different .c files |
||||
# don't have symbols or macros that conflict with each other. |
||||
amalgamated_install() { |
||||
: |
||||
} |
||||
amalgamated_script() { |
||||
# Override of USER_CPPFLAGS removes -UNDEBUG. |
||||
export USER_CPPFLAGS="-UNDEBUG" |
||||
make amalgamated |
||||
} |
||||
|
||||
# A run that executes with coverage support and uploads to coveralls.io |
||||
coverage_install() { |
||||
sudo apt-get update -qq |
||||
sudo apt-get install libprotobuf-dev lua5.2 liblua5.2-dev |
||||
install_protoc |
||||
sudo pip install cpp-coveralls |
||||
} |
||||
coverage_script() { |
||||
export USER_CPPFLAGS="--coverage -O0 `pkg-config lua5.2 --cflags`" |
||||
make -j12 tests testlua WITH_JIT=yes |
||||
make test |
||||
} |
||||
coverage_after_success() { |
||||
coveralls --exclude dynasm --exclude tests --exclude upb/bindings/linux --gcov-options '\-lp' |
||||
} |
||||
|
||||
set -e |
||||
set -x |
||||
|
||||
if [ "$1" == "local" ]; then |
||||
run_config() { |
||||
make clean |
||||
echo |
||||
echo "travis.sh: TESTING CONFIGURATION $1 ===============================" |
||||
echo |
||||
UPB_TRAVIS_BUILD=$1 ./travis.sh script |
||||
} |
||||
# Run all configurations serially locally to test before pushing a pull |
||||
# request. |
||||
export CC=gcc |
||||
export CXX=g++ |
||||
run_config "bare" |
||||
run_config "barejit" |
||||
run_config "core32" |
||||
run_config "lua" |
||||
run_config "ndebug" |
||||
run_config "genfiles" |
||||
run_config "amalgamated" |
||||
exit |
||||
fi |
||||
|
||||
$CC --version |
||||
$CXX --version |
||||
|
||||
# Uncomment to enable uploading failure logs to S3. |
||||
# UPLOAD_TO_S3=true |
||||
|
||||
if [ "$1" == "after_failure" ] && [ "$UPLOAD_TO_S3" == "true" ]; then |
||||
# Upload failing tree to S3. |
||||
curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash |
||||
PATH="$PATH:$HOME/bin" |
||||
export ARTIFACTS_BUCKET=haberman-upb-travis-artifacts2 |
||||
ARCHIVE=failing-artifacts.tar.gz |
||||
tar zcvf $ARCHIVE $(git ls-files -o) |
||||
artifacts upload $ARCHIVE |
||||
exit |
||||
fi |
||||
|
||||
if [ "$1" == "after_success" ] && [ "$UPB_TRAVIS_BUILD" != "coverage" ]; then |
||||
# after_success is only used for coverage. |
||||
exit |
||||
fi |
||||
|
||||
if [ "$CC" != "gcc" ] && [ "$UPB_TRAVIS_BUILD" == "coverage" ]; then |
||||
# coverage build only works for GCC. |
||||
exit |
||||
fi |
||||
|
||||
# Enable asserts and ref debugging (though some configurations override this). |
||||
export USER_CPPFLAGS="-UNDEBUG -DUPB_DEBUG_REFS -DUPB_THREAD_UNSAFE -DUPB_DEBUG_TABLE -g" |
||||
|
||||
if [ "$CC" == "gcc" ]; then |
||||
# For the GCC build test loading JIT code via SO. For the Clang build test |
||||
# loading it in the normal way. |
||||
export USER_CPPFLAGS="$USER_CPPFLAGS -DUPB_JIT_LOAD_SO" |
||||
fi |
||||
|
||||
# TODO(haberman): Test UPB_DUMP_BYTECODE? We don't right now because it is so |
||||
# noisy. |
||||
|
||||
# Enable verbose build. |
||||
export Q= |
||||
|
||||
# Make any compiler warning fail the build. |
||||
export UPB_FAIL_WARNINGS=true |
||||
|
||||
eval ${UPB_TRAVIS_BUILD}_${1} |
Loading…
Reference in new issue