Fixed amalgamation and CMake build.

pull/13171/head
Joshua Haberman 6 years ago
parent d6e466e943
commit 985145ca16
  1. 58
      BUILD
  2. 2
      CMakeLists.txt
  3. 7
      build_defs.bzl
  4. 8
      generated_for_cmake/google/protobuf/descriptor.upb.c
  5. 20
      generated_for_cmake/google/protobuf/descriptor.upb.h
  6. 0
      generated_for_cmake/upb/json/parser.c
  7. 43
      tools/amalgamate.py
  8. 15
      tools/make_cmakelists.py
  9. 196
      travis.sh

58
BUILD

@ -11,6 +11,7 @@ load(
"upb_amalgamation",
"upb_proto_library",
"upb_proto_reflection_library",
"upb_proto_srcs",
)
licenses(["notice"]) # BSD (Google-authored w/ possible external contributions)
@ -395,6 +396,11 @@ py_binary(
srcs = ["tools/amalgamate.py"],
)
upb_proto_srcs(
name = "descriptor_upbproto_srcs",
deps = ["@com_google_protobuf//:descriptor_proto"],
)
upb_amalgamation(
name = "gen_amalgamation",
outs = [
@ -404,6 +410,7 @@ upb_amalgamation(
amalgamator = ":amalgamate",
libs = [
":upb",
":descriptor_upbproto_srcs",
":reflection",
":handlers",
":upb_pb",
@ -477,22 +484,29 @@ lua_test(
# Test the CMake build #########################################################
filegroup(
name = "cmake_files",
srcs = glob([
"CMakeLists.txt",
"generated_for_cmake/**/*",
"google/**/*",
"upbc/**/*",
"upb/**/*",
"tests/**/*",
])
)
make_shell_script(
name = "gen_run_cmake_build",
out = "run_cmake_build.sh",
contents = "mkdir build && cd build && cmake .. && make -j8 && make test",
contents = "find . && mkdir build && cd build && cmake .. && make -j8 && make test",
)
sh_test(
name = "cmake_build",
srcs = ["run_cmake_build.sh"],
data = glob([
"CMakeLists.txt",
"google/**/*",
"upbc/**/*",
"upb/**/*",
"tests/**/*",
]) + [
data = [
":cmake_files",
"@bazel_tools//tools/bash/runfiles",
],
)
@ -517,8 +531,9 @@ genrule(
srcs = [
"BUILD",
"WORKSPACE",
":cmake_files",
],
outs = ["generated/CMakeLists.txt"],
outs = ["generated-in/CMakeLists.txt"],
cmd = "$(location :make_cmakelists) $@",
tools = [":make_cmakelists"],
)
@ -526,18 +541,37 @@ genrule(
genrule(
name = "generate_json_ragel",
srcs = ["upb/json/parser.rl"],
outs = ["generated/upb/json/parser.c"],
outs = ["upb/json/parser.c"],
cmd = "$(location @ragel//:ragel) -C -o upb/json/parser.c $< && mv upb/json/parser.c $@",
tools = ["@ragel"],
)
genrule(
name = "copy_json_ragel",
srcs = ["upb/json/parser.c"],
outs = ["generated-in/generated_for_cmake/upb/json/parser.c"],
cmd = "cp $< $@",
)
genrule(
name = "copy_protos",
srcs = [":descriptor_upbproto_srcs"],
outs = [
"generated-in/generated_for_cmake/google/protobuf/descriptor.upb.c",
"generated-in/generated_for_cmake/google/protobuf/descriptor.upb.h",
],
cmd = "cp $(SRCS) $(@D)/generated-in/generated_for_cmake/google/protobuf",
)
generated_file_staleness_test(
name = "test_generated_files",
outs = [
"CMakeLists.txt",
"upb/json/parser.c",
"generated_for_cmake/upb/json/parser.c",
"generated_for_cmake/google/protobuf/descriptor.upb.c",
"generated_for_cmake/google/protobuf/descriptor.upb.h",
],
generated_pattern = "generated/%s",
generated_pattern = "generated-in/%s",
)
# copybara:strip_end

@ -49,6 +49,7 @@ if(UPB_ENABLE_UBSAN)
endif()
include_directories(.)
include_directories(generated)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
if(APPLE)
@ -114,7 +115,6 @@ target_link_libraries(upb_pb
table
upb)
add_library(upb_json
upb/json/parser.c
upb/json/printer.c
upb/json/parser.h
upb/json/printer.h)

@ -178,6 +178,9 @@ SrcList = provider(
)
def _file_list_aspect_impl(target, ctx):
if SrcList in target:
return []
srcs = []
hdrs = []
for src in ctx.rule.attr.srcs:
@ -200,7 +203,7 @@ def _upb_amalgamation(ctx):
ctx.actions.run(
inputs = inputs,
outputs = ctx.outputs.outs,
arguments = ["", ctx.bin_dir.path + "/"] + [f.path for f in srcs],
arguments = [ctx.bin_dir.path + "/"] + [f.path for f in srcs] + ["-I" + root for root in _get_real_roots(inputs)],
progress_message = "Making amalgamation",
executable = ctx.executable.amalgamator,
)
@ -244,7 +247,7 @@ def _get_real_short_path(file):
def _get_real_root(file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path)]
return file.path[:-len(real_short_path) - 1]
def _get_real_roots(files):
roots = {}

@ -251,7 +251,7 @@ static const upb_msglayout *const google_protobuf_FileOptions_submsgs[1] = {
&google_protobuf_UninterpretedOption_msginit,
};
static const upb_msglayout_field google_protobuf_FileOptions__fields[19] = {
static const upb_msglayout_field google_protobuf_FileOptions__fields[21] = {
{1, UPB_SIZE(28, 32), 11, 0, 9, 1},
{8, UPB_SIZE(36, 48), 12, 0, 9, 1},
{9, UPB_SIZE(8, 8), 1, 0, 14, 1},
@ -270,13 +270,15 @@ static const upb_msglayout_field google_protobuf_FileOptions__fields[19] = {
{40, UPB_SIZE(76, 128), 17, 0, 9, 1},
{41, UPB_SIZE(84, 144), 18, 0, 9, 1},
{42, UPB_SIZE(24, 24), 10, 0, 8, 1},
{999, UPB_SIZE(92, 160), 0, 0, 11, 3},
{44, UPB_SIZE(92, 160), 19, 0, 9, 1},
{45, UPB_SIZE(100, 176), 20, 0, 9, 1},
{999, UPB_SIZE(108, 192), 0, 0, 11, 3},
};
const upb_msglayout google_protobuf_FileOptions_msginit = {
&google_protobuf_FileOptions_submsgs[0],
&google_protobuf_FileOptions__fields[0],
UPB_SIZE(96, 176), 19, false,
UPB_SIZE(112, 208), 21, false,
};
static const upb_msglayout *const google_protobuf_MessageOptions_submsgs[1] = {

@ -1030,7 +1030,11 @@ UPB_INLINE bool google_protobuf_FileOptions_has_php_namespace(const google_proto
UPB_INLINE upb_strview google_protobuf_FileOptions_php_namespace(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(84, 144)); }
UPB_INLINE bool google_protobuf_FileOptions_has_php_generic_services(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 10); }
UPB_INLINE bool google_protobuf_FileOptions_php_generic_services(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, bool, UPB_SIZE(24, 24)); }
UPB_INLINE const google_protobuf_UninterpretedOption* const* google_protobuf_FileOptions_uninterpreted_option(const google_protobuf_FileOptions *msg, size_t *len) { return (const google_protobuf_UninterpretedOption* const*)_upb_array_accessor(msg, UPB_SIZE(92, 160), len); }
UPB_INLINE bool google_protobuf_FileOptions_has_php_metadata_namespace(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 19); }
UPB_INLINE upb_strview google_protobuf_FileOptions_php_metadata_namespace(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(92, 160)); }
UPB_INLINE bool google_protobuf_FileOptions_has_ruby_package(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 20); }
UPB_INLINE upb_strview google_protobuf_FileOptions_ruby_package(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(100, 176)); }
UPB_INLINE const google_protobuf_UninterpretedOption* const* google_protobuf_FileOptions_uninterpreted_option(const google_protobuf_FileOptions *msg, size_t *len) { return (const google_protobuf_UninterpretedOption* const*)_upb_array_accessor(msg, UPB_SIZE(108, 192), len); }
UPB_INLINE void google_protobuf_FileOptions_set_java_package(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 11);
@ -1104,16 +1108,24 @@ UPB_INLINE void google_protobuf_FileOptions_set_php_generic_services(google_prot
_upb_sethas(msg, 10);
UPB_FIELD_AT(msg, bool, UPB_SIZE(24, 24)) = value;
}
UPB_INLINE void google_protobuf_FileOptions_set_php_metadata_namespace(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 19);
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(92, 160)) = value;
}
UPB_INLINE void google_protobuf_FileOptions_set_ruby_package(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 20);
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(100, 176)) = value;
}
UPB_INLINE google_protobuf_UninterpretedOption** google_protobuf_FileOptions_mutable_uninterpreted_option(google_protobuf_FileOptions *msg, size_t *len) {
return (google_protobuf_UninterpretedOption**)_upb_array_mutable_accessor(msg, UPB_SIZE(92, 160), len);
return (google_protobuf_UninterpretedOption**)_upb_array_mutable_accessor(msg, UPB_SIZE(108, 192), len);
}
UPB_INLINE google_protobuf_UninterpretedOption** google_protobuf_FileOptions_resize_uninterpreted_option(google_protobuf_FileOptions *msg, size_t len, upb_arena *arena) {
return (google_protobuf_UninterpretedOption**)_upb_array_resize_accessor(msg, UPB_SIZE(92, 160), len, UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, arena);
return (google_protobuf_UninterpretedOption**)_upb_array_resize_accessor(msg, UPB_SIZE(108, 192), len, UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, arena);
}
UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_FileOptions_add_uninterpreted_option(google_protobuf_FileOptions *msg, upb_arena *arena) {
struct google_protobuf_UninterpretedOption* sub = (struct google_protobuf_UninterpretedOption*)upb_msg_new(&google_protobuf_UninterpretedOption_msginit, arena);
bool ok = _upb_array_append_accessor(
msg, UPB_SIZE(92, 160), UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, &sub, arena);
msg, UPB_SIZE(108, 192), UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, &sub, arena);
if (!ok) return NULL;
return sub;
}

@ -2,6 +2,7 @@
import sys
import re
import os
INCLUDE_RE = re.compile('^#include "([^"]*)"$')
@ -10,8 +11,8 @@ def parse_include(line):
return match.groups()[0] if match else None
class Amalgamator:
def __init__(self, include_path, output_path):
self.include_path = include_path
def __init__(self, output_path):
self.include_paths = ["."]
self.included = set(["upb/port_def.inc", "upb/port_undef.inc"])
self.output_h = open(output_path + "upb.h", "w")
self.output_c = open(output_path + "upb.c", "w")
@ -24,18 +25,32 @@ class Amalgamator:
self.output_h.write('#include <stdint.h>')
self.output_h.write(open("upb/port_def.inc").read())
def add_include_path(self, path):
self.include_paths.append(path)
def finish(self):
self.output_c.write(open("upb/port_undef.inc").read())
self.output_h.write(open("upb/port_undef.inc").read())
def _process_file(self, infile_name, outfile):
for line in open(infile_name):
file = None
for path in self.include_paths:
try:
full_path = os.path.join(path, infile_name)
file = open(full_path)
break
except IOError:
pass
if not file:
raise RuntimeError("Couldn't open file " + infile_name)
for line in file:
include = parse_include(line)
if include is not None and (include.startswith("upb") or
include.startswith("google")):
if include not in self.included:
self.included.add(include)
self._add_header(self.include_path + include)
self._add_header(include)
else:
outfile.write(line)
@ -47,12 +62,20 @@ class Amalgamator:
# ---- main ----
include_path = sys.argv[1]
output_path = sys.argv[2]
amalgamator = Amalgamator(include_path, output_path)
output_path = sys.argv[1]
amalgamator = Amalgamator(output_path)
files = []
for arg in sys.argv[3:]:
arg = arg.strip()
if arg.startswith("-I"):
amalgamator.add_include_path(arg[2:])
elif arg.endswith(".h") or arg.endswith(".inc"):
pass
else:
files.append(arg)
for filename in sys.argv[3:]:
if filename.endswith(".h") or filename.endswith(".inc"):
amalgamator.add_src(filename.strip())
for filename in files:
amalgamator.add_src(filename)
amalgamator.finish()

@ -11,6 +11,7 @@ from __future__ import print_function
import sys
import textwrap
import os
def StripColons(deps):
return map(lambda x: x[1:], deps)
@ -38,12 +39,20 @@ class BuildFileFunctions(object):
if kwargs["name"] == "amalgamation" or kwargs["name"] == "upbc_generator":
return
files = kwargs.get("srcs", []) + kwargs.get("hdrs", [])
found_files = []
for file in files:
if os.path.isfile(file):
found_files.append(file)
elif os.path.isfile("generated/" + file):
found_files.append("generated/" + file)
else:
print("Warning: no such file: " + file)
if filter(IsSourceFile, files):
# Has sources, make this a normal library.
self.converter.toplevel += "add_library(%s\n %s)\n" % (
kwargs["name"],
"\n ".join(files)
"\n ".join(found_files)
)
self._add_deps(kwargs)
else:
@ -143,6 +152,9 @@ class BuildFileFunctions(object):
def licenses(self, *args):
pass
def filegroup(self, **kwargs):
pass
def map_dep(self, arg):
return arg
@ -227,6 +239,7 @@ class Converter(object):
endif()
include_directories(.)
include_directories(generated)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
if(APPLE)

@ -1,196 +0,0 @@
#!/bin/bash
install_protoc() {
sudo apt-get install protobuf-compiler
protoc --version || true
}
# Bare build: no dependencies installed, no JIT enabled.
bare_install() {
:
}
bare_script() {
make -j12 tests
make test
}
# Bare JIT build: no dependencies installed, but JIT enabled.
barejit_install() {
:
}
barejit_script() {
make -j12 tests WITH_JIT=yes
make test
}
# Build with strict warnings.
warnings_install() {
:
}
warnings_script() {
make -j12 default WITH_MAX_WARNINGS=yes
make -j12 tests WITH_MAX_WARNINGS=yes
make test
}
# A 32-bit build. Can only test the core because any dependencies
# need to be available as 32-bit libs also, which gets hairy fast.
# Can't enable the JIT because it only supports x64.
core32_install() {
sudo apt-get update -qq
sudo apt-get install libc6-dev-i386 g++-multilib
}
core32_script() {
make -j12 tests USER_CPPFLAGS="$USER_CPPFLAGS -m32"
make test
}
# A build of Lua and running of Lua tests.
lua_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev
}
lua_script() {
make -j12 testlua USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`"
}
# Test that generated files don't need to be regenerated.
#
# We would include the Ragel output here too, but we can't really guarantee
# that its output will be stable for multiple versions of the tool, and we
# don't want the test to be brittle.
genfiles_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev
# Need a recent version of protoc to compile proto3 files.
# .travis.yml will add this to our path
mkdir protoc
cd protoc
wget https://github.com/google/protobuf/releases/download/v3.0.0-beta-2/protoc-3.0.0-beta-2-linux-x86_64.zip
unzip protoc-3.0.0-beta-2-linux-x86_64.zip
cd ..
}
genfiles_script() {
protoc --version || true
# Avoid regenerating descriptor.pb, since its output can vary based on the
# version of protoc.
touch upb/descriptor/descriptor.pb
make -j12 genfiles USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`"
# Will fail if any differences were observed.
git diff --exit-code
}
# Tests the ndebug build.
ndebug_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev libprotobuf-dev
install_protoc
}
ndebug_script() {
# Override of USER_CPPFLAGS removes -UNDEBUG.
export USER_CPPFLAGS="`pkg-config lua5.2 --cflags` -g -fomit-frame-pointer"
make -j12 tests testlua WITH_JIT=yes
make test
}
# Tests the amalgamated build (this ensures that the different .c files
# don't have symbols or macros that conflict with each other.
amalgamated_install() {
:
}
amalgamated_script() {
# Override of USER_CPPFLAGS removes -UNDEBUG.
export USER_CPPFLAGS="-UNDEBUG"
make amalgamated
}
# A run that executes with coverage support and uploads to coveralls.io
coverage_install() {
sudo apt-get update -qq
sudo apt-get install libprotobuf-dev lua5.2 liblua5.2-dev
install_protoc
sudo pip install cpp-coveralls
}
coverage_script() {
export USER_CPPFLAGS="--coverage -O0 `pkg-config lua5.2 --cflags`"
make -j12 tests testlua WITH_JIT=yes
make test
}
coverage_after_success() {
coveralls --exclude dynasm --exclude tests --exclude upb/bindings/linux --gcov-options '\-lp'
}
set -e
set -x
if [ "$1" == "local" ]; then
run_config() {
make clean
echo
echo "travis.sh: TESTING CONFIGURATION $1 ==============================="
echo
UPB_TRAVIS_BUILD=$1 ./travis.sh script
}
# Run all configurations serially locally to test before pushing a pull
# request.
export CC=gcc
export CXX=g++
run_config "bare"
run_config "barejit"
run_config "core32"
run_config "lua"
run_config "ndebug"
run_config "genfiles"
run_config "amalgamated"
exit
fi
$CC --version
$CXX --version
# Uncomment to enable uploading failure logs to S3.
# UPLOAD_TO_S3=true
if [ "$1" == "after_failure" ] && [ "$UPLOAD_TO_S3" == "true" ]; then
# Upload failing tree to S3.
curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash
PATH="$PATH:$HOME/bin"
export ARTIFACTS_BUCKET=haberman-upb-travis-artifacts2
ARCHIVE=failing-artifacts.tar.gz
tar zcvf $ARCHIVE $(git ls-files -o)
artifacts upload $ARCHIVE
exit
fi
if [ "$1" == "after_success" ] && [ "$UPB_TRAVIS_BUILD" != "coverage" ]; then
# after_success is only used for coverage.
exit
fi
if [ "$CC" != "gcc" ] && [ "$UPB_TRAVIS_BUILD" == "coverage" ]; then
# coverage build only works for GCC.
exit
fi
# Enable asserts and ref debugging (though some configurations override this).
export USER_CPPFLAGS="-UNDEBUG -DUPB_DEBUG_REFS -DUPB_THREAD_UNSAFE -DUPB_DEBUG_TABLE -g"
if [ "$CC" == "gcc" ]; then
# For the GCC build test loading JIT code via SO. For the Clang build test
# loading it in the normal way.
export USER_CPPFLAGS="$USER_CPPFLAGS -DUPB_JIT_LOAD_SO"
fi
# TODO(haberman): Test UPB_DUMP_BYTECODE? We don't right now because it is so
# noisy.
# Enable verbose build.
export Q=
# Make any compiler warning fail the build.
export UPB_FAIL_WARNINGS=true
eval ${UPB_TRAVIS_BUILD}_${1}
Loading…
Cancel
Save