Merge branch 'master' into maps

pull/13171/head
Joshua Haberman 6 years ago
commit 07ac6f0e8e
  1. 10
      .bazelci/presubmit.yml
  2. 262
      BUILD
  3. 18
      CMakeLists.txt
  4. 33
      DESIGN.md
  5. 135
      README.md
  6. 31
      WORKSPACE
  7. 0
      bazel/BUILD
  8. 221
      bazel/build_defs.bzl
  9. 0
      bazel/lua.BUILD
  10. 2
      bazel/ragel.BUILD
  11. 15
      bazel/repository_defs.bzl
  12. 294
      bazel/upb_proto_library.bzl
  13. 36
      bazel/workspace_deps.bzl
  14. 338
      build_defs.bzl
  15. 18
      examples/bazel/BUILD
  16. 14
      examples/bazel/WORKSPACE
  17. 7
      examples/bazel/foo.proto
  18. 17
      examples/bazel/test_binary.c
  19. 8
      generated_for_cmake/google/protobuf/descriptor.upb.c
  20. 53
      generated_for_cmake/google/protobuf/descriptor.upb.h
  21. 182
      generated_for_cmake/upb/json/parser.c
  22. BIN
      google/protobuf/descriptor.pb
  23. 870
      google/protobuf/descriptor.proto
  24. 28
      tests/conformance_upb.c
  25. 9
      tests/json/enum_from_separate_file.proto
  26. 5
      tests/json/test.proto
  27. BIN
      tests/json/test.proto.pb
  28. 1
      tests/json/test_json.cc
  29. 34
      tests/pb/test_encoder.cc
  30. 5
      tests/test_cpp.cc
  31. 43
      tools/amalgamate.py
  32. 22
      tools/copy_genfiles.sh.in
  33. 24
      tools/make_cmakelists.py
  34. 196
      travis.sh
  35. 4
      upb/decode.c
  36. 4
      upb/handlers-inl.h
  37. 4
      upb/handlers.h
  38. 44
      upb/json/parser.rl
  39. 41
      upbc/generator.cc
  40. 10
      upbc/message_layout.h

@ -0,0 +1,10 @@
---
tasks:
ubuntu:
platform: ubuntu1604
test_targets:
- //...
macos:
platform: macos
test_targets:
- //...

262
BUILD

@ -1,5 +1,5 @@
load(
":build_defs.bzl",
"//bazel:build_defs.bzl",
"generated_file_staleness_test",
"licenses", # copybara:strip_for_google3
"lua_binary",
@ -7,8 +7,11 @@ load(
"lua_library",
"lua_test",
"make_shell_script",
"map_dep",
"upb_amalgamation",
)
load(
"//bazel:upb_proto_library.bzl",
"upb_proto_library",
"upb_proto_reflection_library",
)
@ -45,18 +48,14 @@ config_setting(
cc_library(
name = "upb",
srcs = [
"google/protobuf/descriptor.upb.c",
"upb/decode.c",
"upb/encode.c",
"upb/msg.c",
"upb/port_def.inc",
"upb/port_undef.inc",
"upb/table.c",
"upb/table.int.h",
"upb/upb.c",
],
hdrs = [
"google/protobuf/descriptor.upb.h",
"upb/decode.h",
"upb/encode.h",
"upb/generated_util.h",
@ -64,6 +63,17 @@ cc_library(
"upb/upb.h",
],
copts = COPTS,
# Internal-only, but we have to make them public for generated code.
textual_hdrs = [
"upb/port_def.inc",
"upb/port_undef.inc",
],
visibility = ["//visibility:public"],
)
upb_proto_library(
name = "descriptor_upbproto",
deps = ["@com_google_protobuf//:descriptor_proto"],
visibility = ["//visibility:public"],
)
@ -79,7 +89,11 @@ cc_library(
],
copts = COPTS,
visibility = ["//visibility:public"],
deps = [":upb"],
deps = [
":descriptor_upbproto",
":table",
":upb",
],
)
# Internal C/C++ libraries #####################################################
@ -98,8 +112,11 @@ cc_library(
"upb/legacy_msg_reflection.c",
],
hdrs = ["upb/legacy_msg_reflection.h"],
deps = [":upb"],
copts = COPTS,
deps = [
":table",
":upb",
],
)
cc_library(
@ -116,6 +133,7 @@ cc_library(
copts = COPTS,
deps = [
":reflection",
":table",
":upb",
],
)
@ -138,12 +156,15 @@ cc_library(
],
copts = COPTS,
deps = [
":descriptor_upbproto",
":handlers",
":reflection",
":table",
":upb",
],
)
# copybara:strip_for_google3_begin
cc_library(
name = "upb_json",
srcs = [
@ -160,13 +181,18 @@ cc_library(
":upb_pb",
],
)
# copybara:strip_end
cc_library(
name = "upb_cc_bindings",
hdrs = [
"upb/bindings/stdc++/string.h",
],
deps = [":upb"],
deps = [
":descriptor_upbproto",
":handlers",
":upb",
],
)
# upb compiler #################################################################
@ -179,30 +205,30 @@ cc_library(
"upbc/message_layout.h",
],
hdrs = ["upbc/generator.h"],
copts = CPPOPTS,
deps = [
map_dep("@absl//absl/base:core_headers"),
map_dep("@absl//absl/strings"),
map_dep("@com_google_protobuf//:protobuf"),
map_dep("@com_google_protobuf//:protoc_lib"),
"@absl//absl/base:core_headers",
"@absl//absl/container:flat_hash_map",
"@absl//absl/strings",
"@com_google_protobuf//:protobuf",
"@com_google_protobuf//:protoc_lib",
],
copts = CPPOPTS,
)
cc_binary(
name = "protoc-gen-upb",
srcs = ["upbc/main.cc"],
copts = CPPOPTS,
visibility = ["//visibility:public"],
deps = [
":upbc_generator",
map_dep("@com_google_protobuf//:protoc_lib"),
"@com_google_protobuf//:protoc_lib",
],
copts = CPPOPTS,
)
# We strip the tests and remaining rules from google3 until the upb_proto_library()
# and upb_proto_reflection_library() rules are fixed.
# copybara:strip_for_google3_begin
# C/C++ tests ##################################################################
cc_library(
@ -216,16 +242,24 @@ cc_library(
"tests/upb_test.h",
],
copts = CPPOPTS,
deps = [
":handlers",
":upb",
],
)
cc_test(
name = "test_varint",
srcs = ["tests/pb/test_varint.c"],
srcs = [
"tests/pb/test_varint.c",
"upb/pb/varint.int.h",
],
copts = COPTS,
deps = [
":upb",
":upb_pb",
":upb_test",
],
copts = COPTS,
)
proto_library(
@ -237,31 +271,23 @@ proto_library(
upb_proto_reflection_library(
name = "test_decoder_upbproto",
upbc = ":protoc-gen-upb",
deps = ["test_decoder_proto"],
deps = [":test_decoder_proto"],
)
cc_test(
name = "test_decoder",
srcs = ["tests/pb/test_decoder.cc"],
deps = [
":test_decoder_upbproto",
":upb_pb",
":upb_test",
srcs = [
"tests/pb/test_decoder.cc",
"upb/pb/varint.int.h",
],
copts = CPPOPTS,
)
cc_test(
name = "test_encoder",
srcs = ["tests/pb/test_encoder.cc"],
data = ["google/protobuf/descriptor.pb"],
deps = [
":upb_cc_bindings",
":handlers",
":test_decoder_upbproto",
":upb",
":upb_pb",
":upb_test",
],
copts = CPPOPTS,
)
proto_library(
@ -273,50 +299,78 @@ proto_library(
upb_proto_reflection_library(
name = "test_cpp_upbproto",
upbc = ":protoc-gen-upb",
deps = ["test_cpp_proto"],
)
cc_test(
name = "test_cpp",
srcs = ["tests/test_cpp.cc"],
copts = CPPOPTS,
deps = [
":handlers",
":reflection",
":test_cpp_upbproto",
":upb",
":upb_pb",
":upb_test",
],
copts = CPPOPTS,
)
cc_test(
name = "test_table",
srcs = ["tests/test_table.cc"],
copts = CPPOPTS,
deps = [
":table",
":upb",
":upb_test",
],
)
# copybara:strip_for_google3_begin
upb_proto_reflection_library(
name = "descriptor_upbreflection",
deps = ["@com_google_protobuf//:descriptor_proto"],
)
cc_test(
name = "test_encoder",
srcs = ["tests/pb/test_encoder.cc"],
copts = CPPOPTS,
deps = [
":descriptor_upbproto",
":descriptor_upbreflection",
":upb",
":upb_cc_bindings",
":upb_pb",
":upb_test",
],
)
proto_library(
name = "test_json_enum_from_separate",
srcs = ["tests/json/enum_from_separate_file.proto"],
deps = [":test_json_proto"],
)
proto_library(
name = "test_json_proto",
srcs = [
"tests/json/test.proto",
# "tests/json/enum_from_separate_file.proto",
],
srcs = ["tests/json/test.proto"],
)
upb_proto_reflection_library(
name = "test_json_upbprotoreflection",
upbc = ":protoc-gen-upb",
deps = ["test_json_proto"],
)
upb_proto_library(
name = "test_json_enum_from_separate_upbproto",
deps = [":test_json_enum_from_separate"],
)
upb_proto_library(
name = "test_json_upbproto",
upbc = ":protoc-gen-upb",
deps = ["test_json_proto"],
deps = [":test_json_proto"],
)
cc_test(
@ -324,31 +378,38 @@ cc_test(
srcs = [
"tests/json/test_json.cc",
],
copts = CPPOPTS,
deps = [
":test_json_upbproto",
":test_json_upbprotoreflection",
":upb_json",
":upb_test",
],
copts = CPPOPTS,
)
# copybara:strip_end
upb_proto_library(
name = "conformance_proto_upb",
upbc = ":protoc-gen-upb",
deps = [
"@com_google_protobuf//:conformance_proto",
"@com_google_protobuf//:test_messages_proto3_proto",
],
testonly = 1,
deps = ["@com_google_protobuf//:conformance_proto"],
)
upb_proto_library(
name = "test_messages_proto3_proto_upb",
testonly = 1,
deps = ["@com_google_protobuf//:test_messages_proto3_proto"],
)
cc_binary(
name = "conformance_upb",
testonly = 1,
srcs = [
"tests/conformance_upb.c",
],
copts = COPTS + ["-Ibazel-out/k8-fastbuild/bin"],
deps = [
":conformance_proto_upb",
":test_messages_proto3_proto_upb",
":upb",
],
)
@ -356,7 +417,7 @@ cc_binary(
make_shell_script(
name = "gen_test_conformance_upb",
out = "test_conformance_upb.sh",
contents = "$(rlocation com_google_protobuf/conformance_test_runner) $(rlocation upb/conformance_upb)",
contents = "external/com_google_protobuf/conformance_test_runner ./conformance_upb",
)
sh_test(
@ -365,11 +426,12 @@ sh_test(
data = [
"tests/conformance_upb_failures.txt",
":conformance_upb",
"@bazel_tools//tools/bash/runfiles",
"@com_google_protobuf//:conformance_test_runner",
],
)
# copybara:strip_for_google3_begin
# Amalgamation #################################################################
py_binary(
@ -386,6 +448,7 @@ upb_amalgamation(
amalgamator = ":amalgamate",
libs = [
":upb",
":descriptor_upbproto",
":reflection",
":handlers",
":upb_pb",
@ -459,24 +522,28 @@ lua_test(
# Test the CMake build #########################################################
filegroup(
name = "cmake_files",
srcs = glob([
"CMakeLists.txt",
"generated_for_cmake/**/*",
"google/**/*",
"upbc/**/*",
"upb/**/*",
"tests/**/*",
]),
)
make_shell_script(
name = "gen_run_cmake_build",
out = "run_cmake_build.sh",
contents = "mkdir build && cd build && cmake .. && make -j8 && make test",
contents = "find . && mkdir build && cd build && cmake .. && make -j8 && make test",
)
sh_test(
name = "cmake_build",
srcs = ["run_cmake_build.sh"],
data = glob([
"CMakeLists.txt",
"google/**/*",
"upbc/**/*",
"upb/**/*",
"tests/**/*",
]) + [
"@bazel_tools//tools/bash/runfiles",
],
data = [":cmake_files"],
)
# Generated files ##############################################################
@ -499,78 +566,47 @@ genrule(
srcs = [
"BUILD",
"WORKSPACE",
":cmake_files",
],
outs = ["generated/CMakeLists.txt"],
outs = ["generated-in/CMakeLists.txt"],
cmd = "$(location :make_cmakelists) $@",
tools = [":make_cmakelists"],
)
proto_library(
name = "descriptor_proto",
srcs = [
"google/protobuf/descriptor.proto",
],
genrule(
name = "generate_json_ragel",
srcs = ["upb/json/parser.rl"],
outs = ["upb/json/parser.c"],
cmd = "$(location @ragel//:ragelc) -C -o upb/json/parser.c $< && mv upb/json/parser.c $@",
tools = ["@ragel//:ragelc"],
)
genrule(
name = "copy_upb_descriptor_pb",
srcs = [":descriptor_proto"],
outs = ["generated/google/protobuf/descriptor.pb"],
name = "copy_json_ragel",
srcs = ["upb/json/parser.c"],
outs = ["generated-in/generated_for_cmake/upb/json/parser.c"],
cmd = "cp $< $@",
)
proto_library(
name = "google_descriptor_proto",
srcs = [
"google/protobuf/descriptor.proto",
],
)
genrule(
name = "generate_descriptor_c",
srcs = ["google/protobuf/descriptor.proto"],
name = "copy_protos",
srcs = [":descriptor_upbproto"],
outs = [
"generated/google/protobuf/descriptor.upb.h",
"generated/google/protobuf/descriptor.upb.c",
"generated-in/generated_for_cmake/google/protobuf/descriptor.upb.c",
"generated-in/generated_for_cmake/google/protobuf/descriptor.upb.h",
],
cmd = "$(location @com_google_protobuf//:protoc) $< --upb_out=$(GENDIR)/generated --plugin=protoc-gen-upb=$(location :protoc-gen-upb)",
tools = [
":protoc-gen-upb",
"@com_google_protobuf//:protoc",
],
)
proto_library(
name = "json_test_proto",
srcs = ["tests/json/test.proto"],
)
genrule(
name = "copy_json_test_proto",
srcs = [":json_test_proto"],
outs = ["generated/tests/json/test.proto.pb"],
cmd = "cp $< $@",
)
genrule(
name = "generate_json_ragel",
srcs = ["upb/json/parser.rl"],
outs = ["generated/upb/json/parser.c"],
cmd = "$(location @ragel//:ragel) -C -o upb/json/parser.c $< && mv upb/json/parser.c $@",
tools = ["@ragel"],
cmd = "cp $(SRCS) $(@D)/generated-in/generated_for_cmake/google/protobuf",
)
generated_file_staleness_test(
name = "test_generated_files",
outs = [
"CMakeLists.txt",
"google/protobuf/descriptor.pb",
"google/protobuf/descriptor.upb.c",
"google/protobuf/descriptor.upb.h",
"tests/json/test.proto.pb",
"upb/json/parser.c",
"generated_for_cmake/google/protobuf/descriptor.upb.c",
"generated_for_cmake/google/protobuf/descriptor.upb.h",
"generated_for_cmake/upb/json/parser.c",
],
generated_pattern = "generated/%s",
generated_pattern = "generated-in/%s",
)
# copybara:strip_end

@ -49,6 +49,7 @@ if(UPB_ENABLE_UBSAN)
endif()
include_directories(.)
include_directories(generated_for_cmake)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
if(APPLE)
@ -60,16 +61,12 @@ endif()
enable_testing()
add_library(upb
google/protobuf/descriptor.upb.c
upb/decode.c
upb/encode.c
upb/msg.c
upb/port_def.inc
upb/port_undef.inc
upb/table.c
upb/table.int.h
upb/upb.c
google/protobuf/descriptor.upb.h
upb/decode.h
upb/encode.h
upb/generated_util.h
@ -81,6 +78,8 @@ add_library(reflection
upb/def.h
upb/msgfactory.h)
target_link_libraries(reflection
descriptor_upbproto
table
upb)
add_library(table INTERFACE)
target_link_libraries(table INTERFACE
@ -89,6 +88,7 @@ add_library(legacy_msg_reflection
upb/legacy_msg_reflection.c
upb/legacy_msg_reflection.h)
target_link_libraries(legacy_msg_reflection
table
upb)
add_library(handlers
upb/handlers.c
@ -98,6 +98,7 @@ add_library(handlers
upb/sink.h)
target_link_libraries(handlers
reflection
table
upb)
add_library(upb_pb
upb/pb/compile_decoder.c
@ -111,11 +112,13 @@ add_library(upb_pb
upb/pb/encoder.h
upb/pb/textprinter.h)
target_link_libraries(upb_pb
descriptor_upbproto
handlers
reflection
table
upb)
add_library(upb_json
upb/json/parser.c
generated_for_cmake/upb/json/parser.c
upb/json/printer.c
upb/json/parser.h
upb/json/printer.h)
@ -124,10 +127,15 @@ target_link_libraries(upb_json
upb_pb)
add_library(upb_cc_bindings INTERFACE)
target_link_libraries(upb_cc_bindings INTERFACE
descriptor_upbproto
handlers
upb)
add_library(upb_test
tests/testmain.cc
tests/test_util.h
tests/upb_test.h)
target_link_libraries(upb_test
handlers
upb)

@ -2,9 +2,6 @@
μpb Design
----------
**NOTE:** the design described here is being implemented currently, but is not
yet complete. The repo is in heavy transition right now.
μpb has the following design goals:
- C89 compatible.
@ -62,22 +59,14 @@ link μpb will never need to worry about this.
TODO
----
The current state of the repo is quite different than what is described above.
Here are the major items that need to be implemented.
1. implement the core generic protobuf binary encoder/decoder that uses a
`upb_msglayout*`.
2. remove all mention of handlers, sink, etc. from core into their own module.
All of the handlers stuff needs substantial revision, but moving it out of
core is the first priority.
3. move all of the def/refcounted stuff out of core. The defs also need
substantial revision, but moving them out of core is the first priority.
4. revise our generated code until it is in a state where we feel comfortable
committing to API/ABI stability for it. This may involve moving different
parts of the generated code into separate files, like keeping the serialized
descriptor in a separate file from the compact msglayout.
5. revise all of the existing encoders/decoders and handlers. We probably
will want to keep handlers, since they let us decouple encoders/decoders
from `upb_msg`, but we need to simplify all of that a LOT. Likely we will
want to make handlers only per-message instead of per-field, except for
variable-length fields.
1. revise our generated code until it is in a state where we feel comfortable
committing to API/ABI stability for it. In particular there is an open
question of whether non-ABI-compatible field accesses should have a
fastpath different from the ABI-compatible field access.
1. Add missing features (maps, extensions, unknown fields).
1. Flesh out C++ wrappers.
1. *(lower-priority)*: revise all of the existing encoders/decoders and
handlers. We probably will want to keep handlers, since they let us decouple
encoders/decoders from `upb_msg`, but we need to simplify all of that a LOT.
Likely we will want to make handlers only per-message instead of per-field,
except for variable-length fields.

@ -1,53 +1,132 @@
# μpb - a small protobuf implementation in C
[![Build Status](https://travis-ci.org/google/upb.svg?branch=master)](https://travis-ci.org/google/upb)
[![Coverage Status](https://img.shields.io/coveralls/google/upb.svg)](https://coveralls.io/r/google/upb?branch=master)
μpb is a small protobuf implementation written in C.
|Platform|Build Status|
|--------|------------|
|macOS|[![Build Status](https://storage.googleapis.com/upb-kokoro-results/status-badge/macos.png)](https://fusion.corp.google.com/projectanalysis/summary/KOKORO/prod%3Aupb%2Fmacos%2Fcontinuous)|
|ubuntu|[![Build Status](https://storage.googleapis.com/upb-kokoro-results/status-badge/ubuntu.png)](https://fusion.corp.google.com/projectanalysis/summary/KOKORO/prod%3Aupb%2Fubuntu%2Fcontinuous)|
μpb (often written 'upb') is a small protobuf implementation written in C.
upb generates a C API for creating, parsing, and serializing messages
as declared in `.proto` files. upb is heavily arena-based: all
messages always live in an arena (note: the arena can live in stack or
static memory if desired). Here is a simple example:
```c
#include "conformance/conformance.upb.h"
void foo(const char* data, size_t size) {
upb_arena *arena;
/* Generated message type. */
conformance_ConformanceRequest *request;
conformance_ConformanceResponse *response;
arena = upb_arena_new();
request = conformance_ConformanceRequest_parse(data, size, arena);
response = conformance_ConformanceResponse_new(arena);
switch (conformance_ConformanceRequest_payload_case(request)) {
case conformance_ConformanceRequest_payload_protobuf_payload: {
upb_strview payload = conformance_ConformanceRequest_protobuf_payload(request);
// ...
break;
}
case conformance_ConformanceRequest_payload_NOT_SET:
fprintf(stderr, "conformance_upb: Request didn't have payload.\n");
break;
default: {
static const char msg[] = "Unsupported input format.";
conformance_ConformanceResponse_set_skipped(
response, upb_strview_make(msg, sizeof(msg)));
break;
}
}
/* Frees all messages on the arena. */
upb_arena_free(arena);
}
```
API and ABI are both subject to change! Please do not distribute
as a shared library for this reason (for now at least).
## Building the core libraries
## Using upb in your project
The core libraries are pure C99 and have no dependencies.
Currently only Bazel is supported (CMake support is partial and incomplete
but full CMake support is an eventual goal).
$ make
To use upb in your Bazel project, first add upb to your `WORKSPACE` file,
either as a `git_repository()` or as a `new_local_repository()` with a
Git Submodule. (For an example, see `examples/bazel/ in this repo).
This will create a separate C library for each core library
in `lib/`. They are built separately to help your binaries
slim, so you don't need to link in things you neither want
or need.
```python
# Add this to your WORKSPACE file.
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
Other useful targets:
git_repository(
name = "upb",
remote = "https://github.com/protocolbuffers/upb.git",
commit = "d16bf99ac4658793748cda3251226059892b3b7b",
)
$ make tests
$ make test
load("@upb//bazel:workspace_deps.bzl", "upb_deps")
## C and C++ API
upb_deps()
```
The public C/C++ API is defined by all of the .h files in
`upb/` except `.int.h` files (which are internal-only).
Then in your BUILD file you can add `upb_proto_library()` rules that
generate code for a corresponding `proto_library()` rule. For
example:
## Lua bindings
```python
# Add this to your BUILD file.
load("@upb//bazel:upb_proto_library.bzl", "upb_proto_library")
proto_library(
name = "foo_proto",
srcs = ["foo.proto"],
)
Lua bindings provide μpb's functionality to Lua programs.
The bindings target Lua 5.1, Lua 5.2, LuaJIT, and (soon) Lua 5.3.
upb_proto_library(
name = "foo_upbproto",
deps = [":foo_proto"],
)
To build the Lua bindings, the Lua libraries must be installed. Once
they are installed, run:
cc_binary(
name = "test_binary",
srcs = ["test_binary.c"],
deps = [":foo_upbproto"],
)
```
$ make lua
Then in your `.c` file you can #include the generated header:
Note that if the Lua headers are not in a standard place, you may
need to pass custom flags:
```c
#include "foo.upb.h"
$ make lua USER_CPPFLAGS=`pkg-config lua5.2 --cflags`
/* Insert code that uses generated types. */
```
To test the Lua bindings:
## Old "handlers" interfaces
This library contains several semi-deprecated interfaces (see BUILD
file for more info about which interfaces are deprecated). These
deprecated interfaces are still used in some significant projects,
such as the Ruby and PHP C bindings for protobuf in the [main protobuf
repo](https://github.com/protocolbuffers/protobuf). The goal is to
migrate the Ruby/PHP bindings to use the newer, simpler interfaces
instead. Please do not use the old interfaces in new code.
## Lua bindings
$ make testlua
This repo has some Lua bindings for the core library. These are
experimental and very incomplete. These are currently included in
order to validate that the C API is suitable for wrapping. As the
project matures these Lua bindings may become publicly available.
## Contact

@ -1,12 +1,13 @@
workspace(name = "upb")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
load("//bazel:workspace_deps.bzl", "upb_deps")
upb_deps()
http_archive(
name = "lua",
build_file = "//:lua.BUILD",
build_file = "//bazel:lua.BUILD",
sha256 = "b9e2e4aad6789b3b63a056d442f7b39f0ecfca3ae0f1fc0ae4e9614401b69f4b",
strip_prefix = "lua-5.2.4",
urls = [
@ -15,32 +16,10 @@ http_archive(
],
)
git_repository(
name = "com_google_protobuf",
commit = "25feb59620627b673df76813dfd66e3f565765e7",
#sha256 = "d7a221b3d4fb4f05b7473795ccea9e05dab3b8721f6286a95fffbffc2d926f8b",
remote = "https://github.com/haberman/protobuf.git",
#tag = "conformance-build-tag",
)
git_repository(
name = "absl",
commit = "070f6e47b33a2909d039e620c873204f78809492",
remote = "https://github.com/abseil/abseil-cpp.git",
)
http_archive(
name = "ragel",
build_file = "//bazel:ragel.BUILD",
sha256 = "5f156edb65d20b856d638dd9ee2dfb43285914d9aa2b6ec779dac0270cd56c3f",
build_file = "//:ragel.BUILD",
strip_prefix = "ragel-6.10",
urls = ["http://www.colm.net/files/ragel/ragel-6.10.tar.gz"],
)
# Used by protobuf.
http_archive(
name = "bazel_skylib",
sha256 = "bbccf674aa441c266df9894182d80de104cabd19be98be002f6d478aaa31574d",
strip_prefix = "bazel-skylib-2169ae1c374aab4a09aa90e65efe1a3aad4e279b",
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/2169ae1c374aab4a09aa90e65efe1a3aad4e279b.tar.gz"],
)

@ -0,0 +1,221 @@
"""Internal rules for building upb."""
load(":upb_proto_library.bzl", "GeneratedSrcs")
def _librule(name):
return name + "_lib"
def _get_real_short_path(file):
# For some reason, files from other archives have short paths that look like:
# ../com_google_protobuf/google/protobuf/descriptor.proto
short_path = file.short_path
if short_path.startswith("../"):
second_slash = short_path.index("/", 3)
short_path = short_path[second_slash + 1:]
return short_path
def _get_real_root(file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path) - 1]
def _get_real_roots(files):
roots = {}
for file in files:
real_root = _get_real_root(file)
if real_root:
roots[real_root] = True
return roots.keys()
def lua_cclibrary(name, srcs, hdrs = [], deps = [], luadeps = []):
lib_rule = name + "_lib"
so_rule = "lib" + name + ".so"
so_file = _remove_prefix(name, "lua/") + ".so"
native.cc_library(
name = _librule(name),
hdrs = hdrs,
srcs = srcs,
deps = deps + [_librule(dep) for dep in luadeps] + ["@lua//:liblua_headers"],
)
native.cc_binary(
name = so_rule,
linkshared = True,
deps = [_librule(name)],
linkopts = select({
":darwin": [
"-undefined dynamic_lookup",
],
"//conditions:default": [],
}),
)
native.genrule(
name = name + "_copy",
srcs = [":" + so_rule],
outs = [so_file],
cmd = "cp $< $@",
)
native.filegroup(
name = name,
data = [so_file],
)
def _remove_prefix(str, prefix):
if not str.startswith(prefix):
fail("%s doesn't start with %s" % (str, prefix))
return str[len(prefix):]
def _remove_suffix(str, suffix):
if not str.endswith(suffix):
fail("%s doesn't end with %s" % (str, suffix))
return str[:-len(suffix)]
def lua_library(name, srcs, strip_prefix, luadeps = []):
outs = [_remove_prefix(src, strip_prefix + "/") for src in srcs]
native.genrule(
name = name + "_copy",
srcs = srcs,
outs = outs,
cmd = "cp $(SRCS) $(@D)",
)
native.filegroup(
name = name,
data = outs + luadeps,
)
def make_shell_script(name, contents, out):
contents = contents.replace("$", "$$")
native.genrule(
name = "gen_" + name,
outs = [out],
cmd = "(cat <<'HEREDOC'\n%s\nHEREDOC\n) > $@" % contents,
)
def _lua_binary_or_test(name, luamain, luadeps, rule):
script = name + ".sh"
make_shell_script(
name = "gen_" + name,
out = script,
contents = """
BASE=$(dirname $(rlocation upb/upb_c.so))
export LUA_CPATH="$BASE/?.so"
export LUA_PATH="$BASE/?.lua"
$(rlocation lua/lua) $(rlocation upb/tools/upbc.lua) "$@"
""",
)
rule(
name = name,
srcs = [script],
data = ["@lua//:lua", luamain] + luadeps,
)
def lua_binary(name, luamain, luadeps = []):
_lua_binary_or_test(name, luamain, luadeps, native.sh_binary)
def lua_test(name, luamain, luadeps = []):
_lua_binary_or_test(name, luamain, luadeps, native.sh_test)
def generated_file_staleness_test(name, outs, generated_pattern):
"""Tests that checked-in file(s) match the contents of generated file(s).
The resulting test will verify that all output files exist and have the
correct contents. If the test fails, it can be invoked with --fix to
bring the checked-in files up to date.
Args:
name: Name of the rule.
outs: the checked-in files that are copied from generated files.
generated_pattern: the pattern for transforming each "out" file into a
generated file. For example, if generated_pattern="generated/%s" then
a file foo.txt will look for generated file generated/foo.txt.
"""
script_name = name + ".py"
script_src = "//:tools/staleness_test.py"
# Filter out non-existing rules so Blaze doesn't error out before we even
# run the test.
existing_outs = native.glob(include = outs)
# The file list contains a few extra bits of information at the end.
# These get unpacked by the Config class in staleness_test_lib.py.
file_list = outs + [generated_pattern, native.package_name() or ".", name]
native.genrule(
name = name + "_makescript",
outs = [script_name],
srcs = [script_src],
testonly = 1,
cmd = "cat $(location " + script_src + ") > $@; " +
"sed -i.bak -e 's|INSERT_FILE_LIST_HERE|" + "\\\n ".join(file_list) + "|' $@",
)
native.py_test(
name = name,
srcs = [script_name],
data = existing_outs + [generated_pattern % file for file in outs],
deps = [
"//:staleness_test_lib",
],
)
# upb_amalgamation() rule, with file_list aspect.
SrcList = provider(
fields = {
"srcs": "list of srcs",
},
)
def _file_list_aspect_impl(target, ctx):
if GeneratedSrcs in target:
srcs = target[GeneratedSrcs]
return [SrcList(srcs = srcs.srcs + srcs.hdrs)]
srcs = []
for src in ctx.rule.attr.srcs:
srcs += src.files.to_list()
for hdr in ctx.rule.attr.hdrs:
srcs += hdr.files.to_list()
for hdr in ctx.rule.attr.textual_hdrs:
srcs += hdr.files.to_list()
return [SrcList(srcs = srcs)]
_file_list_aspect = aspect(
implementation = _file_list_aspect_impl,
)
def _upb_amalgamation(ctx):
inputs = []
for lib in ctx.attr.libs:
inputs += lib[SrcList].srcs
srcs = [src for src in inputs if src.path.endswith("c")]
ctx.actions.run(
inputs = inputs,
outputs = ctx.outputs.outs,
arguments = [ctx.bin_dir.path + "/"] + [f.path for f in srcs] + ["-I" + root for root in _get_real_roots(inputs)],
progress_message = "Making amalgamation",
executable = ctx.executable.amalgamator,
)
return []
upb_amalgamation = rule(
attrs = {
"amalgamator": attr.label(
executable = True,
cfg = "host",
),
"libs": attr.label_list(aspects = [_file_list_aspect]),
"outs": attr.output_list(),
},
implementation = _upb_amalgamation,
)
def licenses(*args):
# No-op (for Google-internal usage).
pass

@ -4,7 +4,7 @@ package(
)
cc_binary(
name = "ragel",
name = "ragelc",
srcs = [
"ragel/rubycodegen.cpp",
"ragel/goipgoto.h",

@ -0,0 +1,15 @@
# A hacky way to work around the fact that native.bazel_version is only
# available from WORKSPACE macros, not BUILD macros or rules.
#
# Hopefully we can remove this if/when this is fixed:
# https://github.com/bazelbuild/bazel/issues/8305
def _impl(repository_ctx):
s = "bazel_version = \"" + native.bazel_version + "\""
repository_ctx.file("bazel_version.bzl", s)
repository_ctx.file("BUILD", "")
bazel_version_repository = repository_rule(
implementation = _impl,
local = True,
)

@ -0,0 +1,294 @@
"""Public rules for using upb protos:
- upb_proto_library()
- upb_proto_reflection_library()
"""
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
# copybara:strip_for_google3_begin
load("@bazel_skylib//lib:versions.bzl", "versions")
load("@bazel_version//:bazel_version.bzl", "bazel_version")
# copybara:strip_end
# Generic support code #########################################################
_is_bazel = not hasattr(native, "genmpm")
def _get_real_short_path(file):
# For some reason, files from other archives have short paths that look like:
# ../com_google_protobuf/google/protobuf/descriptor.proto
short_path = file.short_path
if short_path.startswith("../"):
second_slash = short_path.index("/", 3)
short_path = short_path[second_slash + 1:]
return short_path
def _get_real_root(file):
real_short_path = _get_real_short_path(file)
return file.path[:-len(real_short_path) - 1]
def _get_real_roots(files):
roots = {}
for file in files:
real_root = _get_real_root(file)
if real_root:
roots[real_root] = True
return roots.keys()
def _generate_output_file(ctx, src, extension):
if _is_bazel:
real_short_path = _get_real_short_path(src)
else:
real_short_path = paths.relativize(src.short_path, ctx.label.package)
output_filename = paths.replace_extension(real_short_path, extension)
ret = ctx.new_file(ctx.genfiles_dir, output_filename)
return ret
def _filter_none(elems):
out = []
for elem in elems:
if elem:
out.append(elem)
return out
def _cc_library_func(ctx, name, hdrs, srcs, dep_ccinfos):
"""Like cc_library(), but callable from rules.
Args:
ctx: Rule context.
name: Unique name used to generate output files.
hdrs: Public headers that can be #included from other rules.
srcs: C/C++ source files.
dep_ccinfos: CcInfo providers of dependencies we should build/link against.
Returns:
CcInfo provider for this compilation.
"""
compilation_contexts = [info.compilation_context for info in dep_ccinfos]
linking_contexts = [info.linking_context for info in dep_ccinfos]
toolchain = find_cpp_toolchain(ctx)
feature_configuration = cc_common.configure_features(
ctx = ctx,
cc_toolchain = toolchain,
requested_features = ctx.features,
unsupported_features = ctx.disabled_features,
)
# copybara:strip_for_google3_begin
if bazel_version == "0.24.1":
# Compatibility code until gRPC is on 0.25.2 or later.
compilation_info = cc_common.compile(
ctx = ctx,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
srcs = srcs,
hdrs = hdrs,
compilation_contexts = compilation_contexts,
)
linking_info = cc_common.link(
ctx = ctx,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
cc_compilation_outputs = compilation_info.cc_compilation_outputs,
linking_contexts = linking_contexts,
)
return CcInfo(
compilation_context = compilation_info.compilation_context,
linking_context = linking_info.linking_context,
)
if not versions.is_at_least("0.25.2", bazel_version):
fail("upb requires Bazel >=0.25.2 or 0.24.1")
# copybara:strip_end
blaze_only_args = {}
if not _is_bazel:
blaze_only_args["grep_includes"] = ctx.file._grep_includes
(compilation_context, compilation_outputs) = cc_common.compile(
actions = ctx.actions,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
name = name,
srcs = srcs,
public_hdrs = hdrs,
compilation_contexts = compilation_contexts,
**blaze_only_args
)
(linking_context, linking_outputs) = cc_common.create_linking_context_from_compilation_outputs(
actions = ctx.actions,
name = name,
feature_configuration = feature_configuration,
cc_toolchain = toolchain,
compilation_outputs = compilation_outputs,
linking_contexts = linking_contexts,
**blaze_only_args
)
return CcInfo(
compilation_context = compilation_context,
linking_context = linking_context,
)
# upb_proto_library / upb_proto_reflection_library shared code #################
GeneratedSrcs = provider(
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
},
)
_WrappedCcInfo = provider(fields = ["cc_info"])
_WrappedGeneratedSrcs = provider(fields = ["srcs"])
def _compile_upb_protos(ctx, proto_info, proto_sources, ext):
srcs = [_generate_output_file(ctx, name, ext + ".c") for name in proto_sources]
hdrs = [_generate_output_file(ctx, name, ext + ".h") for name in proto_sources]
transitive_sets = list(proto_info.transitive_descriptor_sets)
ctx.actions.run(
inputs = depset(
direct = [ctx.executable._upbc, proto_info.direct_descriptor_set],
transitive = [proto_info.transitive_descriptor_sets],
),
outputs = srcs + hdrs,
executable = ctx.executable._protoc,
arguments = [
"--upb_out=" + _get_real_root(srcs[0]),
"--plugin=protoc-gen-upb=" + ctx.executable._upbc.path,
"--descriptor_set_in=" + ":".join([f.path for f in transitive_sets]),
] +
[_get_real_short_path(file) for file in proto_sources],
progress_message = "Generating upb protos for :" + ctx.label.name,
)
return GeneratedSrcs(srcs = srcs, hdrs = hdrs)
def _upb_proto_rule_impl(ctx):
if len(ctx.attr.deps) != 1:
fail("only one deps dependency allowed.")
dep = ctx.attr.deps[0]
if _WrappedCcInfo not in dep or _WrappedGeneratedSrcs not in dep:
fail("proto_library rule must generate _WrappedCcInfo and " +
"_WrappedGeneratedSrcs (aspect should have handled this).")
cc_info = dep[_WrappedCcInfo].cc_info
srcs = dep[_WrappedGeneratedSrcs].srcs
lib = cc_info.linking_context.libraries_to_link[0]
files = _filter_none([
lib.static_library,
lib.pic_static_library,
lib.dynamic_library,
])
return [
DefaultInfo(files = depset(files + srcs.hdrs + srcs.srcs)),
srcs,
cc_info,
]
def _upb_proto_aspect_impl(target, ctx):
proto_info = target[ProtoInfo]
files = _compile_upb_protos(ctx, proto_info, proto_info.direct_sources, ctx.attr._ext)
deps = ctx.rule.attr.deps + ctx.attr._upb
dep_ccinfos = [dep[CcInfo] for dep in deps if CcInfo in dep]
dep_ccinfos += [dep[_WrappedCcInfo].cc_info for dep in deps if _WrappedCcInfo in dep]
cc_info = _cc_library_func(
ctx = ctx,
name = ctx.rule.attr.name + ctx.attr._ext,
hdrs = files.hdrs,
srcs = files.srcs,
dep_ccinfos = dep_ccinfos,
)
return [_WrappedCcInfo(cc_info = cc_info), _WrappedGeneratedSrcs(srcs = files)]
def _maybe_add(d):
if not _is_bazel:
d["_grep_includes"] = attr.label(
allow_single_file = True,
cfg = "host",
default = "//tools/cpp:grep-includes",
)
return d
# upb_proto_library() ##########################################################
_upb_proto_library_aspect = aspect(
attrs = _maybe_add({
"_upbc": attr.label(
executable = True,
cfg = "host",
default = "//:protoc-gen-upb",
),
"_protoc": attr.label(
executable = True,
cfg = "host",
default = "@com_google_protobuf//:protoc",
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_upb": attr.label_list(default = ["//:upb"]),
"_ext": attr.string(default = ".upb"),
}),
implementation = _upb_proto_aspect_impl,
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
)
upb_proto_library = rule(
output_to_genfiles = True,
implementation = _upb_proto_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [_upb_proto_library_aspect],
allow_rules = ["proto_library"],
providers = [ProtoInfo],
),
},
)
# upb_proto_reflection_library() ###############################################
_upb_proto_reflection_library_aspect = aspect(
attrs = _maybe_add({
"_upbc": attr.label(
executable = True,
cfg = "host",
default = "//:protoc-gen-upb",
),
"_protoc": attr.label(
executable = True,
cfg = "host",
default = "@com_google_protobuf//:protoc",
),
"_cc_toolchain": attr.label(
default = "@bazel_tools//tools/cpp:current_cc_toolchain",
),
"_upb": attr.label_list(
default = [
"//:upb",
"//:reflection",
],
),
"_ext": attr.string(default = ".upbdefs"),
}),
implementation = _upb_proto_aspect_impl,
attr_aspects = ["deps"],
fragments = ["cpp"],
toolchains = ["@bazel_tools//tools/cpp:toolchain_type"],
)
upb_proto_reflection_library = rule(
output_to_genfiles = True,
implementation = _upb_proto_rule_impl,
attrs = {
"deps": attr.label_list(
aspects = [_upb_proto_reflection_library_aspect],
allow_rules = ["proto_library"],
providers = [ProtoInfo],
),
},
)

@ -0,0 +1,36 @@
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
load("//bazel:repository_defs.bzl", "bazel_version_repository")
def upb_deps():
bazel_version_repository(
name = "bazel_version",
)
git_repository(
name = "absl",
commit = "070f6e47b33a2909d039e620c873204f78809492",
remote = "https://github.com/abseil/abseil-cpp.git",
shallow_since = "1541627663 -0500",
)
git_repository(
name = "com_google_protobuf",
remote = "https://github.com/protocolbuffers/protobuf.git",
commit = "d41002663fd04325ead28439dfd5ce2822b0d6fb",
)
http_archive(
name = "bazel_skylib",
strip_prefix = "bazel-skylib-master",
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/master.tar.gz"],
)
http_archive(
name = "zlib",
build_file = "@com_google_protobuf//:third_party/zlib.BUILD",
sha256 = "c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1",
strip_prefix = "zlib-1.2.11",
urls = ["https://zlib.net/zlib-1.2.11.tar.gz"],
)

@ -1,338 +0,0 @@
_shell_find_runfiles = """
# --- begin runfiles.bash initialization ---
# Copy-pasted from Bazel's Bash runfiles library (tools/bash/runfiles/runfiles.bash).
set -euo pipefail
if [[ ! -d "${RUNFILES_DIR:-/dev/null}" && ! -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
if [[ -f "$0.runfiles_manifest" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles_manifest"
elif [[ -f "$0.runfiles/MANIFEST" ]]; then
export RUNFILES_MANIFEST_FILE="$0.runfiles/MANIFEST"
elif [[ -f "$0.runfiles/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
export RUNFILES_DIR="$0.runfiles"
fi
fi
if [[ -f "${RUNFILES_DIR:-/dev/null}/bazel_tools/tools/bash/runfiles/runfiles.bash" ]]; then
source "${RUNFILES_DIR}/bazel_tools/tools/bash/runfiles/runfiles.bash"
elif [[ -f "${RUNFILES_MANIFEST_FILE:-/dev/null}" ]]; then
source "$(grep -m1 "^bazel_tools/tools/bash/runfiles/runfiles.bash " \
"$RUNFILES_MANIFEST_FILE" | cut -d ' ' -f 2-)"
else
echo >&2 "ERROR: cannot find @bazel_tools//tools/bash/runfiles:runfiles.bash"
exit 1
fi
# --- end runfiles.bash initialization ---
"""
def _librule(name):
return name + "_lib"
def lua_cclibrary(name, srcs, hdrs = [], deps = [], luadeps = []):
lib_rule = name + "_lib"
so_rule = "lib" + name + ".so"
so_file = _remove_prefix(name, "lua/") + ".so"
native.cc_library(
name = _librule(name),
hdrs = hdrs,
srcs = srcs,
deps = deps + [_librule(dep) for dep in luadeps] + ["@lua//:liblua_headers"],
)
native.cc_binary(
name = so_rule,
linkshared = True,
deps = [_librule(name)],
linkopts = select({
":darwin": [
"-undefined dynamic_lookup",
],
"//conditions:default": [],
})
)
native.genrule(
name = name + "_copy",
srcs = [":" + so_rule],
outs = [so_file],
cmd = "cp $< $@",
)
native.filegroup(
name = name,
data = [so_file],
)
def _remove_prefix(str, prefix):
if not str.startswith(prefix):
fail("%s doesn't start with %s" % (str, prefix))
return str[len(prefix):]
def _remove_suffix(str, suffix):
if not str.endswith(suffix):
fail("%s doesn't end with %s" % (str, suffix))
return str[:-len(suffix)]
def lua_library(name, srcs, strip_prefix, luadeps = []):
outs = [_remove_prefix(src, strip_prefix + "/") for src in srcs]
native.genrule(
name = name + "_copy",
srcs = srcs,
outs = outs,
cmd = "cp $(SRCS) $(@D)",
)
native.filegroup(
name = name,
data = outs + luadeps,
)
def make_shell_script(name, contents, out):
script_contents = (_shell_find_runfiles + contents).replace("$", "$$")
native.genrule(
name = "gen_" + name,
outs = [out],
cmd = "(cat <<'HEREDOC'\n%s\nHEREDOC\n) > $@" % script_contents,
)
def _lua_binary_or_test(name, luamain, luadeps, rule):
script = name + ".sh"
make_shell_script(
name = "gen_" + name,
out = script,
contents = """
BASE=$(dirname $(rlocation upb/upb_c.so))
export LUA_CPATH="$BASE/?.so"
export LUA_PATH="$BASE/?.lua"
$(rlocation lua/lua) $(rlocation upb/tools/upbc.lua) "$@"
""",
)
rule(
name = name,
srcs = [script],
data = ["@lua//:lua", "@bazel_tools//tools/bash/runfiles", luamain] + luadeps,
)
def lua_binary(name, luamain, luadeps = []):
_lua_binary_or_test(name, luamain, luadeps, native.sh_binary)
def lua_test(name, luamain, luadeps = []):
_lua_binary_or_test(name, luamain, luadeps, native.sh_test)
def generated_file_staleness_test(name, outs, generated_pattern):
"""Tests that checked-in file(s) match the contents of generated file(s).
The resulting test will verify that all output files exist and have the
correct contents. If the test fails, it can be invoked with --fix to
bring the checked-in files up to date.
Args:
name: Name of the rule.
outs: the checked-in files that are copied from generated files.
generated_pattern: the pattern for transforming each "out" file into a
generated file. For example, if generated_pattern="generated/%s" then
a file foo.txt will look for generated file generated/foo.txt.
"""
script_name = name + ".py"
script_src = "//:tools/staleness_test.py"
# Filter out non-existing rules so Blaze doesn't error out before we even
# run the test.
existing_outs = native.glob(include = outs)
# The file list contains a few extra bits of information at the end.
# These get unpacked by the Config class in staleness_test_lib.py.
file_list = outs + [generated_pattern, native.package_name() or ".", name]
native.genrule(
name = name + "_makescript",
outs = [script_name],
srcs = [script_src],
testonly = 1,
cmd = "cat $(location " + script_src + ") > $@; " +
"sed -i.bak -e 's|INSERT_FILE_LIST_HERE|" + "\\\n ".join(file_list) + "|' $@",
)
native.py_test(
name = name,
srcs = [script_name],
data = existing_outs + [generated_pattern % file for file in outs],
deps = [
"//:staleness_test_lib",
],
)
# upb_amalgamation() rule, with file_list aspect.
SrcList = provider(
fields = {
"srcs": "list of srcs",
"hdrs": "list of hdrs",
},
)
def _file_list_aspect_impl(target, ctx):
srcs = []
hdrs = []
for src in ctx.rule.attr.srcs:
srcs += src.files.to_list()
for hdr in ctx.rule.attr.hdrs:
hdrs += hdr.files.to_list()
return [SrcList(srcs = srcs, hdrs = hdrs)]
_file_list_aspect = aspect(
implementation = _file_list_aspect_impl,
)
def _upb_amalgamation(ctx):
inputs = []
srcs = []
for lib in ctx.attr.libs:
inputs += lib[SrcList].srcs
inputs += lib[SrcList].hdrs
srcs += [src for src in lib[SrcList].srcs if src.path.endswith("c")]
ctx.actions.run(
inputs = inputs,
outputs = ctx.outputs.outs,
arguments = ["", ctx.bin_dir.path + "/"] + [f.path for f in srcs],
progress_message = "Making amalgamation",
executable = ctx.executable.amalgamator,
)
upb_amalgamation = rule(
attrs = {
"amalgamator": attr.label(
executable = True,
cfg = "host",
),
"libs": attr.label_list(aspects = [_file_list_aspect]),
"outs": attr.output_list(),
},
implementation = _upb_amalgamation,
)
is_bazel = not hasattr(native, "genmpm")
google3_dep_map = {
"@absl//absl/base:core_headers": "//third_party/absl/base:core_headers",
"@absl//absl/strings": "//third_party/absl/strings",
"@com_google_protobuf//:protoc": "//third_party/protobuf:protoc",
"@com_google_protobuf//:protobuf": "//third_party/protobuf:protobuf",
"@com_google_protobuf//:protoc_lib": "//third_party/protobuf:libprotoc",
}
def map_dep(dep):
if is_bazel:
return dep
else:
return google3_dep_map[dep]
# upb_proto_library() rule
def _remove_up(string):
if string.startswith("../"):
string = string[3:]
pos = string.find("/")
string = string[pos + 1:]
return _remove_suffix(string, ".proto")
def _upb_proto_srcs_impl(ctx, suffix):
sources = []
outs = []
include_dirs = {}
for dep in ctx.attr.deps:
if hasattr(dep, "proto"):
for src in dep.proto.transitive_sources:
sources.append(src)
include_dir = _remove_suffix(src.path, _remove_up(src.short_path) + "." + src.extension)
if include_dir:
include_dirs[include_dir] = True
outs.append(ctx.actions.declare_file(_remove_up(src.short_path) + suffix + ".h"))
outs.append(ctx.actions.declare_file(_remove_up(src.short_path) + suffix + ".c"))
outdir = _remove_suffix(outs[-1].path, _remove_up(src.short_path) + suffix + ".c")
source_paths = [d.path for d in sources]
include_args = ["-I" + root for root in include_dirs.keys()]
ctx.actions.run(
inputs = [ctx.executable.upbc] + sources,
outputs = outs,
executable = ctx.executable.protoc,
arguments = ["--upb_out", outdir, "--plugin=protoc-gen-upb=" + ctx.executable.upbc.path] + include_args + source_paths,
progress_message = "Generating upb protos",
)
return [DefaultInfo(files = depset(outs))]
def _upb_proto_library_srcs_impl(ctx):
return _upb_proto_srcs_impl(ctx, ".upb")
def _upb_proto_reflection_library_srcs_impl(ctx):
return _upb_proto_srcs_impl(ctx, ".upbdefs")
_upb_proto_library_srcs = rule(
attrs = {
"upbc": attr.label(
executable = True,
cfg = "host",
),
"protoc": attr.label(
executable = True,
cfg = "host",
default = map_dep("@com_google_protobuf//:protoc"),
),
"deps": attr.label_list(),
},
implementation = _upb_proto_library_srcs_impl,
)
def upb_proto_library(name, deps, upbc):
srcs_rule = name + "_srcs.cc"
_upb_proto_library_srcs(
name = srcs_rule,
upbc = upbc,
deps = deps,
)
native.cc_library(
name = name,
srcs = [":" + srcs_rule],
deps = [":upb"],
copts = ["-Ibazel-out/k8-fastbuild/bin"],
)
_upb_proto_reflection_library_srcs = rule(
attrs = {
"upbc": attr.label(
executable = True,
cfg = "host",
),
"protoc": attr.label(
executable = True,
cfg = "host",
default = map_dep("@com_google_protobuf//:protoc"),
),
"deps": attr.label_list(),
},
implementation = _upb_proto_reflection_library_srcs_impl,
)
def upb_proto_reflection_library(name, deps, upbc):
srcs_rule = name + "_defsrcs.cc"
_upb_proto_reflection_library_srcs(
name = srcs_rule,
upbc = upbc,
deps = deps,
)
native.cc_library(
name = name,
srcs = [":" + srcs_rule],
deps = [":upb", ":reflection"],
copts = ["-Ibazel-out/k8-fastbuild/bin"],
)
def licenses(*args):
# No-op (for Google-internal usage).
pass

@ -0,0 +1,18 @@
load("@upb//bazel:upb_proto_library.bzl", "upb_proto_library")
proto_library(
name = "foo_proto",
srcs = ["foo.proto"],
)
upb_proto_library(
name = "foo_upbproto",
deps = [":foo_proto"],
)
cc_binary(
name = "test_binary",
srcs = ["test_binary.c"],
deps = [":foo_upbproto"],
)

@ -0,0 +1,14 @@
workspace(name = "upb_example")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
git_repository(
name = "upb",
remote = "https://github.com/protocolbuffers/upb.git",
commit = "d16bf99ac4658793748cda3251226059892b3b7b",
)
load("@upb//bazel:workspace_deps.bzl", "upb_deps")
upb_deps()

@ -0,0 +1,7 @@
syntax = "proto2";
message Foo {
optional int64 time = 1;
optional string greeting = 2;
}

@ -0,0 +1,17 @@
#include <time.h>
#include "foo.upb.h"
int main() {
upb_arena *arena = upb_arena_new();
Foo* foo = Foo_new(arena);
const char greeting[] = "Hello, World!\n";
Foo_set_time(foo, time(NULL));
/* Warning: the proto will not copy this, the string data must outlive
* the proto. */
Foo_set_greeting(foo, upb_strview_makez(greeting));
upb_arena_free(arena);
}

@ -251,7 +251,7 @@ static const upb_msglayout *const google_protobuf_FileOptions_submsgs[1] = {
&google_protobuf_UninterpretedOption_msginit,
};
static const upb_msglayout_field google_protobuf_FileOptions__fields[19] = {
static const upb_msglayout_field google_protobuf_FileOptions__fields[21] = {
{1, UPB_SIZE(28, 32), 11, 0, 9, 1},
{8, UPB_SIZE(36, 48), 12, 0, 9, 1},
{9, UPB_SIZE(8, 8), 1, 0, 14, 1},
@ -270,13 +270,15 @@ static const upb_msglayout_field google_protobuf_FileOptions__fields[19] = {
{40, UPB_SIZE(76, 128), 17, 0, 9, 1},
{41, UPB_SIZE(84, 144), 18, 0, 9, 1},
{42, UPB_SIZE(24, 24), 10, 0, 8, 1},
{999, UPB_SIZE(92, 160), 0, 0, 11, 3},
{44, UPB_SIZE(92, 160), 19, 0, 9, 1},
{45, UPB_SIZE(100, 176), 20, 0, 9, 1},
{999, UPB_SIZE(108, 192), 0, 0, 11, 3},
};
const upb_msglayout google_protobuf_FileOptions_msginit = {
&google_protobuf_FileOptions_submsgs[0],
&google_protobuf_FileOptions__fields[0],
UPB_SIZE(96, 176), 19, false,
UPB_SIZE(112, 208), 21, false,
};
static const upb_msglayout *const google_protobuf_MessageOptions_submsgs[1] = {

@ -10,12 +10,12 @@
#define GOOGLE_PROTOBUF_DESCRIPTOR_PROTO_UPB_H_
#include "upb/generated_util.h"
#include "upb/msg.h"
#include "upb/decode.h"
#include "upb/encode.h"
#include "upb/port_def.inc"
#ifdef __cplusplus
extern "C" {
#endif
@ -102,8 +102,6 @@ extern const upb_msglayout google_protobuf_SourceCodeInfo_Location_msginit;
extern const upb_msglayout google_protobuf_GeneratedCodeInfo_msginit;
extern const upb_msglayout google_protobuf_GeneratedCodeInfo_Annotation_msginit;
/* Enums */
typedef enum {
google_protobuf_FieldDescriptorProto_LABEL_OPTIONAL = 1,
google_protobuf_FieldDescriptorProto_LABEL_REQUIRED = 2,
@ -186,7 +184,6 @@ UPB_INLINE struct google_protobuf_FileDescriptorProto* google_protobuf_FileDescr
return sub;
}
/* google.protobuf.FileDescriptorProto */
UPB_INLINE google_protobuf_FileDescriptorProto *google_protobuf_FileDescriptorProto_new(upb_arena *arena) {
@ -340,7 +337,6 @@ UPB_INLINE void google_protobuf_FileDescriptorProto_set_syntax(google_protobuf_F
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(20, 40)) = value;
}
/* google.protobuf.DescriptorProto */
UPB_INLINE google_protobuf_DescriptorProto *google_protobuf_DescriptorProto_new(upb_arena *arena) {
@ -487,7 +483,6 @@ UPB_INLINE bool google_protobuf_DescriptorProto_add_reserved_name(google_protobu
msg, UPB_SIZE(44, 88), UPB_SIZE(8, 16), UPB_TYPE_STRING, &val, arena);
}
/* google.protobuf.DescriptorProto.ExtensionRange */
UPB_INLINE google_protobuf_DescriptorProto_ExtensionRange *google_protobuf_DescriptorProto_ExtensionRange_new(upb_arena *arena) {
@ -531,7 +526,6 @@ UPB_INLINE struct google_protobuf_ExtensionRangeOptions* google_protobuf_Descrip
return sub;
}
/* google.protobuf.DescriptorProto.ReservedRange */
UPB_INLINE google_protobuf_DescriptorProto_ReservedRange *google_protobuf_DescriptorProto_ReservedRange_new(upb_arena *arena) {
@ -560,7 +554,6 @@ UPB_INLINE void google_protobuf_DescriptorProto_ReservedRange_set_end(google_pro
UPB_FIELD_AT(msg, int32_t, UPB_SIZE(8, 8)) = value;
}
/* google.protobuf.ExtensionRangeOptions */
UPB_INLINE google_protobuf_ExtensionRangeOptions *google_protobuf_ExtensionRangeOptions_new(upb_arena *arena) {
@ -591,7 +584,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_Extension
return sub;
}
/* google.protobuf.FieldDescriptorProto */
UPB_INLINE google_protobuf_FieldDescriptorProto *google_protobuf_FieldDescriptorProto_new(upb_arena *arena) {
@ -677,7 +669,6 @@ UPB_INLINE void google_protobuf_FieldDescriptorProto_set_json_name(google_protob
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(64, 96)) = value;
}
/* google.protobuf.OneofDescriptorProto */
UPB_INLINE google_protobuf_OneofDescriptorProto *google_protobuf_OneofDescriptorProto_new(upb_arena *arena) {
@ -715,7 +706,6 @@ UPB_INLINE struct google_protobuf_OneofOptions* google_protobuf_OneofDescriptorP
return sub;
}
/* google.protobuf.EnumDescriptorProto */
UPB_INLINE google_protobuf_EnumDescriptorProto *google_protobuf_EnumDescriptorProto_new(upb_arena *arena) {
@ -792,7 +782,6 @@ UPB_INLINE bool google_protobuf_EnumDescriptorProto_add_reserved_name(google_pro
msg, UPB_SIZE(24, 48), UPB_SIZE(8, 16), UPB_TYPE_STRING, &val, arena);
}
/* google.protobuf.EnumDescriptorProto.EnumReservedRange */
UPB_INLINE google_protobuf_EnumDescriptorProto_EnumReservedRange *google_protobuf_EnumDescriptorProto_EnumReservedRange_new(upb_arena *arena) {
@ -821,7 +810,6 @@ UPB_INLINE void google_protobuf_EnumDescriptorProto_EnumReservedRange_set_end(go
UPB_FIELD_AT(msg, int32_t, UPB_SIZE(8, 8)) = value;
}
/* google.protobuf.EnumValueDescriptorProto */
UPB_INLINE google_protobuf_EnumValueDescriptorProto *google_protobuf_EnumValueDescriptorProto_new(upb_arena *arena) {
@ -865,7 +853,6 @@ UPB_INLINE struct google_protobuf_EnumValueOptions* google_protobuf_EnumValueDes
return sub;
}
/* google.protobuf.ServiceDescriptorProto */
UPB_INLINE google_protobuf_ServiceDescriptorProto *google_protobuf_ServiceDescriptorProto_new(upb_arena *arena) {
@ -917,7 +904,6 @@ UPB_INLINE struct google_protobuf_ServiceOptions* google_protobuf_ServiceDescrip
return sub;
}
/* google.protobuf.MethodDescriptorProto */
UPB_INLINE google_protobuf_MethodDescriptorProto *google_protobuf_MethodDescriptorProto_new(upb_arena *arena) {
@ -979,7 +965,6 @@ UPB_INLINE void google_protobuf_MethodDescriptorProto_set_server_streaming(googl
UPB_FIELD_AT(msg, bool, UPB_SIZE(2, 2)) = value;
}
/* google.protobuf.FileOptions */
UPB_INLINE google_protobuf_FileOptions *google_protobuf_FileOptions_new(upb_arena *arena) {
@ -1030,7 +1015,11 @@ UPB_INLINE bool google_protobuf_FileOptions_has_php_namespace(const google_proto
UPB_INLINE upb_strview google_protobuf_FileOptions_php_namespace(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(84, 144)); }
UPB_INLINE bool google_protobuf_FileOptions_has_php_generic_services(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 10); }
UPB_INLINE bool google_protobuf_FileOptions_php_generic_services(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, bool, UPB_SIZE(24, 24)); }
UPB_INLINE const google_protobuf_UninterpretedOption* const* google_protobuf_FileOptions_uninterpreted_option(const google_protobuf_FileOptions *msg, size_t *len) { return (const google_protobuf_UninterpretedOption* const*)_upb_array_accessor(msg, UPB_SIZE(92, 160), len); }
UPB_INLINE bool google_protobuf_FileOptions_has_php_metadata_namespace(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 19); }
UPB_INLINE upb_strview google_protobuf_FileOptions_php_metadata_namespace(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(92, 160)); }
UPB_INLINE bool google_protobuf_FileOptions_has_ruby_package(const google_protobuf_FileOptions *msg) { return _upb_has_field(msg, 20); }
UPB_INLINE upb_strview google_protobuf_FileOptions_ruby_package(const google_protobuf_FileOptions *msg) { return UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(100, 176)); }
UPB_INLINE const google_protobuf_UninterpretedOption* const* google_protobuf_FileOptions_uninterpreted_option(const google_protobuf_FileOptions *msg, size_t *len) { return (const google_protobuf_UninterpretedOption* const*)_upb_array_accessor(msg, UPB_SIZE(108, 192), len); }
UPB_INLINE void google_protobuf_FileOptions_set_java_package(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 11);
@ -1104,21 +1093,28 @@ UPB_INLINE void google_protobuf_FileOptions_set_php_generic_services(google_prot
_upb_sethas(msg, 10);
UPB_FIELD_AT(msg, bool, UPB_SIZE(24, 24)) = value;
}
UPB_INLINE void google_protobuf_FileOptions_set_php_metadata_namespace(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 19);
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(92, 160)) = value;
}
UPB_INLINE void google_protobuf_FileOptions_set_ruby_package(google_protobuf_FileOptions *msg, upb_strview value) {
_upb_sethas(msg, 20);
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(100, 176)) = value;
}
UPB_INLINE google_protobuf_UninterpretedOption** google_protobuf_FileOptions_mutable_uninterpreted_option(google_protobuf_FileOptions *msg, size_t *len) {
return (google_protobuf_UninterpretedOption**)_upb_array_mutable_accessor(msg, UPB_SIZE(92, 160), len);
return (google_protobuf_UninterpretedOption**)_upb_array_mutable_accessor(msg, UPB_SIZE(108, 192), len);
}
UPB_INLINE google_protobuf_UninterpretedOption** google_protobuf_FileOptions_resize_uninterpreted_option(google_protobuf_FileOptions *msg, size_t len, upb_arena *arena) {
return (google_protobuf_UninterpretedOption**)_upb_array_resize_accessor(msg, UPB_SIZE(92, 160), len, UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, arena);
return (google_protobuf_UninterpretedOption**)_upb_array_resize_accessor(msg, UPB_SIZE(108, 192), len, UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, arena);
}
UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_FileOptions_add_uninterpreted_option(google_protobuf_FileOptions *msg, upb_arena *arena) {
struct google_protobuf_UninterpretedOption* sub = (struct google_protobuf_UninterpretedOption*)upb_msg_new(&google_protobuf_UninterpretedOption_msginit, arena);
bool ok = _upb_array_append_accessor(
msg, UPB_SIZE(92, 160), UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, &sub, arena);
msg, UPB_SIZE(108, 192), UPB_SIZE(4, 8), UPB_TYPE_MESSAGE, &sub, arena);
if (!ok) return NULL;
return sub;
}
/* google.protobuf.MessageOptions */
UPB_INLINE google_protobuf_MessageOptions *google_protobuf_MessageOptions_new(upb_arena *arena) {
@ -1173,7 +1169,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_MessageOp
return sub;
}
/* google.protobuf.FieldOptions */
UPB_INLINE google_protobuf_FieldOptions *google_protobuf_FieldOptions_new(upb_arena *arena) {
@ -1240,7 +1235,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_FieldOpti
return sub;
}
/* google.protobuf.OneofOptions */
UPB_INLINE google_protobuf_OneofOptions *google_protobuf_OneofOptions_new(upb_arena *arena) {
@ -1271,7 +1265,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_OneofOpti
return sub;
}
/* google.protobuf.EnumOptions */
UPB_INLINE google_protobuf_EnumOptions *google_protobuf_EnumOptions_new(upb_arena *arena) {
@ -1314,7 +1307,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_EnumOptio
return sub;
}
/* google.protobuf.EnumValueOptions */
UPB_INLINE google_protobuf_EnumValueOptions *google_protobuf_EnumValueOptions_new(upb_arena *arena) {
@ -1351,7 +1343,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_EnumValue
return sub;
}
/* google.protobuf.ServiceOptions */
UPB_INLINE google_protobuf_ServiceOptions *google_protobuf_ServiceOptions_new(upb_arena *arena) {
@ -1388,7 +1379,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_ServiceOp
return sub;
}
/* google.protobuf.MethodOptions */
UPB_INLINE google_protobuf_MethodOptions *google_protobuf_MethodOptions_new(upb_arena *arena) {
@ -1431,7 +1421,6 @@ UPB_INLINE struct google_protobuf_UninterpretedOption* google_protobuf_MethodOpt
return sub;
}
/* google.protobuf.UninterpretedOption */
UPB_INLINE google_protobuf_UninterpretedOption *google_protobuf_UninterpretedOption_new(upb_arena *arena) {
@ -1498,7 +1487,6 @@ UPB_INLINE void google_protobuf_UninterpretedOption_set_aggregate_value(google_p
UPB_FIELD_AT(msg, upb_strview, UPB_SIZE(48, 64)) = value;
}
/* google.protobuf.UninterpretedOption.NamePart */
UPB_INLINE google_protobuf_UninterpretedOption_NamePart *google_protobuf_UninterpretedOption_NamePart_new(upb_arena *arena) {
@ -1527,7 +1515,6 @@ UPB_INLINE void google_protobuf_UninterpretedOption_NamePart_set_is_extension(go
UPB_FIELD_AT(msg, bool, UPB_SIZE(1, 1)) = value;
}
/* google.protobuf.SourceCodeInfo */
UPB_INLINE google_protobuf_SourceCodeInfo *google_protobuf_SourceCodeInfo_new(upb_arena *arena) {
@ -1558,7 +1545,6 @@ UPB_INLINE struct google_protobuf_SourceCodeInfo_Location* google_protobuf_Sourc
return sub;
}
/* google.protobuf.SourceCodeInfo.Location */
UPB_INLINE google_protobuf_SourceCodeInfo_Location *google_protobuf_SourceCodeInfo_Location_new(upb_arena *arena) {
@ -1620,7 +1606,6 @@ UPB_INLINE bool google_protobuf_SourceCodeInfo_Location_add_leading_detached_com
msg, UPB_SIZE(28, 56), UPB_SIZE(8, 16), UPB_TYPE_STRING, &val, arena);
}
/* google.protobuf.GeneratedCodeInfo */
UPB_INLINE google_protobuf_GeneratedCodeInfo *google_protobuf_GeneratedCodeInfo_new(upb_arena *arena) {
@ -1651,7 +1636,6 @@ UPB_INLINE struct google_protobuf_GeneratedCodeInfo_Annotation* google_protobuf_
return sub;
}
/* google.protobuf.GeneratedCodeInfo.Annotation */
UPB_INLINE google_protobuf_GeneratedCodeInfo_Annotation *google_protobuf_GeneratedCodeInfo_Annotation_new(upb_arena *arena) {
@ -1697,7 +1681,6 @@ UPB_INLINE void google_protobuf_GeneratedCodeInfo_Annotation_set_end(google_prot
UPB_FIELD_AT(msg, int32_t, UPB_SIZE(8, 8)) = value;
}
#ifdef __cplusplus
} /* extern "C" */
#endif

@ -1686,6 +1686,48 @@ static void start_timestamp_zone(upb_json_parser *p, const char *ptr) {
capture_begin(p, ptr);
}
#define EPOCH_YEAR 1970
#define TM_YEAR_BASE 1900
static bool isleap(int year) {
return (year % 4) == 0 && (year % 100 != 0 || (year % 400) == 0);
}
const unsigned short int __mon_yday[2][13] = {
/* Normal years. */
{ 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365 },
/* Leap years. */
{ 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366 }
};
int64_t epoch(int year, int yday, int hour, int min, int sec) {
int64_t years = year - EPOCH_YEAR;
int64_t leap_days = years / 4 - years / 100 + years / 400;
int64_t days = years * 365 + yday + leap_days;
int64_t hours = days * 24 + hour;
int64_t mins = hours * 60 + min;
int64_t secs = mins * 60 + sec;
return secs;
}
static int64_t upb_mktime(const struct tm *tp) {
int sec = tp->tm_sec;
int min = tp->tm_min;
int hour = tp->tm_hour;
int mday = tp->tm_mday;
int mon = tp->tm_mon;
int year = tp->tm_year + TM_YEAR_BASE;
/* Calculate day of year from year, month, and day of month. */
int mon_yday = ((__mon_yday[isleap(year)][mon]) - 1);
int yday = mon_yday + mday;
return epoch(year, yday, hour, min, sec);
}
static bool end_timestamp_zone(upb_json_parser *p, const char *ptr) {
size_t len;
const char *buf;
@ -1713,7 +1755,7 @@ static bool end_timestamp_zone(upb_json_parser *p, const char *ptr) {
}
/* Normalize tm */
seconds = mktime(&p->tm);
seconds = upb_mktime(&p->tm);
/* Check timestamp boundary */
if (seconds < -62135596800) {
@ -2544,11 +2586,11 @@ static bool does_fieldmask_end(upb_json_parser *p) {
* final state once, when the closing '"' is seen. */
#line 2749 "upb/json/parser.rl"
#line 2791 "upb/json/parser.rl"
#line 2552 "upb/json/parser.c"
#line 2594 "upb/json/parser.c"
static const char _json_actions[] = {
0, 1, 0, 1, 1, 1, 3, 1,
4, 1, 6, 1, 7, 1, 8, 1,
@ -2803,7 +2845,7 @@ static const int json_en_value_machine = 78;
static const int json_en_main = 1;
#line 2752 "upb/json/parser.rl"
#line 2794 "upb/json/parser.rl"
size_t parse(void *closure, const void *hd, const char *buf, size_t size,
const upb_bufhandle *handle) {
@ -2826,7 +2868,7 @@ size_t parse(void *closure, const void *hd, const char *buf, size_t size,
capture_resume(parser, buf);
#line 2830 "upb/json/parser.c"
#line 2872 "upb/json/parser.c"
{
int _klen;
unsigned int _trans;
@ -2901,147 +2943,147 @@ _match:
switch ( *_acts++ )
{
case 1:
#line 2557 "upb/json/parser.rl"
#line 2599 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
case 2:
#line 2559 "upb/json/parser.rl"
#line 2601 "upb/json/parser.rl"
{ p--; {stack[top++] = cs; cs = 23;goto _again;} }
break;
case 3:
#line 2563 "upb/json/parser.rl"
#line 2605 "upb/json/parser.rl"
{ start_text(parser, p); }
break;
case 4:
#line 2564 "upb/json/parser.rl"
#line 2606 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_text(parser, p)); }
break;
case 5:
#line 2570 "upb/json/parser.rl"
#line 2612 "upb/json/parser.rl"
{ start_hex(parser); }
break;
case 6:
#line 2571 "upb/json/parser.rl"
#line 2613 "upb/json/parser.rl"
{ hexdigit(parser, p); }
break;
case 7:
#line 2572 "upb/json/parser.rl"
#line 2614 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_hex(parser)); }
break;
case 8:
#line 2578 "upb/json/parser.rl"
#line 2620 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(escape(parser, p)); }
break;
case 9:
#line 2584 "upb/json/parser.rl"
#line 2626 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
case 10:
#line 2589 "upb/json/parser.rl"
#line 2631 "upb/json/parser.rl"
{ start_year(parser, p); }
break;
case 11:
#line 2590 "upb/json/parser.rl"
#line 2632 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_year(parser, p)); }
break;
case 12:
#line 2594 "upb/json/parser.rl"
#line 2636 "upb/json/parser.rl"
{ start_month(parser, p); }
break;
case 13:
#line 2595 "upb/json/parser.rl"
#line 2637 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_month(parser, p)); }
break;
case 14:
#line 2599 "upb/json/parser.rl"
#line 2641 "upb/json/parser.rl"
{ start_day(parser, p); }
break;
case 15:
#line 2600 "upb/json/parser.rl"
#line 2642 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_day(parser, p)); }
break;
case 16:
#line 2604 "upb/json/parser.rl"
#line 2646 "upb/json/parser.rl"
{ start_hour(parser, p); }
break;
case 17:
#line 2605 "upb/json/parser.rl"
#line 2647 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_hour(parser, p)); }
break;
case 18:
#line 2609 "upb/json/parser.rl"
#line 2651 "upb/json/parser.rl"
{ start_minute(parser, p); }
break;
case 19:
#line 2610 "upb/json/parser.rl"
#line 2652 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_minute(parser, p)); }
break;
case 20:
#line 2614 "upb/json/parser.rl"
#line 2656 "upb/json/parser.rl"
{ start_second(parser, p); }
break;
case 21:
#line 2615 "upb/json/parser.rl"
#line 2657 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_second(parser, p)); }
break;
case 22:
#line 2620 "upb/json/parser.rl"
#line 2662 "upb/json/parser.rl"
{ start_duration_base(parser, p); }
break;
case 23:
#line 2621 "upb/json/parser.rl"
#line 2663 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_duration_base(parser, p)); }
break;
case 24:
#line 2623 "upb/json/parser.rl"
#line 2665 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
case 25:
#line 2628 "upb/json/parser.rl"
#line 2670 "upb/json/parser.rl"
{ start_timestamp_base(parser); }
break;
case 26:
#line 2630 "upb/json/parser.rl"
#line 2672 "upb/json/parser.rl"
{ start_timestamp_fraction(parser, p); }
break;
case 27:
#line 2631 "upb/json/parser.rl"
#line 2673 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_timestamp_fraction(parser, p)); }
break;
case 28:
#line 2633 "upb/json/parser.rl"
#line 2675 "upb/json/parser.rl"
{ start_timestamp_zone(parser, p); }
break;
case 29:
#line 2634 "upb/json/parser.rl"
#line 2676 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_timestamp_zone(parser, p)); }
break;
case 30:
#line 2636 "upb/json/parser.rl"
#line 2678 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
case 31:
#line 2641 "upb/json/parser.rl"
#line 2683 "upb/json/parser.rl"
{ start_fieldmask_path_text(parser, p); }
break;
case 32:
#line 2642 "upb/json/parser.rl"
#line 2684 "upb/json/parser.rl"
{ end_fieldmask_path_text(parser, p); }
break;
case 33:
#line 2647 "upb/json/parser.rl"
#line 2689 "upb/json/parser.rl"
{ start_fieldmask_path(parser); }
break;
case 34:
#line 2648 "upb/json/parser.rl"
#line 2690 "upb/json/parser.rl"
{ end_fieldmask_path(parser); }
break;
case 35:
#line 2654 "upb/json/parser.rl"
#line 2696 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
case 36:
#line 2659 "upb/json/parser.rl"
#line 2701 "upb/json/parser.rl"
{
if (is_wellknown_msg(parser, UPB_WELLKNOWN_TIMESTAMP)) {
{stack[top++] = cs; cs = 47;goto _again;}
@ -3055,11 +3097,11 @@ _match:
}
break;
case 37:
#line 2672 "upb/json/parser.rl"
#line 2714 "upb/json/parser.rl"
{ p--; {stack[top++] = cs; cs = 78;goto _again;} }
break;
case 38:
#line 2677 "upb/json/parser.rl"
#line 2719 "upb/json/parser.rl"
{
if (is_wellknown_msg(parser, UPB_WELLKNOWN_ANY)) {
start_any_member(parser, p);
@ -3069,11 +3111,11 @@ _match:
}
break;
case 39:
#line 2684 "upb/json/parser.rl"
#line 2726 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_membername(parser)); }
break;
case 40:
#line 2687 "upb/json/parser.rl"
#line 2729 "upb/json/parser.rl"
{
if (is_wellknown_msg(parser, UPB_WELLKNOWN_ANY)) {
end_any_member(parser, p);
@ -3083,7 +3125,7 @@ _match:
}
break;
case 41:
#line 2698 "upb/json/parser.rl"
#line 2740 "upb/json/parser.rl"
{
if (is_wellknown_msg(parser, UPB_WELLKNOWN_ANY)) {
start_any_object(parser, p);
@ -3093,7 +3135,7 @@ _match:
}
break;
case 42:
#line 2707 "upb/json/parser.rl"
#line 2749 "upb/json/parser.rl"
{
if (is_wellknown_msg(parser, UPB_WELLKNOWN_ANY)) {
CHECK_RETURN_TOP(end_any_object(parser, p));
@ -3103,54 +3145,54 @@ _match:
}
break;
case 43:
#line 2719 "upb/json/parser.rl"
#line 2761 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(start_array(parser)); }
break;
case 44:
#line 2723 "upb/json/parser.rl"
#line 2765 "upb/json/parser.rl"
{ end_array(parser); }
break;
case 45:
#line 2728 "upb/json/parser.rl"
#line 2770 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(start_number(parser, p)); }
break;
case 46:
#line 2729 "upb/json/parser.rl"
#line 2771 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_number(parser, p)); }
break;
case 47:
#line 2731 "upb/json/parser.rl"
#line 2773 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(start_stringval(parser)); }
break;
case 48:
#line 2732 "upb/json/parser.rl"
#line 2774 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_stringval(parser)); }
break;
case 49:
#line 2734 "upb/json/parser.rl"
#line 2776 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_bool(parser, true)); }
break;
case 50:
#line 2736 "upb/json/parser.rl"
#line 2778 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_bool(parser, false)); }
break;
case 51:
#line 2738 "upb/json/parser.rl"
#line 2780 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_null(parser)); }
break;
case 52:
#line 2740 "upb/json/parser.rl"
#line 2782 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(start_subobject_full(parser)); }
break;
case 53:
#line 2741 "upb/json/parser.rl"
#line 2783 "upb/json/parser.rl"
{ end_subobject_full(parser); }
break;
case 54:
#line 2746 "upb/json/parser.rl"
#line 2788 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; goto _again;} }
break;
#line 3154 "upb/json/parser.c"
#line 3196 "upb/json/parser.c"
}
}
@ -3167,32 +3209,32 @@ _again:
while ( __nacts-- > 0 ) {
switch ( *__acts++ ) {
case 0:
#line 2555 "upb/json/parser.rl"
#line 2597 "upb/json/parser.rl"
{ p--; {cs = stack[--top]; if ( p == pe )
goto _test_eof;
goto _again;} }
break;
case 46:
#line 2729 "upb/json/parser.rl"
#line 2771 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_number(parser, p)); }
break;
case 49:
#line 2734 "upb/json/parser.rl"
#line 2776 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_bool(parser, true)); }
break;
case 50:
#line 2736 "upb/json/parser.rl"
#line 2778 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_bool(parser, false)); }
break;
case 51:
#line 2738 "upb/json/parser.rl"
#line 2780 "upb/json/parser.rl"
{ CHECK_RETURN_TOP(end_null(parser)); }
break;
case 53:
#line 2741 "upb/json/parser.rl"
#line 2783 "upb/json/parser.rl"
{ end_subobject_full(parser); }
break;
#line 3196 "upb/json/parser.c"
#line 3238 "upb/json/parser.c"
}
}
}
@ -3200,7 +3242,7 @@ goto _again;} }
_out: {}
}
#line 2774 "upb/json/parser.rl"
#line 2816 "upb/json/parser.rl"
if (p != pe) {
upb_status_seterrf(parser->status, "Parse error at '%.*s'\n", pe - p, p);
@ -3243,13 +3285,13 @@ static void json_parser_reset(upb_json_parser *p) {
/* Emit Ragel initialization of the parser. */
#line 3247 "upb/json/parser.c"
#line 3289 "upb/json/parser.c"
{
cs = json_start;
top = 0;
}
#line 2816 "upb/json/parser.rl"
#line 2858 "upb/json/parser.rl"
p->current_state = cs;
p->parser_top = top;
accumulate_clear(p);

Binary file not shown.

@ -1,870 +0,0 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
//
// The messages in this file describe the definitions found in .proto files.
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package google.protobuf;
option go_package = "github.com/golang/protobuf/protoc-gen-go/descriptor;descriptor";
option java_package = "com.google.protobuf";
option java_outer_classname = "DescriptorProtos";
option csharp_namespace = "Google.Protobuf.Reflection";
option objc_class_prefix = "GPB";
option cc_enable_arenas = true;
// descriptor.proto must be optimized for speed because reflection-based
// algorithms don't work during bootstrapping.
option optimize_for = SPEED;
// The protocol compiler can output a FileDescriptorSet containing the .proto
// files it parses.
message FileDescriptorSet {
repeated FileDescriptorProto file = 1;
}
// Describes a complete .proto file.
message FileDescriptorProto {
optional string name = 1; // file name, relative to root of source tree
optional string package = 2; // e.g. "foo", "foo.bar", etc.
// Names of files imported by this file.
repeated string dependency = 3;
// Indexes of the public imported files in the dependency list above.
repeated int32 public_dependency = 10;
// Indexes of the weak imported files in the dependency list.
// For Google-internal migration only. Do not use.
repeated int32 weak_dependency = 11;
// All top-level definitions in this file.
repeated DescriptorProto message_type = 4;
repeated EnumDescriptorProto enum_type = 5;
repeated ServiceDescriptorProto service = 6;
repeated FieldDescriptorProto extension = 7;
optional FileOptions options = 8;
// This field contains optional information about the original source code.
// You may safely remove this entire field without harming runtime
// functionality of the descriptors -- the information is needed only by
// development tools.
optional SourceCodeInfo source_code_info = 9;
// The syntax of the proto file.
// The supported values are "proto2" and "proto3".
optional string syntax = 12;
}
// Describes a message type.
message DescriptorProto {
optional string name = 1;
repeated FieldDescriptorProto field = 2;
repeated FieldDescriptorProto extension = 6;
repeated DescriptorProto nested_type = 3;
repeated EnumDescriptorProto enum_type = 4;
message ExtensionRange {
optional int32 start = 1;
optional int32 end = 2;
optional ExtensionRangeOptions options = 3;
}
repeated ExtensionRange extension_range = 5;
repeated OneofDescriptorProto oneof_decl = 8;
optional MessageOptions options = 7;
// Range of reserved tag numbers. Reserved tag numbers may not be used by
// fields or extension ranges in the same message. Reserved ranges may
// not overlap.
message ReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Exclusive.
}
repeated ReservedRange reserved_range = 9;
// Reserved field names, which may not be used by fields in the same message.
// A given name may only be reserved once.
repeated string reserved_name = 10;
}
message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// Describes a field within a message.
message FieldDescriptorProto {
enum Type {
// 0 is reserved for errors.
// Order is weird for historical reasons.
TYPE_DOUBLE = 1;
TYPE_FLOAT = 2;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
// negative values are likely.
TYPE_INT64 = 3;
TYPE_UINT64 = 4;
// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
// negative values are likely.
TYPE_INT32 = 5;
TYPE_FIXED64 = 6;
TYPE_FIXED32 = 7;
TYPE_BOOL = 8;
TYPE_STRING = 9;
// Tag-delimited aggregate.
// Group type is deprecated and not supported in proto3. However, Proto3
// implementations should still be able to parse the group wire format and
// treat group fields as unknown fields.
TYPE_GROUP = 10;
TYPE_MESSAGE = 11; // Length-delimited aggregate.
// New in version 2.
TYPE_BYTES = 12;
TYPE_UINT32 = 13;
TYPE_ENUM = 14;
TYPE_SFIXED32 = 15;
TYPE_SFIXED64 = 16;
TYPE_SINT32 = 17; // Uses ZigZag encoding.
TYPE_SINT64 = 18; // Uses ZigZag encoding.
};
enum Label {
// 0 is reserved for errors
LABEL_OPTIONAL = 1;
LABEL_REQUIRED = 2;
LABEL_REPEATED = 3;
};
optional string name = 1;
optional int32 number = 3;
optional Label label = 4;
// If type_name is set, this need not be set. If both this and type_name
// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
optional Type type = 5;
// For message and enum types, this is the name of the type. If the name
// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
// rules are used to find the type (i.e. first the nested types within this
// message are searched, then within the parent, on up to the root
// namespace).
optional string type_name = 6;
// For extensions, this is the name of the type being extended. It is
// resolved in the same manner as type_name.
optional string extendee = 2;
// For numeric types, contains the original text representation of the value.
// For booleans, "true" or "false".
// For strings, contains the default text contents (not escaped in any way).
// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
// TODO(kenton): Base-64 encode?
optional string default_value = 7;
// If set, gives the index of a oneof in the containing type's oneof_decl
// list. This field is a member of that oneof.
optional int32 oneof_index = 9;
// JSON name of this field. The value is set by protocol compiler. If the
// user has set a "json_name" option on this field, that option's value
// will be used. Otherwise, it's deduced from the field's name by converting
// it to camelCase.
optional string json_name = 10;
optional FieldOptions options = 8;
}
// Describes a oneof.
message OneofDescriptorProto {
optional string name = 1;
optional OneofOptions options = 2;
}
// Describes an enum type.
message EnumDescriptorProto {
optional string name = 1;
repeated EnumValueDescriptorProto value = 2;
optional EnumOptions options = 3;
// Range of reserved numeric values. Reserved values may not be used by
// entries in the same enum. Reserved ranges may not overlap.
//
// Note that this is distinct from DescriptorProto.ReservedRange in that it
// is inclusive such that it can appropriately represent the entire int32
// domain.
message EnumReservedRange {
optional int32 start = 1; // Inclusive.
optional int32 end = 2; // Inclusive.
}
// Range of reserved numeric values. Reserved numeric values may not be used
// by enum values in the same enum declaration. Reserved ranges may not
// overlap.
repeated EnumReservedRange reserved_range = 4;
// Reserved enum value names, which may not be reused. A given name may only
// be reserved once.
repeated string reserved_name = 5;
}
// Describes a value within an enum.
message EnumValueDescriptorProto {
optional string name = 1;
optional int32 number = 2;
optional EnumValueOptions options = 3;
}
// Describes a service.
message ServiceDescriptorProto {
optional string name = 1;
repeated MethodDescriptorProto method = 2;
optional ServiceOptions options = 3;
}
// Describes a method of a service.
message MethodDescriptorProto {
optional string name = 1;
// Input and output type names. These are resolved in the same way as
// FieldDescriptorProto.type_name, but must refer to a message type.
optional string input_type = 2;
optional string output_type = 3;
optional MethodOptions options = 4;
// Identifies if client streams multiple client messages
optional bool client_streaming = 5 [default=false];
// Identifies if server streams multiple server messages
optional bool server_streaming = 6 [default=false];
}
// ===================================================================
// Options
// Each of the definitions above may have "options" attached. These are
// just annotations which may cause code to be generated slightly differently
// or may contain hints for code that manipulates protocol messages.
//
// Clients may define custom options as extensions of the *Options messages.
// These extensions may not yet be known at parsing time, so the parser cannot
// store the values in them. Instead it stores them in a field in the *Options
// message called uninterpreted_option. This field must have the same name
// across all *Options messages. We then use this field to populate the
// extensions when we build a descriptor, at which point all protos have been
// parsed and so all extensions are known.
//
// Extension numbers for custom options may be chosen as follows:
// * For options which will only be used within a single application or
// organization, or for experimental options, use field numbers 50000
// through 99999. It is up to you to ensure that you do not use the
// same number for multiple options.
// * For options which will be published and used publicly by multiple
// independent entities, e-mail protobuf-global-extension-registry@google.com
// to reserve extension numbers. Simply provide your project name (e.g.
// Objective-C plugin) and your project website (if available) -- there's no
// need to explain how you intend to use them. Usually you only need one
// extension number. You can declare multiple options with only one extension
// number by putting them in a sub-message. See the Custom Options section of
// the docs for examples:
// https://developers.google.com/protocol-buffers/docs/proto#options
// If this turns out to be popular, a web service will be set up
// to automatically assign option numbers.
message FileOptions {
// Sets the Java package where classes generated from this .proto will be
// placed. By default, the proto package is used, but this is often
// inappropriate because proto packages do not normally start with backwards
// domain names.
optional string java_package = 1;
// If set, all the classes from the .proto file are wrapped in a single
// outer class with the given name. This applies to both Proto1
// (equivalent to the old "--one_java_file" option) and Proto2 (where
// a .proto always translates to a single class, but you may want to
// explicitly choose the class name).
optional string java_outer_classname = 8;
// If set true, then the Java code generator will generate a separate .java
// file for each top-level message, enum, and service defined in the .proto
// file. Thus, these types will *not* be nested inside the outer class
// named by java_outer_classname. However, the outer class will still be
// generated to contain the file's getDescriptor() method as well as any
// top-level extensions defined in the file.
optional bool java_multiple_files = 10 [default=false];
// This option does nothing.
optional bool java_generate_equals_and_hash = 20 [deprecated=true];
// If set true, then the Java2 code generator will generate code that
// throws an exception whenever an attempt is made to assign a non-UTF-8
// byte sequence to a string field.
// Message reflection will do the same.
// However, an extension field still accepts non-UTF-8 byte sequences.
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default=false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
// etc.
CODE_SIZE = 2; // Use ReflectionOps to implement these methods.
LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime.
}
optional OptimizeMode optimize_for = 9 [default=SPEED];
// Sets the Go package where structs generated from this .proto will be
// placed. If omitted, the Go package will be derived from the following:
// - The basename of the package import path, if provided.
// - Otherwise, the package statement in the .proto file, if present.
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
// Generic services were the only kind of service generation supported by
// early versions of google.protobuf.
//
// Generic services are now considered deprecated in favor of using plugins
// that generate code specific to your particular RPC system. Therefore,
// these default to false. Old code which depends on generic services should
// explicitly set them to true.
optional bool cc_generic_services = 16 [default=false];
optional bool java_generic_services = 17 [default=false];
optional bool py_generic_services = 18 [default=false];
optional bool php_generic_services = 42 [default=false];
// Is this file deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for everything in the file, or it will be completely ignored; in the very
// least, this is a formalization for deprecating files.
optional bool deprecated = 23 [default=false];
// Enables the use of arenas for the proto messages in this file. This applies
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default=false];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
// Namespace for generated classes; defaults to the package.
optional string csharp_namespace = 37;
// By default Swift generators will take the proto package and CamelCase it
// replacing '.' with underscore and use that to prefix the types/symbols
// defined. When this options is provided, they will use this value instead
// to prefix the types/symbols defined.
optional string swift_prefix = 39;
// Sets the php class prefix which is prepended to all php generated classes
// from this .proto. Default is empty.
optional string php_class_prefix = 40;
// Use this option to change the namespace of php generated classes. Default
// is empty. When this option is empty, the package name will be used for
// determining the namespace.
optional string php_namespace = 41;
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 38;
}
message MessageOptions {
// Set true to use the old proto1 MessageSet wire format for extensions.
// This is provided for backwards-compatibility with the MessageSet wire
// format. You should not use this for any other reason: It's less
// efficient, has fewer features, and is more complicated.
//
// The message must be defined exactly as follows:
// message Foo {
// option message_set_wire_format = true;
// extensions 4 to max;
// }
// Note that the message cannot have any defined fields; MessageSets only
// have extensions.
//
// All extensions of your type must be singular messages; e.g. they cannot
// be int32s, enums, or repeated messages.
//
// Because this is an option, the above two restrictions are not enforced by
// the protocol compiler.
optional bool message_set_wire_format = 1 [default=false];
// Disables the generation of the standard "descriptor()" accessor, which can
// conflict with a field of the same name. This is meant to make migration
// from proto1 easier; new code should avoid fields named "descriptor".
optional bool no_standard_descriptor_accessor = 2 [default=false];
// Is this message deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the message, or it will be completely ignored; in the very least,
// this is a formalization for deprecating messages.
optional bool deprecated = 3 [default=false];
// Whether the message is an automatically generated map entry type for the
// maps field.
//
// For maps fields:
// map<KeyType, ValueType> map_field = 1;
// The parsed descriptor looks like:
// message MapFieldEntry {
// option map_entry = true;
// optional KeyType key = 1;
// optional ValueType value = 2;
// }
// repeated MapFieldEntry map_field = 1;
//
// Implementations may choose not to generate the map_entry=true message, but
// use a native map in the target language to hold the keys and values.
// The reflection APIs in such implementions still need to work as
// if the field is a repeated message field.
//
// NOTE: Do not set the option in .proto files. Always use the maps syntax
// instead. The option should only be implicitly set by the proto compiler
// parser.
optional bool map_entry = 7;
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message FieldOptions {
// The ctype option instructs the C++ code generator to use a different
// representation of the field than it normally would. See the specific
// options below. This option is not yet implemented in the open source
// release -- sorry, we'll try to include it in a future version!
optional CType ctype = 1 [default = STRING];
enum CType {
// Default mode.
STRING = 0;
CORD = 1;
STRING_PIECE = 2;
}
// The packed option can be enabled for repeated primitive fields to enable
// a more efficient representation on the wire. Rather than repeatedly
// writing the tag and type for each element, the entire array is encoded as
// a single length-delimited blob. In proto3, only explicit setting it to
// false will avoid using packed encoding.
optional bool packed = 2;
// The jstype option determines the JavaScript type used for values of the
// field. The option is permitted only for 64 bit integral and fixed types
// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
// is represented as JavaScript string, which avoids loss of precision that
// can happen when a large value is converted to a floating point JavaScript.
// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
// use the JavaScript "number" type. The behavior of the default option
// JS_NORMAL is implementation dependent.
//
// This option is an enum to permit additional types to be added, e.g.
// goog.math.Integer.
optional JSType jstype = 6 [default = JS_NORMAL];
enum JSType {
// Use the default type.
JS_NORMAL = 0;
// Use JavaScript strings.
JS_STRING = 1;
// Use JavaScript numbers.
JS_NUMBER = 2;
}
// Should this field be parsed lazily? Lazy applies only to message-type
// fields. It means that when the outer message is initially parsed, the
// inner message's contents will not be parsed but instead stored in encoded
// form. The inner message will actually be parsed when it is first accessed.
//
// This is only a hint. Implementations are free to choose whether to use
// eager or lazy parsing regardless of the value of this option. However,
// setting this option true suggests that the protocol author believes that
// using lazy parsing on this field is worth the additional bookkeeping
// overhead typically needed to implement it.
//
// This option does not affect the public interface of any generated code;
// all method signatures remain the same. Furthermore, thread-safety of the
// interface is not affected by this option; const methods remain safe to
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
// This is necessary because otherwise the inner message would have to be
// parsed in order to perform the check, defeating the purpose of lazy
// parsing. An implementation which chooses not to check required fields
// must be consistent about it. That is, for any particular sub-message, the
// implementation must either *always* check its required fields, or *never*
// check its required fields, regardless of whether or not the message has
// been parsed.
optional bool lazy = 5 [default=false];
// Is this field deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for accessors, or it will be completely ignored; in the very least, this
// is a formalization for deprecating fields.
optional bool deprecated = 3 [default=false];
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
reserved 4; // removed jtype
}
message OneofOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumOptions {
// Set this option to true to allow mapping different tag names to the same
// value.
optional bool allow_alias = 2;
// Is this enum deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum, or it will be completely ignored; in the very least, this
// is a formalization for deprecating enums.
optional bool deprecated = 3 [default=false];
reserved 5; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message EnumValueOptions {
// Is this enum value deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the enum value, or it will be completely ignored; in the very least,
// this is a formalization for deprecating enum values.
optional bool deprecated = 1 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message ServiceOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this service deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the service, or it will be completely ignored; in the very least,
// this is a formalization for deprecating services.
optional bool deprecated = 33 [default=false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
message MethodOptions {
// Note: Field numbers 1 through 32 are reserved for Google's internal RPC
// framework. We apologize for hoarding these numbers to ourselves, but
// we were already using them long before we decided to release Protocol
// Buffers.
// Is this method deprecated?
// Depending on the target platform, this can emit Deprecated annotations
// for the method, or it will be completely ignored; in the very least,
// this is a formalization for deprecating methods.
optional bool deprecated = 33 [default=false];
// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
// or neither? HTTP based RPC implementation may choose GET verb for safe
// methods, and PUT verb for idempotent methods instead of the default POST.
enum IdempotencyLevel {
IDEMPOTENCY_UNKNOWN = 0;
NO_SIDE_EFFECTS = 1; // implies idempotent
IDEMPOTENT = 2; // idempotent, but may have side effects
}
optional IdempotencyLevel idempotency_level =
34 [default=IDEMPOTENCY_UNKNOWN];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,
// options protos in descriptor objects (e.g. returned by Descriptor::options(),
// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
// in them.
message UninterpretedOption {
// The name of the uninterpreted option. Each string represents a segment in
// a dot-separated name. is_extension is true iff a segment represents an
// extension (denoted with parentheses in options specs in .proto files).
// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents
// "foo.(bar.baz).qux".
message NamePart {
required string name_part = 1;
required bool is_extension = 2;
}
repeated NamePart name = 2;
// The value of the uninterpreted option, in whatever type the tokenizer
// identified it as during parsing. Exactly one of these should be set.
optional string identifier_value = 3;
optional uint64 positive_int_value = 4;
optional int64 negative_int_value = 5;
optional double double_value = 6;
optional bytes string_value = 7;
optional string aggregate_value = 8;
}
// ===================================================================
// Optional source code info
// Encapsulates information about the original source file from which a
// FileDescriptorProto was generated.
message SourceCodeInfo {
// A Location identifies a piece of source code in a .proto file which
// corresponds to a particular definition. This information is intended
// to be useful to IDEs, code indexers, documentation generators, and similar
// tools.
//
// For example, say we have a file like:
// message Foo {
// optional string foo = 1;
// }
// Let's look at just the field definition:
// optional string foo = 1;
// ^ ^^ ^^ ^ ^^^
// a bc de f ghi
// We have the following locations:
// span path represents
// [a,i) [ 4, 0, 2, 0 ] The whole field definition.
// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional).
// [c,d) [ 4, 0, 2, 0, 5 ] The type (string).
// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo).
// [g,h) [ 4, 0, 2, 0, 3 ] The number (1).
//
// Notes:
// - A location may refer to a repeated field itself (i.e. not to any
// particular index within it). This is used whenever a set of elements are
// logically enclosed in a single code segment. For example, an entire
// extend block (possibly containing multiple extension definitions) will
// have an outer location whose path refers to the "extensions" repeated
// field without an index.
// - Multiple locations may have the same path. This happens when a single
// logical declaration is spread out across multiple places. The most
// obvious example is the "extend" block again -- there may be multiple
// extend blocks in the same scope, each of which will have the same path.
// - A location's span is not always a subset of its parent's span. For
// example, the "extendee" of an extension declaration appears at the
// beginning of the "extend" block and is shared by all extensions within
// the block.
// - Just because a location's span is a subset of some other location's span
// does not mean that it is a descendent. For example, a "group" defines
// both a type and a field in a single declaration. Thus, the locations
// corresponding to the type and field and their components will overlap.
// - Code which tries to interpret locations should probably be designed to
// ignore those that it doesn't understand, as more types of locations could
// be recorded in the future.
repeated Location location = 1;
message Location {
// Identifies which part of the FileDescriptorProto was defined at this
// location.
//
// Each element is a field number or an index. They form a path from
// the root FileDescriptorProto to the place where the definition. For
// example, this path:
// [ 4, 3, 2, 7, 1 ]
// refers to:
// file.message_type(3) // 4, 3
// .field(7) // 2, 7
// .name() // 1
// This is because FileDescriptorProto.message_type has field number 4:
// repeated DescriptorProto message_type = 4;
// and DescriptorProto.field has field number 2:
// repeated FieldDescriptorProto field = 2;
// and FieldDescriptorProto.name has field number 1:
// optional string name = 1;
//
// Thus, the above path gives the location of a field name. If we removed
// the last element:
// [ 4, 3, 2, 7 ]
// this path refers to the whole field declaration (from the beginning
// of the label to the terminating semicolon).
repeated int32 path = 1 [packed=true];
// Always has exactly three or four elements: start line, start column,
// end line (optional, otherwise assumed same as start line), end column.
// These are packed into a single field for efficiency. Note that line
// and column numbers are zero-based -- typically you will want to add
// 1 to each before displaying to a user.
repeated int32 span = 2 [packed=true];
// If this SourceCodeInfo represents a complete declaration, these are any
// comments appearing before and after the declaration which appear to be
// attached to the declaration.
//
// A series of line comments appearing on consecutive lines, with no other
// tokens appearing on those lines, will be treated as a single comment.
//
// leading_detached_comments will keep paragraphs of comments that appear
// before (but not connected to) the current element. Each paragraph,
// separated by empty lines, will be one comment element in the repeated
// field.
//
// Only the comment content is provided; comment markers (e.g. //) are
// stripped out. For block comments, leading whitespace and an asterisk
// will be stripped from the beginning of each line other than the first.
// Newlines are included in the output.
//
// Examples:
//
// optional int32 foo = 1; // Comment attached to foo.
// // Comment attached to bar.
// optional int32 bar = 2;
//
// optional string baz = 3;
// // Comment attached to baz.
// // Another line attached to baz.
//
// // Comment attached to qux.
// //
// // Another line attached to qux.
// optional double qux = 4;
//
// // Detached comment for corge. This is not leading or trailing comments
// // to qux or corge because there are blank lines separating it from
// // both.
//
// // Detached comment for corge paragraph 2.
//
// optional string corge = 5;
// /* Block comment attached
// * to corge. Leading asterisks
// * will be removed. */
// /* Block comment attached to
// * grault. */
// optional int32 grault = 6;
//
// // ignored detached comments.
optional string leading_comments = 3;
optional string trailing_comments = 4;
repeated string leading_detached_comments = 6;
}
}
// Describes the relationship between generated code and its original source
// file. A GeneratedCodeInfo message is associated with only one generated
// source file, but may contain references to different source .proto files.
message GeneratedCodeInfo {
// An Annotation connects some span of text in generated code to an element
// of its generating .proto file.
repeated Annotation annotation = 1;
message Annotation {
// Identifies the element in the original source .proto file. This field
// is formatted the same as SourceCodeInfo.Location.path.
repeated int32 path = 1 [packed=true];
// Identifies the filesystem path to the original source .proto.
optional string source_file = 2;
// Identifies the starting offset in bytes in the generated code
// that relates to the identified object.
optional int32 begin = 3;
// Identifies the ending offset in bytes in the generated code that
// relates to the identified offset. The end offset should be one past
// the last relevant byte (so the length of the text = end - begin).
optional int32 end = 4;
}
}

@ -75,21 +75,16 @@ void DoTest(
break;
}
case conformance_ConformanceRequest_payload_json_payload: {
static const char msg[] = "JSON support not yet implemented.";
conformance_ConformanceResponse_set_skipped(
response, upb_strview_make(msg, sizeof(msg)));
return;
}
case conformance_ConformanceRequest_payload_NOT_SET:
fprintf(stderr, "conformance_upb: Request didn't have payload.\n");
return;
default:
fprintf(stderr, "conformance_upb: Unexpected case: %d\n",
conformance_ConformanceRequest_payload_case(request));
exit(1);
default: {
static const char msg[] = "Unsupported input format.";
conformance_ConformanceResponse_set_skipped(
response, upb_strview_make(msg, sizeof(msg)));
return;
}
}
switch (conformance_ConformanceRequest_requested_output_format(request)) {
@ -113,17 +108,12 @@ void DoTest(
break;
}
case conformance_JSON: {
static const char msg[] = "JSON support not yet implemented.";
default: {
static const char msg[] = "Unsupported output format.";
conformance_ConformanceResponse_set_skipped(
response, upb_strview_make(msg, sizeof(msg)));
break;
return;
}
default:
fprintf(stderr, "conformance_upb: Unknown output format: %d\n",
conformance_ConformanceRequest_requested_output_format(request));
exit(1);
}
return;

@ -0,0 +1,9 @@
syntax = "proto2";
import "tests/json/test.proto";
package upb.test.json;
message ImportEnum {
optional MyEnum e = 1;
}

@ -29,6 +29,11 @@ message TestMessage {
map<string, int32> map_string_int32 = 23;
map<string, bool> map_string_bool = 24;
map<string, SubMessage> map_string_msg = 25;
oneof o {
int32 oneof_int32 = 26;
int64 oneof_int64 = 27;
}
}
message SubMessage {

Binary file not shown.

@ -4,6 +4,7 @@
*/
#include "tests/json/test.upbdefs.h"
#include "tests/json/test.upb.h" // Test that it compiles for C++.
#include "tests/test_util.h"
#include "tests/upb_test.h"
#include "upb/handlers.h"

@ -3,43 +3,25 @@
#include "tests/upb_test.h"
#include "upb/bindings/stdc++/string.h"
#include "google/protobuf/descriptor.upb.h"
#include "google/protobuf/descriptor.upbdefs.h"
#include "upb/pb/decoder.h"
#include "upb/pb/encoder.h"
#include "upb/port_def.inc"
std::string read_string(const char *filename) {
size_t len;
char *str = upb_readfile(filename, &len);
ASSERT(str);
if (!str) { return std::string(); }
std::string ret = std::string(str, len);
free(str);
return ret;
}
#include <iostream>
void test_pb_roundtrip() {
std::string input = read_string("google/protobuf/descriptor.pb");
std::string input(
google_protobuf_descriptor_proto_upbdefinit.descriptor.data,
google_protobuf_descriptor_proto_upbdefinit.descriptor.size);
std::cout << input.size() << "\n";
upb::SymbolTable symtab;
upb::HandlerCache encoder_cache(upb::pb::EncoderPtr::NewCache());
upb::pb::CodeCache decoder_cache(&encoder_cache);
upb::Arena arena;
google_protobuf_FileDescriptorSet *set =
google_protobuf_FileDescriptorSet_parse(input.c_str(), input.size(),
arena.ptr());
ASSERT(set);
size_t n;
const google_protobuf_FileDescriptorProto *const *files =
google_protobuf_FileDescriptorSet_file(set, &n);
ASSERT(n == 1);
upb::Status status;
upb::FileDefPtr file_def = symtab.AddFile(files[0], &status);
if (!file_def) {
fprintf(stderr, "Error building def: %s\n", status.error_message());
ASSERT(false);
}
upb::MessageDefPtr md =
symtab.LookupMessage("google.protobuf.FileDescriptorSet");
upb::MessageDefPtr md(
google_protobuf_FileDescriptorProto_getmsgdef(symtab.ptr()));
ASSERT(md);
const upb::Handlers *encoder_handlers = encoder_cache.Get(md);
ASSERT(encoder_handlers);

@ -12,14 +12,13 @@
#include <sstream>
#include "tests/test_cpp.upbdefs.h"
#include "tests/upb_test.h"
#include "upb/def.h"
#include "upb/handlers.h"
#include "upb/pb/decoder.h"
#include "upb/pb/textprinter.h"
#include "upb/upb.h"
#include "upb_test.h"
#include "upb/port_def.inc"
#include "upb/upb.h"
template <class T>
void AssertInsert(T* const container, const typename T::value_type& val) {

@ -2,6 +2,7 @@
import sys
import re
import os
INCLUDE_RE = re.compile('^#include "([^"]*)"$')
@ -10,8 +11,8 @@ def parse_include(line):
return match.groups()[0] if match else None
class Amalgamator:
def __init__(self, include_path, output_path):
self.include_path = include_path
def __init__(self, output_path):
self.include_paths = ["."]
self.included = set(["upb/port_def.inc", "upb/port_undef.inc"])
self.output_h = open(output_path + "upb.h", "w")
self.output_c = open(output_path + "upb.c", "w")
@ -24,18 +25,32 @@ class Amalgamator:
self.output_h.write('#include <stdint.h>')
self.output_h.write(open("upb/port_def.inc").read())
def add_include_path(self, path):
self.include_paths.append(path)
def finish(self):
self.output_c.write(open("upb/port_undef.inc").read())
self.output_h.write(open("upb/port_undef.inc").read())
def _process_file(self, infile_name, outfile):
for line in open(infile_name):
file = None
for path in self.include_paths:
try:
full_path = os.path.join(path, infile_name)
file = open(full_path)
break
except IOError:
pass
if not file:
raise RuntimeError("Couldn't open file " + infile_name)
for line in file:
include = parse_include(line)
if include is not None and (include.startswith("upb") or
include.startswith("google")):
if include not in self.included:
self.included.add(include)
self._add_header(self.include_path + include)
self._add_header(include)
else:
outfile.write(line)
@ -47,12 +62,20 @@ class Amalgamator:
# ---- main ----
include_path = sys.argv[1]
output_path = sys.argv[2]
amalgamator = Amalgamator(include_path, output_path)
output_path = sys.argv[1]
amalgamator = Amalgamator(output_path)
files = []
for arg in sys.argv[2:]:
arg = arg.strip()
if arg.startswith("-I"):
amalgamator.add_include_path(arg[2:])
elif arg.endswith(".h") or arg.endswith(".inc"):
pass
else:
files.append(arg)
for filename in sys.argv[3:]:
if filename.endswith(".h") or filename.endswith(".inc"):
amalgamator.add_src(filename.strip())
for filename in files:
amalgamator.add_src(filename)
amalgamator.finish()

@ -1,22 +0,0 @@
#!/usr/bin/env bash
set -e
BINDIR=`dirname "$0"`/..
SRCDIR=${CMAKE_CURRENT_SOURCE_DIR}
EXIT=0
function try_copy() {
if [ ! -f $1 ]; then
echo "Can't find $1, skipping..."
EXIT=1
else
cp $1 $2
echo $1
fi
}
try_copy $BINDIR/google/protobuf/descriptor.upb.c $SRCDIR/google/protobuf
try_copy $BINDIR/google/protobuf/descriptor.upb.h $SRCDIR/google/protobuf
try_copy $BINDIR/upb/json/parser.c $SRCDIR/upb/json
try_copy $BINDIR/upb/pb/compile_decoder_x64.h $SRCDIR/upb/pb

@ -11,6 +11,7 @@ from __future__ import print_function
import sys
import textwrap
import os
def StripColons(deps):
return map(lambda x: x[1:], deps)
@ -38,12 +39,20 @@ class BuildFileFunctions(object):
if kwargs["name"] == "amalgamation" or kwargs["name"] == "upbc_generator":
return
files = kwargs.get("srcs", []) + kwargs.get("hdrs", [])
found_files = []
for file in files:
if os.path.isfile(file):
found_files.append(file)
elif os.path.isfile("generated_for_cmake/" + file):
found_files.append("generated_for_cmake/" + file)
else:
print("Warning: no such file: " + file)
if filter(IsSourceFile, files):
# Has sources, make this a normal library.
self.converter.toplevel += "add_library(%s\n %s)\n" % (
kwargs["name"],
"\n ".join(files)
"\n ".join(found_files)
)
self._add_deps(kwargs)
else:
@ -125,6 +134,9 @@ class BuildFileFunctions(object):
def upb_proto_reflection_library(self, **kwargs):
pass
def upb_proto_srcs(self, **kwargs):
pass
def genrule(self, **kwargs):
pass
@ -140,6 +152,9 @@ class BuildFileFunctions(object):
def licenses(self, *args):
pass
def filegroup(self, **kwargs):
pass
def map_dep(self, arg):
return arg
@ -160,6 +175,12 @@ class WorkspaceFileFunctions(object):
def git_repository(self, **kwargs):
pass
def bazel_version_repository(self, **kwargs):
pass
def upb_deps(self):
pass
class Converter(object):
def __init__(self):
@ -224,6 +245,7 @@ class Converter(object):
endif()
include_directories(.)
include_directories(generated_for_cmake)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
if(APPLE)

@ -1,196 +0,0 @@
#!/bin/bash
install_protoc() {
sudo apt-get install protobuf-compiler
protoc --version || true
}
# Bare build: no dependencies installed, no JIT enabled.
bare_install() {
:
}
bare_script() {
make -j12 tests
make test
}
# Bare JIT build: no dependencies installed, but JIT enabled.
barejit_install() {
:
}
barejit_script() {
make -j12 tests WITH_JIT=yes
make test
}
# Build with strict warnings.
warnings_install() {
:
}
warnings_script() {
make -j12 default WITH_MAX_WARNINGS=yes
make -j12 tests WITH_MAX_WARNINGS=yes
make test
}
# A 32-bit build. Can only test the core because any dependencies
# need to be available as 32-bit libs also, which gets hairy fast.
# Can't enable the JIT because it only supports x64.
core32_install() {
sudo apt-get update -qq
sudo apt-get install libc6-dev-i386 g++-multilib
}
core32_script() {
make -j12 tests USER_CPPFLAGS="$USER_CPPFLAGS -m32"
make test
}
# A build of Lua and running of Lua tests.
lua_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev
}
lua_script() {
make -j12 testlua USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`"
}
# Test that generated files don't need to be regenerated.
#
# We would include the Ragel output here too, but we can't really guarantee
# that its output will be stable for multiple versions of the tool, and we
# don't want the test to be brittle.
genfiles_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev
# Need a recent version of protoc to compile proto3 files.
# .travis.yml will add this to our path
mkdir protoc
cd protoc
wget https://github.com/google/protobuf/releases/download/v3.0.0-beta-2/protoc-3.0.0-beta-2-linux-x86_64.zip
unzip protoc-3.0.0-beta-2-linux-x86_64.zip
cd ..
}
genfiles_script() {
protoc --version || true
# Avoid regenerating descriptor.pb, since its output can vary based on the
# version of protoc.
touch upb/descriptor/descriptor.pb
make -j12 genfiles USER_CPPFLAGS="$USER_CPPFLAGS `pkg-config lua5.2 --cflags`"
# Will fail if any differences were observed.
git diff --exit-code
}
# Tests the ndebug build.
ndebug_install() {
sudo apt-get update -qq
sudo apt-get install lua5.2 liblua5.2-dev libprotobuf-dev
install_protoc
}
ndebug_script() {
# Override of USER_CPPFLAGS removes -UNDEBUG.
export USER_CPPFLAGS="`pkg-config lua5.2 --cflags` -g -fomit-frame-pointer"
make -j12 tests testlua WITH_JIT=yes
make test
}
# Tests the amalgamated build (this ensures that the different .c files
# don't have symbols or macros that conflict with each other.
amalgamated_install() {
:
}
amalgamated_script() {
# Override of USER_CPPFLAGS removes -UNDEBUG.
export USER_CPPFLAGS="-UNDEBUG"
make amalgamated
}
# A run that executes with coverage support and uploads to coveralls.io
coverage_install() {
sudo apt-get update -qq
sudo apt-get install libprotobuf-dev lua5.2 liblua5.2-dev
install_protoc
sudo pip install cpp-coveralls
}
coverage_script() {
export USER_CPPFLAGS="--coverage -O0 `pkg-config lua5.2 --cflags`"
make -j12 tests testlua WITH_JIT=yes
make test
}
coverage_after_success() {
coveralls --exclude dynasm --exclude tests --exclude upb/bindings/linux --gcov-options '\-lp'
}
set -e
set -x
if [ "$1" == "local" ]; then
run_config() {
make clean
echo
echo "travis.sh: TESTING CONFIGURATION $1 ==============================="
echo
UPB_TRAVIS_BUILD=$1 ./travis.sh script
}
# Run all configurations serially locally to test before pushing a pull
# request.
export CC=gcc
export CXX=g++
run_config "bare"
run_config "barejit"
run_config "core32"
run_config "lua"
run_config "ndebug"
run_config "genfiles"
run_config "amalgamated"
exit
fi
$CC --version
$CXX --version
# Uncomment to enable uploading failure logs to S3.
# UPLOAD_TO_S3=true
if [ "$1" == "after_failure" ] && [ "$UPLOAD_TO_S3" == "true" ]; then
# Upload failing tree to S3.
curl -sL https://raw.githubusercontent.com/travis-ci/artifacts/master/install | bash
PATH="$PATH:$HOME/bin"
export ARTIFACTS_BUCKET=haberman-upb-travis-artifacts2
ARCHIVE=failing-artifacts.tar.gz
tar zcvf $ARCHIVE $(git ls-files -o)
artifacts upload $ARCHIVE
exit
fi
if [ "$1" == "after_success" ] && [ "$UPB_TRAVIS_BUILD" != "coverage" ]; then
# after_success is only used for coverage.
exit
fi
if [ "$CC" != "gcc" ] && [ "$UPB_TRAVIS_BUILD" == "coverage" ]; then
# coverage build only works for GCC.
exit
fi
# Enable asserts and ref debugging (though some configurations override this).
export USER_CPPFLAGS="-UNDEBUG -DUPB_DEBUG_REFS -DUPB_THREAD_UNSAFE -DUPB_DEBUG_TABLE -g"
if [ "$CC" == "gcc" ]; then
# For the GCC build test loading JIT code via SO. For the Clang build test
# loading it in the normal way.
export USER_CPPFLAGS="$USER_CPPFLAGS -DUPB_JIT_LOAD_SO"
fi
# TODO(haberman): Test UPB_DUMP_BYTECODE? We don't right now because it is so
# noisy.
# Enable verbose build.
export Q=
# Make any compiler warning fail the build.
export UPB_FAIL_WARNINGS=true
eval ${UPB_TRAVIS_BUILD}_${1}

@ -249,6 +249,7 @@ static bool upb_decode_addval(upb_decframe *frame,
if (field->label == UPB_LABEL_REPEATED) {
arr = upb_getorcreatearr(frame, field);
CHK(arr);
field_mem = upb_array_reserve(arr, 1, size);
CHK(field_mem);
}
@ -382,6 +383,7 @@ static bool upb_decode_toarray(upb_decstate *d, upb_decframe *frame,
const upb_msglayout_field *field,
upb_strview val) {
upb_array *arr = upb_getorcreatearr(frame, field);
CHK(arr);
#define VARINT_CASE(ctype, decode) \
{ \
@ -512,7 +514,7 @@ static bool upb_decode_field(upb_decstate *d, upb_decframe *frame) {
frame->limit = d->ptr;
return true;
default:
return false;
CHK(false);
}
} else {
CHK(field_number != 0);

@ -10,6 +10,8 @@
#include <stddef.h>
#include "upb/handlers.h"
#include "upb/port_def.inc"
#ifdef __cplusplus
/* Type detection and typedefs for integer types.
@ -916,4 +918,6 @@ inline void Handler<T>::AddCleanup(upb_handlers* h) const {
#undef UPB_INT64ALT_T
#undef UPB_UINT64ALT_T
#include "upb/port_undef.inc"
#endif /* UPB_HANDLERS_INL_H_ */

@ -725,8 +725,8 @@ bool upb_msg_getscalarhandlerdata(const upb_handlers *h,
} /* extern "C" */
#endif
#include "upb/handlers-inl.h"
#include "upb/port_undef.inc"
#include "upb/handlers-inl.h"
#endif /* UPB_HANDLERS_H */

@ -1684,6 +1684,48 @@ static void start_timestamp_zone(upb_json_parser *p, const char *ptr) {
capture_begin(p, ptr);
}
#define EPOCH_YEAR 1970
#define TM_YEAR_BASE 1900
static bool isleap(int year) {
return (year % 4) == 0 && (year % 100 != 0 || (year % 400) == 0);
}
const unsigned short int __mon_yday[2][13] = {
/* Normal years. */
{ 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365 },
/* Leap years. */
{ 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366 }
};
int64_t epoch(int year, int yday, int hour, int min, int sec) {
int64_t years = year - EPOCH_YEAR;
int64_t leap_days = years / 4 - years / 100 + years / 400;
int64_t days = years * 365 + yday + leap_days;
int64_t hours = days * 24 + hour;
int64_t mins = hours * 60 + min;
int64_t secs = mins * 60 + sec;
return secs;
}
static int64_t upb_mktime(const struct tm *tp) {
int sec = tp->tm_sec;
int min = tp->tm_min;
int hour = tp->tm_hour;
int mday = tp->tm_mday;
int mon = tp->tm_mon;
int year = tp->tm_year + TM_YEAR_BASE;
/* Calculate day of year from year, month, and day of month. */
int mon_yday = ((__mon_yday[isleap(year)][mon]) - 1);
int yday = mon_yday + mday;
return epoch(year, yday, hour, min, sec);
}
static bool end_timestamp_zone(upb_json_parser *p, const char *ptr) {
size_t len;
const char *buf;
@ -1711,7 +1753,7 @@ static bool end_timestamp_zone(upb_json_parser *p, const char *ptr) {
}
/* Normalize tm */
seconds = mktime(&p->tm);
seconds = upb_mktime(&p->tm);
/* Check timestamp boundary */
if (seconds < -62135596800) {

@ -1,8 +1,7 @@
#include <unordered_map>
#include <unordered_set>
#include <memory>
#include "absl/container/flat_hash_map.h"
#include "absl/strings/ascii.h"
#include "absl/strings/str_replace.h"
#include "absl/strings/substitute.h"
@ -362,7 +361,7 @@ void GenerateMessageInHeader(const protobuf::Descriptor* message, Output& output
fullname);
output(
"UPB_INLINE $0_oneofcases $1_$2_case(const $1* msg) { "
"return UPB_FIELD_AT(msg, int, $3); }\n"
"return ($0_oneofcases)UPB_FIELD_AT(msg, int32_t, $3); }\n"
"\n",
fullname, msgname, oneof->name(),
GetSizeInit(layout.GetOneofCaseOffset(oneof)));
@ -499,7 +498,7 @@ void GenerateMessageInHeader(const protobuf::Descriptor* message, Output& output
}
}
output("\n\n");
output("\n");
}
void WriteHeader(const protobuf::FileDescriptor* file, Output& output) {
@ -507,15 +506,30 @@ void WriteHeader(const protobuf::FileDescriptor* file, Output& output) {
output(
"#ifndef $0_UPB_H_\n"
"#define $0_UPB_H_\n\n"
"#include \"upb/generated_util.h\"\n\n"
"#include \"upb/msg.h\"\n\n"
"#include \"upb/generated_util.h\"\n"
"#include \"upb/msg.h\"\n"
"#include \"upb/decode.h\"\n"
"#include \"upb/encode.h\"\n"
"#include \"upb/encode.h\"\n\n",
ToPreproc(file->name()));
for (int i = 0; i < file->public_dependency_count(); i++) {
const auto& name = file->public_dependency(i)->name();
if (i == 0) {
output("/* Public Imports. */\n");
}
output("#include \"$0\"\n", HeaderFilename(name));
if (i == file->public_dependency_count() - 1) {
output("\n");
}
}
output(
"#include \"upb/port_def.inc\"\n"
"\n"
"#ifdef __cplusplus\n"
"extern \"C\" {\n"
"#endif\n\n",
ToPreproc(file->name()));
"#endif\n"
"\n");
std::vector<const protobuf::Descriptor*> this_file_messages =
SortedMessages(file);
@ -552,12 +566,13 @@ void WriteHeader(const protobuf::FileDescriptor* file, Output& output) {
output("extern const upb_msglayout $0;\n", MessageInit(pair.second));
}
if (!this_file_messages.empty()) {
output("\n");
}
std::vector<const protobuf::EnumDescriptor*> this_file_enums =
SortedEnums(file);
output(
"\n"
"/* Enums */\n\n");
for (auto enumdesc : this_file_enums) {
output("typedef enum {\n");
DumpEnumValues(enumdesc, output);
@ -605,7 +620,7 @@ void WriteSource(const protobuf::FileDescriptor* file, Output& output) {
std::string fields_array_ref = "NULL";
std::string submsgs_array_ref = "NULL";
std::string oneofs_array_ref = "NULL";
std::unordered_map<const protobuf::Descriptor*, int> submsg_indexes;
absl::flat_hash_map<const protobuf::Descriptor*, int> submsg_indexes;
MessageLayout layout(message);
std::vector<const protobuf::FieldDescriptor*> sorted_submsgs =
SortedSubmessages(message);

@ -2,8 +2,8 @@
#ifndef UPBC_MESSAGE_LAYOUT_H
#define UPBC_MESSAGE_LAYOUT_H
#include <unordered_map>
#include "absl/base/macros.h"
#include "absl/container/flat_hash_map.h"
#include "google/protobuf/descriptor.h"
namespace upbc {
@ -70,7 +70,7 @@ class MessageLayout {
Size Place(SizeAndAlign size_and_align);
template <class K, class V>
static V GetMapValue(const std::unordered_map<K, V>& map, K key) {
static V GetMapValue(const absl::flat_hash_map<K, V>& map, K key) {
auto iter = map.find(key);
if (iter == map.end()) {
fprintf(stderr, "No value for field.\n");
@ -92,11 +92,11 @@ class MessageLayout {
static int64_t FieldLayoutRank(
const google::protobuf::FieldDescriptor* field);
std::unordered_map<const google::protobuf::FieldDescriptor*, Size>
absl::flat_hash_map<const google::protobuf::FieldDescriptor*, Size>
field_offsets_;
std::unordered_map<const google::protobuf::FieldDescriptor*, int>
absl::flat_hash_map<const google::protobuf::FieldDescriptor*, int>
hasbit_indexes_;
std::unordered_map<const google::protobuf::OneofDescriptor*, Size>
absl::flat_hash_map<const google::protobuf::OneofDescriptor*, Size>
oneof_case_offsets_;
Size maxalign_;
Size size_;

Loading…
Cancel
Save