Protocol Buffers - Google's data interchange format (grpc依赖) https://developers.google.com/protocol-buffers/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

424 lines
11 KiB

#!/usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2023 Google LLC. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A tool to convert {WORKSPACE, BUILD} -> CMakeLists.txt.
This tool is very upb-specific at the moment, and should not be seen as a
generic Bazel -> CMake converter.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import textwrap
import os
def StripFirstChar(deps):
return [dep[1:] for dep in deps]
def IsSourceFile(name):
return name.endswith(".c") or name.endswith(".cc")
ADD_LIBRARY_FORMAT = """
add_library(%(name)s %(type)s
%(sources)s
)
target_include_directories(%(name)s %(keyword)s
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/..>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/../cmake>
$<BUILD_INTERFACE:${CMAKE_CURRENT_BINRARY_DIR}>
)
"""
class BuildFileFunctions(object):
def __init__(self, converter):
self.converter = converter
def _add_deps(self, kwargs, keyword=""):
if "deps" not in kwargs:
return
self.converter.toplevel += "target_link_libraries(%s%s\n %s)\n" % (
kwargs["name"],
keyword,
"\n ".join(StripFirstChar(kwargs["deps"]))
)
def load(self, *args):
pass
def cc_library(self, **kwargs):
Fixes for PHP. (#286) - A new PHP-specific upb amalgamation. It contains everything related to upb_msg, but leaves out all of the old handlers-related interfaces and encoders/decoders. # Schema/Defs Changes - Changed `upb_fielddef_msgsubdef()` and `upb_fielddef_enumsubdef()` to return `NULL` instead of assert-failing if the field is not a message or enum. - Added `upb_msgdef_iswrapper()`, to test whether this is a wrapper well-known type. # Decoder - Decoder bugfix: when we parse a submessage inside a oneof, we need to clear out any previous data, so we don't misinterpret it as a pointer to an existing submessage. # JSON Decoder - Allowed well-known types at the top level to have their special processing. - Fixed a bug that could occur when parsing nested empty lists/objects, eg `[[]]`. - Made the "ignore unknown" option also be permissive about unknown enumerators by setting them to 0. # JSON Encoder - Allowed well-known types at the top level to have their special processing. - Removed all spaces after `:` and `,` characters, to match the old encoder and pass goldenfile tests. # Message / Reflection - Changed `upb_msg_hasoneof()` -> `upb_msg_whichoneof()`. The new function returns the `upb_fielddef*` of whichever oneof is set. - Implemented `upb_msg_clearfield()` and added/implemented `upb_msg_clear()`. - Added `upb_msg_discardunknown()`. Part of me thinks this should go in a util library instead of core reflection since it is a recursive algorithm. # Compiler - Always emit descriptors as an array instead of as a string, to avoid exceeding maximum string lengths. If this becomes a speed issue later we can go back to two separate paths.
5 years ago
if kwargs["name"].endswith("amalgamation"):
return
Fixes for PHP. (#286) - A new PHP-specific upb amalgamation. It contains everything related to upb_msg, but leaves out all of the old handlers-related interfaces and encoders/decoders. # Schema/Defs Changes - Changed `upb_fielddef_msgsubdef()` and `upb_fielddef_enumsubdef()` to return `NULL` instead of assert-failing if the field is not a message or enum. - Added `upb_msgdef_iswrapper()`, to test whether this is a wrapper well-known type. # Decoder - Decoder bugfix: when we parse a submessage inside a oneof, we need to clear out any previous data, so we don't misinterpret it as a pointer to an existing submessage. # JSON Decoder - Allowed well-known types at the top level to have their special processing. - Fixed a bug that could occur when parsing nested empty lists/objects, eg `[[]]`. - Made the "ignore unknown" option also be permissive about unknown enumerators by setting them to 0. # JSON Encoder - Allowed well-known types at the top level to have their special processing. - Removed all spaces after `:` and `,` characters, to match the old encoder and pass goldenfile tests. # Message / Reflection - Changed `upb_msg_hasoneof()` -> `upb_msg_whichoneof()`. The new function returns the `upb_fielddef*` of whichever oneof is set. - Implemented `upb_msg_clearfield()` and added/implemented `upb_msg_clear()`. - Added `upb_msg_discardunknown()`. Part of me thinks this should go in a util library instead of core reflection since it is a recursive algorithm. # Compiler - Always emit descriptors as an array instead of as a string, to avoid exceeding maximum string lengths. If this becomes a speed issue later we can go back to two separate paths.
5 years ago
if kwargs["name"] == "upbc_generator":
return
if kwargs["name"] == "lupb":
return
if "testonly" in kwargs:
return
files = kwargs.get("srcs", []) + kwargs.get("hdrs", [])
found_files = []
pregenerated_files = [
"CMakeLists.txt", "descriptor.upb.h", "descriptor.upb.c"
]
for file in files:
if os.path.basename(file) in pregenerated_files:
found_files.append("../cmake/" + file)
else:
found_files.append("../" + file)
if list(filter(IsSourceFile, files)):
# Has sources, make this a normal library.
self.converter.toplevel += ADD_LIBRARY_FORMAT % {
"name": kwargs["name"],
"type": "",
"keyword": "PUBLIC",
"sources": "\n ".join(found_files),
}
self._add_deps(kwargs)
else:
# Header-only library, have to do a couple things differently.
# For some info, see:
# http://mariobadr.com/creating-a-header-only-library-with-cmake.html
self.converter.toplevel += ADD_LIBRARY_FORMAT % {
"name": kwargs["name"],
"type": "INTERFACE",
"keyword": "INTERFACE",
"sources": "",
}
self._add_deps(kwargs, " INTERFACE")
def cc_binary(self, **kwargs):
pass
def cc_test(self, **kwargs):
# Disable this until we properly support upb_proto_library().
# self.converter.toplevel += "add_executable(%s\n %s)\n" % (
# kwargs["name"],
# "\n ".join(kwargs["srcs"])
# )
# self.converter.toplevel += "add_test(NAME %s COMMAND %s)\n" % (
# kwargs["name"],
# kwargs["name"],
# )
# if "data" in kwargs:
# for data_dep in kwargs["data"]:
# self.converter.toplevel += textwrap.dedent("""\
# add_custom_command(
# TARGET %s POST_BUILD
# COMMAND ${CMAKE_COMMAND} -E copy
# ${CMAKE_SOURCE_DIR}/%s
# ${CMAKE_CURRENT_BINARY_DIR}/%s)\n""" % (
# kwargs["name"], data_dep, data_dep
# ))
# self._add_deps(kwargs)
pass
def cc_fuzz_test(self, **kwargs):
pass
def pkg_files(self, **kwargs):
pass
def py_library(self, **kwargs):
pass
def py_binary(self, **kwargs):
pass
def lua_proto_library(self, **kwargs):
pass
def sh_test(self, **kwargs):
pass
def make_shell_script(self, **kwargs):
pass
def exports_files(self, files, **kwargs):
pass
def proto_library(self, **kwargs):
pass
def cc_proto_library(self, **kwargs):
pass
def staleness_test(self, **kwargs):
pass
def upb_amalgamation(self, **kwargs):
pass
def upb_proto_library(self, **kwargs):
pass
def upb_proto_library_copts(self, **kwargs):
pass
def upb_proto_reflection_library(self, **kwargs):
pass
def upb_proto_srcs(self, **kwargs):
pass
def genrule(self, **kwargs):
pass
def config_setting(self, **kwargs):
pass
Added a codegen parameter for whether fasttables are generated or not. Example: $ CC=clang bazel build -c opt --copt=-g benchmarks:benchmark --//:fasttable_enabled=false INFO: Build option --//:fasttable_enabled has changed, discarding analysis cache. INFO: Analyzed target //benchmarks:benchmark (0 packages loaded, 913 targets configured). INFO: Found 1 target... Target //benchmarks:benchmark up-to-date: bazel-bin/benchmarks/benchmark INFO: Elapsed time: 0.760s, Critical Path: 0.58s INFO: 7 processes: 1 internal, 6 linux-sandbox. INFO: Build completed successfully, 7 total actions $ bazel-bin/benchmarks/benchmark --benchmark_filter=BM_Parse_Upb ------------------------------------------------------------------------------ Benchmark Time CPU Iterations ------------------------------------------------------------------------------ BM_Parse_Upb_FileDesc_WithArena 10985 ns 10984 ns 63567 651.857MB/s BM_Parse_Upb_FileDesc_WithInitialBlock 10556 ns 10554 ns 66138 678.458MB/s $ CC=clang bazel build -c opt --copt=-g benchmarks:benchmark --//:fasttable_enabled=true INFO: Build option --//:fasttable_enabled has changed, discarding analysis cache. INFO: Analyzed target //benchmarks:benchmark (0 packages loaded, 913 targets configured). INFO: Found 1 target... Target //benchmarks:benchmark up-to-date: bazel-bin/benchmarks/benchmark INFO: Elapsed time: 0.744s, Critical Path: 0.58s INFO: 7 processes: 1 internal, 6 linux-sandbox. INFO: Build completed successfully, 7 total actions $ bazel-bin/benchmarks/benchmark --benchmark_filter=BM_Parse_Upb ------------------------------------------------------------------------------ Benchmark Time CPU Iterations ------------------------------------------------------------------------------ BM_Parse_Upb_FileDesc_WithArena 3284 ns 3284 ns 213495 2.1293GB/s BM_Parse_Upb_FileDesc_WithInitialBlock 2882 ns 2882 ns 243069 2.4262GB/s Biggest unknown is whether this parameter should default to true or false.
4 years ago
def upb_fasttable_enabled(self, **kwargs):
pass
def select(self, arg_dict):
return []
2 years ago
def glob(self, *args, **kwargs):
return []
def licenses(self, *args):
pass
def filegroup(self, **kwargs):
pass
def map_dep(self, arg):
return arg
def package_group(self, **kwargs):
pass
def bool_flag(self, **kwargs):
pass
upb is self-hosting! This CL changes the upb compiler to no longer depend on C++ protobuf libraries. upb now uses its own reflection libraries to implement its code generator. # Key Benefits 1. upb can now use its own reflection libraries throughout the compiler. This makes upb more consistent and principled, and gives us more chances to dogfood our own C++ reflection API. This highlighted several parts of the C++ reflection API that were incomplete. 2. This CL removes code duplication that previously existed in the compiler. The upb reflection library has code to build MiniDescriptors and MiniTables out of descriptors, but prior to this CL the upb compiler could not use it. The upb compiler had a separate copy of this logic, and the compiler's copy of this logic was especially tricky and hard to maintain. This CL removes the separate copy of that logic. 3. This CL (mostly) removes upb's dependency on the C++ protobuf library. We still depend on `protoc` (the binary), but the runtime and compiler no longer link against C++'s libraries. This opens up the possibility of speeding up some builds significantly if we can use a prebuilt `protoc` binary. # Bootstrap Stages To bootstrap, we check in a copy of our generated code for `descriptor.proto` and `plugin.proto`. This allows the compiler to depend on the generated code for these two protos without creating a circular dependency. This code is checked in to the `stage0` directory. The bootstrapping process is divided into a few stages. All `cc_library()`, `upb_proto_library()`, and `cc_binary()` targets that would otherwise be circular participate in this staging process. That currently includes: * `//third_party/upb:descriptor_upb_proto` * `//third_party/upb:plugin_upb_proto` * `//third_party/upb:reflection` * `//third_party/upb:reflection_internal` * `//third_party/upbc:common` * `//third_party/upbc:file_layout` * `//third_party/upbc:plugin` * `//third_party/upbc:protoc-gen-upb` For each of these targets, we produce a rule for each stage (the logic for this is nicely encapsulated in Blaze/Bazel macros like `bootstrap_cc_library()` and `bootstrap_upb_proto_library()`, so the `BUILD` file remains readable). For example: * `//third_party/upb:descriptor_upb_proto_stage0` * `//third_party/upb:descriptor_upb_proto_stage1` * `//third_party/upb:descriptor_upb_proto` The stages are: 1. `stage0`: This uses the checked-in version of the generated code. The stage0 compiler is correct and outputs the same code as all other compilers, but it is unnecessarily slow because its protos were compiled in bootstrap mode. The stage0 compiler is used to generate protos for stage1. 2. `stage1`: The stage1 compiler is correct and fast, and therefore we use it in almost all cases (eg. `upb_proto_library()`). However its own protos were not generated using `upb_proto_library()`, so its `cc_library()` targets cannot be safely mixed with `upb_proto_library()`, as this would lead to duplicate symbols. 3. final (no stage): The final compiler is identical to the `stage1` compiler. The only difference is that its protos were built with `upb_proto_library()`. This doesn't matter very much for the compiler binary, but for the `cc_library()` targets like `//third_party/upb:reflection`, only the final targets can be safely linked in by other applications. # "Bootstrap Mode" Protos The checked-in generated code is generated in a special "bootstrap" mode that is a bit different than normal generated code. Bootstrap mode avoids depending on the internal representation of MiniTables or the messages, at the cost of slower runtime performance. Bootstrap mode only interacts with MiniTables and messages using public APIs such as `upb_MiniTable_Build()`, `upb_Message_GetInt32()`, etc. This is very important as it allows us to change the internal representation without needing to regenerate our bootstrap protos. This will make it far easier to write CLs that change the internal representation, because it avoids the awkward dance of trying to regenerate the bootstrap protos when the compiler itself is broken due to bootstrap protos being out of date. The bootstrap generated code does have two downsides: 1. The accessors are less efficient, because they look up MiniTable fields by number instead of hard-coding the MiniTableField into the generated code. 2. It requires runtime initialization of the MiniTables, which costs CPU cycles at startup, and also allocates memory which is never freed. Per google3 rules this is not really a leak, since this memory is still reachable via static variables, but it is undesirable in many contexts. We could fix this part by introducing the equivalent of `google::protobuf::ShutdownProtobufLibrary()`). These downsides are fine for the bootstrapping process, but they are reason enough not to enable bootstrap mode in general for all protos. # Bootstrapping Always Uses OSS Protos To enable smooth syncing between Google3 and OSS, we always use an OSS version of the checked in generated code for `stage0`, even in google3. This requires that the google3 code can be switched to reference the OSS proto names using a preprocessor define. We introduce the `UPB_DESC(xyz)` macro for this, which will expand into either `proto2_xyz` or `google_protobuf_xyz`. Any libraries used in `stage0` must use `UPB_DESC(xyz)` rather than refer to the symbol names directly. PiperOrigin-RevId: 501458451
2 years ago
def bootstrap_upb_proto_library(self, **kwargs):
pass
def bootstrap_cc_library(self, **kwargs):
pass
def alias(self, **kwargs):
pass
class WorkspaceFileFunctions(object):
def __init__(self, converter):
self.converter = converter
def load(self, *args, **kwargs):
pass
def workspace(self, **kwargs):
self.converter.prelude += "project(%s)\n" % (kwargs["name"])
self.converter.prelude += "set(CMAKE_C_STANDARD 99)\n"
def maybe(self, rule, **kwargs):
if kwargs["name"] == "utf8_range":
self.converter.utf8_range_commit = kwargs["commit"]
pass
def http_archive(self, **kwargs):
pass
def git_repository(self, **kwargs):
pass
def new_git_repository(self, **kwargs):
pass
def bazel_version_repository(self, **kwargs):
pass
def protobuf_deps(self):
pass
def utf8_range_deps(self):
pass
def pip_parse(self, **kwargs):
pass
def rules_fuzzing_dependencies(self):
pass
def rules_fuzzing_init(self):
pass
def rules_pkg_dependencies(self):
pass
def system_python(self, **kwargs):
pass
def register_system_python(self, **kwargs):
pass
def register_toolchains(self, toolchain):
pass
def python_source_archive(self, **kwargs):
pass
def python_nuget_package(self, **kwargs):
pass
def install_deps(self):
pass
def fuzzing_py_install_deps(self):
pass
def googletest_deps(self):
pass
def local_repository(self, **kwargs):
pass
class Converter(object):
def __init__(self):
self.prelude = ""
self.toplevel = ""
self.if_lua = ""
self.utf8_range_commit = ""
def convert(self):
return self.template % {
"prelude": converter.prelude,
"toplevel": converter.toplevel,
"utf8_range_commit": converter.utf8_range_commit,
}
template = textwrap.dedent("""\
# This file was generated from BUILD using tools/make_cmakelists.py.
cmake_minimum_required(VERSION 3.10...3.24)
%(prelude)s
# Prevent CMake from setting -rdynamic on Linux (!!).
SET(CMAKE_SHARED_LIBRARY_LINK_C_FLAGS "")
SET(CMAKE_SHARED_LIBRARY_LINK_CXX_FLAGS "")
# Set default build type.
if(NOT CMAKE_BUILD_TYPE)
message(STATUS "Setting build type to 'RelWithDebInfo' as none was specified.")
set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING
"Choose the type of build, options are: Debug Release RelWithDebInfo MinSizeRel."
FORCE)
endif()
# When using Ninja, compiler output won't be colorized without this.
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG(-fdiagnostics-color=always SUPPORTS_COLOR_ALWAYS)
if(SUPPORTS_COLOR_ALWAYS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-color=always")
endif()
# Implement ASAN/UBSAN options
if(UPB_ENABLE_ASAN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=address")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=address")
endif()
if(UPB_ENABLE_UBSAN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fsanitize=address")
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=address")
endif()
if(NOT TARGET utf8_range)
if(EXISTS ../external/utf8_range)
# utf8_range is already installed
include_directories(../external/utf8_range)
elseif(EXISTS ../../utf8_range)
include_directories(../../utf8_range)
else()
include(FetchContent)
FetchContent_Declare(
utf8_range
GIT_REPOSITORY "https://github.com/protocolbuffers/utf8_range.git"
GIT_TAG "%(utf8_range_commit)s"
)
FetchContent_GetProperties(utf8_range)
if(NOT utf8_range_POPULATED)
FetchContent_Populate(utf8_range)
include_directories(${utf8_range_SOURCE_DIR})
endif()
endif()
endif()
if(APPLE)
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -undefined dynamic_lookup -flat_namespace")
elseif(UNIX)
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--build-id")
endif()
enable_testing()
%(toplevel)s
""")
data = {}
converter = Converter()
def GetDict(obj):
ret = {}
ret["UPB_DEFAULT_COPTS"] = [] # HACK
ret["UPB_DEFAULT_CPPOPTS"] = [] # HACK
for k in dir(obj):
if not k.startswith("_"):
ret[k] = getattr(obj, k);
return ret
globs = GetDict(converter)
workspace_dict = GetDict(WorkspaceFileFunctions(converter))
# We take all file paths as command-line arguments to ensure that we can find
# each file regardless of how exactly Bazel was invoked.
exec(open(sys.argv[1]).read(), workspace_dict) # workspace_deps.bzl
exec(open(sys.argv[2]).read(), workspace_dict) # WORKSPACE
exec(open(sys.argv[3]).read(), GetDict(BuildFileFunctions(converter))) # BUILD
with open(sys.argv[4], "w") as f:
f.write(converter.convert())