generate build metadata from bazel BUILD

pull/21929/head
Jan Tattermusch 5 years ago
parent 8994302049
commit 41ac287f7b
  1. 2
      bazel/grpc_build_system.bzl
  2. 6369
      build.yaml
  3. 263
      build_handwritten.yaml
  4. 31
      templates/gRPC-C++.podspec.template
  5. 38
      templates/gRPC-Core.podspec.template
  6. 39
      templates/src/core/plugin_registry.template
  7. 80
      test/core/bad_client/gen_build_yaml.py
  8. 69
      test/core/bad_ssl/gen_build_yaml.py
  9. 140
      test/core/end2end/gen_build_yaml.py
  10. 97
      test/cpp/naming/gen_build_yaml.py
  11. 15
      test/cpp/qps/gen_build_yaml.py
  12. 36
      tools/buildgen/build_cleaner.py
  13. 943
      tools/buildgen/extract_metadata_from_bazel_xml.py
  14. 7
      tools/buildgen/generate_build_additions.sh
  15. 2
      tools/buildgen/generate_projects.py
  16. 19
      tools/buildgen/generate_projects.sh
  17. 8
      tools/buildgen/plugins/make_fuzzer_tests.py
  18. 2
      tools/distrib/sanitize.sh
  19. 2
      tools/internal_ci/linux/grpc_publish_packages.sh
  20. 2
      tools/run_tests/sanity/check_version.py

@ -221,7 +221,7 @@ def grpc_cc_test(name, srcs = [], deps = [], external_deps = [], args = [], data
)
else:
# the test behavior doesn't depend on polling, just generate the test
native.cc_test(name = name, tags = tags, **args)
native.cc_test(name = name, tags = tags + ["no_uses_polling"], **args)
ios_cc_test(
name = name,
tags = tags,

File diff suppressed because it is too large Load Diff

@ -0,0 +1,263 @@
'#1': This file describes the list of targets and dependencies.
'#2': It is used among other things to generate all of our project files.
'#3': Please refer to the templates directory for more information.
settings:
'#01': The public version number of the library.
'#02': ===
'#03': Please update the 'g_stands_for' field periodically with a new g word
'#04': not listed in doc/g_stands_for.md - and update that document to list the
'#05': new word. When doing so, please also update BUILD.
'#06': ===
'#07': Master always has a "-dev" suffix
'#08': Use "-preN" suffixes to identify pre-release versions
'#09': Per-language overrides are possible with (eg) ruby_version tag here
'#10': See the expand_version.py for all the quirks here
core_version: 9.0.0
csharp_major_version: 2
g_stands_for: gringotts
version: 1.29.0-dev
targets:
- name: check_epollexclusive
build: tool
language: c
src:
- test/build/check_epollexclusive.c
deps:
- grpc
- gpr
- name: gen_hpack_tables
build: tool
language: c++
src:
- tools/codegen/core/gen_hpack_tables.cc
deps:
- grpc
- gpr
uses_polling: false
- name: gen_legal_metadata_characters
build: tool
language: c++
src:
- tools/codegen/core/gen_legal_metadata_characters.cc
deps: []
- name: gen_percent_encoding_tables
build: tool
language: c++
src:
- tools/codegen/core/gen_percent_encoding_tables.cc
deps: []
uses_polling: false
vspackages:
- linkage: static
name: grpc.dependencies.zlib
props: false
redist: true
version: 1.2.8.10
- linkage: static
name: grpc.dependencies.openssl
props: true
redist: true
version: 1.0.204.1
- name: gflags
props: false
redist: false
version: 2.1.2.1
- name: gtest
props: false
redist: false
version: 1.7.0.1
configs:
asan:
CC: clang
CPPFLAGS: -O0 -fsanitize-coverage=edge,trace-pc-guard -fsanitize=address -fno-omit-frame-pointer
-Wno-unused-command-line-argument -DGPR_NO_DIRECT_SYSCALLS
CXX: clang++
LD: clang++
LDFLAGS: -fsanitize=address
LDXX: clang++
compile_the_world: true
test_environ:
ASAN_OPTIONS: detect_leaks=1:color=always
LSAN_OPTIONS: suppressions=test/core/util/lsan_suppressions.txt:report_objects=1
asan-noleaks:
CC: clang
CPPFLAGS: -O0 -fsanitize-coverage=edge,trace-pc-guard -fsanitize=address -fno-omit-frame-pointer
-Wno-unused-command-line-argument -DGPR_NO_DIRECT_SYSCALLS
CXX: clang++
LD: clang++
LDFLAGS: fsanitize=address
LDXX: clang++
compile_the_world: true
test_environ:
ASAN_OPTIONS: detect_leaks=0:color=always
asan-trace-cmp:
CC: clang
CPPFLAGS: -O0 -fsanitize-coverage=edge,trace-pc-guard -fsanitize-coverage=trace-cmp
-fsanitize=address -fno-omit-frame-pointer -Wno-unused-command-line-argument
-DGPR_NO_DIRECT_SYSCALLS
CXX: clang++
LD: clang++
LDFLAGS: -fsanitize=address
LDXX: clang++
compile_the_world: true
test_environ:
ASAN_OPTIONS: detect_leaks=1:color=always
LSAN_OPTIONS: suppressions=test/core/util/lsan_suppressions.txt:report_objects=1
basicprof:
CPPFLAGS: -O2 -DGRPC_BASIC_PROFILER -DGRPC_TIMERS_RDTSC
DEFINES: NDEBUG
c++-compat:
CFLAGS: -Wc++-compat
CPPFLAGS: -O0
DEFINES: _DEBUG DEBUG
counters:
CPPFLAGS: -O2 -DGPR_LOW_LEVEL_COUNTERS
DEFINES: NDEBUG
counters_with_memory_counter:
CPPFLAGS: -O2 -DGPR_LOW_LEVEL_COUNTERS -DGPR_WRAP_MEMORY_COUNTER
DEFINES: NDEBUG
LDFLAGS: -Wl,--wrap=malloc -Wl,--wrap=calloc -Wl,--wrap=realloc -Wl,--wrap=free
dbg:
CPPFLAGS: -O0
DEFINES: _DEBUG DEBUG
gcov:
CC: gcc
CPPFLAGS: -O0 -fprofile-arcs -ftest-coverage -Wno-return-type
CXX: g++
DEFINES: _DEBUG DEBUG GPR_GCOV
LD: gcc
LDFLAGS: -fprofile-arcs -ftest-coverage -rdynamic -lstdc++
LDXX: g++
helgrind:
CPPFLAGS: -O0
DEFINES: _DEBUG DEBUG
LDFLAGS: -rdynamic
valgrind: --tool=helgrind
lto:
CPPFLAGS: -O2
DEFINES: NDEBUG
memcheck:
CPPFLAGS: -O0
DEFINES: _DEBUG DEBUG
LDFLAGS: -rdynamic
valgrind: --tool=memcheck --leak-check=full
msan:
CC: clang
CPPFLAGS: -O0 -stdlib=libc++ -fsanitize-coverage=edge,trace-pc-guard -fsanitize=memory
-fsanitize-memory-track-origins -fsanitize-memory-use-after-dtor -fno-omit-frame-pointer
-DGTEST_HAS_TR1_TUPLE=0 -DGTEST_USE_OWN_TR1_TUPLE=1 -Wno-unused-command-line-argument
-fPIE -pie -DGPR_NO_DIRECT_SYSCALLS
CXX: clang++
DEFINES: NDEBUG
LD: clang++
LDFLAGS: -stdlib=libc++ -fsanitize=memory -DGTEST_HAS_TR1_TUPLE=0 -DGTEST_USE_OWN_TR1_TUPLE=1
-fPIE -pie $(if $(JENKINS_BUILD),-Wl$(comma)-Ttext-segment=0x7e0000000000,)
LDXX: clang++
compile_the_world: true
test_environ:
MSAN_OPTIONS: poison_in_dtor=1
mutrace:
CPPFLAGS: -O3 -fno-omit-frame-pointer
DEFINES: NDEBUG
LDFLAGS: -rdynamic
noexcept:
CPPFLAGS: -O2 -Wframe-larger-than=16384
CXXFLAGS: -fno-exceptions
DEFINES: NDEBUG
opt:
CPPFLAGS: -O2 -Wframe-larger-than=16384
DEFINES: NDEBUG
stapprof:
CPPFLAGS: -O2 -DGRPC_STAP_PROFILER
DEFINES: NDEBUG
tsan:
CC: clang
CPPFLAGS: -O0 -fsanitize=thread -fno-omit-frame-pointer -Wno-unused-command-line-argument
-DGPR_NO_DIRECT_SYSCALLS
CXX: clang++
DEFINES: GRPC_TSAN
LD: clang++
LDFLAGS: -fsanitize=thread
LDXX: clang++
compile_the_world: true
test_environ:
TSAN_OPTIONS: suppressions=test/core/util/tsan_suppressions.txt:halt_on_error=1:second_deadlock_stack=1
ubsan:
CC: clang
CPPFLAGS: -O0 -stdlib=libc++ -fsanitize-coverage=edge,trace-pc-guard -fsanitize=undefined
-fno-omit-frame-pointer -Wno-unused-command-line-argument -Wvarargs
CXX: clang++
DEFINES: NDEBUG GRPC_UBSAN
LD: clang++
LDFLAGS: -stdlib=libc++ -fsanitize=undefined,unsigned-integer-overflow
LDXX: clang++
compile_the_world: true
test_environ:
UBSAN_OPTIONS: halt_on_error=1:print_stacktrace=1:suppressions=test/core/util/ubsan_suppressions.txt
defaults:
ares:
CFLAGS: -g
CPPFLAGS: -Ithird_party/cares -Ithird_party/cares/cares -fvisibility=hidden -D_GNU_SOURCE
$(if $(subst Darwin,,$(SYSTEM)),,-Ithird_party/cares/config_darwin) $(if $(subst
FreeBSD,,$(SYSTEM)),,-Ithird_party/cares/config_freebsd) $(if $(subst Linux,,$(SYSTEM)),,-Ithird_party/cares/config_linux)
$(if $(subst OpenBSD,,$(SYSTEM)),,-Ithird_party/cares/config_openbsd) -DWIN32_LEAN_AND_MEAN
-D_HAS_EXCEPTIONS=0 -DNOMINMAX $(if $(subst MINGW32,,$(SYSTEM)),-DHAVE_CONFIG_H,)
benchmark:
CPPFLAGS: -Ithird_party/benchmark/include -DHAVE_POSIX_REGEX
boringssl:
CFLAGS: -g
CPPFLAGS: -Ithird_party/boringssl-with-bazel/src/include -fvisibility=hidden -DOPENSSL_NO_ASM
-D_GNU_SOURCE -DWIN32_LEAN_AND_MEAN -D_HAS_EXCEPTIONS=0 -DNOMINMAX
CXXFLAGS: -fno-exceptions
global:
CFLAGS: -g
COREFLAGS: -fno-exceptions
CPPFLAGS: -g -Wall -Wextra -DOSATOMIC_USE_INLINED=1 -Ithird_party/abseil-cpp -Ithird_party/upb
-Isrc/core/ext/upb-generated
LDFLAGS: -g
zlib:
CFLAGS: -fvisibility=hidden
php_config_m4:
deps:
- grpc
- address_sorting
- boringssl
- z
headers:
- src/php/ext/grpc/byte_buffer.h
- src/php/ext/grpc/call.h
- src/php/ext/grpc/call_credentials.h
- src/php/ext/grpc/channel.h
- src/php/ext/grpc/channel_credentials.h
- src/php/ext/grpc/completion_queue.h
- src/php/ext/grpc/php7_wrapper.h
- src/php/ext/grpc/php_grpc.h
- src/php/ext/grpc/server.h
- src/php/ext/grpc/server_credentials.h
- src/php/ext/grpc/timeval.h
- src/php/ext/grpc/version.h
src:
- src/php/ext/grpc/byte_buffer.c
- src/php/ext/grpc/call.c
- src/php/ext/grpc/call_credentials.c
- src/php/ext/grpc/channel.c
- src/php/ext/grpc/channel_credentials.c
- src/php/ext/grpc/completion_queue.c
- src/php/ext/grpc/php_grpc.c
- src/php/ext/grpc/server.c
- src/php/ext/grpc/server_credentials.c
- src/php/ext/grpc/timeval.c
python_dependencies:
deps:
- grpc
- address_sorting
- ares
- boringssl
- z
ruby_gem:
deps:
- grpc
- address_sorting
- ares
- boringssl
- z

@ -60,42 +60,43 @@
files.update(lib.get(field, []))
return list(sorted(files))
def list_filegroup_files(expect_filegroups, groups):
out = []
for filegroup_name in expect_filegroups:
filegroup = filegroup_maps[filegroup_name]
for group in groups:
out += filegroup.get(group, [])
return list(sorted(set(out)))
def filter_grpcpp(files):
return sorted([file for file in files if not file.startswith("include/grpc++")])
grpc_private_files = list_lib_files("grpc", ("headers", "src"))
grpc_public_headers = list_lib_files("grpc", ("public_headers",))
grpc_private_headers = list_lib_files("grpc", ("headers",))
# ObjectiveC doesn't use c-ares so we don't need address_sorting files at all
address_sorting_unwanted_files = list_lib_files("address_sorting", ("public_headers", "headers", "src"))
grpc_private_files = list(sorted(set(list_lib_files("grpc", ("headers", "src"))) - set(address_sorting_unwanted_files)))
grpc_public_headers = list(sorted(set(list_lib_files("grpc", ("public_headers",))) - set(address_sorting_unwanted_files)))
grpc_private_headers = list(sorted(set(list_lib_files("grpc", ("headers",))) - set(address_sorting_unwanted_files)))
# TODO(jtattermusch): build.yaml no longer has filegroups, so the files here are just hand-listed
# This template shouldn't be touching the filegroups anyway, so this is only a bit more fragile.
grpcpp_proto_files = ['include/grpcpp/impl/codegen/config_protobuf.h',
'include/grpcpp/impl/codegen/proto_buffer_reader.h',
'include/grpcpp/impl/codegen/proto_buffer_writer.h',
'include/grpcpp/impl/codegen/proto_utils.h']
grpcpp_proto_files = filter_grpcpp(
set(list_filegroup_files(("grpc++_codegen_proto", "grpc++_config_proto"), ("headers", "src", "public_headers")))
- set(list_filegroup_files(("grpc++_codegen_base",), ("headers", "src", "public_headers")))
)
grpcpp_private_files = filter_grpcpp(
set(list_lib_files("grpc++", ("headers", "src")))
- set(grpc_private_files)
# We exclude proto related files in this particular podspec so that we can provide a protobuf-independent lib
- set(grpcpp_proto_files)
- set(address_sorting_unwanted_files)
)
grpcpp_private_headers = filter_grpcpp(
set(list_lib_files("grpc++", ("headers",)))
- set(grpc_private_headers)
# We exclude proto related files in this particular podspec so that we can provide a protobuf-independent lib
- set(grpcpp_proto_files)
- set(address_sorting_unwanted_files)
)
grpcpp_public_headers = filter_grpcpp(
set(list_lib_files("grpc++", ("public_headers",)))
- set(grpc_public_headers)
# We exclude proto related files in this particular podspec so that we can provide a protobuf-independent lib
- set(grpcpp_proto_files)
- set(address_sorting_unwanted_files)
)
grpcpp_abseil_specs = list_abseil_specs("grpc++")
%>

@ -59,27 +59,35 @@
files.update(lib.get(field, []))
return list(sorted(files))
grpc_private_files = list_lib_files("grpc", ("headers", "src"))
grpc_public_headers = list_lib_files("grpc", ("public_headers",))
grpc_private_headers = list_lib_files("grpc", ("headers",))
# ObjectiveC doesn't use c-ares so we don't need address_sorting files at all
address_sorting_unwanted_files = list_lib_files("address_sorting", ("public_headers", "headers", "src"))
grpc_private_files = list(sorted(set(list_lib_files("grpc", ("headers", "src"))) - set(address_sorting_unwanted_files)))
grpc_public_headers = list(sorted(set(list_lib_files("grpc", ("public_headers",))) - set(address_sorting_unwanted_files)))
grpc_private_headers = list(sorted(set(list_lib_files("grpc", ("headers",))) - set(address_sorting_unwanted_files)))
grpc_abseil_specs = list_abseil_specs("grpc")
grpc_cronet_files = list(sorted(
set(list_lib_files("grpc_cronet", ("src", "headers")))
- set(grpc_private_files)
- set([
# We do not need cronet dedicated plugin registry
"src/core/ext/transport/cronet/plugin_registry/grpc_cronet_plugin_registry.cc",
# We do not need dummy cronet API for ObjC
"src/core/ext/transport/cronet/transport/cronet_api_dummy.cc",
])))
grpc_cronet_public_headers = list(sorted(
set(list_lib_files("grpc_cronet", ("public_headers",)))
- set(grpc_public_headers)))
# TODO(jtattermusch): build.yaml is now generated from bazel build
# which doesn't have an explicit "grpc_cronet" target. Until it exists
# we construct the list of files by taking what's in the "grpc" target
# and adding a few files on top of that.
grpc_cronet_extra_public_headers = ['include/grpc/grpc_cronet.h']
grpc_cronet_extra_impl_files = [
'src/core/ext/transport/cronet/client/secure/cronet_channel_create.cc',
'src/core/ext/transport/cronet/client/secure/cronet_channel_create.h',
'src/core/ext/transport/cronet/transport/cronet_transport.cc',
'src/core/ext/transport/cronet/transport/cronet_transport.h',
'third_party/objective_c/Cronet/bidirectional_stream_c.h'
]
grpc_cronet_files = list(sorted(grpc_cronet_extra_impl_files))
grpc_cronet_public_headers = list(sorted(grpc_cronet_extra_public_headers))
grpc_test_util_files = list(sorted(
set(list_lib_files("end2end_tests", ("src", "headers")))
- set(grpc_private_files)
- set(address_sorting_unwanted_files)
- set([
# Subprocess is not supported in tvOS and not needed by our tests.
"test/core/util/subprocess_posix.cc",

@ -1,39 +0,0 @@
%YAML 1.2
---
foreach: libs
cond: selected.get('generate_plugin_registry', False)
output_name: ${selected.name}_plugin_registry.cc
template: |
/*
*
* Copyright 2016 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <grpc/support/port_platform.h>
#include <grpc/grpc.h>
%for plugin in selected.plugins:
void ${plugin}_init(void);
void ${plugin}_shutdown(void);
%endfor
void grpc_register_built_in_plugins(void) {
%for plugin in selected.plugins:
grpc_register_plugin(${plugin}_init,
${plugin}_shutdown);
%endfor
}

@ -1,80 +0,0 @@
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the bad_client tests."""
from __future__ import print_function
import collections
import yaml
TestOptions = collections.namedtuple('TestOptions', 'flaky cpu_cost')
default_test_options = TestOptions(False, 1.0)
# maps test names to options
BAD_CLIENT_TESTS = {
'badreq': default_test_options,
'bad_streaming_id': default_test_options,
'connection_prefix': default_test_options._replace(cpu_cost=0.2),
'duplicate_header': default_test_options,
'headers': default_test_options._replace(cpu_cost=0.2),
'initial_settings_frame': default_test_options._replace(cpu_cost=0.2),
'head_of_line_blocking': default_test_options,
'large_metadata': default_test_options,
'out_of_bounds': default_test_options,
'server_registered_method': default_test_options,
'simple_request': default_test_options,
'window_overflow': default_test_options,
'unknown_frame': default_test_options,
}
def main():
json = {
'#':
'generated with test/bad_client/gen_build_json.py',
'libs': [{
'name': 'bad_client_test',
'build': 'private',
'language': 'c++',
'src': ['test/core/bad_client/bad_client.cc'],
'headers': ['test/core/bad_client/bad_client.h'],
'vs_proj_dir': 'test/bad_client',
'deps': ['grpc_test_util_unsecure', 'grpc_unsecure', 'gpr']
}],
'targets': [{
'name':
'%s_bad_client_test' % t,
'cpu_cost':
BAD_CLIENT_TESTS[t].cpu_cost,
'build':
'test',
'language':
'c++',
'secure':
False,
'src': ['test/core/bad_client/tests/%s.cc' % t],
'vs_proj_dir':
'test',
'exclude_iomgrs': ['uv'],
'deps': [
'bad_client_test', 'grpc_test_util_unsecure', 'grpc_unsecure',
'gpr'
]
} for t in sorted(BAD_CLIENT_TESTS.keys())]
}
print(yaml.dump(json))
if __name__ == '__main__':
main()

@ -1,69 +0,0 @@
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the end2end tests."""
import collections
import yaml
TestOptions = collections.namedtuple('TestOptions', 'flaky cpu_cost')
default_test_options = TestOptions(False, 1.0)
# maps test names to options
BAD_CLIENT_TESTS = {
'cert': default_test_options._replace(cpu_cost=0.1),
# Disabling this test because it does not link correctly as written
# 'alpn': default_test_options._replace(cpu_cost=0.1),
}
def main():
json = {
'#':
'generated with test/bad_ssl/gen_build_json.py',
'libs': [{
'name': 'bad_ssl_test_server',
'build': 'private',
'language': 'c',
'src': ['test/core/bad_ssl/server_common.cc'],
'headers': ['test/core/bad_ssl/server_common.h'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['grpc_test_util', 'grpc', 'gpr']
}],
'targets': [{
'name': 'bad_ssl_%s_server' % t,
'build': 'test',
'language': 'c',
'run': False,
'src': ['test/core/bad_ssl/servers/%s.cc' % t],
'vs_proj_dir': 'test/bad_ssl',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['bad_ssl_test_server', 'grpc_test_util', 'grpc', 'gpr']
} for t in sorted(BAD_CLIENT_TESTS.keys())] + [{
'name': 'bad_ssl_%s_test' % t,
'cpu_cost': BAD_CLIENT_TESTS[t].cpu_cost,
'build': 'test',
'language': 'c',
'src': ['test/core/bad_ssl/bad_ssl_test.cc'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': ['grpc_test_util', 'grpc', 'gpr']
} for t in sorted(BAD_CLIENT_TESTS.keys())]
}
print yaml.dump(json)
if __name__ == '__main__':
main()

@ -401,140 +401,16 @@ def without(l, e):
return l
# Originally, this method was used to generate end2end test cases for build.yaml,
# but since the test cases are now extracted from bazel BUILD file,
# this is not used for generating run_tests.py test cases anymore.
# Nevertheless, subset of the output is still used by end2end_tests.cc.template
# and end2end_nosec_tests.cc.template
# TODO(jtattermusch): cleanup this file, so that it only generates the data we need.
# Right now there's some duplication between generate_tests.bzl and this file.
def main():
sec_deps = ['grpc_test_util', 'grpc', 'gpr']
unsec_deps = ['grpc_test_util_unsecure', 'grpc_unsecure', 'gpr']
json = {
'#':
'generated with test/end2end/gen_build_json.py',
'libs': [{
'name':
'end2end_tests',
'build':
'private',
'language':
'c',
'secure':
True,
'src': [
'test/core/end2end/end2end_tests.cc',
'test/core/end2end/end2end_test_utils.cc'
] + [
'test/core/end2end/tests/%s.cc' % t
for t in sorted(END2END_TESTS.keys())
],
'headers': [
'test/core/end2end/tests/cancel_test_helpers.h',
'test/core/end2end/end2end_tests.h'
],
'deps':
sec_deps,
'vs_proj_dir':
'test/end2end/tests',
}] + [{
'name':
'end2end_nosec_tests',
'build':
'private',
'language':
'c',
'secure':
False,
'src': [
'test/core/end2end/end2end_nosec_tests.cc',
'test/core/end2end/end2end_test_utils.cc'
] + [
'test/core/end2end/tests/%s.cc' % t
for t in sorted(END2END_TESTS.keys())
if not END2END_TESTS[t].secure
],
'headers': [
'test/core/end2end/tests/cancel_test_helpers.h',
'test/core/end2end/end2end_tests.h'
],
'deps':
unsec_deps,
'vs_proj_dir':
'test/end2end/tests',
}],
'targets': [{
'name': '%s_test' % f,
'build': 'test',
'language': 'c',
'run': False,
'src': ['test/core/end2end/fixtures/%s.cc' % f],
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms':
(END2END_FIXTURES[f].platforms if END2END_FIXTURES[f].ci_mac
else without(END2END_FIXTURES[f].platforms, 'mac')),
'deps': ['end2end_tests'] + sec_deps,
'vs_proj_dir': 'test/end2end/fixtures',
} for f in sorted(END2END_FIXTURES.keys())] + [{
'name': '%s_nosec_test' % f,
'build': 'test',
'language': 'c',
'secure': False,
'src': ['test/core/end2end/fixtures/%s.cc' % f],
'run': False,
'platforms': END2END_FIXTURES[f].platforms,
'ci_platforms':
(END2END_FIXTURES[f].platforms if END2END_FIXTURES[f].ci_mac
else without(END2END_FIXTURES[f].platforms, 'mac')),
'deps': ['end2end_nosec_tests'] + unsec_deps,
'vs_proj_dir': 'test/end2end/fixtures',
} for f in sorted(
END2END_FIXTURES.keys()) if not END2END_FIXTURES[f].secure],
'tests': [{
'name':
'%s_test' % f,
'args': [t],
'exclude_configs':
END2END_FIXTURES[f].exclude_configs,
'exclude_iomgrs':
list(
set(END2END_FIXTURES[f].exclude_iomgrs) |
set(END2END_TESTS[t].exclude_iomgrs)),
'platforms':
END2END_FIXTURES[f].platforms,
'ci_platforms':
(END2END_FIXTURES[f].platforms if END2END_FIXTURES[f].ci_mac
else without(END2END_FIXTURES[f].platforms, 'mac')),
'flaky':
END2END_TESTS[t].flaky,
'language':
'c',
'cpu_cost':
END2END_TESTS[t].cpu_cost,
}
for f in sorted(END2END_FIXTURES.keys())
for t in sorted(END2END_TESTS.keys())
if compatible(f, t)] +
[{
'name':
'%s_nosec_test' % f,
'args': [t],
'exclude_configs':
END2END_FIXTURES[f].exclude_configs,
'exclude_iomgrs':
list(
set(END2END_FIXTURES[f].exclude_iomgrs) |
set(END2END_TESTS[t].exclude_iomgrs)),
'platforms':
END2END_FIXTURES[f].platforms,
'ci_platforms':
(END2END_FIXTURES[f].platforms
if END2END_FIXTURES[f].ci_mac else without(
END2END_FIXTURES[f].platforms, 'mac')),
'flaky':
END2END_TESTS[t].flaky,
'language':
'c',
'cpu_cost':
END2END_TESTS[t].cpu_cost,
} for f in sorted(END2END_FIXTURES.keys())
if not END2END_FIXTURES[f].secure
for t in sorted(END2END_TESTS.keys())
if compatible(f, t) and not END2END_TESTS[t].secure],
# needed by end2end_tests.cc.template and end2end_nosec_tests.cc.template
'core_end2end_tests':
dict((t, END2END_TESTS[t].secure) for t in END2END_TESTS.keys())
}

@ -68,104 +68,9 @@ def main():
json = {
'resolver_tests_common_zone_name':
resolver_component_data['resolver_tests_common_zone_name'],
# this data is required by the resolver_component_tests_runner.py.template
'resolver_component_test_cases':
_resolver_test_cases(resolver_component_data),
'targets': [{
'name':
'resolver_component_test' + unsecure_build_config_suffix,
'build':
'test',
'language':
'c++',
'gtest':
False,
'run':
False,
'src': ['test/cpp/naming/resolver_component_test.cc'],
'platforms': ['linux', 'posix', 'mac', 'windows'],
'deps': [
'dns_test_util',
'grpc++_test_util' + unsecure_build_config_suffix,
'grpc_test_util' + unsecure_build_config_suffix,
'grpc++' + unsecure_build_config_suffix,
'grpc' + unsecure_build_config_suffix,
'gpr',
'grpc++_test_config',
],
} for unsecure_build_config_suffix in ['_unsecure', '']] + [{
'name':
'resolver_component_tests_runner_invoker' +
unsecure_build_config_suffix,
'build':
'test',
'language':
'c++',
'gtest':
False,
'run':
True,
'src':
['test/cpp/naming/resolver_component_tests_runner_invoker.cc'],
'platforms': ['linux', 'posix', 'mac'],
'deps': [
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr',
'grpc++_test_config',
],
'args': [
'--test_bin_name=resolver_component_test%s' %
unsecure_build_config_suffix,
'--running_under_bazel=false',
],
} for unsecure_build_config_suffix in ['_unsecure', '']] + [{
'name':
'address_sorting_test' + unsecure_build_config_suffix,
'build':
'test',
'language':
'c++',
'gtest':
True,
'run':
True,
'src': ['test/cpp/naming/address_sorting_test.cc'],
'platforms': ['linux', 'posix', 'mac', 'windows'],
'deps': [
'grpc++_test_util' + unsecure_build_config_suffix,
'grpc_test_util' + unsecure_build_config_suffix,
'grpc++' + unsecure_build_config_suffix,
'grpc' + unsecure_build_config_suffix,
'gpr',
'grpc++_test_config',
],
} for unsecure_build_config_suffix in ['_unsecure', '']] + [
{
'name':
'cancel_ares_query_test',
'build':
'test',
'language':
'c++',
'gtest':
True,
'run':
True,
'src': ['test/cpp/naming/cancel_ares_query_test.cc'],
'platforms': ['linux', 'posix', 'mac', 'windows'],
'deps': [
'dns_test_util',
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr',
'grpc++_test_config',
],
},
]
}
print(yaml.dump(json))

@ -29,8 +29,9 @@ sys.path.append(run_tests_root)
import performance.scenario_config as scenario_config
configs_from_yaml = yaml.load(
open(os.path.join(os.path.dirname(sys.argv[0]),
'../../../build.yaml')))['configs'].keys()
open(
os.path.join(os.path.dirname(sys.argv[0]),
'../../../build_handwritten.yaml')))['configs'].keys()
def mutate_scenario(scenario_json, is_tsan):
@ -87,6 +88,13 @@ def maybe_exclude_gcov(scenario_json):
return []
# Originally, this method was used to generate qps test cases for build.yaml,
# but since the test cases are now extracted from bazel BUILD file,
# this is not used for generating run_tests.py test cases anymore.
# Nevertheless, the output is still used by json_run_localhost_scenario_gen.py
# and qps_json_driver_scenario_gen.py to generate the scenario list for bazel.
# TODO(jtattermusch): cleanup this file, so that it only generates data needed
# by bazel.
def generate_yaml():
return {
'tests':
@ -187,6 +195,3 @@ def generate_yaml():
for scenario_json in scenario_config.CXXLanguage().scenarios()
if 'scalable' in scenario_json.get('CATEGORIES', [])]
}
print(yaml.dump(generate_yaml()))

@ -38,7 +38,7 @@ def repr_ordered_dict(dumper, odict):
yaml.add_representer(collections.OrderedDict, repr_ordered_dict)
def rebuild_as_ordered_dict(indict, special_keys):
def _rebuild_as_ordered_dict(indict, special_keys):
outdict = collections.OrderedDict()
for key in sorted(indict.keys()):
if '#' in key:
@ -53,23 +53,22 @@ def rebuild_as_ordered_dict(indict, special_keys):
return outdict
def clean_elem(indict):
def _clean_elem(indict):
for name in ['public_headers', 'headers', 'src']:
if name not in indict: continue
inlist = indict[name]
protos = list(x for x in inlist if os.path.splitext(x)[1] == '.proto')
others = set(x for x in inlist if x not in protos)
indict[name] = protos + sorted(others)
return rebuild_as_ordered_dict(indict, _ELEM_KEYS)
return _rebuild_as_ordered_dict(indict, _ELEM_KEYS)
for filename in sys.argv[1:]:
with open(filename) as f:
js = yaml.load(f)
js = rebuild_as_ordered_dict(js, _TOP_LEVEL_KEYS)
def cleaned_build_yaml_dict_as_string(indict):
"""Takes dictionary which represents yaml file and returns the cleaned-up yaml string"""
js = _rebuild_as_ordered_dict(indict, _TOP_LEVEL_KEYS)
for grp in ['filegroups', 'libs', 'targets']:
if grp not in js: continue
js[grp] = sorted([clean_elem(x) for x in js[grp]],
js[grp] = sorted([_clean_elem(x) for x in js[grp]],
key=lambda x: (x.get('language', '_'), x['name']))
output = yaml.dump(js, indent=2, width=80, default_flow_style=False)
# massage out trailing whitespace
@ -77,9 +76,20 @@ for filename in sys.argv[1:]:
for line in output.splitlines():
lines.append(line.rstrip() + '\n')
output = ''.join(lines)
if TEST:
return output
if __name__ == '__main__':
for filename in sys.argv[1:]:
with open(filename) as f:
assert f.read() == output
else:
with open(filename, 'w') as f:
f.write(output)
js = yaml.load(f)
output = cleaned_build_yaml_dict_as_string(js)
if TEST:
with open(filename) as f:
if not f.read() == output:
raise Exception(
'Looks like build-cleaner.py has not been run for file "%s"?'
% filename)
else:
with open(filename, 'w') as f:
f.write(output)

@ -0,0 +1,943 @@
#!/usr/bin/env python
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import yaml
import xml.etree.ElementTree as ET
import os
import sys
import build_cleaner
_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(_ROOT)
def _bazel_query_xml_tree(query):
"""Get xml output of bazel query invocation, parsed as XML tree"""
output = subprocess.check_output(
['tools/bazel', 'query', '--noimplicit_deps', '--output', 'xml', query])
return ET.fromstring(output)
def _rule_dict_from_xml_node(rule_xml_node):
result = {
'class': rule_xml_node.attrib.get('class'),
'name': rule_xml_node.attrib.get('name'),
'srcs': [],
'hdrs': [],
'deps': [],
'data': [],
'tags': [],
'args': [],
'generator_function': None,
'size': None,
}
for child in rule_xml_node:
# all the metadata we want is stored under "list" tags
if child.tag == 'list':
list_name = child.attrib['name']
if list_name in ['srcs', 'hdrs', 'deps', 'data', 'tags', 'args']:
result[list_name] += [item.attrib['value'] for item in child]
if child.tag == 'string':
string_name = child.attrib['name']
if string_name in ['generator_function', 'size']:
result[string_name] = child.attrib['value']
return result
def _extract_rules_from_bazel_xml(xml_tree):
result = {}
for child in xml_tree:
if child.tag == 'rule':
rule_dict = _rule_dict_from_xml_node(child)
rule_clazz = rule_dict['class']
rule_name = rule_dict['name']
if rule_clazz in [
'cc_library', 'cc_binary', 'cc_test', 'cc_proto_library',
'proto_library'
]:
if rule_name in result:
raise Exception('Rule %s already present' % rule_name)
result[rule_name] = rule_dict
return result
def _get_bazel_label(target_name):
if ':' in target_name:
return '//%s' % target_name
else:
return '//:%s' % target_name
def _extract_source_file_path(label):
"""Gets relative path to source file from bazel deps listing"""
if label.startswith('//'):
label = label[len('//'):]
# labels in form //:src/core/lib/surface/call_test_only.h
if label.startswith(':'):
label = label[len(':'):]
# labels in form //test/core/util:port.cc
label = label.replace(':', '/')
return label
def _extract_public_headers(bazel_rule):
"""Gets list of public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
if dep.startswith('//:include/') and dep.endswith('.h'):
result.append(_extract_source_file_path(dep))
return list(sorted(result))
def _extract_nonpublic_headers(bazel_rule):
"""Gets list of non-public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
if dep.startswith('//') and not dep.startswith(
'//:include/') and dep.endswith('.h'):
result.append(_extract_source_file_path(dep))
return list(sorted(result))
def _extract_sources(bazel_rule):
"""Gets list of source files from a bazel rule"""
result = []
for dep in bazel_rule['srcs']:
if dep.startswith('//') and (dep.endswith('.cc') or dep.endswith('.c')
or dep.endswith('.proto')):
result.append(_extract_source_file_path(dep))
return list(sorted(result))
def _extract_deps(bazel_rule):
"""Gets list of deps from from a bazel rule"""
return list(sorted(bazel_rule['deps']))
def _create_target_from_bazel_rule(target_name, bazel_rules):
# extract the deps from bazel
bazel_rule = bazel_rules[_get_bazel_label(target_name)]
result = {
'name': target_name,
'_PUBLIC_HEADERS_BAZEL': _extract_public_headers(bazel_rule),
'_HEADERS_BAZEL': _extract_nonpublic_headers(bazel_rule),
'_SRC_BAZEL': _extract_sources(bazel_rule),
'_DEPS_BAZEL': _extract_deps(bazel_rule),
}
return result
def _sort_by_build_order(lib_names, lib_dict, deps_key_name, verbose=False):
"""Sort library names to form correct build order. Use metadata from lib_dict"""
# we find correct build order by performing a topological sort
# expected output: if library B depends on A, A should be listed first
# all libs that are not in the dictionary are considered external.
external_deps = list(
sorted(filter(lambda lib_name: lib_name not in lib_dict, lib_names)))
if verbose:
print('topo_ordering ' + str(lib_names))
print(' external_deps ' + str(external_deps))
result = list(external_deps) # external deps will be listed first
while len(result) < len(lib_names):
more_results = []
for lib in lib_names:
if lib not in result:
dep_set = set(lib_dict[lib].get(deps_key_name, []))
dep_set = dep_set.intersection(lib_names)
# if lib only depends on what's already built, add it to the results
if not dep_set.difference(set(result)):
more_results.append(lib)
if not more_results:
raise Exception(
'Cannot sort topologically, there seems to be a cyclic dependency'
)
if verbose:
print(' adding ' + str(more_results))
result = result + list(
sorted(more_results
)) # when build order doesn't matter, sort lexicographically
return result
# TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a slightly different logic)
def _populate_transitive_deps(bazel_rules):
"""Add 'transitive_deps' field for each of the rules"""
transitive_deps = {}
for rule_name in bazel_rules.iterkeys():
transitive_deps[rule_name] = set(bazel_rules[rule_name]['deps'])
while True:
deps_added = 0
for rule_name in bazel_rules.iterkeys():
old_deps = transitive_deps[rule_name]
new_deps = set(old_deps)
for dep_name in old_deps:
new_deps.update(transitive_deps.get(dep_name, set()))
deps_added += len(new_deps) - len(old_deps)
transitive_deps[rule_name] = new_deps
# if none of the transitive dep sets has changed, we're done
if deps_added == 0:
break
for rule_name, bazel_rule in bazel_rules.iteritems():
bazel_rule['transitive_deps'] = list(sorted(transitive_deps[rule_name]))
def _external_dep_name_from_bazel_dependency(bazel_dep):
"""Returns name of dependency if external bazel dependency is provided or None"""
if bazel_dep.startswith('@com_google_absl//'):
# special case for add dependency on one of the absl libraries (there is not just one absl library)
prefixlen = len('@com_google_absl//')
return bazel_dep[prefixlen:]
elif bazel_dep == '//external:upb_lib':
return 'upb'
elif bazel_dep == '//external:benchmark':
return 'benchmark'
else:
# all the other external deps such as gflags, protobuf, cares, zlib
# don't need to be listed explicitly, they are handled automatically
# by the build system (make, cmake)
return None
def _expand_intermediate_deps(target_dict, public_dep_names, bazel_rules):
# Some of the libraries defined by bazel won't be exposed in build.yaml
# We call these "intermediate" dependencies. This method expands
# the intermediate deps for given target (populates library's
# headers, sources and dicts as if the intermediate dependency never existed)
# use this dictionary to translate from bazel labels to dep names
bazel_label_to_dep_name = {}
for dep_name in public_dep_names:
bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
target_name = target_dict['name']
bazel_deps = target_dict['_DEPS_BAZEL']
# initial values
public_headers = set(target_dict['_PUBLIC_HEADERS_BAZEL'])
headers = set(target_dict['_HEADERS_BAZEL'])
src = set(target_dict['_SRC_BAZEL'])
deps = set()
expansion_blacklist = set()
to_expand = set(bazel_deps)
while to_expand:
# start with the last dependency to be built
build_order = _sort_by_build_order(list(to_expand), bazel_rules,
'transitive_deps')
bazel_dep = build_order[-1]
to_expand.remove(bazel_dep)
is_public = bazel_dep in bazel_label_to_dep_name
external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
bazel_dep)
if is_public:
# this is not an intermediate dependency we so we add it
# to the list of public dependencies to the list, in the right format
deps.add(bazel_label_to_dep_name[bazel_dep])
# we do not want to expand any intermediate libraries that are already included
# by the dependency we just added
expansion_blacklist.update(
bazel_rules[bazel_dep]['transitive_deps'])
elif external_dep_name_maybe:
deps.add(external_dep_name_maybe)
elif bazel_dep.startswith(
'//external:') or not bazel_dep.startswith('//'):
# all the other external deps can be skipped
pass
elif bazel_dep in expansion_blacklist:
# do not expand if a public dependency that depends on this has already been expanded
pass
else:
if bazel_dep in bazel_rules:
# this is an intermediate library, expand it
public_headers.update(
_extract_public_headers(bazel_rules[bazel_dep]))
headers.update(
_extract_nonpublic_headers(bazel_rules[bazel_dep]))
src.update(_extract_sources(bazel_rules[bazel_dep]))
new_deps = _extract_deps(bazel_rules[bazel_dep])
to_expand.update(new_deps)
else:
raise Exception(bazel_dep + ' not in bazel_rules')
# make the 'deps' field transitive, but only list non-intermediate deps and selected external deps
bazel_transitive_deps = bazel_rules[_get_bazel_label(
target_name)]['transitive_deps']
for transitive_bazel_dep in bazel_transitive_deps:
public_name = bazel_label_to_dep_name.get(transitive_bazel_dep, None)
if public_name:
deps.add(public_name)
external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
transitive_bazel_dep)
if external_dep_name_maybe:
# expanding all absl libraries is technically correct but creates too much noise
if not external_dep_name_maybe.startswith('absl'):
deps.add(external_dep_name_maybe)
target_dict['public_headers'] = list(sorted(public_headers))
target_dict['headers'] = list(sorted(headers))
target_dict['src'] = list(sorted(src))
target_dict['deps'] = list(sorted(deps))
def _generate_build_metadata(build_extra_metadata, bazel_rules):
lib_names = build_extra_metadata.keys()
result = {}
for lib_name in lib_names:
lib_dict = _create_target_from_bazel_rule(lib_name, bazel_rules)
_expand_intermediate_deps(lib_dict, lib_names, bazel_rules)
# populate extra properties from build metadata
lib_dict.update(build_extra_metadata.get(lib_name, {}))
# store to results
result[lib_name] = lib_dict
# rename some targets to something else
# this needs to be made after we're done with most of processing logic
# otherwise the already-renamed libraries will have different names than expected
for lib_name in lib_names:
to_name = build_extra_metadata.get(lib_name, {}).get('_RENAME', None)
if to_name:
# store lib under the new name and also change its 'name' property
if to_name in result:
raise Exception('Cannot rename target ' + lib_name + ', ' +
to_name + ' already exists.')
lib_dict = result.pop(lib_name)
lib_dict['name'] = to_name
result[to_name] = lib_dict
# dep names need to be updated as well
for lib_dict_to_update in result.values():
lib_dict_to_update['deps'] = list(
map(lambda dep: to_name if dep == lib_name else dep,
lib_dict_to_update['deps']))
# make sure deps are listed in reverse topological order (e.g. "grpc gpr" and not "gpr grpc")
for lib_dict in result.itervalues():
lib_dict['deps'] = list(
reversed(_sort_by_build_order(lib_dict['deps'], result, 'deps')))
return result
def _convert_to_build_yaml_like(lib_dict):
lib_names = list(
filter(
lambda lib_name: lib_dict[lib_name].get('_TYPE', 'library') ==
'library', lib_dict.keys()))
target_names = list(
filter(
lambda lib_name: lib_dict[lib_name].get('_TYPE', 'library') ==
'target', lib_dict.keys()))
test_names = list(
filter(
lambda lib_name: lib_dict[lib_name].get('_TYPE', 'library') ==
'test', lib_dict.keys()))
# list libraries and targets in predefined order
lib_list = list(map(lambda lib_name: lib_dict[lib_name], lib_names))
target_list = list(map(lambda lib_name: lib_dict[lib_name], target_names))
test_list = list(map(lambda lib_name: lib_dict[lib_name], test_names))
# get rid of temporary private fields prefixed with "_" and some other useless fields
for lib in lib_list:
for field_to_remove in filter(lambda k: k.startswith('_'), lib.keys()):
lib.pop(field_to_remove, None)
for target in target_list:
for field_to_remove in filter(lambda k: k.startswith('_'),
target.keys()):
target.pop(field_to_remove, None)
target.pop('public_headers',
None) # public headers make no sense for targets
for test in test_list:
for field_to_remove in filter(lambda k: k.startswith('_'), test.keys()):
test.pop(field_to_remove, None)
test.pop('public_headers',
None) # public headers make no sense for tests
build_yaml_like = {
'libs': lib_list,
'filegroups': [],
'targets': target_list,
'tests': test_list,
}
return build_yaml_like
def _extract_cc_tests(bazel_rules):
"""Gets list of cc_test tests from bazel rules"""
result = []
for bazel_rule in bazel_rules.itervalues():
if bazel_rule['class'] == 'cc_test':
test_name = bazel_rule['name']
if test_name.startswith('//'):
prefixlen = len('//')
result.append(test_name[prefixlen:])
return list(sorted(result))
def _filter_cc_tests(tests):
"""Filters out tests that we don't want or we cannot build them reasonably"""
# most qps tests are autogenerated, we are fine without them
tests = list(
filter(lambda test: not test.startswith('test/cpp/qps:'), tests))
# we have trouble with census dependency outside of bazel
tests = list(
filter(lambda test: not test.startswith('test/cpp/ext/filters/census:'),
tests))
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/microbenchmarks:bm_opencensus_plugin'), tests))
# missing opencensus/stats/stats.h
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/end2end:server_load_reporting_end2end_test'), tests))
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/server/load_reporter:lb_load_reporter_test'), tests))
# The test uses --running_under_bazel cmdline argument
# To avoid the trouble needing to adjust it, we just skip the test
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/naming:resolver_component_tests_runner_invoker'),
tests))
# the test requires 'client_crash_test_server' to be built
tests = list(
filter(
lambda test: not test.startswith('test/cpp/end2end:time_change_test'
), tests))
# the test requires 'client_crash_test_server' to be built
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/end2end:client_crash_test'), tests))
# the test requires 'server_crash_test_client' to be built
tests = list(
filter(
lambda test: not test.startswith(
'test/cpp/end2end:server_crash_test'), tests))
# test never existed under build.yaml and it fails -> skip it
tests = list(
filter(
lambda test: not test.startswith(
'test/core/tsi:ssl_session_cache_test'), tests))
return tests
def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
test_metadata = {}
for test in tests:
test_dict = {'build': 'test', '_TYPE': 'target'}
bazel_rule = bazel_rules[_get_bazel_label(test)]
bazel_tags = bazel_rule['tags']
if 'manual' in bazel_tags:
# don't run the tests marked as "manual"
test_dict['run'] = False
if 'no_uses_polling' in bazel_tags:
test_dict['uses_polling'] = False
if 'grpc_fuzzer' == bazel_rule['generator_function']:
# currently we hand-list fuzzers instead of generating them automatically
# because there's no way to obtain maxlen property from bazel BUILD file.
print('skipping fuzzer ' + test)
continue
# if any tags that restrict platform compatibility are present,
# generate the "platforms" field accordingly
# TODO(jtattermusch): there is also a "no_linux" tag, but we cannot take
# it into account as it is applied by grpc_cc_test when poller expansion
# is made (for tests where uses_polling=True). So for now, we just
# assume all tests are compatible with linux and ignore the "no_linux" tag
# completely.
known_platform_tags = set(['no_windows', 'no_mac'])
if set(bazel_tags).intersection(known_platform_tags):
platforms = []
# assume all tests are compatible with linux and posix
platforms.append('linux')
platforms.append(
'posix') # there is no posix-specific tag in bazel BUILD
if not 'no_mac' in bazel_tags:
platforms.append('mac')
if not 'no_windows' in bazel_tags:
platforms.append('windows')
test_dict['platforms'] = platforms
if '//external:benchmark' in bazel_rule['transitive_deps']:
test_dict['benchmark'] = True
test_dict['defaults'] = 'benchmark'
cmdline_args = bazel_rule['args']
if cmdline_args:
test_dict['args'] = list(cmdline_args)
uses_gtest = '//external:gtest' in bazel_rule['transitive_deps']
if uses_gtest:
test_dict['gtest'] = True
if test.startswith('test/cpp') or uses_gtest:
test_dict['language'] = 'c++'
elif test.startswith('test/core'):
test_dict['language'] = 'c'
else:
raise Exception('wrong test' + test)
# short test name without the path.
# There can be name collisions, but we will resolve them later
simple_test_name = os.path.basename(_extract_source_file_path(test))
test_dict['_RENAME'] = simple_test_name
test_metadata[test] = test_dict
# detect duplicate test names
tests_by_simple_name = {}
for test_name, test_dict in test_metadata.iteritems():
simple_test_name = test_dict['_RENAME']
if not simple_test_name in tests_by_simple_name:
tests_by_simple_name[simple_test_name] = []
tests_by_simple_name[simple_test_name].append(test_name)
# choose alternative names for tests with a name collision
for collision_list in tests_by_simple_name.itervalues():
if len(collision_list) > 1:
for test_name in collision_list:
long_name = test_name.replace('/', '_').replace(':', '_')
print(
'short name of "%s" collides with another test, renaming to %s'
% (test_name, long_name))
test_metadata[test_name]['_RENAME'] = long_name
# TODO(jtattermusch): in bazel, add "_test" suffix to the test names
# test does not have "_test" suffix: fling
# test does not have "_test" suffix: fling_stream
# test does not have "_test" suffix: client_ssl
# test does not have "_test" suffix: handshake_server_with_readahead_handshaker
# test does not have "_test" suffix: handshake_verify_peer_options
# test does not have "_test" suffix: server_ssl
return test_metadata
# extra metadata that will be used to construct build.yaml
# there are mostly extra properties that we weren't able to obtain from the bazel build
# _TYPE: whether this is library, target or test
# _RENAME: whether this target should be renamed to a different name (to match expectations of make and cmake builds)
# NOTE: secure is 'check' by default, so setting secure = False below does matter
_BUILD_EXTRA_METADATA = {
'third_party/address_sorting:address_sorting': {
'language': 'c',
'build': 'all',
'secure': False,
'_RENAME': 'address_sorting'
},
'gpr': {
'language': 'c',
'build': 'all',
'secure': False
},
'grpc': {
'language': 'c',
'build': 'all',
'baselib': True,
'secure': True,
'dll': True,
'generate_plugin_registry': True
},
'grpc++': {
'language': 'c++',
'build': 'all',
'baselib': True,
'dll': True
},
'grpc++_alts': {
'language': 'c++',
'build': 'all',
'baselib': True
},
'grpc++_error_details': {
'language': 'c++',
'build': 'all'
},
'grpc++_reflection': {
'language': 'c++',
'build': 'all'
},
'grpc++_unsecure': {
'language': 'c++',
'build': 'all',
'baselib': True,
'secure': False,
'dll': True
},
# TODO(jtattermusch): do we need to set grpc_csharp_ext's LDFLAGS for wrapping memcpy in the same way as in build.yaml?
'grpc_csharp_ext': {
'language': 'c',
'build': 'all',
'dll': 'only'
},
'grpc_unsecure': {
'language': 'c',
'build': 'all',
'baselib': True,
'secure': False,
'dll': True,
'generate_plugin_registry': True
},
'grpcpp_channelz': {
'language': 'c++',
'build': 'all'
},
'src/compiler:grpc_plugin_support': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_RENAME': 'grpc_plugin_support'
},
'src/compiler:grpc_cpp_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_cpp_plugin'
},
'src/compiler:grpc_csharp_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_csharp_plugin'
},
'src/compiler:grpc_node_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_node_plugin'
},
'src/compiler:grpc_objective_c_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_objective_c_plugin'
},
'src/compiler:grpc_php_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_php_plugin'
},
'src/compiler:grpc_python_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_python_plugin'
},
'src/compiler:grpc_ruby_plugin': {
'language': 'c++',
'build': 'protoc',
'secure': False,
'_TYPE': 'target',
'_RENAME': 'grpc_ruby_plugin'
},
# TODO(jtattermusch): consider adding grpc++_core_stats
# test support libraries
'test/core/util:grpc_test_util': {
'language': 'c',
'build': 'private',
'_RENAME': 'grpc_test_util'
},
'test/core/util:grpc_test_util_unsecure': {
'language': 'c',
'build': 'private',
'secure': False,
'_RENAME': 'grpc_test_util_unsecure'
},
# TODO(jtattermusch): consider adding grpc++_test_util_unsecure - it doesn't seem to be used by bazel build (don't forget to set secure: False)
'test/cpp/util:test_config': {
'language': 'c++',
'build': 'private',
'_RENAME': 'grpc++_test_config'
},
'test/cpp/util:test_util': {
'language': 'c++',
'build': 'private',
'_RENAME': 'grpc++_test_util'
},
# end2end test support libraries
'test/core/end2end:end2end_tests': {
'language': 'c',
'build': 'private',
'secure': True,
'_RENAME': 'end2end_tests'
},
'test/core/end2end:end2end_nosec_tests': {
'language': 'c',
'build': 'private',
'secure': False,
'_RENAME': 'end2end_nosec_tests'
},
# benchmark support libraries
'test/cpp/microbenchmarks:helpers': {
'language': 'c++',
'build': 'test',
'defaults': 'benchmark',
'_RENAME': 'benchmark_helpers'
},
'test/cpp/interop:interop_client': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'interop_client'
},
'test/cpp/interop:interop_server': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'interop_server'
},
'test/cpp/interop:http2_client': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'http2_client'
},
'test/cpp/qps:qps_json_driver': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'qps_json_driver'
},
'test/cpp/qps:qps_worker': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'qps_worker'
},
'test/cpp/util:grpc_cli': {
'language': 'c++',
'build': 'test',
'run': False,
'_TYPE': 'target',
'_RENAME': 'grpc_cli'
},
# TODO(jtattermusch): create_jwt and verify_jwt breaks distribtests because it depends on grpc_test_utils and thus requires tests to be built
# For now it's ok to disable them as these binaries aren't very useful anyway.
#'test/core/security:create_jwt': { 'language': 'c', 'build': 'tool', '_TYPE': 'target', '_RENAME': 'grpc_create_jwt' },
#'test/core/security:verify_jwt': { 'language': 'c', 'build': 'tool', '_TYPE': 'target', '_RENAME': 'grpc_verify_jwt' },
# TODO(jtattermusch): add remaining tools such as grpc_print_google_default_creds_token (they are not used by bazel build)
# Fuzzers
'test/core/security:alts_credentials_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/security/corpus/alts_credentials_corpus'],
'maxlen': 2048,
'_TYPE': 'target',
'_RENAME': 'alts_credentials_fuzzer'
},
'test/core/end2end/fuzzers:client_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/end2end/fuzzers/client_fuzzer_corpus'],
'maxlen': 2048,
'dict': 'test/core/end2end/fuzzers/hpack.dictionary',
'_TYPE': 'target',
'_RENAME': 'client_fuzzer'
},
'test/core/transport/chttp2:hpack_parser_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/transport/chttp2/hpack_parser_corpus'],
'maxlen': 512,
'dict': 'test/core/end2end/fuzzers/hpack.dictionary',
'_TYPE': 'target',
'_RENAME': 'hpack_parser_fuzzer_test'
},
'test/core/http:request_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/http/request_corpus'],
'maxlen': 2048,
'_TYPE': 'target',
'_RENAME': 'http_request_fuzzer_test'
},
'test/core/http:response_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/http/response_corpus'],
'maxlen': 2048,
'_TYPE': 'target',
'_RENAME': 'http_response_fuzzer_test'
},
'test/core/json:json_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/json/corpus'],
'maxlen': 512,
'_TYPE': 'target',
'_RENAME': 'json_fuzzer_test'
},
'test/core/nanopb:fuzzer_response': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/nanopb/corpus_response'],
'maxlen': 128,
'_TYPE': 'target',
'_RENAME': 'nanopb_fuzzer_response_test'
},
'test/core/nanopb:fuzzer_serverlist': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/nanopb/corpus_serverlist'],
'maxlen': 128,
'_TYPE': 'target',
'_RENAME': 'nanopb_fuzzer_serverlist_test'
},
'test/core/slice:percent_decode_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/slice/percent_decode_corpus'],
'maxlen': 32,
'_TYPE': 'target',
'_RENAME': 'percent_decode_fuzzer'
},
'test/core/slice:percent_encode_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/slice/percent_encode_corpus'],
'maxlen': 32,
'_TYPE': 'target',
'_RENAME': 'percent_encode_fuzzer'
},
'test/core/end2end/fuzzers:server_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/end2end/fuzzers/server_fuzzer_corpus'],
'maxlen': 2048,
'dict': 'test/core/end2end/fuzzers/hpack.dictionary',
'_TYPE': 'target',
'_RENAME': 'server_fuzzer'
},
'test/core/security:ssl_server_fuzzer': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/security/corpus/ssl_server_corpus'],
'maxlen': 2048,
'_TYPE': 'target',
'_RENAME': 'ssl_server_fuzzer'
},
'test/core/client_channel:uri_fuzzer_test': {
'language': 'c++',
'build': 'fuzzer',
'corpus_dirs': ['test/core/client_channel/uri_corpus'],
'maxlen': 128,
'_TYPE': 'target',
'_RENAME': 'uri_fuzzer_test'
},
# TODO(jtattermusch): these fuzzers had no build.yaml equivalent
# test/core/compression:message_compress_fuzzer
# test/core/compression:message_decompress_fuzzer
# test/core/compression:stream_compression_fuzzer
# test/core/compression:stream_decompression_fuzzer
# test/core/slice:b64_decode_fuzzer
# test/core/slice:b64_encode_fuzzer
}
# We need a complete picture of all the targets and dependencies we're interested in
# so we run multiple bazel queries and merge the results.
_BAZEL_DEPS_QUERIES = [
'deps("//test/...")',
'deps("//:all")',
'deps("//src/compiler/...")',
'deps("//src/proto/...")',
]
bazel_rules = {}
for query in _BAZEL_DEPS_QUERIES:
bazel_rules.update(
_extract_rules_from_bazel_xml(_bazel_query_xml_tree(query)))
_populate_transitive_deps(bazel_rules)
tests = _filter_cc_tests(_extract_cc_tests(bazel_rules))
test_metadata = _generate_build_extra_metadata_for_tests(tests, bazel_rules)
all_metadata = {}
all_metadata.update(_BUILD_EXTRA_METADATA)
all_metadata.update(test_metadata)
all_targets_dict = _generate_build_metadata(all_metadata, bazel_rules)
build_yaml_like = _convert_to_build_yaml_like(all_targets_dict)
# if a test uses source files from src/ directly, it's a little bit suspicious
for tgt in build_yaml_like['targets']:
if tgt['build'] == 'test':
for src in tgt['src']:
if src.startswith('src/') and not src.endswith('.proto'):
print('source file from under "src/" tree used in test ' +
tgt['name'] + ': ' + src)
build_yaml_string = build_cleaner.cleaned_build_yaml_dict_as_string(
build_yaml_like)
with open('build_autogenerated.yaml', 'w') as file:
file.write(build_yaml_string)

@ -23,12 +23,11 @@ gen_build_yaml_dirs=" \
src/upb \
src/zlib \
src/c-ares \
test/core/bad_client \
test/core/bad_ssl \
test/core/end2end \
test/core/end2end \
test/cpp/naming \
test/cpp/qps \
tools/run_tests/lb_interop_tests"
gen_build_files=""
for gen_build_yaml in $gen_build_yaml_dirs
do

@ -39,7 +39,7 @@ args = argp.parse_args()
json = args.build_files
test = {} if 'TEST' in os.environ else None
test = {} if os.environ.get('TEST', 'false') == 'true' else None
plugins = sorted(glob.glob('tools/buildgen/plugins/*.py'))

@ -16,20 +16,25 @@
set -e
if [ "x$TEST" = "x" ] ; then
TEST=false
fi
export TEST=${TEST:-false}
echo "Generating build_autogenerated.yaml from bazel BUILD file"
rm -f build_autogenerated.yaml
python tools/buildgen/extract_metadata_from_bazel_xml.py
cd `dirname $0`/../..
mako_renderer=tools/buildgen/mako_renderer.py
if [ "x$TEST" != "x" ] ; then
tools/buildgen/build-cleaner.py build.yaml
fi
tools/buildgen/build_cleaner.py build_handwritten.yaml
# check build_autogenerated.yaml is already in its "clean" form
TEST=true tools/buildgen/build_cleaner.py build_autogenerated.yaml
. tools/buildgen/generate_build_additions.sh
python tools/buildgen/generate_projects.py build.yaml $gen_build_files $*
# Instead of generating from a single build.yaml, we've split it into
# - build_handwritten.yaml: manually written metadata
# - build_autogenerated.yaml: generated from bazel BUILD file
python tools/buildgen/generate_projects.py build_handwritten.yaml build_autogenerated.yaml $gen_build_files $*
rm $gen_build_files

@ -30,6 +30,14 @@ def mako_plugin(dictionary):
'test/core/util/one_corpus_entry_fuzzer.cc')
new_target['own_src'].append(
'test/core/util/one_corpus_entry_fuzzer.cc')
# avoid having two main() methods
to_remove = 'test/core/util/fuzzer_corpus_test.cc'
if to_remove in new_target['src']:
new_target['src'].remove(to_remove)
if to_remove in new_target['own_src']:
new_target['own_src'].remove(to_remove)
targets.append(new_target)
for corpus in new_target['corpus_dirs']:
for fn in sorted(glob.glob('%s/*' % corpus)):

@ -20,7 +20,7 @@ cd $(dirname $0)/../..
DIFF_COMMAND="git diff --name-only HEAD | grep -v ^third_party/"
if [ "x$1" == 'x--pre-commit' ]; then
if eval $DIFF_COMMAND | grep '^build.yaml$'; then
if eval $DIFF_COMMAND | grep '^build_handwritten.yaml$'; then
./tools/buildgen/generate_projects.sh
else
templates=$(eval $DIFF_COMMAND | grep '\.template$' || true)

@ -19,7 +19,7 @@ shopt -s nullglob
cd "$(dirname "$0")/../../.."
GRPC_VERSION=$(grep -e "^ *version: " build.yaml | head -n 1 | sed 's/.*: //')
GRPC_VERSION=$(grep -e "^ *version: " build_handwritten.yaml | head -n 1 | sed 's/.*: //')
INPUT_ARTIFACTS=$KOKORO_GFILE_DIR/github/grpc/artifacts
INDEX_FILENAME=index.xml

@ -56,7 +56,7 @@ if branch_name is not None:
else:
check_version = lambda version: True
with open('build.yaml', 'r') as f:
with open('build_handwritten.yaml', 'r') as f:
build_yaml = yaml.load(f.read())
settings = build_yaml['settings']

Loading…
Cancel
Save