extract_metadata_from_bazel_xml.py changes from #25272 (#25758)

* tools/buildgen/extract_metadata_from_bazel_xml.py changes from #25272

* regenerate projects

* get rid of useless _deduplicate_append

* extract_deps worked for both collapsed_sources and collapsed_deps only due to mismatch of bazel label vs target name mismatch

* special handling for absl has no effect

* improve comments

* remove changes that are not needed yet
pull/25761/head
Jan Tattermusch 4 years ago committed by GitHub
parent a737ff8612
commit 8cae49174c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 32
      BUILD.gn
  2. 2016
      CMakeLists.txt
  3. 27
      Makefile
  4. 2000
      build_autogenerated.yaml
  5. 109
      grpc.gyp
  6. 419
      tools/buildgen/extract_metadata_from_bazel_xml.py

@ -176,14 +176,14 @@ config("grpc_config") {
"src/core/lib/profiling/timers.h",
]
deps = [
":absl/types:optional",
":absl/time:time",
":absl/synchronization:synchronization",
":absl/strings:strings",
":absl/strings:str_format",
":absl/status:status",
":absl/memory:memory",
":absl/base:base",
":absl/memory:memory",
":absl/status:status",
":absl/strings:str_format",
":absl/strings:strings",
":absl/synchronization:synchronization",
":absl/time:time",
":absl/types:optional",
]
public_configs = [
@ -1239,17 +1239,14 @@ config("grpc_config") {
]
deps = [
"//third_party/zlib",
":absl/container:flat_hash_map",
":absl/container:inlined_vector",
":absl/functional:bind_front",
":absl/status:statusor",
":gpr",
"//third_party/boringssl",
":address_sorting",
":upb",
"//third_party/boringssl",
":absl/types:optional",
":absl/strings:strings",
":absl/status:statusor",
":absl/status:status",
":absl/functional:bind_front",
":absl/container:inlined_vector",
":absl/container:flat_hash_map",
"//third_party/cares",
":address_sorting",
]
@ -1517,11 +1514,6 @@ config("grpc_config") {
deps = [
"//third_party/protobuf:protobuf_lite",
":grpc",
":gpr",
":address_sorting",
":upb",
"//third_party/boringssl",
":absl/synchronization:synchronization",
]
public_configs = [

File diff suppressed because it is too large Load Diff

@ -1651,17 +1651,7 @@ PUBLIC_HEADERS_C += \
LIBGRPC_CSHARP_EXT_OBJS = $(addprefix $(OBJDIR)/$(CONFIG)/, $(addsuffix .o, $(basename $(LIBGRPC_CSHARP_EXT_SRC))))
ifeq ($(NO_SECURE),true)
# You can't build secure libraries if you don't have OpenSSL.
$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext.a: openssl_dep_error
$(LIBDIR)/$(CONFIG)/$(SHARED_PREFIX)grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE): openssl_dep_error
else
$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext.a: $(ZLIB_DEP) $(OPENSSL_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBGRPC_CSHARP_EXT_OBJS)
$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext.a: $(ZLIB_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBGRPC_CSHARP_EXT_OBJS)
$(E) "[AR] Creating $@"
$(Q) mkdir -p `dirname $@`
$(Q) rm -f $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext.a
@ -1673,30 +1663,26 @@ endif
ifeq ($(SYSTEM),MINGW32)
$(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE): $(LIBGRPC_CSHARP_EXT_OBJS) $(ZLIB_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LIBDIR)/$(CONFIG)/libaddress_sorting.a $(LIBDIR)/$(CONFIG)/libupb.a $(OPENSSL_DEP)
$(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE): $(LIBGRPC_CSHARP_EXT_OBJS) $(ZLIB_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBDIR)/$(CONFIG)/libgrpc.a
$(E) "[LD] Linking $@"
$(Q) mkdir -p `dirname $@`
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -shared -Wl,--output-def=$(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).def -Wl,--out-implib=$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE)-dll.a -o $(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LIBDIR)/$(CONFIG)/libaddress_sorting.a $(LIBDIR)/$(CONFIG)/libupb.a $(OPENSSL_MERGE_LIBS) $(LDLIBS_SECURE) $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -shared -Wl,--output-def=$(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).def -Wl,--out-implib=$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE)-dll.a -o $(LIBDIR)/$(CONFIG)/grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
else
$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE): $(LIBGRPC_CSHARP_EXT_OBJS) $(ZLIB_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LIBDIR)/$(CONFIG)/libaddress_sorting.a $(LIBDIR)/$(CONFIG)/libupb.a $(OPENSSL_DEP)
$(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE): $(LIBGRPC_CSHARP_EXT_OBJS) $(ZLIB_DEP) $(CARES_DEP) $(ADDRESS_SORTING_DEP) $(RE2_DEP) $(UPB_DEP) $(GRPC_ABSEIL_DEP) $(LIBDIR)/$(CONFIG)/libgrpc.a
$(E) "[LD] Linking $@"
$(Q) mkdir -p `dirname $@`
ifeq ($(SYSTEM),Darwin)
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -install_name $(SHARED_PREFIX)grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) -dynamiclib -o $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LIBDIR)/$(CONFIG)/libaddress_sorting.a $(LIBDIR)/$(CONFIG)/libupb.a $(OPENSSL_MERGE_LIBS) $(LDLIBS_SECURE) $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -install_name $(SHARED_PREFIX)grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) -dynamiclib -o $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
else
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -shared -Wl,-soname,libgrpc_csharp_ext.so.15 -o $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(LIBDIR)/$(CONFIG)/libgpr.a $(LIBDIR)/$(CONFIG)/libaddress_sorting.a $(LIBDIR)/$(CONFIG)/libupb.a $(OPENSSL_MERGE_LIBS) $(LDLIBS_SECURE) $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
$(Q) $(LDXX) $(LDFLAGS) -L$(LIBDIR)/$(CONFIG) -shared -Wl,-soname,libgrpc_csharp_ext.so.15 -o $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBGRPC_CSHARP_EXT_OBJS) $(LIBDIR)/$(CONFIG)/libgrpc.a $(ZLIB_MERGE_LIBS) $(CARES_MERGE_LIBS) $(ADDRESS_SORTING_MERGE_LIBS) $(RE2_MERGE_LIBS) $(UPB_MERGE_LIBS) $(GRPC_ABSEIL_MERGE_LIBS) $(LDLIBS)
$(Q) ln -sf $(SHARED_PREFIX)grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).so.15
$(Q) ln -sf $(SHARED_PREFIX)grpc_csharp_ext$(SHARED_VERSION_CORE).$(SHARED_EXT_CORE) $(LIBDIR)/$(CONFIG)/libgrpc_csharp_ext$(SHARED_VERSION_CORE).so
endif
endif
endif
ifneq ($(NO_SECURE),true)
ifneq ($(NO_DEPS),true)
-include $(LIBGRPC_CSHARP_EXT_OBJS:.o=.dep)
endif
endif
# end of build recipe for library "grpc_csharp_ext"
@ -2926,7 +2912,6 @@ src/core/tsi/ssl/session_cache/ssl_session_openssl.cc: $(OPENSSL_DEP)
src/core/tsi/ssl_transport_security.cc: $(OPENSSL_DEP)
src/core/tsi/transport_security.cc: $(OPENSSL_DEP)
src/core/tsi/transport_security_grpc.cc: $(OPENSSL_DEP)
src/csharp/ext/grpc_csharp_ext.c: $(OPENSSL_DEP)
endif
.PHONY: all strip tools dep_error openssl_dep_error openssl_dep_message git_update stop buildtests buildtests_c buildtests_cxx test test_c test_cxx install install_c install_cxx install_csharp install-static install-certs strip strip-shared strip-static strip_c strip-shared_c strip-static_c strip_cxx strip-shared_cxx strip-static_cxx dep_c dep_cxx bins_dep_c bins_dep_cxx clean

File diff suppressed because it is too large Load Diff

@ -178,10 +178,6 @@
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'test/core/end2end/cq_verifier.cc',
@ -280,10 +276,6 @@
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'test/core/end2end/cq_verifier.cc',
@ -382,14 +374,14 @@
'target_name': 'gpr',
'type': 'static_library',
'dependencies': [
'absl/types:optional',
'absl/time:time',
'absl/synchronization:synchronization',
'absl/strings:strings',
'absl/strings:str_format',
'absl/status:status',
'absl/memory:memory',
'absl/base:base',
'absl/memory:memory',
'absl/status:status',
'absl/strings:str_format',
'absl/strings:strings',
'absl/synchronization:synchronization',
'absl/time:time',
'absl/types:optional',
],
'sources': [
'src/core/lib/gpr/alloc.cc',
@ -443,16 +435,13 @@
'target_name': 'grpc',
'type': 'static_library',
'dependencies': [
'absl/container:flat_hash_map',
'absl/container:inlined_vector',
'absl/functional:bind_front',
'absl/status:statusor',
'gpr',
'address_sorting',
'upb',
'absl/types:optional',
'absl/strings:strings',
'absl/status:statusor',
'absl/status:status',
'absl/functional:bind_front',
'absl/container:inlined_vector',
'absl/container:flat_hash_map',
],
'sources': [
'src/core/ext/filters/census/grpc_context.cc',
@ -1001,9 +990,6 @@
'type': 'static_library',
'dependencies': [
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/csharp/ext/grpc_csharp_ext.c',
@ -1013,13 +999,10 @@
'target_name': 'grpc_test_util',
'type': 'static_library',
'dependencies': [
'grpc',
'gpr',
'address_sorting',
'upb',
'absl/debugging:symbolize',
'absl/debugging:stacktrace',
'absl/debugging:failure_signal_handler',
'absl/debugging:stacktrace',
'absl/debugging:symbolize',
'grpc',
],
'sources': [
'test/core/util/cmdline.cc',
@ -1051,13 +1034,10 @@
'target_name': 'grpc_test_util_unsecure',
'type': 'static_library',
'dependencies': [
'grpc_unsecure',
'gpr',
'address_sorting',
'upb',
'absl/debugging:symbolize',
'absl/debugging:stacktrace',
'absl/debugging:failure_signal_handler',
'absl/debugging:stacktrace',
'absl/debugging:symbolize',
'grpc_unsecure',
],
'sources': [
'test/core/util/cmdline.cc',
@ -1088,15 +1068,12 @@
'target_name': 'grpc_unsecure',
'type': 'static_library',
'dependencies': [
'absl/container:flat_hash_map',
'absl/container:inlined_vector',
'absl/status:statusor',
'gpr',
'address_sorting',
'upb',
'absl/types:optional',
'absl/strings:strings',
'absl/status:statusor',
'absl/status:status',
'absl/container:inlined_vector',
'absl/container:flat_hash_map',
],
'sources': [
'src/core/ext/filters/census/grpc_context.cc',
@ -1385,14 +1362,10 @@
'target_name': 'benchmark_helpers',
'type': 'static_library',
'dependencies': [
'grpc_test_util_unsecure',
'benchmark',
'grpc++_unsecure',
'grpc_unsecure',
'grpc_test_util_unsecure',
'grpc++_test_config',
'gpr',
'address_sorting',
'upb',
'benchmark',
],
'sources': [
'src/proto/grpc/testing/echo.proto',
@ -1406,10 +1379,6 @@
'type': 'static_library',
'dependencies': [
'grpc',
'gpr',
'address_sorting',
'upb',
'absl/synchronization:synchronization',
],
'sources': [
'src/cpp/client/channel_cc.cc',
@ -1469,10 +1438,6 @@
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/cpp/common/alts_context.cc',
@ -1484,10 +1449,6 @@
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/cpp/util/error_details.cc',
@ -1498,10 +1459,6 @@
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/proto/grpc/reflection/v1alpha/reflection.proto',
@ -1514,10 +1471,6 @@
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/cpp/client/channel_test_peer.cc',
@ -1527,8 +1480,8 @@
'target_name': 'grpc++_test_config',
'type': 'static_library',
'dependencies': [
'gpr',
'absl/flags:parse',
'gpr',
],
'sources': [
'test/cpp/util/test_config_cc.cc',
@ -1538,13 +1491,9 @@
'target_name': 'grpc++_test_util',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
'absl/flags:flag',
'grpc++',
'grpc_test_util',
],
'sources': [
'test/core/end2end/data/client_certs.cc',
@ -1563,10 +1512,6 @@
'type': 'static_library',
'dependencies': [
'grpc_unsecure',
'gpr',
'address_sorting',
'upb',
'absl/synchronization:synchronization',
],
'sources': [
'src/cpp/client/channel_cc.cc',
@ -1631,10 +1576,6 @@
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
'gpr',
'address_sorting',
'upb',
],
'sources': [
'src/proto/grpc/channelz/channelz.proto',

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -34,14 +34,18 @@ import subprocess
import yaml
import xml.etree.ElementTree as ET
import os
import collections
import sys
import re
from typing import List, Any, Dict, Optional, Iterable
import build_cleaner
_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(_ROOT)
BuildMetadata = Dict[str, Any]
BuildDict = Dict[str, BuildMetadata]
BuildYaml = Dict[str, Any]
def _bazel_query_xml_tree(query):
def _bazel_query_xml_tree(query: str) -> ET.Element:
"""Get xml output of bazel query invocation, parsed as XML tree"""
output = subprocess.check_output(
['tools/bazel', 'query', '--noimplicit_deps', '--output', 'xml', query])
@ -98,14 +102,14 @@ def _extract_rules_from_bazel_xml(xml_tree):
return result
def _get_bazel_label(target_name):
def _get_bazel_label(target_name: str) -> str:
if ':' in target_name:
return '//%s' % target_name
else:
return '//:%s' % target_name
def _extract_source_file_path(label):
def _extract_source_file_path(label: str) -> str:
"""Gets relative path to source file from bazel deps listing"""
if label.startswith('//'):
label = label[len('//'):]
@ -117,7 +121,7 @@ def _extract_source_file_path(label):
return label
def _extract_public_headers(bazel_rule):
def _extract_public_headers(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
@ -126,7 +130,7 @@ def _extract_public_headers(bazel_rule):
return list(sorted(result))
def _extract_nonpublic_headers(bazel_rule):
def _extract_nonpublic_headers(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of non-public headers from a bazel rule"""
result = []
for dep in bazel_rule['hdrs']:
@ -136,7 +140,7 @@ def _extract_nonpublic_headers(bazel_rule):
return list(sorted(result))
def _extract_sources(bazel_rule):
def _extract_sources(bazel_rule: BuildMetadata) -> List[str]:
"""Gets list of source files from a bazel rule"""
result = []
for dep in bazel_rule['srcs']:
@ -146,12 +150,14 @@ def _extract_sources(bazel_rule):
return list(sorted(result))
def _extract_deps(bazel_rule):
def _extract_deps(bazel_rule: BuildMetadata,
bazel_rules: BuildDict) -> List[str]:
"""Gets list of deps from from a bazel rule"""
return list(sorted(bazel_rule['deps']))
def _create_target_from_bazel_rule(target_name, bazel_rules):
def _create_target_from_bazel_rule(target_name: str,
bazel_rules: BuildDict) -> BuildMetadata:
"""Create build.yaml-like target definition from bazel metadata"""
bazel_rule = bazel_rules[_get_bazel_label(target_name)]
@ -164,71 +170,16 @@ def _create_target_from_bazel_rule(target_name, bazel_rules):
'_PUBLIC_HEADERS_BAZEL': _extract_public_headers(bazel_rule),
'_HEADERS_BAZEL': _extract_nonpublic_headers(bazel_rule),
'_SRC_BAZEL': _extract_sources(bazel_rule),
'_DEPS_BAZEL': _extract_deps(bazel_rule),
'_DEPS_BAZEL': _extract_deps(bazel_rule, bazel_rules),
'public_headers': bazel_rule['_COLLAPSED_PUBLIC_HEADERS'],
'headers': bazel_rule['_COLLAPSED_HEADERS'],
'src': bazel_rule['_COLLAPSED_SRCS'],
'deps': bazel_rule['_COLLAPSED_DEPS'],
}
return result
def _sort_by_build_order(lib_names, lib_dict, deps_key_name, verbose=False):
"""Sort library names to form correct build order. Use metadata from lib_dict"""
# we find correct build order by performing a topological sort
# expected output: if library B depends on A, A should be listed first
# all libs that are not in the dictionary are considered external.
external_deps = list(
sorted([lib_name for lib_name in lib_names if lib_name not in lib_dict
]))
if verbose:
print('topo_ordering ' + str(lib_names))
print(' external_deps ' + str(external_deps))
result = list(external_deps) # external deps will be listed first
while len(result) < len(lib_names):
more_results = []
for lib in lib_names:
if lib not in result:
dep_set = set(lib_dict[lib].get(deps_key_name, []))
dep_set = dep_set.intersection(lib_names)
# if lib only depends on what's already built, add it to the results
if not dep_set.difference(set(result)):
more_results.append(lib)
if not more_results:
raise Exception(
'Cannot sort topologically, there seems to be a cyclic dependency'
)
if verbose:
print(' adding ' + str(more_results))
result = result + list(
sorted(more_results
)) # when build order doesn't matter, sort lexicographically
return result
# TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a slightly different logic)
def _populate_transitive_deps(bazel_rules):
"""Add 'transitive_deps' field for each of the rules"""
transitive_deps = {}
for rule_name in bazel_rules.keys():
transitive_deps[rule_name] = set(bazel_rules[rule_name]['deps'])
while True:
deps_added = 0
for rule_name in bazel_rules.keys():
old_deps = transitive_deps[rule_name]
new_deps = set(old_deps)
for dep_name in old_deps:
new_deps.update(transitive_deps.get(dep_name, set()))
deps_added += len(new_deps) - len(old_deps)
transitive_deps[rule_name] = new_deps
# if none of the transitive dep sets has changed, we're done
if deps_added == 0:
break
for rule_name, bazel_rule in bazel_rules.items():
bazel_rule['transitive_deps'] = list(sorted(transitive_deps[rule_name]))
def _external_dep_name_from_bazel_dependency(bazel_dep):
def _external_dep_name_from_bazel_dependency(bazel_dep: str) -> Optional[str]:
"""Returns name of dependency if external bazel dependency is provided or None"""
if bazel_dep.startswith('@com_google_absl//'):
# special case for add dependency on one of the absl libraries (there is not just one absl library)
@ -247,98 +198,186 @@ def _external_dep_name_from_bazel_dependency(bazel_dep):
return None
def _expand_intermediate_deps(target_dict, public_dep_names, bazel_rules):
# Some of the libraries defined by bazel won't be exposed in build.yaml
# We call these "intermediate" dependencies. This method expands
# the intermediate deps for given target (populates library's
# headers, sources and dicts as if the intermediate dependency never existed)
def _compute_transitive_metadata(
rule_name: str, bazel_rules: Any,
bazel_label_to_dep_name: Dict[str, str]) -> None:
"""Computes the final build metadata for Bazel target with rule_name.
The dependencies that will appear on the deps list are:
* Public build targets including binaries and tests;
* External targets, like absl, re2.
All other intermediate dependencies will be merged, which means their
source file, headers, etc. will be collected into one build target. This
step of processing will greatly reduce the complexity of the generated
build specifications for other build systems, like CMake, Make, setuptools.
The final build metadata are:
* _TRANSITIVE_DEPS: all the transitive dependencies including intermediate
targets;
* _COLLAPSED_DEPS: dependencies that fits our requirement above, and it
will remove duplicated items and produce the shortest
possible dependency list in alphabetical order;
* _COLLAPSED_SRCS: the merged source files;
* _COLLAPSED_PUBLIC_HEADERS: the merged public headers;
* _COLLAPSED_HEADERS: the merged non-public headers;
* _EXCLUDE_DEPS: intermediate targets to exclude when performing collapsing
of sources and dependencies.
For the collapsed_deps, the algorithm improved cases like:
The result in the past:
end2end_tests -> [grpc_test_util, grpc, gpr, address_sorting, upb]
grpc_test_util -> [grpc, gpr, address_sorting, upb, ...]
grpc -> [gpr, address_sorting, upb, ...]
The result of the algorithm:
end2end_tests -> [grpc_test_util]
grpc_test_util -> [grpc]
grpc -> [gpr, address_sorting, upb, ...]
"""
bazel_rule = bazel_rules[rule_name]
direct_deps = _extract_deps(bazel_rule, bazel_rules)
transitive_deps = set()
collapsed_deps = set()
exclude_deps = set()
collapsed_srcs = set(_extract_sources(bazel_rule))
collapsed_public_headers = set(_extract_public_headers(bazel_rule))
collapsed_headers = set(_extract_nonpublic_headers(bazel_rule))
for dep in direct_deps:
external_dep_name_maybe = _external_dep_name_from_bazel_dependency(dep)
if dep in bazel_rules:
# Descend recursively, but no need to do that for external deps
if external_dep_name_maybe is None:
if "_PROCESSING_DONE" not in bazel_rules[dep]:
# This item is not processed before, compute now
_compute_transitive_metadata(dep, bazel_rules,
bazel_label_to_dep_name)
transitive_deps.update(bazel_rules[dep].get(
'_TRANSITIVE_DEPS', []))
collapsed_deps.update(
collapsed_deps, bazel_rules[dep].get('_COLLAPSED_DEPS', []))
exclude_deps.update(bazel_rules[dep].get('_EXCLUDE_DEPS', []))
# This dep is a public target, add it as a dependency
if dep in bazel_label_to_dep_name:
transitive_deps.update([bazel_label_to_dep_name[dep]])
collapsed_deps.update(collapsed_deps,
[bazel_label_to_dep_name[dep]])
# Add all the transitive deps of our every public dep to exclude
# list since we want to avoid building sources that are already
# built by our dependencies
exclude_deps.update(bazel_rules[dep]['_TRANSITIVE_DEPS'])
continue
# use this dictionary to translate from bazel labels to dep names
bazel_label_to_dep_name = {}
for dep_name in public_dep_names:
bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
# This dep is an external target, add it as a dependency
if external_dep_name_maybe is not None:
transitive_deps.update([external_dep_name_maybe])
collapsed_deps.update(collapsed_deps, [external_dep_name_maybe])
continue
target_name = target_dict['name']
bazel_deps = target_dict['_DEPS_BAZEL']
# Direct dependencies are part of transitive dependencies
transitive_deps.update(direct_deps)
# initial values
public_headers = set(target_dict['_PUBLIC_HEADERS_BAZEL'])
headers = set(target_dict['_HEADERS_BAZEL'])
src = set(target_dict['_SRC_BAZEL'])
deps = set()
# Calculate transitive public deps (needed for collapsing sources)
transitive_public_deps = set(
filter(lambda x: x in bazel_label_to_dep_name, transitive_deps))
expansion_blocklist = set()
to_expand = set(bazel_deps)
while to_expand:
# Remove intermediate targets that our public dependencies already depend
# on. This is the step that further shorten the deps list.
collapsed_deps = set(filter(lambda x: x not in exclude_deps,
collapsed_deps))
# start with the last dependency to be built
build_order = _sort_by_build_order(list(to_expand), bazel_rules,
'transitive_deps')
# Compute the final source files and headers for this build target whose
# name is `rule_name` (input argument of this function).
#
# Imaging a public target PX has transitive deps [IA, IB, PY, IC, PZ]. PX,
# PY and PZ are public build targets. And IA, IB, IC are intermediate
# targets. In addition, PY depends on IC.
#
# Translate the condition into dependency graph:
# PX -> [IA, IB, PY, IC, PZ]
# PY -> [IC]
# Public targets: [PX, PY, PZ]
#
# The collapsed dependencies of PX: [PY, PZ].
# The excluded dependencies of X: [PY, IC, PZ].
# (IC is excluded as a dependency of PX. It is already included in PY, hence
# it would be redundant to include it again.)
#
# Target PX should include source files and headers of [PX, IA, IB] as final
# build metadata.
for dep in transitive_deps:
if dep not in exclude_deps and dep not in transitive_public_deps:
if dep in bazel_rules:
collapsed_srcs.update(_extract_sources(bazel_rules[dep]))
collapsed_public_headers.update(
_extract_public_headers(bazel_rules[dep]))
collapsed_headers.update(
_extract_nonpublic_headers(bazel_rules[dep]))
# This item is a "visited" flag
bazel_rule['_PROCESSING_DONE'] = True
# Following items are described in the docstinrg.
bazel_rule['_TRANSITIVE_DEPS'] = list(sorted(transitive_deps))
bazel_rule['_COLLAPSED_DEPS'] = list(sorted(collapsed_deps))
bazel_rule['_COLLAPSED_SRCS'] = list(sorted(collapsed_srcs))
bazel_rule['_COLLAPSED_PUBLIC_HEADERS'] = list(
sorted(collapsed_public_headers))
bazel_rule['_COLLAPSED_HEADERS'] = list(sorted(collapsed_headers))
bazel_rule['_EXCLUDE_DEPS'] = list(sorted(exclude_deps))
# TODO(jtattermusch): deduplicate with transitive_dependencies.py (which has a slightly different logic)
# TODO(jtattermusch): This is done to avoid introducing too many intermediate
# libraries into the build.yaml-based builds (which might in cause issues
# building language-specific artifacts) and also because the libraries
# in build.yaml-based build are generally considered units of distributions
# (= public libraries that are visible to the user and are installable),
# while in bazel builds it is customary to define larger number of smaller
# "sublibraries". The need for elision (and expansion)
# of intermediate libraries can be re-evaluated in the future.
def _populate_transitive_metadata(bazel_rules: Any,
public_dep_names: Iterable[str]) -> None:
"""Add 'transitive_deps' field for each of the rules"""
# Create the map between Bazel label and public dependency name
bazel_label_to_dep_name = {}
for dep_name in public_dep_names:
bazel_label_to_dep_name[_get_bazel_label(dep_name)] = dep_name
bazel_dep = build_order[-1]
to_expand.remove(bazel_dep)
# Make sure we reached all the Bazel rules
# TODO(lidiz) potentially we could only update a subset of rules
for rule_name in bazel_rules:
if '_PROCESSING_DONE' not in bazel_rules[rule_name]:
_compute_transitive_metadata(rule_name, bazel_rules,
bazel_label_to_dep_name)
is_public = bazel_dep in bazel_label_to_dep_name
external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
bazel_dep)
if is_public:
# this is not an intermediate dependency we so we add it
# to the list of public dependencies to the list, in the right format
deps.add(bazel_label_to_dep_name[bazel_dep])
def update_test_metadata_with_transitive_metadata(
all_extra_metadata: BuildDict, bazel_rules: BuildDict) -> None:
"""Patches test build metadata with transitive metadata."""
for lib_name, lib_dict in all_extra_metadata.items():
# Skip if it isn't not an test
if lib_dict.get('build') != 'test' or lib_dict.get('_TYPE') != 'target':
continue
# we do not want to expand any intermediate libraries that are already included
# by the dependency we just added
expansion_blocklist.update(
bazel_rules[bazel_dep]['transitive_deps'])
bazel_rule = bazel_rules[_get_bazel_label(lib_name)]
elif external_dep_name_maybe:
deps.add(external_dep_name_maybe)
if '//external:benchmark' in bazel_rule['_TRANSITIVE_DEPS']:
lib_dict['benchmark'] = True
lib_dict['defaults'] = 'benchmark'
elif bazel_dep.startswith(
'//external:') or not bazel_dep.startswith('//'):
# all the other external deps can be skipped
pass
if '//external:gtest' in bazel_rule['_TRANSITIVE_DEPS']:
lib_dict['gtest'] = True
lib_dict['language'] = 'c++'
elif bazel_dep in expansion_blocklist:
# do not expand if a public dependency that depends on this has already been expanded
pass
else:
if bazel_dep in bazel_rules:
# this is an intermediate library, expand it
public_headers.update(
_extract_public_headers(bazel_rules[bazel_dep]))
headers.update(
_extract_nonpublic_headers(bazel_rules[bazel_dep]))
src.update(_extract_sources(bazel_rules[bazel_dep]))
new_deps = _extract_deps(bazel_rules[bazel_dep])
to_expand.update(new_deps)
else:
raise Exception(bazel_dep + ' not in bazel_rules')
# make the 'deps' field transitive, but only list non-intermediate deps and selected external deps
bazel_transitive_deps = bazel_rules[_get_bazel_label(
target_name)]['transitive_deps']
for transitive_bazel_dep in bazel_transitive_deps:
public_name = bazel_label_to_dep_name.get(transitive_bazel_dep, None)
if public_name:
deps.add(public_name)
external_dep_name_maybe = _external_dep_name_from_bazel_dependency(
transitive_bazel_dep)
if external_dep_name_maybe:
# expanding all absl libraries is technically correct but creates too much noise
if not external_dep_name_maybe.startswith('absl'):
deps.add(external_dep_name_maybe)
target_dict['public_headers'] = list(sorted(public_headers))
target_dict['headers'] = list(sorted(headers))
target_dict['src'] = list(sorted(src))
target_dict['deps'] = list(sorted(deps))
def _generate_build_metadata(build_extra_metadata, bazel_rules):
def _generate_build_metadata(build_extra_metadata: BuildDict,
bazel_rules: BuildDict) -> BuildDict:
"""Generate build metadata in build.yaml-like format bazel build metadata and build.yaml-specific "extra metadata"."""
lib_names = list(build_extra_metadata.keys())
result = {}
@ -346,20 +385,6 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
for lib_name in lib_names:
lib_dict = _create_target_from_bazel_rule(lib_name, bazel_rules)
# Figure out the final list of headers and sources for given target.
# While this is mostly based on bazel build metadata, build.yaml does
# not necessarily expose all the targets that are present in bazel build.
# These "intermediate dependencies" might get flattened.
# TODO(jtattermusch): This is done to avoid introducing too many intermediate
# libraries into the build.yaml-based builds (which might in cause issues
# building language-specific artifacts) and also because the libraries
# in build.yaml-based build are generally considered units of distributions
# (= public libraries that are visible to the user and are installable),
# while in bazel builds it is customary to define larger number of smaller
# "sublibraries". The need for elision (and expansion)
# of intermediate libraries can be re-evaluated in the future.
_expand_intermediate_deps(lib_dict, lib_names, bazel_rules)
# populate extra properties from the build.yaml-specific "extra metadata"
lib_dict.update(build_extra_metadata.get(lib_name, {}))
@ -376,8 +401,8 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
if to_name:
# store lib under the new name and also change its 'name' property
if to_name in result:
raise Exception('Cannot rename target ' + lib_name + ', ' +
to_name + ' already exists.')
raise Exception('Cannot rename target ' + str(lib_name) + ', ' +
str(to_name) + ' already exists.')
lib_dict = result.pop(lib_name)
lib_dict['name'] = to_name
result[to_name] = lib_dict
@ -389,15 +414,10 @@ def _generate_build_metadata(build_extra_metadata, bazel_rules):
for dep in lib_dict_to_update['deps']
])
# make sure deps are listed in reverse topological order (e.g. "grpc gpr" and not "gpr grpc")
for lib_dict in result.values():
lib_dict['deps'] = list(
reversed(_sort_by_build_order(lib_dict['deps'], result, 'deps')))
return result
def _convert_to_build_yaml_like(lib_dict):
def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
lib_names = [
lib_name for lib_name in list(lib_dict.keys())
if lib_dict[lib_name].get('_TYPE', 'library') == 'library'
@ -440,7 +460,7 @@ def _convert_to_build_yaml_like(lib_dict):
return build_yaml_like
def _extract_cc_tests(bazel_rules):
def _extract_cc_tests(bazel_rules: BuildDict) -> List[str]:
"""Gets list of cc_test tests from bazel rules"""
result = []
for bazel_rule in bazel_rules.values():
@ -452,7 +472,7 @@ def _extract_cc_tests(bazel_rules):
return list(sorted(result))
def _exclude_unwanted_cc_tests(tests):
def _exclude_unwanted_cc_tests(tests: List[str]) -> List[str]:
"""Filters out bazel tests that we don't want to run with other build systems or we cannot build them reasonably"""
# most qps tests are autogenerated, we are fine without them
@ -518,7 +538,8 @@ def _exclude_unwanted_cc_tests(tests):
return tests
def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
def _generate_build_extra_metadata_for_tests(
tests: List[str], bazel_rules: BuildDict) -> BuildDict:
"""For given tests, generate the "extra metadata" that we need for our "build.yaml"-like output. The extra metadata is generated from the bazel rule metadata by using a bunch of heuristics."""
test_metadata = {}
for test in tests:
@ -567,19 +588,11 @@ def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
platforms.append('windows')
test_dict['platforms'] = platforms
if '//external:benchmark' in bazel_rule['transitive_deps']:
test_dict['benchmark'] = True
test_dict['defaults'] = 'benchmark'
cmdline_args = bazel_rule['args']
if cmdline_args:
test_dict['args'] = list(cmdline_args)
uses_gtest = '//external:gtest' in bazel_rule['transitive_deps']
if uses_gtest:
test_dict['gtest'] = True
if test.startswith('test/cpp') or uses_gtest:
if test.startswith('test/cpp'):
test_dict['language'] = 'c++'
elif test.startswith('test/core'):
@ -615,7 +628,7 @@ def _generate_build_extra_metadata_for_tests(tests, bazel_rules):
return test_metadata
def _detect_and_print_issues(build_yaml_like):
def _detect_and_print_issues(build_yaml_like: BuildYaml) -> None:
"""Try detecting some unusual situations and warn about them."""
for tgt in build_yaml_like['targets']:
if tgt['build'] == 'test':
@ -968,6 +981,8 @@ _BAZEL_DEPS_QUERIES = [
'deps("//:all")',
'deps("//src/compiler/...")',
'deps("//src/proto/...")',
# The ^ is needed to differentiate proto_library from go_proto_library
'deps(kind("^proto_library", @envoy_api//envoy/...))',
]
# Step 1: run a bunch of "bazel query --output xml" queries to collect
@ -987,14 +1002,6 @@ for query in _BAZEL_DEPS_QUERIES:
bazel_rules.update(
_extract_rules_from_bazel_xml(_bazel_query_xml_tree(query)))
# Step 1a: Knowing the transitive closure of dependencies will make
# the postprocessing simpler, so compute the info for all our rules.
#
# Example:
# '//:grpc' : { ...,
# 'transitive_deps': ['//:gpr_base', ...] }
_populate_transitive_deps(bazel_rules)
# Step 2: Extract the known bazel cc_test tests. While most tests
# will be buildable with other build systems just fine, some of these tests
# would be too difficult to build and run with other build systems,
@ -1049,7 +1056,25 @@ all_extra_metadata.update(_BUILD_EXTRA_METADATA)
all_extra_metadata.update(
_generate_build_extra_metadata_for_tests(tests, bazel_rules))
# Step 4: Generate the final metadata for all the targets.
# Step 4: Compute the build metadata that will be used in the final build.yaml.
# The final build metadata includes transitive dependencies, and sources/headers
# expanded without intermediate dependencies.
# Example:
# '//:grpc' : { ...,
# '_TRANSITIVE_DEPS': ['//:gpr_base', ...],
# '_COLLAPSED_DEPS': ['gpr', ...],
# '_COLLAPSED_SRCS': [...],
# '_COLLAPSED_PUBLIC_HEADERS': [...],
# '_COLLAPSED_HEADERS': [...]
# }
_populate_transitive_metadata(bazel_rules, all_extra_metadata.keys())
# Step 4a: Update the existing test metadata with the updated build metadata.
# Certain build metadata of certain test targets depend on the transitive
# metadata that wasn't available earlier.
update_test_metadata_with_transitive_metadata(all_extra_metadata, bazel_rules)
# Step 5: Generate the final metadata for all the targets.
# This is done by combining the bazel build metadata and the "extra metadata"
# we obtained in the previous step.
# In this step, we also perform some interesting massaging of the target metadata
@ -1079,7 +1104,7 @@ all_extra_metadata.update(
# ... }
all_targets_dict = _generate_build_metadata(all_extra_metadata, bazel_rules)
# Step 5: convert the dictionary with all the targets to a dict that has
# Step 6: convert the dictionary with all the targets to a dict that has
# the desired "build.yaml"-like layout.
# TODO(jtattermusch): We use the custom "build.yaml"-like layout because
# currently all other build systems use that format as their source of truth.
@ -1096,7 +1121,7 @@ build_yaml_like = _convert_to_build_yaml_like(all_targets_dict)
# detect and report some suspicious situations we've seen before
_detect_and_print_issues(build_yaml_like)
# Step 6: Store the build_autogenerated.yaml in a deterministic (=sorted)
# Step 7: Store the build_autogenerated.yaml in a deterministic (=sorted)
# and cleaned-up form.
# A basic overview of the resulting "build.yaml"-like format is here:
# https://github.com/grpc/grpc/blob/master/templates/README.md

Loading…
Cancel
Save