Revert "Run 2to3 on tools directory (#26002)" (#27183)

This reverts commit a388361c3d.
pull/27191/head
Richard Belleville 3 years ago committed by GitHub
parent 76e95f6afd
commit b6cc72f129
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      tools/buildgen/_utils.py
  2. 39
      tools/buildgen/extract_metadata_from_bazel_xml.py
  3. 2
      tools/buildgen/plugins/check_attrs.py
  4. 5
      tools/buildgen/plugins/list_api.py
  5. 2
      tools/buildgen/plugins/transitive_dependencies.py
  6. 4
      tools/codegen/core/gen_header_frame.py
  7. 7
      tools/codegen/core/gen_server_registered_method_bad_client_test_body.py
  8. 111
      tools/codegen/core/gen_settings_ids.py
  9. 321
      tools/codegen/core/gen_static_metadata.py
  10. 267
      tools/codegen/core/gen_stats_data.py
  11. 4
      tools/debug/core/chttp2_ref_leak.py
  12. 2
      tools/debug/core/error_ref_leak.py
  13. 14
      tools/distrib/c-ish/check_documentation.py
  14. 6
      tools/distrib/check_copyright.py
  15. 12
      tools/distrib/check_include_guards.py
  16. 4
      tools/distrib/run_clang_tidy.py
  17. 13
      tools/gcp/github_stats_tracking/app.yaml
  18. 19
      tools/gcp/github_stats_tracking/appengine_config.py
  19. 4
      tools/gcp/github_stats_tracking/cron.yaml
  20. 96
      tools/gcp/github_stats_tracking/fetch_data.py
  21. 28
      tools/gcp/github_stats_tracking/main.py
  22. 211
      tools/github/pr_latency.py
  23. 6
      tools/interop_matrix/create_matrix_images.py
  24. 8
      tools/interop_matrix/run_interop_matrix_tests.py
  25. 42
      tools/line_count/collect-history.py
  26. 38
      tools/line_count/summarize-history.py
  27. 10
      tools/mkowners/mkowners.py
  28. 10
      tools/profiling/ios_bin/parse_link_map.py
  29. 12
      tools/profiling/latency_profile/profile_analyzer.py
  30. 2
      tools/profiling/microbenchmarks/bm2bq.py
  31. 6
      tools/profiling/microbenchmarks/bm_diff/bm_diff.py
  32. 2
      tools/profiling/microbenchmarks/bm_diff/bm_main.py
  33. 4
      tools/profiling/microbenchmarks/bm_diff/bm_speedup.py
  34. 2
      tools/profiling/microbenchmarks/bm_json.py
  35. 16
      tools/profiling/qps/qps_diff.py
  36. 35
      tools/release/release_notes.py
  37. 2
      tools/run_tests/artifacts/artifact_targets.py
  38. 2
      tools/run_tests/artifacts/distribtest_targets.py
  39. 2
      tools/run_tests/artifacts/package_targets.py
  40. 4
      tools/run_tests/lb_interop_tests/gen_build_yaml.py
  41. 2
      tools/run_tests/performance/bq_upload_result.py
  42. 5
      tools/run_tests/performance/loadtest_config.py
  43. 5
      tools/run_tests/performance/loadtest_template.py
  44. 2
      tools/run_tests/performance/massage_qps_stats.py
  45. 2
      tools/run_tests/python_utils/dockerjob.py
  46. 2
      tools/run_tests/python_utils/filter_pull_request_tests.py
  47. 4
      tools/run_tests/python_utils/jobset.py
  48. 2
      tools/run_tests/python_utils/port_server.py
  49. 11
      tools/run_tests/python_utils/report_utils.py
  50. 18
      tools/run_tests/python_utils/upload_rbe_results.py
  51. 12
      tools/run_tests/run_grpclb_interop_tests.py
  52. 14
      tools/run_tests/run_interop_tests.py
  53. 5
      tools/run_tests/run_performance_tests.py
  54. 4
      tools/run_tests/run_tests.py
  55. 17
      tools/run_tests/run_xds_tests.py
  56. 8
      tools/run_tests/sanity/check_bazel_workspace.py
  57. 10
      tools/run_tests/sanity/check_deprecated_grpc++.py
  58. 4
      tools/run_tests/sanity/check_tracer_sanity.py
  59. 14
      tools/run_tests/sanity/check_version.py
  60. 4
      tools/run_tests/sanity/core_banned_functions.py
  61. 4
      tools/run_tests/task_runner.py

@ -45,7 +45,7 @@ def to_bunch(var: Any) -> Any:
return [to_bunch(i) for i in var]
if isinstance(var, dict):
ret = {}
for k, v in list(var.items()):
for k, v in var.items():
if isinstance(v, (list, dict)):
v = to_bunch(v)
ret[k] = v
@ -57,7 +57,7 @@ def to_bunch(var: Any) -> Any:
def merge_json(dst: Union[Mapping, List], add: Union[Mapping, List]) -> None:
"""Merges JSON objects recursively."""
if isinstance(dst, dict) and isinstance(add, dict):
for k, v in list(add.items()):
for k, v in add.items():
if k in dst:
if k.startswith('#'):
continue

@ -286,11 +286,12 @@ def _compute_transitive_metadata(
# Calculate transitive public deps (needed for collapsing sources)
transitive_public_deps = set(
[x for x in transitive_deps if x in bazel_label_to_dep_name])
filter(lambda x: x in bazel_label_to_dep_name, transitive_deps))
# Remove intermediate targets that our public dependencies already depend
# on. This is the step that further shorten the deps list.
collapsed_deps = set([x for x in collapsed_deps if x not in exclude_deps])
collapsed_deps = set(filter(lambda x: x not in exclude_deps,
collapsed_deps))
# Compute the final source files and headers for this build target whose
# name is `rule_name` (input argument of this function).
@ -360,7 +361,7 @@ def _populate_transitive_metadata(bazel_rules: Any,
def update_test_metadata_with_transitive_metadata(
all_extra_metadata: BuildDict, bazel_rules: BuildDict) -> None:
"""Patches test build metadata with transitive metadata."""
for lib_name, lib_dict in list(all_extra_metadata.items()):
for lib_name, lib_dict in all_extra_metadata.items():
# Skip if it isn't not an test
if lib_dict.get('build') != 'test' or lib_dict.get('_TYPE') != 'target':
continue
@ -408,7 +409,7 @@ def _generate_build_metadata(build_extra_metadata: BuildDict,
result[to_name] = lib_dict
# dep names need to be updated as well
for lib_dict_to_update in list(result.values()):
for lib_dict_to_update in result.values():
lib_dict_to_update['deps'] = list([
to_name if dep == lib_name else dep
for dep in lib_dict_to_update['deps']
@ -438,21 +439,15 @@ def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
# get rid of temporary private fields prefixed with "_" and some other useless fields
for lib in lib_list:
for field_to_remove in [
k for k in list(lib.keys()) if k.startswith('_')
]:
for field_to_remove in [k for k in lib.keys() if k.startswith('_')]:
lib.pop(field_to_remove, None)
for target in target_list:
for field_to_remove in [
k for k in list(target.keys()) if k.startswith('_')
]:
for field_to_remove in [k for k in target.keys() if k.startswith('_')]:
target.pop(field_to_remove, None)
target.pop('public_headers',
None) # public headers make no sense for targets
for test in test_list:
for field_to_remove in [
k for k in list(test.keys()) if k.startswith('_')
]:
for field_to_remove in [k for k in test.keys() if k.startswith('_')]:
test.pop(field_to_remove, None)
test.pop('public_headers',
None) # public headers make no sense for tests
@ -469,7 +464,7 @@ def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
def _extract_cc_tests(bazel_rules: BuildDict) -> List[str]:
"""Gets list of cc_test tests from bazel rules"""
result = []
for bazel_rule in list(bazel_rules.values()):
for bazel_rule in bazel_rules.values():
if bazel_rule['class'] == 'cc_test':
test_name = bazel_rule['name']
if test_name.startswith('//'):
@ -571,7 +566,7 @@ def _generate_build_extra_metadata_for_tests(
if 'grpc_fuzzer' == bazel_rule['generator_function']:
# currently we hand-list fuzzers instead of generating them automatically
# because there's no way to obtain maxlen property from bazel BUILD file.
print(('skipping fuzzer ' + test))
print('skipping fuzzer ' + test)
continue
# if any tags that restrict platform compatibility are present,
@ -615,20 +610,20 @@ def _generate_build_extra_metadata_for_tests(
# detect duplicate test names
tests_by_simple_name = {}
for test_name, test_dict in list(test_metadata.items()):
for test_name, test_dict in test_metadata.items():
simple_test_name = test_dict['_RENAME']
if not simple_test_name in tests_by_simple_name:
tests_by_simple_name[simple_test_name] = []
tests_by_simple_name[simple_test_name].append(test_name)
# choose alternative names for tests with a name collision
for collision_list in list(tests_by_simple_name.values()):
for collision_list in tests_by_simple_name.values():
if len(collision_list) > 1:
for test_name in collision_list:
long_name = test_name.replace('/', '_').replace(':', '_')
print((
print(
'short name of "%s" collides with another test, renaming to %s'
% (test_name, long_name)))
% (test_name, long_name))
test_metadata[test_name]['_RENAME'] = long_name
return test_metadata
@ -640,8 +635,8 @@ def _detect_and_print_issues(build_yaml_like: BuildYaml) -> None:
if tgt['build'] == 'test':
for src in tgt['src']:
if src.startswith('src/') and not src.endswith('.proto'):
print(('source file from under "src/" tree used in test ' +
tgt['name'] + ': ' + src))
print('source file from under "src/" tree used in test ' +
tgt['name'] + ': ' + src)
# extra metadata that will be used to construct build.yaml
@ -1073,7 +1068,7 @@ all_extra_metadata.update(
# '_COLLAPSED_PUBLIC_HEADERS': [...],
# '_COLLAPSED_HEADERS': [...]
# }
_populate_transitive_metadata(bazel_rules, list(all_extra_metadata.keys()))
_populate_transitive_metadata(bazel_rules, all_extra_metadata.keys())
# Step 4a: Update the existing test metadata with the updated build metadata.
# Certain build metadata of certain test targets depend on the transitive

@ -95,7 +95,7 @@ VALID_ATTRIBUTE_KEYS_MAP = {
def check_attributes(entity, kind, errors):
attributes = VALID_ATTRIBUTE_KEYS_MAP[kind]
name = entity.get('name', anything())
for key, value in list(entity.items()):
for key, value in entity.items():
if key == 'name':
continue
validator = attributes.get(key)

@ -65,6 +65,5 @@ def mako_plugin(dictionary):
if __name__ == '__main__':
print(
(yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))
])))
print(yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))
]))

@ -53,7 +53,7 @@ def mako_plugin(dictionary):
"""
lib_map = {lib['name']: lib for lib in dictionary.get('libs')}
for target_name, target_list in list(dictionary.items()):
for target_name, target_list in dictionary.items():
for target in target_list:
if isinstance(target, dict):
if 'deps' in target or target_name == 'libs':

@ -141,7 +141,7 @@ if args.hex:
all_bytes = []
for line in payload_bytes:
all_bytes.extend(line)
print(('{%s}' % ', '.join('0x%02x' % c for c in all_bytes)))
print '{%s}' % ', '.join('0x%02x' % c for c in all_bytes)
else:
for line in payload_bytes:
print((esc_c(line)))
print esc_c(line)

@ -49,8 +49,7 @@ for message_length in range(0, 3):
] + payload[0:frame_length]
text = esc_c(frame)
if text not in done:
print(
('GRPC_RUN_BAD_CLIENT_TEST(verifier_%s, PFX_STR %s, %s);' %
('succeeds' if is_end else 'fails', text,
'0' if is_end else 'GRPC_BAD_CLIENT_DISCONNECT')))
print 'GRPC_RUN_BAD_CLIENT_TEST(verifier_%s, PFX_STR %s, %s);' % (
'succeeds' if is_end else 'fails', text,
'0' if is_end else 'GRPC_BAD_CLIENT_DISCONNECT')
done.add(text)

@ -57,11 +57,11 @@ C = open('src/core/ext/transport/chttp2/transport/http2_settings.c', 'w')
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print('/*', file=f)
print >> f, '/*'
for line in banner:
print(' * %s' % line, file=f)
print(' */', file=f)
print(file=f)
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
# copy-paste copyright notice from this file
@ -84,24 +84,21 @@ put_banner(
[H, C],
["Automatically generated by tools/codegen/core/gen_settings_ids.py"])
print("#ifndef GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H",
file=H)
print("#define GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H",
file=H)
print(file=H)
print("#include <stdint.h>", file=H)
print("#include <stdbool.h>", file=H)
print(file=H)
print >> H, "#ifndef GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H"
print >> H, "#define GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H"
print >> H
print >> H, "#include <stdint.h>"
print >> H, "#include <stdbool.h>"
print >> H
print("#include \"src/core/ext/transport/chttp2/transport/http2_settings.h\"",
file=C)
print(file=C)
print("#include <grpc/support/useful.h>", file=C)
print("#include \"src/core/lib/transport/http2_errors.h\"", file=C)
print(file=C)
print >> C, "#include \"src/core/ext/transport/chttp2/transport/http2_settings.h\""
print >> C
print >> C, "#include <grpc/support/useful.h>"
print >> C, "#include \"src/core/lib/transport/http2_errors.h\""
print >> C
p = perfection.hash_parameters(sorted(x.id for x in list(_SETTINGS.values())))
print(p)
p = perfection.hash_parameters(sorted(x.id for x in _SETTINGS.values()))
print p
def hash(i):
@ -113,60 +110,53 @@ def hash(i):
decorated_settings = [
DecoratedSetting(hash(setting.id), name, setting)
for name, setting in _SETTINGS.items()
for name, setting in _SETTINGS.iteritems()
]
print('typedef enum {', file=H)
print >> H, 'typedef enum {'
for decorated_setting in sorted(decorated_settings):
print(' GRPC_CHTTP2_SETTINGS_%s = %d, /* wire id %d */' %
(decorated_setting.name, decorated_setting.enum,
decorated_setting.setting.id),
file=H)
print('} grpc_chttp2_setting_id;', file=H)
print(file=H)
print('#define GRPC_CHTTP2_NUM_SETTINGS %d' %
(max(x.enum for x in decorated_settings) + 1),
file=H)
print('extern const uint16_t grpc_setting_id_to_wire_id[];', file=H)
print('const uint16_t grpc_setting_id_to_wire_id[] = {%s};' %
','.join('%d' % s for s in p.slots),
file=C)
print(file=H)
print(
"bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out);",
file=H)
print >> H, ' GRPC_CHTTP2_SETTINGS_%s = %d, /* wire id %d */' % (
decorated_setting.name, decorated_setting.enum,
decorated_setting.setting.id)
print >> H, '} grpc_chttp2_setting_id;'
print >> H
print >> H, '#define GRPC_CHTTP2_NUM_SETTINGS %d' % (
max(x.enum for x in decorated_settings) + 1)
print >> H, 'extern const uint16_t grpc_setting_id_to_wire_id[];'
print >> C, 'const uint16_t grpc_setting_id_to_wire_id[] = {%s};' % ','.join(
'%d' % s for s in p.slots)
print >> H
print >> H, "bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out);"
cgargs = {
'r': ','.join('%d' % (r if r is not None else 0) for r in p.r),
't': p.t,
'offset': abs(p.offset),
'offset_sign': '+' if p.offset > 0 else '-'
}
print("""
print >> C, """
bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out) {
uint32_t i = wire_id %(offset_sign)s %(offset)d;
uint32_t x = i %% %(t)d;
uint32_t y = i / %(t)d;
uint32_t h = x;
switch (y) {
""" % cgargs,
file=C)
""" % cgargs
for i, r in enumerate(p.r):
if not r:
continue
if r < 0:
print('case %d: h -= %d; break;' % (i, -r), file=C)
print >> C, 'case %d: h -= %d; break;' % (i, -r)
else:
print('case %d: h += %d; break;' % (i, r), file=C)
print("""
print >> C, 'case %d: h += %d; break;' % (i, r)
print >> C, """
}
*out = (grpc_chttp2_setting_id)h;
return h < GPR_ARRAY_SIZE(grpc_setting_id_to_wire_id) && grpc_setting_id_to_wire_id[h] == wire_id;
}
""" % cgargs,
file=C)
""" % cgargs
print("""
print >> H, """
typedef enum {
GRPC_CHTTP2_CLAMP_INVALID_VALUE,
GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE
@ -182,33 +172,26 @@ typedef struct {
} grpc_chttp2_setting_parameters;
extern const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS];
""",
file=H)
print(
"const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS] = {",
file=C)
"""
print >> C, "const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS] = {"
i = 0
for decorated_setting in sorted(decorated_settings):
while i < decorated_setting.enum:
print(
"{NULL, 0, 0, 0, GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE, GRPC_HTTP2_PROTOCOL_ERROR},",
file=C)
print >> C, "{NULL, 0, 0, 0, GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE, GRPC_HTTP2_PROTOCOL_ERROR},"
i += 1
print("{\"%s\", %du, %du, %du, GRPC_CHTTP2_%s, GRPC_HTTP2_%s}," % (
print >> C, "{\"%s\", %du, %du, %du, GRPC_CHTTP2_%s, GRPC_HTTP2_%s}," % (
decorated_setting.name,
decorated_setting.setting.default,
decorated_setting.setting.min,
decorated_setting.setting.max,
decorated_setting.setting.on_error.behavior,
decorated_setting.setting.on_error.code,
),
file=C)
)
i += 1
print("};", file=C)
print >> C, "};"
print(file=H)
print("#endif /* GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H */",
file=H)
print >> H
print >> H, "#endif /* GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H */"
H.close()
C.close()

@ -249,11 +249,11 @@ def fake_hash(elem):
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print('/*', file=f)
print >> f, '/*'
for line in banner:
print(' * %s' % line, file=f)
print(' */', file=f)
print(file=f)
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
# build a list of all the strings we need
@ -372,21 +372,21 @@ See metadata.h for an explanation of the interface here, and metadata.cc for
an explanation of what's going on.
""".splitlines())
print('#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H', file=H)
print('#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H', file=H)
print(file=H)
print('#include <grpc/support/port_platform.h>', file=H)
print(file=H)
print('#include <cstdint>', file=H)
print(file=H)
print('#include "src/core/lib/transport/metadata.h"', file=H)
print(file=H)
print('#include <grpc/support/port_platform.h>', file=C)
print(file=C)
print('#include "src/core/lib/transport/static_metadata.h"', file=C)
print(file=C)
print('#include "src/core/lib/slice/slice_internal.h"', file=C)
print(file=C)
print >> H, '#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >> H, '#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >> H
print >> H, '#include <grpc/support/port_platform.h>'
print >> H
print >> H, '#include <cstdint>'
print >> H
print >> H, '#include "src/core/lib/transport/metadata.h"'
print >> H
print >> C, '#include <grpc/support/port_platform.h>'
print >> C
print >> C, '#include "src/core/lib/transport/static_metadata.h"'
print >> C
print >> C, '#include "src/core/lib/slice/slice_internal.h"'
print >> C
str_ofs = 0
id2strofs = {}
@ -420,9 +420,9 @@ static_slice_dest_assert = (
'static_assert(std::is_trivially_destructible' +
'<grpc_core::StaticMetadataSlice>::value, '
'"grpc_core::StaticMetadataSlice must be trivially destructible.");')
print(static_slice_dest_assert, file=H)
print('#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs), file=H)
print('''
print >> H, static_slice_dest_assert
print >> H, '#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs)
print >> H, '''
void grpc_init_static_metadata_ctx(void);
void grpc_destroy_static_metadata_ctx(void);
namespace grpc_core {
@ -438,19 +438,16 @@ inline const grpc_core::StaticMetadataSlice* grpc_static_slice_table() {
GPR_DEBUG_ASSERT(grpc_core::g_static_metadata_slice_table != nullptr);
return grpc_core::g_static_metadata_slice_table;
}
''',
file=H)
'''
for i, elem in enumerate(all_strs):
print('/* "%s" */' % elem, file=H)
print('#define %s (grpc_static_slice_table()[%d])' %
(mangle(elem).upper(), i),
file=H)
print(file=H)
print('static constexpr uint8_t g_bytes[] = {%s};' %
(','.join('%d' % ord(c) for c in ''.join(all_strs))),
file=C)
print(file=C)
print('''
print >> H, '/* "%s" */' % elem
print >> H, '#define %s (grpc_static_slice_table()[%d])' % (
mangle(elem).upper(), i)
print >> H
print >> C, 'static constexpr uint8_t g_bytes[] = {%s};' % (','.join(
'%d' % ord(c) for c in ''.join(all_strs)))
print >> C
print >> H, '''
namespace grpc_core {
struct StaticSliceRefcount;
extern StaticSliceRefcount* g_static_metadata_slice_refcounts;
@ -461,11 +458,9 @@ inline grpc_core::StaticSliceRefcount* grpc_static_metadata_refcounts() {
GPR_DEBUG_ASSERT(grpc_core::g_static_metadata_slice_refcounts != nullptr);
return grpc_core::g_static_metadata_slice_refcounts;
}
''',
file=H)
print('grpc_slice_refcount grpc_core::StaticSliceRefcount::kStaticSubRefcount;',
file=C)
print('''
'''
print >> C, 'grpc_slice_refcount grpc_core::StaticSliceRefcount::kStaticSubRefcount;'
print >> C, '''
namespace grpc_core {
struct StaticMetadataCtx {
#ifndef NDEBUG
@ -473,40 +468,34 @@ struct StaticMetadataCtx {
#endif
StaticSliceRefcount
refcounts[GRPC_STATIC_MDSTR_COUNT] = {
''',
file=C)
'''
for i, elem in enumerate(all_strs):
print(' StaticSliceRefcount(%d), ' % i, file=C)
print('};', file=C) # static slice refcounts
print(file=C)
print('''
print >> C, ' StaticSliceRefcount(%d), ' % i
print >> C, '};' # static slice refcounts
print >> C
print >> C, '''
const StaticMetadataSlice
slices[GRPC_STATIC_MDSTR_COUNT] = {
''',
file=C)
'''
for i, elem in enumerate(all_strs):
print(slice_def_for_ctx(i) + ',', file=C)
print('};', file=C) # static slices
print('StaticMetadata static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {',
file=C)
print >> C, slice_def_for_ctx(i) + ','
print >> C, '};' # static slices
print >> C, 'StaticMetadata static_mdelem_table[GRPC_STATIC_MDELEM_COUNT] = {'
for idx, (a, b) in enumerate(all_elems):
print('StaticMetadata(%s,%s, %d),' %
(slice_def_for_ctx(str_idx(a)), slice_def_for_ctx(str_idx(b)), idx),
file=C)
print('};', file=C) # static_mdelem_table
print(('''
print >> C, 'StaticMetadata(%s,%s, %d),' % (slice_def_for_ctx(
str_idx(a)), slice_def_for_ctx(str_idx(b)), idx)
print >> C, '};' # static_mdelem_table
print >> C, ('''
/* Warning: the core static metadata currently operates under the soft constraint
that the first GRPC_CHTTP2_LAST_STATIC_ENTRY (61) entries must contain
metadata specified by the http2 hpack standard. The CHTTP2 transport reads the
core metadata with this assumption in mind. If the order of the core static
metadata is to be changed, then the CHTTP2 transport must be changed as well to
stop relying on the core metadata. */
'''),
file=C)
print(('grpc_mdelem '
'static_mdelem_manifested[GRPC_STATIC_MDELEM_COUNT] = {'),
file=C)
print('// clang-format off', file=C)
''')
print >> C, ('grpc_mdelem '
'static_mdelem_manifested[GRPC_STATIC_MDELEM_COUNT] = {')
print >> C, '// clang-format off'
static_mds = []
for i, elem in enumerate(all_elems):
md_name = mangle(elem).upper()
@ -516,12 +505,12 @@ for i, elem in enumerate(all_elems):
md_spec += ((' &static_mdelem_table[%d].data(),\n' % i) +
' GRPC_MDELEM_STORAGE_STATIC)')
static_mds.append(md_spec)
print(',\n'.join(static_mds), file=C)
print('// clang-format on', file=C)
print(('};'), file=C) # static_mdelem_manifested
print('};', file=C) # struct StaticMetadataCtx
print('}', file=C) # namespace grpc_core
print('''
print >> C, ',\n'.join(static_mds)
print >> C, '// clang-format on'
print >> C, ('};') # static_mdelem_manifested
print >> C, '};' # struct StaticMetadataCtx
print >> C, '}' # namespace grpc_core
print >> C, '''
namespace grpc_core {
static StaticMetadataCtx* g_static_metadata_slice_ctx = nullptr;
const StaticMetadataSlice* g_static_metadata_slice_table = nullptr;
@ -557,32 +546,27 @@ void grpc_destroy_static_metadata_ctx(void) {
grpc_core::g_static_mdelem_manifested = nullptr;
}
''',
file=C)
print(file=C)
print('#define GRPC_IS_STATIC_METADATA_STRING(slice) \\', file=H)
print((' ((slice).refcount != NULL && (slice).refcount->GetType() == '
'grpc_slice_refcount::Type::STATIC)'),
file=H)
print(file=H)
print(file=C)
print('#define GRPC_STATIC_METADATA_INDEX(static_slice) \\', file=H)
print(
'(reinterpret_cast<grpc_core::StaticSliceRefcount*>((static_slice).refcount)->index)',
file=H)
print(file=H)
print('# hpack fuzzing dictionary', file=D)
'''
print >> C
print >> H, '#define GRPC_IS_STATIC_METADATA_STRING(slice) \\'
print >> H, (' ((slice).refcount != NULL && (slice).refcount->GetType() == '
'grpc_slice_refcount::Type::STATIC)')
print >> H
print >> C
print >> H, '#define GRPC_STATIC_METADATA_INDEX(static_slice) \\'
print >> H, '(reinterpret_cast<grpc_core::StaticSliceRefcount*>((static_slice).refcount)->index)'
print >> H
print >> D, '# hpack fuzzing dictionary'
for i, elem in enumerate(all_strs):
print('%s' % (esc_dict([len(elem)] + [ord(c) for c in elem])), file=D)
print >> D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem]))
for i, elem in enumerate(all_elems):
print('%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] +
[len(elem[1])] + [ord(c) for c in elem[1]])),
file=D)
print >> D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] +
[len(elem[1])] + [ord(c) for c in elem[1]]))
print('#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems), file=H)
print('''
print >> H, '#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems)
print >> H, '''
namespace grpc_core {
extern StaticMetadata* g_static_mdelem_table;
extern grpc_mdelem* g_static_mdelem_manifested;
@ -599,27 +583,23 @@ inline grpc_mdelem* grpc_static_mdelem_manifested() {
GPR_DEBUG_ASSERT(grpc_core::g_static_mdelem_manifested != nullptr);
return grpc_core::g_static_mdelem_manifested;
}
''',
file=H)
print(('extern uintptr_t '
'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];'),
file=H)
'''
print >> H, ('extern uintptr_t '
'grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];')
for i, elem in enumerate(all_elems):
md_name = mangle(elem).upper()
print('/* "%s": "%s" */' % elem, file=H)
print(('#define %s (grpc_static_mdelem_manifested()[%d])' % (md_name, i)),
file=H)
print(file=H)
print >> H, '/* "%s": "%s" */' % elem
print >> H, ('#define %s (grpc_static_mdelem_manifested()[%d])' %
(md_name, i))
print >> H
print(('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] '
'= {'),
file=C)
print(' %s' %
','.join('%d' % static_userdata.get(elem, 0) for elem in all_elems),
file=C)
print('};', file=C)
print(file=C)
print >> C, ('uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] '
'= {')
print >> C, ' %s' % ','.join(
'%d' % static_userdata.get(elem, 0) for elem in all_elems)
print >> C, '};'
print >> C
def md_idx(m):
@ -677,7 +657,7 @@ elem_keys = [
str_idx(elem[0]) * len(all_strs) + str_idx(elem[1]) for elem in all_elems
]
elem_hash = perfect_hash(elem_keys, 'elems')
print(elem_hash['code'], file=C)
print >> C, elem_hash['code']
keys = [0] * int(elem_hash['PHASHNKEYS'])
idxs = [255] * int(elem_hash['PHASHNKEYS'])
@ -686,44 +666,35 @@ for i, k in enumerate(elem_keys):
assert keys[h] == 0
keys[h] = k
idxs[h] = i
print('static const uint16_t elem_keys[] = {%s};' %
','.join('%d' % k for k in keys),
file=C)
print('static const uint8_t elem_idxs[] = {%s};' %
','.join('%d' % i for i in idxs),
file=C)
print(file=C)
print(
'grpc_mdelem grpc_static_mdelem_for_static_strings(intptr_t a, intptr_t b);',
file=H)
print(
'grpc_mdelem grpc_static_mdelem_for_static_strings(intptr_t a, intptr_t b) {',
file=C)
print(' if (a == -1 || b == -1) return GRPC_MDNULL;', file=C)
print(' uint32_t k = static_cast<uint32_t>(a * %d + b);' % len(all_strs),
file=C)
print(' uint32_t h = elems_phash(k);', file=C)
print(
' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[elem_idxs[h]].data(), GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;',
file=C)
print('}', file=C)
print(file=C)
print('typedef enum {', file=H)
print >> C, 'static const uint16_t elem_keys[] = {%s};' % ','.join(
'%d' % k for k in keys)
print >> C, 'static const uint8_t elem_idxs[] = {%s};' % ','.join(
'%d' % i for i in idxs)
print >> C
print >> H, 'grpc_mdelem grpc_static_mdelem_for_static_strings(intptr_t a, intptr_t b);'
print >> C, 'grpc_mdelem grpc_static_mdelem_for_static_strings(intptr_t a, intptr_t b) {'
print >> C, ' if (a == -1 || b == -1) return GRPC_MDNULL;'
print >> C, ' uint32_t k = static_cast<uint32_t>(a * %d + b);' % len(all_strs)
print >> C, ' uint32_t h = elems_phash(k);'
print >> C, ' return h < GPR_ARRAY_SIZE(elem_keys) && elem_keys[h] == k && elem_idxs[h] != 255 ? GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[elem_idxs[h]].data(), GRPC_MDELEM_STORAGE_STATIC) : GRPC_MDNULL;'
print >> C, '}'
print >> C
print >> H, 'typedef enum {'
for elem in METADATA_BATCH_CALLOUTS:
print(' %s,' % mangle(elem, 'batch').upper(), file=H)
print(' GRPC_BATCH_CALLOUTS_COUNT', file=H)
print('} grpc_metadata_batch_callouts_index;', file=H)
print(file=H)
print('typedef union {', file=H)
print(' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];', file=H)
print(' struct {', file=H)
print >> H, ' %s,' % mangle(elem, 'batch').upper()
print >> H, ' GRPC_BATCH_CALLOUTS_COUNT'
print >> H, '} grpc_metadata_batch_callouts_index;'
print >> H
print >> H, 'typedef union {'
print >> H, ' struct grpc_linked_mdelem *array[GRPC_BATCH_CALLOUTS_COUNT];'
print >> H, ' struct {'
for elem in METADATA_BATCH_CALLOUTS:
print(' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower(), file=H)
print(' } named;', file=H)
print('} grpc_metadata_batch_callouts;', file=H)
print(file=H)
print >> H, ' struct grpc_linked_mdelem *%s;' % mangle(elem, '').lower()
print >> H, ' } named;'
print >> H, '} grpc_metadata_batch_callouts;'
print >> H
batch_idx_of_hdr = '#define GRPC_BATCH_INDEX_OF(slice) \\'
static_slice = 'GRPC_IS_STATIC_METADATA_STRING((slice))'
@ -743,41 +714,31 @@ batch_idx_of_pieces = [
batch_invalid_u32, '?', slice_ref_idx_to_batch_idx, ':', batch_invalid_idx,
')'
]
print(''.join(batch_idx_of_pieces), file=H)
print(file=H)
print('extern const uint8_t grpc_static_accept_encoding_metadata[%d];' %
(1 << len(COMPRESSION_ALGORITHMS)),
file=H)
print('const uint8_t grpc_static_accept_encoding_metadata[%d] = {' %
(1 << len(COMPRESSION_ALGORITHMS)),
file=C)
print('0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems),
file=C)
print('};', file=C)
print(file=C)
print(
'#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[grpc_static_accept_encoding_metadata[(algs)]].data(), GRPC_MDELEM_STORAGE_STATIC))',
file=H)
print(file=H)
print('extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' %
(1 << len(STREAM_COMPRESSION_ALGORITHMS)),
file=H)
print('const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' %
(1 << len(STREAM_COMPRESSION_ALGORITHMS)),
file=C)
print('0,%s' %
','.join('%d' % md_idx(elem) for elem in stream_compression_elems),
file=C)
print('};', file=C)
print(
'#define GRPC_MDELEM_ACCEPT_STREAM_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[grpc_static_accept_stream_encoding_metadata[(algs)]].data(), GRPC_MDELEM_STORAGE_STATIC))',
file=H)
print('#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */', file=H)
print >> H, ''.join(batch_idx_of_pieces)
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % (
1 << len(COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems)
print >> C, '};'
print >> C
print >> H, '#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[grpc_static_accept_encoding_metadata[(algs)]].data(), GRPC_MDELEM_STORAGE_STATIC))'
print >> H
print >> H, 'extern const uint8_t grpc_static_accept_stream_encoding_metadata[%d];' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, 'const uint8_t grpc_static_accept_stream_encoding_metadata[%d] = {' % (
1 << len(STREAM_COMPRESSION_ALGORITHMS))
print >> C, '0,%s' % ','.join(
'%d' % md_idx(elem) for elem in stream_compression_elems)
print >> C, '};'
print >> H, '#define GRPC_MDELEM_ACCEPT_STREAM_ENCODING_FOR_ALGORITHMS(algs) (GRPC_MAKE_MDELEM(&grpc_static_mdelem_table()[grpc_static_accept_stream_encoding_metadata[(algs)]].data(), GRPC_MDELEM_STORAGE_STATIC))'
print >> H, '#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */'
H.close()
C.close()

@ -34,7 +34,7 @@ def make_type(name, fields):
def c_str(s, encoding='ascii'):
if isinstance(s, str):
if isinstance(s, unicode):
s = s.encode(encoding)
result = ''
for c in s:
@ -81,7 +81,7 @@ def shift_works_until(mapped_bounds, shift_bits):
def find_ideal_shift(mapped_bounds, max_size):
best = None
for shift_bits in reversed(list(range(0, 64))):
for shift_bits in reversed(range(0, 64)):
n = shift_works_until(mapped_bounds, shift_bits)
if n == 0:
continue
@ -94,16 +94,16 @@ def find_ideal_shift(mapped_bounds, max_size):
best = (shift_bits, n, table_size)
elif best[1] < n:
best = (shift_bits, n, table_size)
print(best)
print best
return best
def gen_map_table(mapped_bounds, shift_data):
tbl = []
cur = 0
print(mapped_bounds)
print mapped_bounds
mapped_bounds = [x >> shift_data[0] for x in mapped_bounds]
print(mapped_bounds)
print mapped_bounds
for i in range(0, mapped_bounds[shift_data[1] - 1]):
while i > mapped_bounds[cur]:
cur += 1
@ -120,7 +120,7 @@ def decl_static_table(values, type):
for i, vp in enumerate(static_tables):
if v == vp:
return i
print("ADD TABLE: %s %r" % (type, values))
print "ADD TABLE: %s %r" % (type, values)
r = len(static_tables)
static_tables.append(v)
return r
@ -205,11 +205,11 @@ def gen_bucket_code(histogram):
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print('/*', file=f)
print >> f, '/*'
for line in banner:
print(' * %s' % line, file=f)
print(' */', file=f)
print(file=f)
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
with open('src/core/lib/debug/stats_data.h', 'w') as H:
@ -233,90 +233,77 @@ with open('src/core/lib/debug/stats_data.h', 'w') as H:
[H],
["Automatically generated by tools/codegen/core/gen_stats_data.py"])
print("#ifndef GRPC_CORE_LIB_DEBUG_STATS_DATA_H", file=H)
print("#define GRPC_CORE_LIB_DEBUG_STATS_DATA_H", file=H)
print(file=H)
print("#include <grpc/support/port_platform.h>", file=H)
print(file=H)
print("#include <inttypes.h>", file=H)
print("#include \"src/core/lib/iomgr/exec_ctx.h\"", file=H)
print(file=H)
print >> H, "#ifndef GRPC_CORE_LIB_DEBUG_STATS_DATA_H"
print >> H, "#define GRPC_CORE_LIB_DEBUG_STATS_DATA_H"
print >> H
print >> H, "#include <grpc/support/port_platform.h>"
print >> H
print >> H, "#include <inttypes.h>"
print >> H, "#include \"src/core/lib/iomgr/exec_ctx.h\""
print >> H
for typename, instances in sorted(inst_map.items()):
print("typedef enum {", file=H)
print >> H, "typedef enum {"
for inst in instances:
print(" GRPC_STATS_%s_%s," % (typename.upper(), inst.name.upper()),
file=H)
print(" GRPC_STATS_%s_COUNT" % (typename.upper()), file=H)
print("} grpc_stats_%ss;" % (typename.lower()), file=H)
print("extern const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT];" %
(typename.lower(), typename.upper()),
file=H)
print("extern const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT];" %
(typename.lower(), typename.upper()),
file=H)
print >> H, " GRPC_STATS_%s_%s," % (typename.upper(),
inst.name.upper())
print >> H, " GRPC_STATS_%s_COUNT" % (typename.upper())
print >> H, "} grpc_stats_%ss;" % (typename.lower())
print >> H, "extern const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT];" % (
typename.lower(), typename.upper())
print >> H, "extern const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT];" % (
typename.lower(), typename.upper())
histo_start = []
histo_buckets = []
histo_bucket_boundaries = []
print("typedef enum {", file=H)
print >> H, "typedef enum {"
first_slot = 0
for histogram in inst_map['Histogram']:
histo_start.append(first_slot)
histo_buckets.append(histogram.buckets)
print(" GRPC_STATS_HISTOGRAM_%s_FIRST_SLOT = %d," %
(histogram.name.upper(), first_slot),
file=H)
print(" GRPC_STATS_HISTOGRAM_%s_BUCKETS = %d," %
(histogram.name.upper(), histogram.buckets),
file=H)
print >> H, " GRPC_STATS_HISTOGRAM_%s_FIRST_SLOT = %d," % (
histogram.name.upper(), first_slot)
print >> H, " GRPC_STATS_HISTOGRAM_%s_BUCKETS = %d," % (
histogram.name.upper(), histogram.buckets)
first_slot += histogram.buckets
print(" GRPC_STATS_HISTOGRAM_BUCKETS = %d" % first_slot, file=H)
print("} grpc_stats_histogram_constants;", file=H)
print >> H, " GRPC_STATS_HISTOGRAM_BUCKETS = %d" % first_slot
print >> H, "} grpc_stats_histogram_constants;"
print("#if defined(GRPC_COLLECT_STATS) || !defined(NDEBUG)", file=H)
print >> H, "#if defined(GRPC_COLLECT_STATS) || !defined(NDEBUG)"
for ctr in inst_map['Counter']:
print(("#define GRPC_STATS_INC_%s() " +
"GRPC_STATS_INC_COUNTER(GRPC_STATS_COUNTER_%s)") %
(ctr.name.upper(), ctr.name.upper()),
file=H)
print >> H, ("#define GRPC_STATS_INC_%s() " +
"GRPC_STATS_INC_COUNTER(GRPC_STATS_COUNTER_%s)") % (
ctr.name.upper(), ctr.name.upper())
for histogram in inst_map['Histogram']:
print(
"#define GRPC_STATS_INC_%s(value) grpc_stats_inc_%s( (int)(value))"
% (histogram.name.upper(), histogram.name.lower()),
file=H)
print("void grpc_stats_inc_%s(int x);" % histogram.name.lower(), file=H)
print >> H, "#define GRPC_STATS_INC_%s(value) grpc_stats_inc_%s( (int)(value))" % (
histogram.name.upper(), histogram.name.lower())
print >> H, "void grpc_stats_inc_%s(int x);" % histogram.name.lower()
print("#else", file=H)
print >> H, "#else"
for ctr in inst_map['Counter']:
print(("#define GRPC_STATS_INC_%s() ") % (ctr.name.upper()), file=H)
print >> H, ("#define GRPC_STATS_INC_%s() ") % (ctr.name.upper())
for histogram in inst_map['Histogram']:
print("#define GRPC_STATS_INC_%s(value)" % (histogram.name.upper()),
file=H)
print("#endif /* defined(GRPC_COLLECT_STATS) || !defined(NDEBUG) */",
file=H)
print >> H, "#define GRPC_STATS_INC_%s(value)" % (
histogram.name.upper())
print >> H, "#endif /* defined(GRPC_COLLECT_STATS) || !defined(NDEBUG) */"
for i, tbl in enumerate(static_tables):
print("extern const %s grpc_stats_table_%d[%d];" %
(tbl[0], i, len(tbl[1])),
file=H)
print("extern const int grpc_stats_histo_buckets[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern const int grpc_stats_histo_start[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern const int *const grpc_stats_histo_bucket_boundaries[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern void (*const grpc_stats_inc_histogram[%d])(int x);" %
len(inst_map['Histogram']),
file=H)
print(file=H)
print("#endif /* GRPC_CORE_LIB_DEBUG_STATS_DATA_H */", file=H)
print >> H, "extern const %s grpc_stats_table_%d[%d];" % (tbl[0], i,
len(tbl[1]))
print >> H, "extern const int grpc_stats_histo_buckets[%d];" % len(
inst_map['Histogram'])
print >> H, "extern const int grpc_stats_histo_start[%d];" % len(
inst_map['Histogram'])
print >> H, "extern const int *const grpc_stats_histo_bucket_boundaries[%d];" % len(
inst_map['Histogram'])
print >> H, "extern void (*const grpc_stats_inc_histogram[%d])(int x);" % len(
inst_map['Histogram'])
print >> H
print >> H, "#endif /* GRPC_CORE_LIB_DEBUG_STATS_DATA_H */"
with open('src/core/lib/debug/stats_data.cc', 'w') as C:
# copy-paste copyright notice from this file
@ -339,13 +326,13 @@ with open('src/core/lib/debug/stats_data.cc', 'w') as C:
[C],
["Automatically generated by tools/codegen/core/gen_stats_data.py"])
print("#include <grpc/support/port_platform.h>", file=C)
print(file=C)
print("#include \"src/core/lib/debug/stats.h\"", file=C)
print("#include \"src/core/lib/debug/stats_data.h\"", file=C)
print("#include \"src/core/lib/gpr/useful.h\"", file=C)
print("#include \"src/core/lib/iomgr/exec_ctx.h\"", file=C)
print(file=C)
print >> C, "#include <grpc/support/port_platform.h>"
print >> C
print >> C, "#include \"src/core/lib/debug/stats.h\""
print >> C, "#include \"src/core/lib/debug/stats_data.h\""
print >> C, "#include \"src/core/lib/gpr/useful.h\""
print >> C, "#include \"src/core/lib/iomgr/exec_ctx.h\""
print >> C
histo_code = []
for histogram in inst_map['Histogram']:
@ -354,45 +341,36 @@ with open('src/core/lib/debug/stats_data.cc', 'w') as C:
histo_code.append(code)
for typename, instances in sorted(inst_map.items()):
print("const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT] = {" %
(typename.lower(), typename.upper()),
file=C)
print >> C, "const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT] = {" % (
typename.lower(), typename.upper())
for inst in instances:
print(" %s," % c_str(inst.name), file=C)
print("};", file=C)
print("const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT] = {" %
(typename.lower(), typename.upper()),
file=C)
print >> C, " %s," % c_str(inst.name)
print >> C, "};"
print >> C, "const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT] = {" % (
typename.lower(), typename.upper())
for inst in instances:
print(" %s," % c_str(inst.doc), file=C)
print("};", file=C)
print >> C, " %s," % c_str(inst.doc)
print >> C, "};"
for i, tbl in enumerate(static_tables):
print("const %s grpc_stats_table_%d[%d] = {%s};" %
(tbl[0], i, len(tbl[1]), ','.join('%s' % x for x in tbl[1])),
file=C)
print >> C, "const %s grpc_stats_table_%d[%d] = {%s};" % (
tbl[0], i, len(tbl[1]), ','.join('%s' % x for x in tbl[1]))
for histogram, code in zip(inst_map['Histogram'], histo_code):
print(("void grpc_stats_inc_%s(int value) {%s}") %
(histogram.name.lower(), code),
file=C)
print(
"const int grpc_stats_histo_buckets[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join('%s' % x for x in histo_buckets)),
file=C)
print("const int grpc_stats_histo_start[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join('%s' % x for x in histo_start)),
file=C)
print("const int *const grpc_stats_histo_bucket_boundaries[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join(
'grpc_stats_table_%d' % x for x in histo_bucket_boundaries)),
file=C)
print("void (*const grpc_stats_inc_histogram[%d])(int x) = {%s};" %
(len(inst_map['Histogram']), ','.join(
print >> C, ("void grpc_stats_inc_%s(int value) {%s}") % (
histogram.name.lower(), code)
print >> C, "const int grpc_stats_histo_buckets[%d] = {%s};" % (len(
inst_map['Histogram']), ','.join('%s' % x for x in histo_buckets))
print >> C, "const int grpc_stats_histo_start[%d] = {%s};" % (len(
inst_map['Histogram']), ','.join('%s' % x for x in histo_start))
print >> C, "const int *const grpc_stats_histo_bucket_boundaries[%d] = {%s};" % (
len(inst_map['Histogram']), ','.join(
'grpc_stats_table_%d' % x for x in histo_bucket_boundaries))
print >> C, "void (*const grpc_stats_inc_histogram[%d])(int x) = {%s};" % (
len(inst_map['Histogram']), ','.join(
'grpc_stats_inc_%s' % histogram.name.lower()
for histogram in inst_map['Histogram'])),
file=C)
for histogram in inst_map['Histogram']))
# patch qps_test bigquery schema
RECORD_EXPLICIT_PERCENTILES = [50, 95, 99]
@ -460,56 +438,39 @@ with open('tools/run_tests/performance/massage_qps_stats.py', 'w') as P:
break
for line in my_source:
if line[0] == '#':
print(line.rstrip(), file=P)
print >> P, line.rstrip()
break
for line in my_source:
if line[0] != '#':
break
print(line.rstrip(), file=P)
print(file=P)
print('# Autogenerated by tools/codegen/core/gen_stats_data.py', file=P)
print(file=P)
print('import massage_qps_stats_helpers', file=P)
print('def massage_qps_stats(scenario_result):', file=P)
print(
' for stats in scenario_result["serverStats"] + scenario_result["clientStats"]:',
file=P)
print(' if "coreStats" in stats:', file=P)
print(
' # Get rid of the "coreStats" element and replace it by statistics',
file=P)
print(' # that correspond to columns in the bigquery schema.', file=P)
print(' core_stats = stats["coreStats"]', file=P)
print(' del stats["coreStats"]', file=P)
print >> P, line.rstrip()
print >> P
print >> P, '# Autogenerated by tools/codegen/core/gen_stats_data.py'
print >> P
print >> P, 'import massage_qps_stats_helpers'
print >> P, 'def massage_qps_stats(scenario_result):'
print >> P, ' for stats in scenario_result["serverStats"] + scenario_result["clientStats"]:'
print >> P, ' if "coreStats" in stats:'
print >> P, ' # Get rid of the "coreStats" element and replace it by statistics'
print >> P, ' # that correspond to columns in the bigquery schema.'
print >> P, ' core_stats = stats["coreStats"]'
print >> P, ' del stats["coreStats"]'
for counter in inst_map['Counter']:
print(
' stats["core_%s"] = massage_qps_stats_helpers.counter(core_stats, "%s")'
% (counter.name, counter.name),
file=P)
print >> P, ' stats["core_%s"] = massage_qps_stats_helpers.counter(core_stats, "%s")' % (
counter.name, counter.name)
for i, histogram in enumerate(inst_map['Histogram']):
print(
' h = massage_qps_stats_helpers.histogram(core_stats, "%s")' %
histogram.name,
file=P)
print(
' stats["core_%s"] = ",".join("%%f" %% x for x in h.buckets)' %
histogram.name,
file=P)
print(
' stats["core_%s_bkts"] = ",".join("%%f" %% x for x in h.boundaries)'
% histogram.name,
file=P)
print >> P, ' h = massage_qps_stats_helpers.histogram(core_stats, "%s")' % histogram.name
print >> P, ' stats["core_%s"] = ",".join("%%f" %% x for x in h.buckets)' % histogram.name
print >> P, ' stats["core_%s_bkts"] = ",".join("%%f" %% x for x in h.boundaries)' % histogram.name
for pctl in RECORD_EXPLICIT_PERCENTILES:
print(
' stats["core_%s_%dp"] = massage_qps_stats_helpers.percentile(h.buckets, %d, h.boundaries)'
% (histogram.name, pctl, pctl),
file=P)
print >> P, ' stats["core_%s_%dp"] = massage_qps_stats_helpers.percentile(h.buckets, %d, h.boundaries)' % (
histogram.name, pctl, pctl)
with open('src/core/lib/debug/stats_data_bq_schema.sql', 'w') as S:
columns = []
for counter in inst_map['Counter']:
columns.append(('%s_per_iteration' % counter.name, 'FLOAT'))
print(',\n'.join('%s:%s' % x for x in columns), file=S)
print >> S, ',\n'.join('%s:%s' % x for x in columns)

@ -39,6 +39,6 @@ for line in sys.stdin:
else:
outstanding[m.group(2)].remove(m.group(3))
for obj, remaining in list(outstanding.items()):
for obj, remaining in outstanding.items():
if remaining:
print(('LEAKED: %s %r' % (obj, remaining)))
print 'LEAKED: %s %r' % (obj, remaining)

@ -44,4 +44,4 @@ for line in data:
assert (err in errs)
errs.remove(err)
print(("leaked:", errs))
print "leaked:", errs

@ -41,13 +41,13 @@ for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
if 'README.md' not in filenames:
if not printed_banner:
print('Missing README.md')
print('=================')
print 'Missing README.md'
print '================='
printed_banner = True
print(root)
print root
errors += 1
if printed_banner:
print()
print
printed_banner = False
for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
@ -59,10 +59,10 @@ for target_dir in _TARGET_DIRS:
contents = f.read()
if '\\file' not in contents:
if not printed_banner:
print('Missing \\file comment')
print('======================')
print 'Missing \\file comment'
print '======================'
printed_banner = True
print(path)
print path
errors += 1
assert errors == 0, 'error count = %d' % errors

@ -140,7 +140,7 @@ RE_LICENSE = dict(
(k, r'\n'.join(LICENSE_PREFIX_RE[k] +
(RE_YEAR if re.search(RE_YEAR, line) else re.escape(line))
for line in LICENSE_NOTICE))
for k, v in list(LICENSE_PREFIX_RE.items()))
for k, v in LICENSE_PREFIX_RE.items())
YEAR = datetime.datetime.now().year
@ -168,7 +168,7 @@ LICENSE_TEXT = dict(
(k,
join_license_text(LICENSE_PREFIX_TEXT[k][0], LICENSE_PREFIX_TEXT[k][1],
LICENSE_PREFIX_TEXT[k][2], LICENSE_NOTICE))
for k, v in list(LICENSE_PREFIX_TEXT.items()))
for k, v in LICENSE_PREFIX_TEXT.items())
if args.precommit:
FILE_LIST_COMMAND = 'git status -z | grep -Poz \'(?<=^[MARC][MARCD ] )[^\s]+\''
@ -195,7 +195,7 @@ def log(cond, why, filename):
if not cond:
return
if args.output == 'details':
print(('%s: %s' % (why, filename)))
print('%s: %s' % (why, filename))
else:
print(filename)

@ -66,15 +66,15 @@ class GuardValidator(object):
if c_core_header else '#endif // {2}')
if not match_txt:
print(
(invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath))))
invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath)))
return fcontents
print((('{}: Wrong preprocessor guards (RE {}):'
print(('{}: Wrong preprocessor guards (RE {}):'
'\n\tFound {}, expected {}').format(fpath, regexp.pattern,
match_txt, correct)))
match_txt, correct))
if fix:
print(('Fixing {}...\n'.format(fpath)))
print('Fixing {}...\n'.format(fpath))
fixed_fcontents = re.sub(match_txt, correct, fcontents)
if fixed_fcontents:
self.failed = False
@ -91,7 +91,7 @@ class GuardValidator(object):
match = self.ifndef_re.search(fcontents)
if not match:
print(('something drastically wrong with: %s' % fpath))
print('something drastically wrong with: %s' % fpath)
return False # failed
if match.lastindex is None:
# No ifndef. Request manual addition with hints

@ -59,10 +59,10 @@ if args.only_changed:
['git', 'diff', 'origin/master', 'HEAD', '--name-only'])
for line in output.decode('ascii').splitlines(False):
if line in orig_files:
print(("check: %s" % line))
print("check: %s" % line)
actual_files.append(line)
else:
print(("skip: %s - not in the build" % line))
print("skip: %s - not in the build" % line)
args.files = actual_files
jobs = []

@ -0,0 +1,13 @@
runtime: python27
api_version: 1
threadsafe: true
service: github-stats-tracking
handlers:
- url: /.*
script: main.app
libraries:
- name: ssl
version: latest

@ -0,0 +1,19 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# appengine_config.py
from google.appengine.ext import vendor
# Add any libraries install in the "lib" folder.
vendor.add('lib')

@ -0,0 +1,4 @@
cron:
- description: "daily github stats tracking job"
url: /daily
schedule: every 24 hours

@ -0,0 +1,96 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from datetime import timedelta
from time import time
from github import Github
from github import Label
from google.cloud import bigquery
ACCESS_TOKEN = ""
def get_stats_from_github():
# Please set the access token properly before deploying.
assert ACCESS_TOKEN
g = Github(ACCESS_TOKEN)
print g.rate_limiting
repo = g.get_repo('grpc/grpc')
LABEL_LANG = set(label for label in repo.get_labels()
if label.name.split('/')[0] == 'lang')
LABEL_KIND_BUG = repo.get_label('kind/bug')
LABEL_PRIORITY_P0 = repo.get_label('priority/P0')
LABEL_PRIORITY_P1 = repo.get_label('priority/P1')
LABEL_PRIORITY_P2 = repo.get_label('priority/P2')
def is_untriaged(issue):
key_labels = set()
for label in issue.labels:
label_kind = label.name.split('/')[0]
if label_kind in ('lang', 'kind', 'priority'):
key_labels.add(label_kind)
return len(key_labels) < 3
untriaged_open_issues = [
issue for issue in repo.get_issues(state='open')
if issue.pull_request is None and is_untriaged(issue)
]
total_bugs = [
issue for issue in repo.get_issues(state='all', labels=[LABEL_KIND_BUG])
if issue.pull_request is None
]
lang_to_stats = {}
for lang in LABEL_LANG:
lang_bugs = filter(lambda bug: lang in bug.labels, total_bugs)
closed_bugs = filter(lambda bug: bug.state == 'closed', lang_bugs)
open_bugs = filter(lambda bug: bug.state == 'open', lang_bugs)
open_p0_bugs = filter(lambda bug: LABEL_PRIORITY_P0 in bug.labels,
open_bugs)
open_p1_bugs = filter(lambda bug: LABEL_PRIORITY_P1 in bug.labels,
open_bugs)
open_p2_bugs = filter(lambda bug: LABEL_PRIORITY_P2 in bug.labels,
open_bugs)
lang_to_stats[lang] = [
len(lang_bugs),
len(closed_bugs),
len(open_bugs),
len(open_p0_bugs),
len(open_p1_bugs),
len(open_p2_bugs)
]
return len(untriaged_open_issues), lang_to_stats
def insert_stats_to_db(untriaged_open_issues, lang_to_stats):
timestamp = time()
client = bigquery.Client()
dataset_ref = client.dataset('github_issues')
table_ref = dataset_ref.table('untriaged_issues')
table = client.get_table(table_ref)
errors = client.insert_rows(table, [(timestamp, untriaged_open_issues)])
table_ref = dataset_ref.table('bug_stats')
table = client.get_table(table_ref)
rows = []
for lang, stats in lang_to_stats.iteritems():
rows.append((timestamp, lang.name[5:]) + tuple(stats))
errors = client.insert_rows(table, rows)
def fetch():
untriaged_open_issues, lang_to_stats = get_stats_from_github()
insert_stats_to_db(untriaged_open_issues, lang_to_stats)

@ -0,0 +1,28 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fetch_data import fetch
import webapp2
class DailyCron(webapp2.RequestHandler):
def get(self):
fetch()
self.response.status = 204
app = webapp2.WSGIApplication([
('/daily', DailyCron),
], debug=True)

@ -0,0 +1,211 @@
#!/usr/bin/env python
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Measure the time between PR creation and completion of all tests.
You'll need a github API token to avoid being rate-limited. See
https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
This script goes over the most recent 100 pull requests. For PRs with a single
commit, it uses the PR's creation as the initial time; otherwise, it uses the
date of the last commit. This is somewhat fragile, and imposed by the fact that
GitHub reports a PR's updated timestamp for any event that modifies the PR (e.g.
comments), not just the addition of new commits.
In addition, it ignores latencies greater than five hours, as that's likely due
to a manual re-run of tests.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
from datetime import timedelta
import json
import logging
import pprint
import urllib2
logging.basicConfig(format='%(asctime)s %(message)s')
PRS = 'https://api.github.com/repos/grpc/grpc/pulls?state=open&per_page=100'
COMMITS = 'https://api.github.com/repos/grpc/grpc/pulls/{pr_number}/commits'
def gh(url):
request = urllib2.Request(url)
if TOKEN:
request.add_header('Authorization', 'token {}'.format(TOKEN))
response = urllib2.urlopen(request)
return response.read()
def print_csv_header():
print('pr,base_time,test_time,latency_seconds,successes,failures,errors')
def output(pr,
base_time,
test_time,
diff_time,
successes,
failures,
errors,
mode='human'):
if mode == 'human':
print(
"PR #{} base time: {} UTC, Tests completed at: {} UTC. Latency: {}."
"\n\tSuccesses: {}, Failures: {}, Errors: {}".format(
pr, base_time, test_time, diff_time, successes, failures,
errors))
elif mode == 'csv':
print(','.join([
str(pr),
str(base_time),
str(test_time),
str(int((test_time - base_time).total_seconds())),
str(successes),
str(failures),
str(errors)
]))
def parse_timestamp(datetime_str):
return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ')
def to_posix_timestamp(dt):
return str((dt - datetime(1970, 1, 1)).total_seconds())
def get_pr_data():
latest_prs = json.loads(gh(PRS))
res = [{
'number': pr['number'],
'created_at': parse_timestamp(pr['created_at']),
'updated_at': parse_timestamp(pr['updated_at']),
'statuses_url': pr['statuses_url']
} for pr in latest_prs]
return res
def get_commits_data(pr_number):
commits = json.loads(gh(COMMITS.format(pr_number=pr_number)))
return {
'num_commits':
len(commits),
'most_recent_date':
parse_timestamp(commits[-1]['commit']['author']['date'])
}
def get_status_data(statuses_url, system):
status_url = statuses_url.replace('statuses', 'status')
statuses = json.loads(gh(status_url + '?per_page=100'))
successes = 0
failures = 0
errors = 0
latest_datetime = None
if not statuses:
return None
if system == 'kokoro':
string_in_target_url = 'kokoro'
elif system == 'jenkins':
string_in_target_url = 'grpc-testing'
for status in statuses['statuses']:
if not status['target_url'] or string_in_target_url not in status[
'target_url']:
continue # Ignore jenkins
if status['state'] == 'pending':
return None
elif status['state'] == 'success':
successes += 1
elif status['state'] == 'failure':
failures += 1
elif status['state'] == 'error':
errors += 1
if not latest_datetime:
latest_datetime = parse_timestamp(status['updated_at'])
else:
latest_datetime = max(latest_datetime,
parse_timestamp(status['updated_at']))
# First status is the most recent one.
if any([successes, failures, errors
]) and sum([successes, failures, errors]) > 15:
return {
'latest_datetime': latest_datetime,
'successes': successes,
'failures': failures,
'errors': errors
}
else:
return None
def build_args_parser():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--format',
type=str,
choices=['human', 'csv'],
default='human',
help='Output format: are you a human or a machine?')
parser.add_argument('--system',
type=str,
choices=['jenkins', 'kokoro'],
required=True,
help='Consider only the given CI system')
parser.add_argument(
'--token',
type=str,
default='',
help='GitHub token to use its API with a higher rate limit')
return parser
def main():
import sys
global TOKEN
args_parser = build_args_parser()
args = args_parser.parse_args()
TOKEN = args.token
if args.format == 'csv':
print_csv_header()
for pr_data in get_pr_data():
commit_data = get_commits_data(pr_data['number'])
# PR with a single commit -> use the PRs creation time.
# else -> use the latest commit's date.
base_timestamp = pr_data['updated_at']
if commit_data['num_commits'] > 1:
base_timestamp = commit_data['most_recent_date']
else:
base_timestamp = pr_data['created_at']
last_status = get_status_data(pr_data['statuses_url'], args.system)
if last_status:
diff = last_status['latest_datetime'] - base_timestamp
if diff < timedelta(hours=5):
output(pr_data['number'],
base_timestamp,
last_status['latest_datetime'],
diff,
last_status['successes'],
last_status['failures'],
last_status['errors'],
mode=args.format)
if __name__ == '__main__':
main()

@ -35,13 +35,13 @@ import dockerjob
import jobset
_IMAGE_BUILDER = 'tools/run_tests/dockerize/build_interop_image.sh'
_LANGUAGES = list(client_matrix.LANG_RUNTIME_MATRIX.keys())
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
list(
set(release
for release_dict in list(client_matrix.LANG_RELEASE_MATRIX.values())
for release in list(release_dict.keys()))))
for release_dict in client_matrix.LANG_RELEASE_MATRIX.values()
for release in release_dict.keys())))
# Destination directory inside docker image to keep extra info from build time.
_BUILD_INFO = '/var/local/build_info'

@ -40,13 +40,13 @@ import upload_test_results
_TEST_TIMEOUT_SECONDS = 60
_PULL_IMAGE_TIMEOUT_SECONDS = 15 * 60
_MAX_PARALLEL_DOWNLOADS = 6
_LANGUAGES = list(client_matrix.LANG_RUNTIME_MATRIX.keys())
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
list(
set(release
for release_dict in list(client_matrix.LANG_RELEASE_MATRIX.values())
for release in list(release_dict.keys()))))
for release_dict in client_matrix.LANG_RELEASE_MATRIX.values()
for release in release_dict.keys())))
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
@ -117,7 +117,7 @@ def _get_test_images_for_lang(lang, release_arg, image_path_prefix):
tag)
image_tuple = (tag, image_name)
if runtime not in images:
if not images.has_key(runtime):
images[runtime] = []
images[runtime].append(image_tuple)
return images

@ -0,0 +1,42 @@
#!/usr/bin/env python
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import subprocess
# this script is only of historical interest: it's the script that was used to
# bootstrap the dataset
def daterange(start, end):
for n in range(int((end - start).days)):
yield start + datetime.timedelta(n)
start_date = datetime.date(2017, 3, 26)
end_date = datetime.date(2017, 3, 29)
for dt in daterange(start_date, end_date):
dmy = dt.strftime('%Y-%m-%d')
sha1 = subprocess.check_output(
['git', 'rev-list', '-n', '1',
'--before=%s' % dmy, 'master']).strip()
subprocess.check_call(['git', 'checkout', sha1])
subprocess.check_call(['git', 'submodule', 'update'])
subprocess.check_call(['git', 'clean', '-f', '-x', '-d'])
subprocess.check_call([
'cloc', '--vcs=git', '--by-file', '--yaml',
'--out=../count/%s.yaml' % dmy, '.'
])

@ -0,0 +1,38 @@
#!/usr/bin/env python
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import subprocess
# this script is only of historical interest: it's the script that was used to
# bootstrap the dataset
def daterange(start, end):
for n in range(int((end - start).days)):
yield start + datetime.timedelta(n)
start_date = datetime.date(2017, 3, 26)
end_date = datetime.date(2017, 3, 29)
for dt in daterange(start_date, end_date):
dmy = dt.strftime('%Y-%m-%d')
print dmy
subprocess.check_call([
'tools/line_count/yaml2csv.py', '-i',
'../count/%s.yaml' % dmy, '-d', dmy, '-o',
'../count/%s.csv' % dmy
])

@ -158,13 +158,13 @@ def expand_directives(root, directives):
if directive.who not in globs[glob]:
globs[glob].append(directive.who)
# expand owners for intersecting globs
sorted_globs = sorted(list(globs.keys()),
sorted_globs = sorted(globs.keys(),
key=lambda g: len(git_glob(full_dir(root, g))),
reverse=True)
out_globs = collections.OrderedDict()
for glob_add in sorted_globs:
who_add = globs[glob_add]
pre_items = [i for i in list(out_globs.items())]
pre_items = [i for i in out_globs.items()]
out_globs[glob_add] = who_add.copy()
for glob_have, who_have in pre_items:
files_add = git_glob(full_dir(root, glob_add))
@ -186,8 +186,8 @@ def add_parent_to_globs(parent, globs, globs_dir):
for owners in owners_data:
if owners.dir == parent:
owners_globs = expand_directives(owners.dir, owners.directives)
for oglob, oglob_who in list(owners_globs.items()):
for gglob, gglob_who in list(globs.items()):
for oglob, oglob_who in owners_globs.items():
for gglob, gglob_who in globs.items():
files_parent = git_glob(full_dir(owners.dir, oglob))
files_child = git_glob(full_dir(globs_dir, gglob))
intersect = files_parent.intersection(files_child)
@ -220,7 +220,7 @@ with open(args.out, 'w') as out:
continue
globs = expand_directives(head.dir, head.directives)
add_parent_to_globs(head.parent, globs, head.dir)
for glob, owners in list(globs.items()):
for glob, owners in globs.items():
skip = False
for glob1, owners1, dir1 in reversed(written_globs):
files = git_glob(full_dir(head.dir, glob))

@ -95,11 +95,11 @@ def main():
filename = sys.argv[1]
core_size, objc_size, boringssl_size, protobuf_size, total_size = parse_link_map(
filename)
print(('Core size:{:,}'.format(core_size)))
print(('ObjC size:{:,}'.format(objc_size)))
print(('BoringSSL size:{:,}'.format(boringssl_size)))
print(('Protobuf size:{:,}\n'.format(protobuf_size)))
print(('Total size:{:,}'.format(total_size)))
print('Core size:{:,}'.format(core_size))
print('ObjC size:{:,}'.format(objc_size))
print('BoringSSL size:{:,}'.format(boringssl_size))
print('Protobuf size:{:,}\n'.format(protobuf_size))
print('Total size:{:,}'.format(total_size))
if __name__ == "__main__":

@ -137,7 +137,7 @@ class CallStack(object):
self.signature = initial_call_stack_builder.signature
self.lines = initial_call_stack_builder.lines
for line in self.lines:
for key, val in list(line.times.items()):
for key, val in line.times.items():
line.times[key] = [val]
def add(self, call_stack_builder):
@ -146,13 +146,13 @@ class CallStack(object):
assert len(self.lines) == len(call_stack_builder.lines)
for lsum, line in zip(self.lines, call_stack_builder.lines):
assert lsum.tag == line.tag
assert list(lsum.times.keys()) == list(line.times.keys())
for k, lst in list(lsum.times.items()):
assert lsum.times.keys() == line.times.keys()
for k, lst in lsum.times.items():
lst.append(line.times[k])
def finish(self):
for line in self.lines:
for lst in list(line.times.values()):
for lst in line.times.values():
lst.sort()
@ -175,7 +175,7 @@ with open(args.source) as f:
del builder[thd]
time_taken = time.time() - start
call_stacks = sorted(list(call_stacks.values()),
call_stacks = sorted(call_stacks.values(),
key=lambda cs: cs.count,
reverse=True)
total_stacks = 0
@ -260,7 +260,7 @@ for cs in call_stacks:
out.write(BANNER[args.fmt] % {
'count': cs.count,
})
header, _ = list(zip(*FORMAT))
header, _ = zip(*FORMAT)
table = []
for line in cs.lines:
fields = []

@ -41,7 +41,7 @@ SANITIZE = {
}
if sys.argv[1] == '--schema':
print((',\n'.join('%s:%s' % (k, t.upper()) for k, t in columns)))
print(',\n'.join('%s:%s' % (k, t.upper()) for k, t in columns))
sys.exit(0)
with open(sys.argv[1]) as f:

@ -120,7 +120,7 @@ class Benchmark:
if abs(s) > 3:
if mdn_diff > 0.5 or 'trickle' in f:
self.final[f] = '%+d%%' % s
return list(self.final.keys())
return self.final.keys()
def skip(self):
return not self.final
@ -199,7 +199,7 @@ def diff(bms, loops, regex, track, old, new, counters):
benchmarks[name].add_sample(track, row, False)
really_interesting = set()
for name, bm in list(benchmarks.items()):
for name, bm in benchmarks.items():
_maybe_print(name)
really_interesting.update(bm.process(track, new, old))
fields = [f for f in track if f in really_interesting]
@ -231,4 +231,4 @@ if __name__ == '__main__':
args = _args()
diff, note = diff(args.benchmarks, args.loops, args.regex, args.track,
args.old, args.new, args.counters)
print(('%s\n%s' % (note, diff if diff else "No performance differences")))
print('%s\n%s' % (note, diff if diff else "No performance differences"))

@ -144,7 +144,7 @@ def main(args):
text = '[%s] No significant performance differences' % args.pr_comment_name
if note:
text = note + '\n\n' + text
print(('%s' % text))
print('%s' % text)
check_on_pr.check_on_pr('Benchmark', '```\n%s\n```' % text)

@ -64,5 +64,5 @@ def speedup(new, old, threshold=_DEFAULT_THRESHOLD):
if __name__ == "__main__":
new = [0.0, 0.0, 0.0, 0.0]
old = [2.96608e-06, 3.35076e-06, 3.45384e-06, 3.34407e-06]
print((speedup(new, old, 1e-5)))
print((speedup(old, new, 1e-5)))
print(speedup(new, old, 1e-5))
print(speedup(old, new, 1e-5))

@ -174,7 +174,7 @@ def parse_name(name):
out.update(
dict((k, numericalize(v))
for k, v in zip(_BM_SPECS[name]['dyn'], dyn_args)))
out.update(dict(list(zip(_BM_SPECS[name]['tpl'], tpl_args))))
out.update(dict(zip(_BM_SPECS[name]['tpl'], tpl_args)))
return out

@ -69,7 +69,7 @@ def build(name, jobs):
subprocess.check_call(['git', 'submodule', 'update'])
try:
subprocess.check_call(_make_cmd(jobs))
except subprocess.CalledProcessError as e:
except subprocess.CalledProcessError, e:
subprocess.check_call(['make', 'clean'])
subprocess.check_call(_make_cmd(jobs))
os.rename('bins', 'qps_diff_%s' % name)
@ -93,11 +93,11 @@ def _load_qps(fname):
try:
with open(fname) as f:
return json.loads(f.read())['qps']
except IOError as e:
print(("IOError occurred reading file: %s" % fname))
except IOError, e:
print("IOError occurred reading file: %s" % fname)
return None
except ValueError as e:
print(("ValueError occurred reading file: %s" % fname))
except ValueError, e:
print("ValueError occurred reading file: %s" % fname)
return None
@ -128,8 +128,8 @@ def diff(scenarios, loops, old, new):
rows = []
for sn in scenarios:
mdn_diff = abs(_median(new_data[sn]) - _median(old_data[sn]))
print(('%s: %s=%r %s=%r mdn_diff=%r' %
(sn, new, new_data[sn], old, old_data[sn], mdn_diff)))
print('%s: %s=%r %s=%r mdn_diff=%r' %
(sn, new, new_data[sn], old, old_data[sn], mdn_diff))
s = bm_speedup.speedup(new_data[sn], old_data[sn], 10e-5)
if abs(s) > 3 and mdn_diff > 0.5:
rows.append([sn, '%+d%%' % s])
@ -162,7 +162,7 @@ def main(args):
text = '[qps] Performance differences noted:\n%s' % diff_output
else:
text = '[qps] No significant performance differences'
print(('%s' % text))
print('%s' % text)
check_on_pr.check_on_pr('QPS', '```\n%s\n```' % text)

@ -90,7 +90,7 @@ def get_commit_log(prevRelLabel, relBranch):
"git", "log", "--pretty=oneline", "--committer=GitHub",
"%s..%s" % (prevRelLabel, relBranch)
]
print(("Running ", " ".join(glg_command)))
print("Running ", " ".join(glg_command))
return subprocess.check_output(glg_command)
@ -98,13 +98,11 @@ def get_pr_data(pr_num):
"""Get the PR data from github. Return 'error' on exception"""
try:
from urllib.error import HTTPError
from urllib.request import Request
from urllib.request import urlopen
from urllib2 import HTTPError
from urllib2 import Request
from urllib2 import urlopen
except ImportError:
import urllib.error
import urllib.parse
import urllib.request
import urllib
from urllib.request import HTTPError
from urllib.request import Request
from urllib.request import urlopen
@ -118,7 +116,7 @@ def get_pr_data(pr_num):
except HTTPError as e:
response = json.loads(e.fp.read().decode('utf-8'))
if 'message' in response:
print((response['message']))
print(response['message'])
response = "error"
return response
@ -142,11 +140,10 @@ def get_pr_titles(gitLogs):
langs_pr = defaultdict(list)
for pr_num in prlist:
pr_num = str(pr_num)
print(("---------- getting data for PR " + pr_num))
print("---------- getting data for PR " + pr_num)
pr = get_pr_data(pr_num)
if pr == "error":
print(
("\n***ERROR*** Error in getting data for PR " + pr_num + "\n"))
print("\n***ERROR*** Error in getting data for PR " + pr_num + "\n")
error_count += 1
continue
rl_no_found = False
@ -165,7 +162,7 @@ def get_pr_titles(gitLogs):
if not body.endswith("."):
body = body + "."
if not pr["merged_by"]:
print(("\n***ERROR***: No merge_by found for PR " + pr_num + "\n"))
print("\n***ERROR***: No merge_by found for PR " + pr_num + "\n")
error_count += 1
continue
@ -176,17 +173,17 @@ def get_pr_titles(gitLogs):
print(detail)
#if no RL label
if not rl_no_found and not rl_yes_found:
print(("Release notes label missing for " + pr_num))
print("Release notes label missing for " + pr_num)
langs_pr["nolabel"].append(detail)
elif rl_yes_found and not lang_found:
print(("Lang label missing for " + pr_num))
print("Lang label missing for " + pr_num)
langs_pr["nolang"].append(detail)
elif rl_no_found:
print(("'Release notes:no' found for " + pr_num))
print("'Release notes:no' found for " + pr_num)
langs_pr["notinrel"].append(detail)
elif rl_yes_found:
print(("'Release notes:yes' found for " + pr_num + " with lang " +
lang))
print("'Release notes:yes' found for " + pr_num + " with lang " +
lang)
langs_pr["inrel"].append(detail)
langs_pr[lang].append(prline)
@ -363,7 +360,7 @@ def main():
write_draft(langs_pr, file, version, date)
file.truncate()
file.close()
print(("\nDraft notes written to " + filename))
print("\nDraft notes written to " + filename)
filename = os.path.abspath(rel_file)
if os.path.exists(filename):
@ -375,7 +372,7 @@ def main():
write_rel_notes(langs_pr, file, version, name)
file.truncate()
file.close()
print(("\nRelease notes written to " + filename))
print("\nRelease notes written to " + filename)
if error_count > 0:
print("\n\n*** Errors were encountered. See log. *********\n")

@ -38,7 +38,7 @@ def create_docker_jobspec(name,
environ['ARTIFACTS_OUT'] = 'artifacts/%s' % name
docker_args = []
for k, v in list(environ.items()):
for k, v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -37,7 +37,7 @@ def create_docker_jobspec(name,
environ['RELATIVE_COPY_PATH'] = copy_rel_path
docker_args = []
for k, v in list(environ.items()):
for k, v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -32,7 +32,7 @@ def create_docker_jobspec(name,
environ['RUN_COMMAND'] = shell_command
docker_args = []
for k, v in list(environ.items()):
for k, v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -341,6 +341,6 @@ def generate_client_referred_to_backend_multiple_balancers():
all_scenarios += generate_client_referred_to_backend_multiple_balancers()
print((yaml.dump({
print(yaml.dump({
'lb_interop_test_scenarios': all_scenarios,
})))
}))

@ -25,7 +25,7 @@ import sys
import time
import uuid
from . import massage_qps_stats
import massage_qps_stats
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))

@ -33,11 +33,10 @@ import sys
from typing import Any, Dict, Iterable, Mapping, Optional, Type
import uuid
import scenario_config
import scenario_config_exporter
import yaml
from . import scenario_config
from . import scenario_config_exporter
CONFIGURATION_FILE_HEADER_COMMENT = """
# Load test configurations generated from a template by loadtest_config.py.
# See documentation below:

@ -29,10 +29,9 @@ import argparse
import sys
from typing import Any, Dict, Iterable, List, Mapping, Type
import loadtest_config
import yaml
from . import loadtest_config
TEMPLATE_FILE_HEADER_COMMENT = """
# Template generated from load test configurations by loadtest_template.py.
#
@ -62,7 +61,7 @@ def insert_worker(worker: Dict[str, Any], workers: List[Dict[str,
def uniquify_workers(workermap: Dict[str, List[Dict[str, Any]]]) -> None:
"""Name workers if there is more than one for the same map key."""
for workers in list(workermap.values()):
for workers in workermap.values():
if len(workers) <= 1:
continue
for i, worker in enumerate(workers):

@ -14,7 +14,7 @@
# Autogenerated by tools/codegen/core/gen_stats_data.py
from . import massage_qps_stats_helpers
import massage_qps_stats_helpers
def massage_qps_stats(scenario_result):

@ -22,7 +22,7 @@ import tempfile
import time
import uuid
from . import jobset
import jobset
_DEVNULL = open(os.devnull, 'w')

@ -114,7 +114,7 @@ _ALLOWLIST_DICT = {
}
# Regex that combines all keys in _ALLOWLIST_DICT
_ALL_TRIGGERS = "(" + ")|(".join(list(_ALLOWLIST_DICT.keys())) + ")"
_ALL_TRIGGERS = "(" + ")|(".join(_ALLOWLIST_DICT.keys()) + ")"
# Add all triggers to their respective test suites
for trigger, test_suites in six.iteritems(_ALLOWLIST_DICT):

@ -44,7 +44,7 @@ def strip_non_ascii_chars(s):
def sanitized_environment(env):
sanitized = {}
for key, value in list(env.items()):
for key, value in env.items():
sanitized[strip_non_ascii_chars(key)] = strip_non_ascii_chars(value)
return sanitized
@ -221,7 +221,7 @@ class JobSpec(object):
def __str__(self):
return '%s: %s %s' % (self.shortname, ' '.join(
'%s=%s' % kv for kv in list(self.environ.items())), ' '.join(
'%s=%s' % kv for kv in self.environ.items()), ' '.join(
self.cmdline))

@ -193,7 +193,7 @@ class Handler(BaseHTTPRequestHandler):
now = time.time()
out = yaml.dump({
'pool': pool,
'in_use': dict((k, now - v) for k, v in list(in_use.items()))
'in_use': dict((k, now - v) for k, v in in_use.items())
})
mu.release()
self.wfile.write(out.encode('ascii'))

@ -32,10 +32,9 @@ def _filter_msg(msg, output_format):
if output_format in ['XML', 'HTML']:
# keep whitespaces but remove formfeed and vertical tab characters
# that make XML report unparsable.
filtered_msg = ''.join([
x for x in msg.decode('UTF-8', 'ignore')
if x in string.printable and x != '\f' and x != '\v'
])
filtered_msg = ''.join(
filter(lambda x: x in string.printable and x != '\f' and x != '\v',
msg.decode('UTF-8', 'ignore')))
if output_format == 'HTML':
filtered_msg = filtered_msg.replace('"', '&quot;')
return filtered_msg
@ -137,7 +136,7 @@ def render_interop_html_report(client_langs, server_langs, test_cases,
'Mako template is not installed. Skipping HTML report generation.')
return
except IOError as e:
print(('Failed to find the template %s: %s' % (template_file, e)))
print('Failed to find the template %s: %s' % (template_file, e))
return
sorted_test_cases = sorted(test_cases)
@ -170,7 +169,7 @@ def render_interop_html_report(client_langs, server_langs, test_cases,
with open(html_file_path, 'w') as output_file:
mytemplate.render_context(Context(output_file, **args))
except:
print((exceptions.text_error_template().render()))
print(exceptions.text_error_template().render())
raise

@ -18,11 +18,10 @@ import argparse
import json
import os
import sys
import urllib.error
import urllib.parse
import urllib.request
import uuid
import urllib2
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
sys.path.append(gcp_utils_dir)
@ -122,12 +121,12 @@ def _get_resultstore_data(api_key, invocation_id):
# that limit, the 'nextPageToken' field is included in the request to get
# subsequent data, so keep requesting until 'nextPageToken' field is omitted.
while True:
req = urllib.request.Request(
req = urllib2.Request(
url=
'https://resultstore.googleapis.com/v2/invocations/%s/targets/-/configuredTargets/-/actions?key=%s&pageToken=%s&fields=next_page_token,actions.id,actions.status_attributes,actions.timing,actions.test_action'
% (invocation_id, api_key, page_token),
headers={'Content-Type': 'application/json'})
results = json.loads(urllib.request.urlopen(req).read())
results = json.loads(urllib2.urlopen(req).read())
all_actions.extend(results['actions'])
if 'nextPageToken' not in results:
break
@ -171,8 +170,7 @@ if __name__ == "__main__":
if args.resultstore_dump_file:
with open(args.resultstore_dump_file, 'w') as f:
json.dump(resultstore_actions, f, indent=4, sort_keys=True)
print(
('Dumped resultstore data to file %s' % args.resultstore_dump_file))
print('Dumped resultstore data to file %s' % args.resultstore_dump_file)
# google.devtools.resultstore.v2.Action schema:
# https://github.com/googleapis/googleapis/blob/master/google/devtools/resultstore/v2/action.proto
@ -258,8 +256,8 @@ if __name__ == "__main__":
}
})
except Exception as e:
print(('Failed to parse test result. Error: %s' % str(e)))
print((json.dumps(test_case, indent=4)))
print('Failed to parse test result. Error: %s' % str(e))
print(json.dumps(test_case, indent=4))
bq_rows.append({
'insertId': str(uuid.uuid4()),
'json': {
@ -286,7 +284,7 @@ if __name__ == "__main__":
if args.bq_dump_file:
with open(args.bq_dump_file, 'w') as f:
json.dump(bq_rows, f, indent=4, sort_keys=True)
print(('Dumped BQ data to file %s' % args.bq_dump_file))
print('Dumped BQ data to file %s' % args.bq_dump_file)
if not args.skip_upload:
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.

@ -171,7 +171,7 @@ def docker_run_cmdline(cmdline, image, docker_args, cwd, environ=None):
# turn environ into -e docker args
docker_cmdline = 'docker run -i --rm=true'.split()
if environ:
for k, v in list(environ.items()):
for k, v in environ.items():
docker_cmdline += ['-e', '%s=%s' % (k, v)]
return docker_cmdline + ['-w', cwd] + docker_args + [image] + cmdline
@ -403,7 +403,7 @@ docker_images = {}
build_jobs = []
if len(args.language) and args.language[0] == 'all':
languages = list(_LANGUAGES.keys())
languages = _LANGUAGES.keys()
else:
languages = args.language
for lang_name in languages:
@ -501,7 +501,7 @@ def run_one_scenario(scenario_config):
grpclb_ips = []
shortname_prefix = scenario_config['name']
# Start backends
for i in range(len(scenario_config['backend_configs'])):
for i in xrange(len(scenario_config['backend_configs'])):
backend_config = scenario_config['backend_configs'][i]
backend_shortname = shortname(shortname_prefix, 'backend_server', i)
backend_spec = backend_server_jobspec(
@ -511,7 +511,7 @@ def run_one_scenario(scenario_config):
backend_addrs.append(
'%s:%d' % (backend_job.ip_address(), _BACKEND_SERVER_PORT))
# Start fallbacks
for i in range(len(scenario_config['fallback_configs'])):
for i in xrange(len(scenario_config['fallback_configs'])):
fallback_config = scenario_config['fallback_configs'][i]
fallback_shortname = shortname(shortname_prefix, 'fallback_server',
i)
@ -521,7 +521,7 @@ def run_one_scenario(scenario_config):
server_jobs[fallback_shortname] = fallback_job
fallback_ips.append(fallback_job.ip_address())
# Start balancers
for i in range(len(scenario_config['balancer_configs'])):
for i in xrange(len(scenario_config['balancer_configs'])):
balancer_config = scenario_config['balancer_configs'][i]
grpclb_shortname = shortname(shortname_prefix, 'grpclb_server', i)
grpclb_spec = grpclb_jobspec(balancer_config['transport_sec'],
@ -582,7 +582,7 @@ def run_one_scenario(scenario_config):
return num_failures
finally:
# Check if servers are still running.
for server, job in list(server_jobs.items()):
for server, job in server_jobs.items():
if not job.is_running():
print('Server "%s" has exited prematurely.' % server)
suppress_failure = suppress_server_logs and not args.verbose

@ -754,8 +754,8 @@ _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES = {
'no_df_padding_sanity_test': 'large_unary'
}
_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS = list(
_GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES.keys())
_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS = _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES.keys(
)
_LANGUAGES_WITH_HTTP2_CLIENTS_FOR_HTTP2_SERVER_TEST_CASES = [
'java', 'go', 'python', 'c++'
@ -780,7 +780,7 @@ def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
# turn environ into -e docker args
if environ:
for k, v in list(environ.items()):
for k, v in environ.items():
docker_cmdline += ['-e', '%s=%s' % (k, v)]
# set working directory
@ -1206,7 +1206,7 @@ argp.add_argument('--google_default_creds_use_key_file',
'google_default_credentials test case, e.g. by '
'setting env var GOOGLE_APPLICATION_CREDENTIALS.'))
argp.add_argument('--prod_servers',
choices=list(prod_servers.keys()),
choices=prod_servers.keys(),
default=['default'],
nargs='+',
help=('The servers to run cloud_to_prod and '
@ -1547,7 +1547,7 @@ try:
(server_host, server_port) = server[1].split(':')
server_addresses[server_name] = (server_host, server_port)
for server_name, server_address in list(server_addresses.items()):
for server_name, server_address in server_addresses.items():
(server_host, server_port) = server_address
server_language = _LANGUAGES.get(server_name, None)
skip_server = [] # test cases unimplemented by server
@ -1661,7 +1661,7 @@ try:
report_utils.render_junit_xml_report(resultset, _TESTS_XML_REPORT)
for name, job in list(resultset.items()):
for name, job in resultset.items():
if "http2" in name:
job[0].http2results = aggregate_http2_results(job[0].message)
@ -1674,7 +1674,7 @@ try:
sys.exit(0)
finally:
# Check if servers are still running.
for server, job in list(server_jobs.items()):
for server, job in server_jobs.items():
if not job.is_running():
print('Server "%s" has exited prematurely.' % server)

@ -253,7 +253,7 @@ def prepare_remote_hosts(hosts, prepare_local=False):
def build_on_remote_hosts(hosts,
languages=list(scenario_config.LANGUAGES.keys()),
languages=scenario_config.LANGUAGES.keys(),
build_local=False):
"""Builds performance worker on remote hosts (and maybe also locally)."""
build_timeout = 45 * 60
@ -355,8 +355,7 @@ def create_scenarios(languages,
server_cpu_load=0):
"""Create jobspecs for scenarios to run."""
all_workers = [
worker for workers in list(workers_by_lang.values())
for worker in workers
worker for workers in workers_by_lang.values() for worker in workers
]
scenarios = []
_NO_WORKERS = []

@ -126,7 +126,7 @@ class Config(object):
would like to run
"""
actual_environ = self.environ.copy()
for k, v in list(environ.items()):
for k, v in environ.items():
actual_environ[k] = v
if not flaky and shortname and shortname in flaky_tests:
flaky = True
@ -1707,7 +1707,7 @@ build_steps = list(
if make_targets:
make_commands = itertools.chain.from_iterable(
make_jobspec(build_config, list(targets), makefile)
for (makefile, targets) in list(make_targets.items()))
for (makefile, targets) in make_targets.items())
build_steps.extend(set(make_commands))
build_steps.extend(
set(

@ -128,10 +128,10 @@ def parse_test_cases(arg):
def parse_port_range(port_arg):
try:
port = int(port_arg)
return list(range(port, port + 1))
return range(port, port + 1)
except:
port_min, port_max = port_arg.split(':')
return list(range(int(port_min), int(port_max) + 1))
return range(int(port_min), int(port_max) + 1)
argp = argparse.ArgumentParser(description='Run xDS interop tests on GCP')
@ -613,7 +613,7 @@ def compare_expected_instances(stats, expected_instances):
Returns:
Returns true if the instances are expected. False if not.
"""
for rpc_type, expected_peers in list(expected_instances.items()):
for rpc_type, expected_peers in expected_instances.items():
rpcs_by_peer_for_type = stats.rpcs_by_method[rpc_type]
rpcs_by_peer = rpcs_by_peer_for_type.rpcs_by_peer if rpcs_by_peer_for_type else None
logger.debug('rpc: %s, by_peer: %s', rpc_type, rpcs_by_peer)
@ -1030,7 +1030,7 @@ def test_metadata_filter(gcp, original_backend_service, instance_group,
with open(bootstrap_path) as f:
md = json.load(f)['node']['metadata']
match_labels = []
for k, v in list(md.items()):
for k, v in md.items():
match_labels.append({'name': k, 'value': v})
not_match_labels = [{'name': 'fake', 'value': 'fail'}]
@ -2027,7 +2027,7 @@ def test_timeout(gcp, original_backend_service, instance_group):
after_stats = get_client_accumulated_stats()
success = True
for rpc, status in list(expected_results.items()):
for rpc, status in expected_results.items():
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = test_runtime_secs * args.qps
@ -2209,7 +2209,7 @@ def test_fault_injection(gcp, original_backend_service, instance_group):
after_stats = get_client_accumulated_stats()
success = True
for status, pct in list(expected_results.items()):
for status, pct in expected_results.items():
rpc = 'UNARY_CALL'
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
@ -2377,8 +2377,7 @@ def is_primary_instance_group(gcp, instance_group):
# the client's actual locality.
instance_names = get_instance_names(gcp, instance_group)
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
return all(
peer in instance_names for peer in list(stats.rpcs_by_peer.keys()))
return all(peer in instance_names for peer in stats.rpcs_by_peer.keys())
def get_startup_script(path_to_server_binary, service_port):
@ -2915,7 +2914,7 @@ def patch_url_map_backend_service(gcp,
'weightedBackendServices': [{
'backendService': service.url,
'weight': w,
} for service, w in list(services_with_weights.items())]
} for service, w in services_with_weights.items()]
} if services_with_weights else None
config = {

@ -163,10 +163,10 @@ if len(workspace_git_hashes - git_submodule_hashes) > 0:
print(
"Found discrepancies between git submodules and Bazel WORKSPACE dependencies"
)
print(("workspace_git_hashes: %s" % workspace_git_hashes))
print(("git_submodule_hashes: %s" % git_submodule_hashes))
print(("workspace_git_hashes - git_submodule_hashes: %s" %
(workspace_git_hashes - git_submodule_hashes)))
print("workspace_git_hashes: %s" % workspace_git_hashes)
print("git_submodule_hashes: %s" % git_submodule_hashes)
print("workspace_git_hashes - git_submodule_hashes: %s" %
(workspace_git_hashes - git_submodule_hashes))
sys.exit(1)
# Also check that we can override each dependency

@ -134,9 +134,9 @@ if path_files.sort() != expected_files.sort():
diff_plus = [file for file in path_files if file not in expected_files]
diff_minus = [file for file in expected_files if file not in path_files]
for file in diff_minus:
print(('- ', file))
print('- ', file)
for file in diff_plus:
print(('+ ', file))
print('+ ', file)
errors += 1
if errors > 0:
@ -161,7 +161,7 @@ for path_file in expected_files:
fo.write(expected_content)
if 0 != os.system('diff %s %s' % (path_file_expected, path_file)):
print(('Difference found in file:', path_file))
print('Difference found in file:', path_file)
errors += 1
os.remove(path_file_expected)
@ -177,9 +177,9 @@ for root, dirs, files in os.walk('src'):
with open(path_file, "r") as fi:
content = fi.read()
if '#include <grpc++/' in content:
print((
print(
'Failed: invalid include of deprecated headers in include/grpc++ in %s'
% path_file))
% path_file)
errors += 1
except IOError:
pass

@ -38,9 +38,9 @@ with open('doc/environment_variables.md') as f:
for t in tracers:
if t not in text:
print((
print(
"ERROR: tracer \"%s\" is not mentioned in doc/environment_variables.md"
% t))
% t)
errors += 1
assert errors == 0

@ -63,22 +63,22 @@ settings = build_yaml['settings']
top_version = Version(settings['version'])
if not check_version(top_version):
errors += 1
print((warning % ('version', top_version)))
print(warning % ('version', top_version))
for tag, value in list(settings.items()):
for tag, value in settings.items():
if re.match(r'^[a-z]+_version$', tag):
value = Version(value)
if tag != 'core_version':
if value.major != top_version.major:
errors += 1
print(('major version mismatch on %s: %d vs %d' %
(tag, value.major, top_version.major)))
print('major version mismatch on %s: %d vs %d' %
(tag, value.major, top_version.major))
if value.minor != top_version.minor:
errors += 1
print(('minor version mismatch on %s: %d vs %d' %
(tag, value.minor, top_version.minor)))
print('minor version mismatch on %s: %d vs %d' %
(tag, value.minor, top_version.minor))
if not check_version(value):
errors += 1
print((warning % (tag, value)))
print(warning % (tag, value))
sys.exit(errors)

@ -64,11 +64,11 @@ for root, dirs, files in os.walk('src/core'):
continue
with open(path) as f:
text = f.read()
for banned, exceptions in list(BANNED_EXCEPT.items()):
for banned, exceptions in BANNED_EXCEPT.items():
if path in exceptions:
continue
if banned in text:
print(('Illegal use of "%s" in %s' % (banned, path)))
print('Illegal use of "%s" in %s' % (banned, path))
errors += 1
assert errors == 0

@ -35,7 +35,7 @@ _TARGETS += package_targets.targets()
def _create_build_map():
"""Maps task names and labels to list of tasks to be built."""
target_build_map = dict([(target.name, [target]) for target in _TARGETS])
if len(_TARGETS) > len(list(target_build_map.keys())):
if len(_TARGETS) > len(target_build_map.keys()):
raise Exception('Target names need to be unique')
label_build_map = {}
@ -47,7 +47,7 @@ def _create_build_map():
else:
label_build_map[label] = [target]
if set(target_build_map.keys()).intersection(list(label_build_map.keys())):
if set(target_build_map.keys()).intersection(label_build_map.keys()):
raise Exception('Target names need to be distinct from label names')
return dict(list(target_build_map.items()) + list(label_build_map.items()))

Loading…
Cancel
Save