Migrate Infrastructure Scripts to Python 3 (#27135)

* Run 2to3 on tools directory

* Delete github_stats_tracking

* Re-run 2to3

* Remove unused script

* Remove unused script

* Remove unused line count utility

* Yapf. Isort

* Remove accidentally included file

* Migrate tools/distrib directory to python 3

* Remove unnecessary shebang

* Restore line_count directory

* Immediately convert subprocess.check_output output to string

* Take care of Python 2 shebangs

* Invoke scripts using a Python 3 interpreter

* Yapf. Isort

* Try installing Python 3 first

* See if we have any Python 3 versions installed

* Add Python 3.7 to Windows path

* Try adding a symlink

* Try to symlink differently

* Install six for Python 3

* Run run_interop_tests with python 3

* Try installing six in python3.7 explicitly

* Revert "Try installing six in python3.7 explicitly"

This reverts commit 2cf60d72f3.

* And debug some more

* Fix issue with jobset.py

* Add debug for CI failure

* Revert microbenchmark changes
pull/27969/head
Richard Belleville 3 years ago committed by GitHub
parent c1089d2964
commit 59693b7e72
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      tools/buildgen/_utils.py
  2. 39
      tools/buildgen/extract_metadata_from_bazel_xml.py
  3. 2
      tools/buildgen/plugins/check_attrs.py
  4. 7
      tools/buildgen/plugins/list_api.py
  5. 2
      tools/buildgen/plugins/transitive_dependencies.py
  6. 6
      tools/codegen/core/gen_header_frame.py
  7. 9
      tools/codegen/core/gen_server_registered_method_bad_client_test_body.py
  8. 117
      tools/codegen/core/gen_settings_ids.py
  9. 274
      tools/codegen/core/gen_stats_data.py
  10. 6
      tools/debug/core/chttp2_ref_leak.py
  11. 4
      tools/debug/core/error_ref_leak.py
  12. 14
      tools/distrib/c-ish/check_documentation.py
  13. 6
      tools/distrib/check_copyright.py
  14. 14
      tools/distrib/check_include_guards.py
  15. 2
      tools/distrib/python/check_grpcio_tools.py
  16. 2
      tools/distrib/python/docgen.py
  17. 2
      tools/distrib/python/grpcio_tools/grpc_tools/protoc.py
  18. 6
      tools/distrib/python/make_grpcio_tools.py
  19. 93
      tools/distrib/python/submit.py
  20. 4
      tools/distrib/run_clang_tidy.py
  21. 13
      tools/gcp/github_stats_tracking/app.yaml
  22. 19
      tools/gcp/github_stats_tracking/appengine_config.py
  23. 4
      tools/gcp/github_stats_tracking/cron.yaml
  24. 96
      tools/gcp/github_stats_tracking/fetch_data.py
  25. 28
      tools/gcp/github_stats_tracking/main.py
  26. 1
      tools/gcp/utils/big_query_utils.py
  27. 2
      tools/internal_ci/helper_scripts/prepare_build_linux_rc
  28. 14
      tools/internal_ci/helper_scripts/prepare_build_windows.bat
  29. 4
      tools/internal_ci/linux/aws/grpc_aws_experiment_remote.sh
  30. 2
      tools/internal_ci/linux/aws/grpc_bazel_test_c_cpp_aarch64.sh
  31. 2
      tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh
  32. 8
      tools/internal_ci/linux/grpc_coverage.sh
  33. 2
      tools/internal_ci/linux/grpc_xds_k8s.sh
  34. 12
      tools/internal_ci/linux/grpc_xds_k8s_install_test_driver.sh
  35. 2
      tools/internal_ci/linux/grpc_xds_k8s_lb.sh
  36. 2
      tools/internal_ci/linux/grpc_xds_k8s_lb_python.sh
  37. 2
      tools/internal_ci/linux/grpc_xds_k8s_python.sh
  38. 2
      tools/internal_ci/linux/grpc_xds_url_map.sh
  39. 2
      tools/internal_ci/linux/grpc_xds_url_map_python.sh
  40. 2
      tools/internal_ci/linux/run_performance_profile_hourly.sh
  41. 2
      tools/internal_ci/macos/grpc_run_bazel_c_cpp_tests.sh
  42. 2
      tools/internal_ci/windows/bazel_rbe.bat
  43. 2
      tools/internal_ci/windows/grpc_run_tests_matrix.bat
  44. 1
      tools/interop_matrix/client_matrix.py
  45. 6
      tools/interop_matrix/create_matrix_images.py
  46. 10
      tools/interop_matrix/run_interop_matrix_tests.py
  47. 10
      tools/mkowners/mkowners.py
  48. 2
      tools/profiling/bloat/bloat_diff.py
  49. 2
      tools/profiling/ios_bin/binary_size.py
  50. 10
      tools/profiling/ios_bin/parse_link_map.py
  51. 14
      tools/profiling/latency_profile/profile_analyzer.py
  52. 20
      tools/profiling/qps/qps_diff.py
  53. 35
      tools/release/release_notes.py
  54. 2
      tools/run_tests/artifacts/artifact_targets.py
  55. 2
      tools/run_tests/artifacts/distribtest_targets.py
  56. 2
      tools/run_tests/artifacts/package_targets.py
  57. 6
      tools/run_tests/lb_interop_tests/gen_build_yaml.py
  58. 2
      tools/run_tests/performance/bq_upload_result.py
  59. 5
      tools/run_tests/performance/loadtest_config.py
  60. 5
      tools/run_tests/performance/loadtest_template.py
  61. 2
      tools/run_tests/performance/massage_qps_stats.py
  62. 9
      tools/run_tests/python_utils/dockerjob.py
  63. 3
      tools/run_tests/python_utils/filter_pull_request_tests.py
  64. 8
      tools/run_tests/python_utils/jobset.py
  65. 4
      tools/run_tests/python_utils/port_server.py
  66. 12
      tools/run_tests/python_utils/report_utils.py
  67. 3
      tools/run_tests/python_utils/start_port_server.py
  68. 18
      tools/run_tests/python_utils/upload_rbe_results.py
  69. 12
      tools/run_tests/run_grpclb_interop_tests.py
  70. 16
      tools/run_tests/run_interop_tests.py
  71. 5
      tools/run_tests/run_performance_tests.py
  72. 17
      tools/run_tests/run_xds_tests.py
  73. 8
      tools/run_tests/sanity/check_bazel_workspace.py
  74. 10
      tools/run_tests/sanity/check_deprecated_grpc++.py
  75. 4
      tools/run_tests/sanity/check_qps_scenario_changes.py
  76. 4
      tools/run_tests/sanity/check_tracer_sanity.py
  77. 16
      tools/run_tests/sanity/check_version.py
  78. 4
      tools/run_tests/sanity/core_banned_functions.py
  79. 4
      tools/run_tests/task_runner.py

@ -45,7 +45,7 @@ def to_bunch(var: Any) -> Any:
return [to_bunch(i) for i in var]
if isinstance(var, dict):
ret = {}
for k, v in var.items():
for k, v in list(var.items()):
if isinstance(v, (list, dict)):
v = to_bunch(v)
ret[k] = v
@ -57,7 +57,7 @@ def to_bunch(var: Any) -> Any:
def merge_json(dst: Union[Mapping, List], add: Union[Mapping, List]) -> None:
"""Merges JSON objects recursively."""
if isinstance(dst, dict) and isinstance(add, dict):
for k, v in add.items():
for k, v in list(add.items()):
if k in dst:
if k.startswith('#'):
continue

@ -286,12 +286,11 @@ def _compute_transitive_metadata(
# Calculate transitive public deps (needed for collapsing sources)
transitive_public_deps = set(
filter(lambda x: x in bazel_label_to_dep_name, transitive_deps))
[x for x in transitive_deps if x in bazel_label_to_dep_name])
# Remove intermediate targets that our public dependencies already depend
# on. This is the step that further shorten the deps list.
collapsed_deps = set(filter(lambda x: x not in exclude_deps,
collapsed_deps))
collapsed_deps = set([x for x in collapsed_deps if x not in exclude_deps])
# Compute the final source files and headers for this build target whose
# name is `rule_name` (input argument of this function).
@ -361,7 +360,7 @@ def _populate_transitive_metadata(bazel_rules: Any,
def update_test_metadata_with_transitive_metadata(
all_extra_metadata: BuildDict, bazel_rules: BuildDict) -> None:
"""Patches test build metadata with transitive metadata."""
for lib_name, lib_dict in all_extra_metadata.items():
for lib_name, lib_dict in list(all_extra_metadata.items()):
# Skip if it isn't not an test
if lib_dict.get('build') != 'test' or lib_dict.get('_TYPE') != 'target':
continue
@ -409,7 +408,7 @@ def _generate_build_metadata(build_extra_metadata: BuildDict,
result[to_name] = lib_dict
# dep names need to be updated as well
for lib_dict_to_update in result.values():
for lib_dict_to_update in list(result.values()):
lib_dict_to_update['deps'] = list([
to_name if dep == lib_name else dep
for dep in lib_dict_to_update['deps']
@ -439,15 +438,21 @@ def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
# get rid of temporary private fields prefixed with "_" and some other useless fields
for lib in lib_list:
for field_to_remove in [k for k in lib.keys() if k.startswith('_')]:
for field_to_remove in [
k for k in list(lib.keys()) if k.startswith('_')
]:
lib.pop(field_to_remove, None)
for target in target_list:
for field_to_remove in [k for k in target.keys() if k.startswith('_')]:
for field_to_remove in [
k for k in list(target.keys()) if k.startswith('_')
]:
target.pop(field_to_remove, None)
target.pop('public_headers',
None) # public headers make no sense for targets
for test in test_list:
for field_to_remove in [k for k in test.keys() if k.startswith('_')]:
for field_to_remove in [
k for k in list(test.keys()) if k.startswith('_')
]:
test.pop(field_to_remove, None)
test.pop('public_headers',
None) # public headers make no sense for tests
@ -464,7 +469,7 @@ def _convert_to_build_yaml_like(lib_dict: BuildMetadata) -> BuildYaml:
def _extract_cc_tests(bazel_rules: BuildDict) -> List[str]:
"""Gets list of cc_test tests from bazel rules"""
result = []
for bazel_rule in bazel_rules.values():
for bazel_rule in list(bazel_rules.values()):
if bazel_rule['class'] == 'cc_test':
test_name = bazel_rule['name']
if test_name.startswith('//'):
@ -575,7 +580,7 @@ def _generate_build_extra_metadata_for_tests(
if 'grpc_fuzzer' == bazel_rule['generator_function']:
# currently we hand-list fuzzers instead of generating them automatically
# because there's no way to obtain maxlen property from bazel BUILD file.
print('skipping fuzzer ' + test)
print(('skipping fuzzer ' + test))
continue
# if any tags that restrict platform compatibility are present,
@ -619,20 +624,20 @@ def _generate_build_extra_metadata_for_tests(
# detect duplicate test names
tests_by_simple_name = {}
for test_name, test_dict in test_metadata.items():
for test_name, test_dict in list(test_metadata.items()):
simple_test_name = test_dict['_RENAME']
if not simple_test_name in tests_by_simple_name:
tests_by_simple_name[simple_test_name] = []
tests_by_simple_name[simple_test_name].append(test_name)
# choose alternative names for tests with a name collision
for collision_list in tests_by_simple_name.values():
for collision_list in list(tests_by_simple_name.values()):
if len(collision_list) > 1:
for test_name in collision_list:
long_name = test_name.replace('/', '_').replace(':', '_')
print(
print((
'short name of "%s" collides with another test, renaming to %s'
% (test_name, long_name))
% (test_name, long_name)))
test_metadata[test_name]['_RENAME'] = long_name
return test_metadata
@ -644,8 +649,8 @@ def _detect_and_print_issues(build_yaml_like: BuildYaml) -> None:
if tgt['build'] == 'test':
for src in tgt['src']:
if src.startswith('src/') and not src.endswith('.proto'):
print('source file from under "src/" tree used in test ' +
tgt['name'] + ': ' + src)
print(('source file from under "src/" tree used in test ' +
tgt['name'] + ': ' + src))
# extra metadata that will be used to construct build.yaml
@ -968,7 +973,7 @@ all_extra_metadata.update(
# '_COLLAPSED_PUBLIC_HEADERS': [...],
# '_COLLAPSED_HEADERS': [...]
# }
_populate_transitive_metadata(bazel_rules, all_extra_metadata.keys())
_populate_transitive_metadata(bazel_rules, list(all_extra_metadata.keys()))
# Step 4a: Update the existing test metadata with the updated build metadata.
# Certain build metadata of certain test targets depend on the transitive

@ -95,7 +95,7 @@ VALID_ATTRIBUTE_KEYS_MAP = {
def check_attributes(entity, kind, errors):
attributes = VALID_ATTRIBUTE_KEYS_MAP[kind]
name = entity.get('name', anything())
for key, value in entity.items():
for key, value in list(entity.items()):
if key == 'name':
continue
validator = attributes.get(key)

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2016 gRPC authors.
#
@ -65,5 +65,6 @@ def mako_plugin(dictionary):
if __name__ == '__main__':
print(yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))
]))
print(
(yaml.dump([api for api in list_c_apis(headers_under('include/grpc'))
])))

@ -53,7 +53,7 @@ def mako_plugin(dictionary):
"""
lib_map = {lib['name']: lib for lib in dictionary.get('libs')}
for target_name, target_list in dictionary.items():
for target_name, target_list in list(dictionary.items()):
for target in target_list:
if isinstance(target, dict):
if 'deps' in target or target_name == 'libs':

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
@ -141,7 +141,7 @@ if args.hex:
all_bytes = []
for line in payload_bytes:
all_bytes.extend(line)
print '{%s}' % ', '.join('0x%02x' % c for c in all_bytes)
print(('{%s}' % ', '.join('0x%02x' % c for c in all_bytes)))
else:
for line in payload_bytes:
print esc_c(line)
print((esc_c(line)))

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
@ -49,7 +49,8 @@ for message_length in range(0, 3):
] + payload[0:frame_length]
text = esc_c(frame)
if text not in done:
print 'GRPC_RUN_BAD_CLIENT_TEST(verifier_%s, PFX_STR %s, %s);' % (
'succeeds' if is_end else 'fails', text,
'0' if is_end else 'GRPC_BAD_CLIENT_DISCONNECT')
print(
('GRPC_RUN_BAD_CLIENT_TEST(verifier_%s, PFX_STR %s, %s);' %
('succeeds' if is_end else 'fails', text,
'0' if is_end else 'GRPC_BAD_CLIENT_DISCONNECT')))
done.add(text)

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2017 gRPC authors.
#
@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import collections
import sys
@ -57,11 +59,11 @@ C = open('src/core/ext/transport/chttp2/transport/http2_settings.c', 'w')
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print >> f, '/*'
print('/*', file=f)
for line in banner:
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
print(' * %s' % line, file=f)
print(' */', file=f)
print(file=f)
# copy-paste copyright notice from this file
@ -84,79 +86,89 @@ put_banner(
[H, C],
["Automatically generated by tools/codegen/core/gen_settings_ids.py"])
print >> H, "#ifndef GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H"
print >> H, "#define GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H"
print >> H
print >> H, "#include <stdint.h>"
print >> H, "#include <stdbool.h>"
print >> H
print("#ifndef GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H",
file=H)
print("#define GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H",
file=H)
print(file=H)
print("#include <stdint.h>", file=H)
print("#include <stdbool.h>", file=H)
print(file=H)
print >> C, "#include \"src/core/ext/transport/chttp2/transport/http2_settings.h\""
print >> C
print >> C, "#include <grpc/support/useful.h>"
print >> C, "#include \"src/core/lib/transport/http2_errors.h\""
print >> C
print("#include \"src/core/ext/transport/chttp2/transport/http2_settings.h\"",
file=C)
print(file=C)
print("#include <grpc/support/useful.h>", file=C)
print("#include \"src/core/lib/transport/http2_errors.h\"", file=C)
print(file=C)
p = perfection.hash_parameters(sorted(x.id for x in _SETTINGS.values()))
print p
p = perfection.hash_parameters(sorted(x.id for x in list(_SETTINGS.values())))
print(p)
def hash(i):
i += p.offset
x = i % p.t
y = i / p.t
y = i // p.t
return x + p.r[y]
decorated_settings = [
DecoratedSetting(hash(setting.id), name, setting)
for name, setting in _SETTINGS.iteritems()
for name, setting in _SETTINGS.items()
]
print >> H, 'typedef enum {'
print('typedef enum {', file=H)
for decorated_setting in sorted(decorated_settings):
print >> H, ' GRPC_CHTTP2_SETTINGS_%s = %d, /* wire id %d */' % (
decorated_setting.name, decorated_setting.enum,
decorated_setting.setting.id)
print >> H, '} grpc_chttp2_setting_id;'
print >> H
print >> H, '#define GRPC_CHTTP2_NUM_SETTINGS %d' % (
max(x.enum for x in decorated_settings) + 1)
print >> H, 'extern const uint16_t grpc_setting_id_to_wire_id[];'
print >> C, 'const uint16_t grpc_setting_id_to_wire_id[] = {%s};' % ','.join(
'%d' % s for s in p.slots)
print >> H
print >> H, "bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out);"
print(' GRPC_CHTTP2_SETTINGS_%s = %d, /* wire id %d */' %
(decorated_setting.name, decorated_setting.enum,
decorated_setting.setting.id),
file=H)
print('} grpc_chttp2_setting_id;', file=H)
print(file=H)
print('#define GRPC_CHTTP2_NUM_SETTINGS %d' %
(max(x.enum for x in decorated_settings) + 1),
file=H)
print('extern const uint16_t grpc_setting_id_to_wire_id[];', file=H)
print('const uint16_t grpc_setting_id_to_wire_id[] = {%s};' %
','.join('%d' % s for s in p.slots),
file=C)
print(file=H)
print(
"bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out);",
file=H)
cgargs = {
'r': ','.join('%d' % (r if r is not None else 0) for r in p.r),
't': p.t,
'offset': abs(p.offset),
'offset_sign': '+' if p.offset > 0 else '-'
}
print >> C, """
print("""
bool grpc_wire_id_to_setting_id(uint32_t wire_id, grpc_chttp2_setting_id *out) {
uint32_t i = wire_id %(offset_sign)s %(offset)d;
uint32_t x = i %% %(t)d;
uint32_t y = i / %(t)d;
uint32_t h = x;
switch (y) {
""" % cgargs
""" % cgargs,
file=C)
for i, r in enumerate(p.r):
if not r:
continue
if r < 0:
print >> C, 'case %d: h -= %d; break;' % (i, -r)
print('case %d: h -= %d; break;' % (i, -r), file=C)
else:
print >> C, 'case %d: h += %d; break;' % (i, r)
print >> C, """
print('case %d: h += %d; break;' % (i, r), file=C)
print("""
}
*out = (grpc_chttp2_setting_id)h;
return h < GPR_ARRAY_SIZE(grpc_setting_id_to_wire_id) && grpc_setting_id_to_wire_id[h] == wire_id;
}
""" % cgargs
""" % cgargs,
file=C)
print >> H, """
print("""
typedef enum {
GRPC_CHTTP2_CLAMP_INVALID_VALUE,
GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE
@ -172,26 +184,33 @@ typedef struct {
} grpc_chttp2_setting_parameters;
extern const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS];
"""
print >> C, "const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS] = {"
""",
file=H)
print(
"const grpc_chttp2_setting_parameters grpc_chttp2_settings_parameters[GRPC_CHTTP2_NUM_SETTINGS] = {",
file=C)
i = 0
for decorated_setting in sorted(decorated_settings):
while i < decorated_setting.enum:
print >> C, "{NULL, 0, 0, 0, GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE, GRPC_HTTP2_PROTOCOL_ERROR},"
print(
"{NULL, 0, 0, 0, GRPC_CHTTP2_DISCONNECT_ON_INVALID_VALUE, GRPC_HTTP2_PROTOCOL_ERROR},",
file=C)
i += 1
print >> C, "{\"%s\", %du, %du, %du, GRPC_CHTTP2_%s, GRPC_HTTP2_%s}," % (
print("{\"%s\", %du, %du, %du, GRPC_CHTTP2_%s, GRPC_HTTP2_%s}," % (
decorated_setting.name,
decorated_setting.setting.default,
decorated_setting.setting.min,
decorated_setting.setting.max,
decorated_setting.setting.on_error.behavior,
decorated_setting.setting.on_error.code,
)
),
file=C)
i += 1
print >> C, "};"
print("};", file=C)
print >> H
print >> H, "#endif /* GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H */"
print(file=H)
print("#endif /* GRPC_CORE_EXT_TRANSPORT_CHTTP2_TRANSPORT_HTTP2_SETTINGS_H */",
file=H)
H.close()
C.close()

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2017 gRPC authors.
#
@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import collections
import ctypes
import json
@ -34,10 +36,11 @@ def make_type(name, fields):
def c_str(s, encoding='ascii'):
if isinstance(s, unicode):
if isinstance(s, str):
s = s.encode(encoding)
result = ''
for c in s:
c = chr(c) if isinstance(c, int) else c
if not (32 <= ord(c) < 127) or c in ('\\', '"'):
result += '\\%03o' % ord(c)
else:
@ -81,7 +84,7 @@ def shift_works_until(mapped_bounds, shift_bits):
def find_ideal_shift(mapped_bounds, max_size):
best = None
for shift_bits in reversed(range(0, 64)):
for shift_bits in reversed(list(range(0, 64))):
n = shift_works_until(mapped_bounds, shift_bits)
if n == 0:
continue
@ -94,16 +97,16 @@ def find_ideal_shift(mapped_bounds, max_size):
best = (shift_bits, n, table_size)
elif best[1] < n:
best = (shift_bits, n, table_size)
print best
print(best)
return best
def gen_map_table(mapped_bounds, shift_data):
tbl = []
cur = 0
print mapped_bounds
print(mapped_bounds)
mapped_bounds = [x >> shift_data[0] for x in mapped_bounds]
print mapped_bounds
print(mapped_bounds)
for i in range(0, mapped_bounds[shift_data[1] - 1]):
while i > mapped_bounds[cur]:
cur += 1
@ -120,7 +123,7 @@ def decl_static_table(values, type):
for i, vp in enumerate(static_tables):
if v == vp:
return i
print "ADD TABLE: %s %r" % (type, values)
print("ADD TABLE: %s %r" % (type, values))
r = len(static_tables)
static_tables.append(v)
return r
@ -205,11 +208,11 @@ def gen_bucket_code(histogram):
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print >> f, '/*'
print('/*', file=f)
for line in banner:
print >> f, ' * %s' % line
print >> f, ' */'
print >> f
print(' * %s' % line, file=f)
print(' */', file=f)
print(file=f)
with open('src/core/lib/debug/stats_data.h', 'w') as H:
@ -233,77 +236,90 @@ with open('src/core/lib/debug/stats_data.h', 'w') as H:
[H],
["Automatically generated by tools/codegen/core/gen_stats_data.py"])
print >> H, "#ifndef GRPC_CORE_LIB_DEBUG_STATS_DATA_H"
print >> H, "#define GRPC_CORE_LIB_DEBUG_STATS_DATA_H"
print >> H
print >> H, "#include <grpc/support/port_platform.h>"
print >> H
print >> H, "#include <inttypes.h>"
print >> H, "#include \"src/core/lib/iomgr/exec_ctx.h\""
print >> H
print("#ifndef GRPC_CORE_LIB_DEBUG_STATS_DATA_H", file=H)
print("#define GRPC_CORE_LIB_DEBUG_STATS_DATA_H", file=H)
print(file=H)
print("#include <grpc/support/port_platform.h>", file=H)
print(file=H)
print("#include <inttypes.h>", file=H)
print("#include \"src/core/lib/iomgr/exec_ctx.h\"", file=H)
print(file=H)
for typename, instances in sorted(inst_map.items()):
print >> H, "typedef enum {"
print("typedef enum {", file=H)
for inst in instances:
print >> H, " GRPC_STATS_%s_%s," % (typename.upper(),
inst.name.upper())
print >> H, " GRPC_STATS_%s_COUNT" % (typename.upper())
print >> H, "} grpc_stats_%ss;" % (typename.lower())
print >> H, "extern const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT];" % (
typename.lower(), typename.upper())
print >> H, "extern const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT];" % (
typename.lower(), typename.upper())
print(" GRPC_STATS_%s_%s," % (typename.upper(), inst.name.upper()),
file=H)
print(" GRPC_STATS_%s_COUNT" % (typename.upper()), file=H)
print("} grpc_stats_%ss;" % (typename.lower()), file=H)
print("extern const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT];" %
(typename.lower(), typename.upper()),
file=H)
print("extern const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT];" %
(typename.lower(), typename.upper()),
file=H)
histo_start = []
histo_buckets = []
histo_bucket_boundaries = []
print >> H, "typedef enum {"
print("typedef enum {", file=H)
first_slot = 0
for histogram in inst_map['Histogram']:
histo_start.append(first_slot)
histo_buckets.append(histogram.buckets)
print >> H, " GRPC_STATS_HISTOGRAM_%s_FIRST_SLOT = %d," % (
histogram.name.upper(), first_slot)
print >> H, " GRPC_STATS_HISTOGRAM_%s_BUCKETS = %d," % (
histogram.name.upper(), histogram.buckets)
print(" GRPC_STATS_HISTOGRAM_%s_FIRST_SLOT = %d," %
(histogram.name.upper(), first_slot),
file=H)
print(" GRPC_STATS_HISTOGRAM_%s_BUCKETS = %d," %
(histogram.name.upper(), histogram.buckets),
file=H)
first_slot += histogram.buckets
print >> H, " GRPC_STATS_HISTOGRAM_BUCKETS = %d" % first_slot
print >> H, "} grpc_stats_histogram_constants;"
print(" GRPC_STATS_HISTOGRAM_BUCKETS = %d" % first_slot, file=H)
print("} grpc_stats_histogram_constants;", file=H)
print >> H, "#if defined(GRPC_COLLECT_STATS) || !defined(NDEBUG)"
print("#if defined(GRPC_COLLECT_STATS) || !defined(NDEBUG)", file=H)
for ctr in inst_map['Counter']:
print >> H, ("#define GRPC_STATS_INC_%s() " +
"GRPC_STATS_INC_COUNTER(GRPC_STATS_COUNTER_%s)") % (
ctr.name.upper(), ctr.name.upper())
print(("#define GRPC_STATS_INC_%s() " +
"GRPC_STATS_INC_COUNTER(GRPC_STATS_COUNTER_%s)") %
(ctr.name.upper(), ctr.name.upper()),
file=H)
for histogram in inst_map['Histogram']:
print >> H, "#define GRPC_STATS_INC_%s(value) grpc_stats_inc_%s( (int)(value))" % (
histogram.name.upper(), histogram.name.lower())
print >> H, "void grpc_stats_inc_%s(int x);" % histogram.name.lower()
print(
"#define GRPC_STATS_INC_%s(value) grpc_stats_inc_%s( (int)(value))"
% (histogram.name.upper(), histogram.name.lower()),
file=H)
print("void grpc_stats_inc_%s(int x);" % histogram.name.lower(), file=H)
print >> H, "#else"
print("#else", file=H)
for ctr in inst_map['Counter']:
print >> H, ("#define GRPC_STATS_INC_%s() ") % (ctr.name.upper())
print(("#define GRPC_STATS_INC_%s() ") % (ctr.name.upper()), file=H)
for histogram in inst_map['Histogram']:
print >> H, "#define GRPC_STATS_INC_%s(value)" % (
histogram.name.upper())
print >> H, "#endif /* defined(GRPC_COLLECT_STATS) || !defined(NDEBUG) */"
print("#define GRPC_STATS_INC_%s(value)" % (histogram.name.upper()),
file=H)
print("#endif /* defined(GRPC_COLLECT_STATS) || !defined(NDEBUG) */",
file=H)
for i, tbl in enumerate(static_tables):
print >> H, "extern const %s grpc_stats_table_%d[%d];" % (tbl[0], i,
len(tbl[1]))
print >> H, "extern const int grpc_stats_histo_buckets[%d];" % len(
inst_map['Histogram'])
print >> H, "extern const int grpc_stats_histo_start[%d];" % len(
inst_map['Histogram'])
print >> H, "extern const int *const grpc_stats_histo_bucket_boundaries[%d];" % len(
inst_map['Histogram'])
print >> H, "extern void (*const grpc_stats_inc_histogram[%d])(int x);" % len(
inst_map['Histogram'])
print >> H
print >> H, "#endif /* GRPC_CORE_LIB_DEBUG_STATS_DATA_H */"
print("extern const %s grpc_stats_table_%d[%d];" %
(tbl[0], i, len(tbl[1])),
file=H)
print("extern const int grpc_stats_histo_buckets[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern const int grpc_stats_histo_start[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern const int *const grpc_stats_histo_bucket_boundaries[%d];" %
len(inst_map['Histogram']),
file=H)
print("extern void (*const grpc_stats_inc_histogram[%d])(int x);" %
len(inst_map['Histogram']),
file=H)
print(file=H)
print("#endif /* GRPC_CORE_LIB_DEBUG_STATS_DATA_H */", file=H)
with open('src/core/lib/debug/stats_data.cc', 'w') as C:
# copy-paste copyright notice from this file
@ -326,13 +342,13 @@ with open('src/core/lib/debug/stats_data.cc', 'w') as C:
[C],
["Automatically generated by tools/codegen/core/gen_stats_data.py"])
print >> C, "#include <grpc/support/port_platform.h>"
print >> C
print >> C, "#include \"src/core/lib/debug/stats.h\""
print >> C, "#include \"src/core/lib/debug/stats_data.h\""
print >> C, "#include \"src/core/lib/gpr/useful.h\""
print >> C, "#include \"src/core/lib/iomgr/exec_ctx.h\""
print >> C
print("#include <grpc/support/port_platform.h>", file=C)
print(file=C)
print("#include \"src/core/lib/debug/stats.h\"", file=C)
print("#include \"src/core/lib/debug/stats_data.h\"", file=C)
print("#include \"src/core/lib/gpr/useful.h\"", file=C)
print("#include \"src/core/lib/iomgr/exec_ctx.h\"", file=C)
print(file=C)
histo_code = []
for histogram in inst_map['Histogram']:
@ -341,36 +357,45 @@ with open('src/core/lib/debug/stats_data.cc', 'w') as C:
histo_code.append(code)
for typename, instances in sorted(inst_map.items()):
print >> C, "const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT] = {" % (
typename.lower(), typename.upper())
print("const char *grpc_stats_%s_name[GRPC_STATS_%s_COUNT] = {" %
(typename.lower(), typename.upper()),
file=C)
for inst in instances:
print >> C, " %s," % c_str(inst.name)
print >> C, "};"
print >> C, "const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT] = {" % (
typename.lower(), typename.upper())
print(" %s," % c_str(inst.name), file=C)
print("};", file=C)
print("const char *grpc_stats_%s_doc[GRPC_STATS_%s_COUNT] = {" %
(typename.lower(), typename.upper()),
file=C)
for inst in instances:
print >> C, " %s," % c_str(inst.doc)
print >> C, "};"
print(" %s," % c_str(inst.doc), file=C)
print("};", file=C)
for i, tbl in enumerate(static_tables):
print >> C, "const %s grpc_stats_table_%d[%d] = {%s};" % (
tbl[0], i, len(tbl[1]), ','.join('%s' % x for x in tbl[1]))
print("const %s grpc_stats_table_%d[%d] = {%s};" %
(tbl[0], i, len(tbl[1]), ','.join('%s' % x for x in tbl[1])),
file=C)
for histogram, code in zip(inst_map['Histogram'], histo_code):
print >> C, ("void grpc_stats_inc_%s(int value) {%s}") % (
histogram.name.lower(), code)
print >> C, "const int grpc_stats_histo_buckets[%d] = {%s};" % (len(
inst_map['Histogram']), ','.join('%s' % x for x in histo_buckets))
print >> C, "const int grpc_stats_histo_start[%d] = {%s};" % (len(
inst_map['Histogram']), ','.join('%s' % x for x in histo_start))
print >> C, "const int *const grpc_stats_histo_bucket_boundaries[%d] = {%s};" % (
len(inst_map['Histogram']), ','.join(
'grpc_stats_table_%d' % x for x in histo_bucket_boundaries))
print >> C, "void (*const grpc_stats_inc_histogram[%d])(int x) = {%s};" % (
len(inst_map['Histogram']), ','.join(
'grpc_stats_inc_%s' % histogram.name.lower()
for histogram in inst_map['Histogram']))
print(("void grpc_stats_inc_%s(int value) {%s}") %
(histogram.name.lower(), code),
file=C)
print(
"const int grpc_stats_histo_buckets[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join('%s' % x for x in histo_buckets)),
file=C)
print("const int grpc_stats_histo_start[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join('%s' % x for x in histo_start)),
file=C)
print("const int *const grpc_stats_histo_bucket_boundaries[%d] = {%s};" %
(len(inst_map['Histogram']), ','.join(
'grpc_stats_table_%d' % x for x in histo_bucket_boundaries)),
file=C)
print("void (*const grpc_stats_inc_histogram[%d])(int x) = {%s};" %
(len(inst_map['Histogram']), ','.join(
'grpc_stats_inc_%s' % histogram.name.lower()
for histogram in inst_map['Histogram'])),
file=C)
# patch qps_test bigquery schema
RECORD_EXPLICIT_PERCENTILES = [50, 95, 99]
@ -438,39 +463,56 @@ with open('tools/run_tests/performance/massage_qps_stats.py', 'w') as P:
break
for line in my_source:
if line[0] == '#':
print >> P, line.rstrip()
print(line.rstrip(), file=P)
break
for line in my_source:
if line[0] != '#':
break
print >> P, line.rstrip()
print >> P
print >> P, '# Autogenerated by tools/codegen/core/gen_stats_data.py'
print >> P
print >> P, 'import massage_qps_stats_helpers'
print >> P, 'def massage_qps_stats(scenario_result):'
print >> P, ' for stats in scenario_result["serverStats"] + scenario_result["clientStats"]:'
print >> P, ' if "coreStats" in stats:'
print >> P, ' # Get rid of the "coreStats" element and replace it by statistics'
print >> P, ' # that correspond to columns in the bigquery schema.'
print >> P, ' core_stats = stats["coreStats"]'
print >> P, ' del stats["coreStats"]'
print(line.rstrip(), file=P)
print(file=P)
print('# Autogenerated by tools/codegen/core/gen_stats_data.py', file=P)
print(file=P)
print('import massage_qps_stats_helpers', file=P)
print('def massage_qps_stats(scenario_result):', file=P)
print(
' for stats in scenario_result["serverStats"] + scenario_result["clientStats"]:',
file=P)
print(' if "coreStats" in stats:', file=P)
print(
' # Get rid of the "coreStats" element and replace it by statistics',
file=P)
print(' # that correspond to columns in the bigquery schema.', file=P)
print(' core_stats = stats["coreStats"]', file=P)
print(' del stats["coreStats"]', file=P)
for counter in inst_map['Counter']:
print >> P, ' stats["core_%s"] = massage_qps_stats_helpers.counter(core_stats, "%s")' % (
counter.name, counter.name)
print(
' stats["core_%s"] = massage_qps_stats_helpers.counter(core_stats, "%s")'
% (counter.name, counter.name),
file=P)
for i, histogram in enumerate(inst_map['Histogram']):
print >> P, ' h = massage_qps_stats_helpers.histogram(core_stats, "%s")' % histogram.name
print >> P, ' stats["core_%s"] = ",".join("%%f" %% x for x in h.buckets)' % histogram.name
print >> P, ' stats["core_%s_bkts"] = ",".join("%%f" %% x for x in h.boundaries)' % histogram.name
print(
' h = massage_qps_stats_helpers.histogram(core_stats, "%s")' %
histogram.name,
file=P)
print(
' stats["core_%s"] = ",".join("%%f" %% x for x in h.buckets)' %
histogram.name,
file=P)
print(
' stats["core_%s_bkts"] = ",".join("%%f" %% x for x in h.boundaries)'
% histogram.name,
file=P)
for pctl in RECORD_EXPLICIT_PERCENTILES:
print >> P, ' stats["core_%s_%dp"] = massage_qps_stats_helpers.percentile(h.buckets, %d, h.boundaries)' % (
histogram.name, pctl, pctl)
print(
' stats["core_%s_%dp"] = massage_qps_stats_helpers.percentile(h.buckets, %d, h.boundaries)'
% (histogram.name, pctl, pctl),
file=P)
with open('src/core/lib/debug/stats_data_bq_schema.sql', 'w') as S:
columns = []
for counter in inst_map['Counter']:
columns.append(('%s_per_iteration' % counter.name, 'FLOAT'))
print >> S, ',\n'.join('%s:%s' % x for x in columns)
print(',\n'.join('%s:%s' % x for x in columns), file=S)

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -39,6 +39,6 @@ for line in sys.stdin:
else:
outstanding[m.group(2)].remove(m.group(3))
for obj, remaining in outstanding.items():
for obj, remaining in list(outstanding.items()):
if remaining:
print 'LEAKED: %s %r' % (obj, remaining)
print(('LEAKED: %s %r' % (obj, remaining)))

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
#
# Copyright 2017 gRPC authors.
#
@ -44,4 +44,4 @@ for line in data:
assert (err in errs)
errs.remove(err)
print "leaked:", errs
print(("leaked:", errs))

@ -41,13 +41,13 @@ for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
if 'README.md' not in filenames:
if not printed_banner:
print 'Missing README.md'
print '================='
print('Missing README.md')
print('=================')
printed_banner = True
print root
print(root)
errors += 1
if printed_banner:
print
print()
printed_banner = False
for target_dir in _TARGET_DIRS:
for root, dirs, filenames in os.walk(target_dir):
@ -59,10 +59,10 @@ for target_dir in _TARGET_DIRS:
contents = f.read()
if '\\file' not in contents:
if not printed_banner:
print 'Missing \\file comment'
print '======================'
print('Missing \\file comment')
print('======================')
printed_banner = True
print path
print(path)
errors += 1
assert errors == 0, 'error count = %d' % errors

@ -140,7 +140,7 @@ RE_LICENSE = dict(
(k, r'\n'.join(LICENSE_PREFIX_RE[k] +
(RE_YEAR if re.search(RE_YEAR, line) else re.escape(line))
for line in LICENSE_NOTICE))
for k, v in LICENSE_PREFIX_RE.items())
for k, v in list(LICENSE_PREFIX_RE.items()))
YEAR = datetime.datetime.now().year
@ -168,7 +168,7 @@ LICENSE_TEXT = dict(
(k,
join_license_text(LICENSE_PREFIX_TEXT[k][0], LICENSE_PREFIX_TEXT[k][1],
LICENSE_PREFIX_TEXT[k][2], LICENSE_NOTICE))
for k, v in LICENSE_PREFIX_TEXT.items())
for k, v in list(LICENSE_PREFIX_TEXT.items()))
if args.precommit:
FILE_LIST_COMMAND = 'git status -z | grep -Poz \'(?<=^[MARC][MARCD ] )[^\s]+\''
@ -195,7 +195,7 @@ def log(cond, why, filename):
if not cond:
return
if args.output == 'details':
print('%s: %s' % (why, filename))
print(('%s: %s' % (why, filename)))
else:
print(filename)

@ -66,15 +66,15 @@ class GuardValidator(object):
if c_core_header else '#endif // {2}')
if not match_txt:
print(
invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath)))
(invalid_guards_msg_template.format(fpath, regexp.pattern,
build_valid_guard(fpath))))
return fcontents
print(('{}: Wrong preprocessor guards (RE {}):'
'\n\tFound {}, expected {}').format(fpath, regexp.pattern,
match_txt, correct))
print((('{}: Wrong preprocessor guards (RE {}):'
'\n\tFound {}, expected {}').format(fpath, regexp.pattern,
match_txt, correct)))
if fix:
print('Fixing {}...\n'.format(fpath))
print(('Fixing {}...\n'.format(fpath)))
fixed_fcontents = re.sub(match_txt, correct, fcontents)
if fixed_fcontents:
self.failed = False
@ -91,7 +91,7 @@ class GuardValidator(object):
match = self.ifndef_re.search(fcontents)
if not match:
print('something drastically wrong with: %s' % fpath)
print(('something drastically wrong with: %s' % fpath))
return False # failed
if match.lastindex is None:
# No ifndef. Request manual addition with hints

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2016 gRPC authors.
#

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2016 gRPC authors.
#
@ -94,13 +94,13 @@ def protobuf_submodule_commit_hash():
os.chdir(GRPC_PROTOBUF_SUBMODULE_ROOT)
output = subprocess.check_output(['git', 'rev-parse', 'HEAD'])
os.chdir(cwd)
return output.splitlines()[0].strip()
return output.decode("ascii").splitlines()[0].strip()
def bazel_query(query):
print('Running "bazel query %s"' % query)
output = subprocess.check_output([BAZEL_DEPS, query])
return output.splitlines()
return output.decode("ascii").splitlines()
def get_deps():

@ -1,93 +0,0 @@
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import shutil
import subprocess
parser = argparse.ArgumentParser(
description='Submit the package to a PyPI repository.')
parser.add_argument('--repository',
'-r',
metavar='r',
type=str,
default='pypi',
help='The repository to push the package to. '
'Ensure the value appears in your .pypirc file. '
'Defaults to "pypi".')
parser.add_argument('--identity',
'-i',
metavar='i',
type=str,
help='GPG identity to sign the files with.')
parser.add_argument(
'--username',
'-u',
metavar='u',
type=str,
help='Username to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your username.')
parser.add_argument(
'--password',
'-p',
metavar='p',
type=str,
help='Password to authenticate with the repository. Not needed if you have '
'configured your .pypirc to include your password.')
parser.add_argument(
'--bdist',
'-b',
action='store_true',
help='Generate a binary distribution (wheel) for the current OS.')
parser.add_argument(
'--dist-args',
type=str,
help='Additional arguments to pass to the *dist setup.py command.')
args = parser.parse_args()
# Move to the root directory of Python GRPC.
pkgdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../')
# Remove previous distributions; they somehow confuse twine.
try:
shutil.rmtree(os.path.join(pkgdir, 'dist/'))
except:
pass
# Build the Cython C files
build_env = os.environ.copy()
build_env['GRPC_PYTHON_BUILD_WITH_CYTHON'] = "1"
cmd = ['python', 'setup.py', 'build_ext', '--inplace']
subprocess.call(cmd, cwd=pkgdir, env=build_env)
# Make the push.
if args.bdist:
cmd = ['python', 'setup.py', 'bdist_wheel']
else:
cmd = ['python', 'setup.py', 'sdist']
if args.dist_args:
cmd += args.dist_args.split()
subprocess.call(cmd, cwd=pkgdir)
cmd = ['twine', 'upload', '-r', args.repository]
if args.identity is not None:
cmd.extend(['-i', args.identity])
if args.username is not None:
cmd.extend(['-u', args.username])
if args.password is not None:
cmd.extend(['-p', args.password])
cmd.append('dist/*')
subprocess.call(cmd, cwd=pkgdir)

@ -59,10 +59,10 @@ if args.only_changed:
['git', 'diff', 'origin/master', 'HEAD', '--name-only'])
for line in output.decode('ascii').splitlines(False):
if line in orig_files:
print("check: %s" % line)
print(("check: %s" % line))
actual_files.append(line)
else:
print("skip: %s - not in the build" % line)
print(("skip: %s - not in the build" % line))
args.files = actual_files
jobs = []

@ -1,13 +0,0 @@
runtime: python27
api_version: 1
threadsafe: true
service: github-stats-tracking
handlers:
- url: /.*
script: main.app
libraries:
- name: ssl
version: latest

@ -1,19 +0,0 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# appengine_config.py
from google.appengine.ext import vendor
# Add any libraries install in the "lib" folder.
vendor.add('lib')

@ -1,4 +0,0 @@
cron:
- description: "daily github stats tracking job"
url: /daily
schedule: every 24 hours

@ -1,96 +0,0 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from datetime import timedelta
from time import time
from github import Github
from github import Label
from google.cloud import bigquery
ACCESS_TOKEN = ""
def get_stats_from_github():
# Please set the access token properly before deploying.
assert ACCESS_TOKEN
g = Github(ACCESS_TOKEN)
print g.rate_limiting
repo = g.get_repo('grpc/grpc')
LABEL_LANG = set(label for label in repo.get_labels()
if label.name.split('/')[0] == 'lang')
LABEL_KIND_BUG = repo.get_label('kind/bug')
LABEL_PRIORITY_P0 = repo.get_label('priority/P0')
LABEL_PRIORITY_P1 = repo.get_label('priority/P1')
LABEL_PRIORITY_P2 = repo.get_label('priority/P2')
def is_untriaged(issue):
key_labels = set()
for label in issue.labels:
label_kind = label.name.split('/')[0]
if label_kind in ('lang', 'kind', 'priority'):
key_labels.add(label_kind)
return len(key_labels) < 3
untriaged_open_issues = [
issue for issue in repo.get_issues(state='open')
if issue.pull_request is None and is_untriaged(issue)
]
total_bugs = [
issue for issue in repo.get_issues(state='all', labels=[LABEL_KIND_BUG])
if issue.pull_request is None
]
lang_to_stats = {}
for lang in LABEL_LANG:
lang_bugs = filter(lambda bug: lang in bug.labels, total_bugs)
closed_bugs = filter(lambda bug: bug.state == 'closed', lang_bugs)
open_bugs = filter(lambda bug: bug.state == 'open', lang_bugs)
open_p0_bugs = filter(lambda bug: LABEL_PRIORITY_P0 in bug.labels,
open_bugs)
open_p1_bugs = filter(lambda bug: LABEL_PRIORITY_P1 in bug.labels,
open_bugs)
open_p2_bugs = filter(lambda bug: LABEL_PRIORITY_P2 in bug.labels,
open_bugs)
lang_to_stats[lang] = [
len(lang_bugs),
len(closed_bugs),
len(open_bugs),
len(open_p0_bugs),
len(open_p1_bugs),
len(open_p2_bugs)
]
return len(untriaged_open_issues), lang_to_stats
def insert_stats_to_db(untriaged_open_issues, lang_to_stats):
timestamp = time()
client = bigquery.Client()
dataset_ref = client.dataset('github_issues')
table_ref = dataset_ref.table('untriaged_issues')
table = client.get_table(table_ref)
errors = client.insert_rows(table, [(timestamp, untriaged_open_issues)])
table_ref = dataset_ref.table('bug_stats')
table = client.get_table(table_ref)
rows = []
for lang, stats in lang_to_stats.iteritems():
rows.append((timestamp, lang.name[5:]) + tuple(stats))
errors = client.insert_rows(table, rows)
def fetch():
untriaged_open_issues, lang_to_stats = get_stats_from_github()
insert_stats_to_db(untriaged_open_issues, lang_to_stats)

@ -1,28 +0,0 @@
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fetch_data import fetch
import webapp2
class DailyCron(webapp2.RequestHandler):
def get(self):
fetch()
self.response.status = 204
app = webapp2.WSGIApplication([
('/daily', DailyCron),
], debug=True)

@ -1,4 +1,3 @@
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");

@ -40,3 +40,5 @@ fi
export DOCKERHUB_ORGANIZATION=grpctesting
git submodule update --init
python3 -m pip install six

@ -15,7 +15,14 @@
@rem make sure msys binaries are preferred over cygwin binaries
@rem set path to python 2.7
@rem set path to CMake
set PATH=C:\tools\msys64\usr\bin;C:\Python27;C:\Program Files\CMake\bin;%PATH%
set PATH=C:\tools\msys64\usr\bin;C:\Python37;C:\Python27;C:\Program Files\CMake\bin;%PATH%
dir C:\Python37\
mklink C:\Python37\python3.exe C:\Python37\python.exe
python --version
python3 --version
@rem If this is a PR using RUN_TESTS_FLAGS var, then add flags to filter tests
if defined KOKORO_GITHUB_PULL_REQUEST_NUMBER if defined RUN_TESTS_FLAGS (
@ -29,8 +36,6 @@ netsh interface ip set dns "Local Area Connection 8" static 169.254.169.254 prim
netsh interface ip add dnsservers "Local Area Connection 8" 8.8.8.8 index=2
netsh interface ip add dnsservers "Local Area Connection 8" 8.8.4.4 index=3
@rem Needed for big_query_utils
python -m pip install google-api-python-client || goto :error
@rem C# prerequisites: Install dotnet SDK
powershell -File src\csharp\install_dotnet_sdk.ps1 || goto :error
@ -46,6 +51,9 @@ If "%PREPARE_BUILD_INSTALL_DEPS_PYTHON%" == "true" (
powershell -File tools\internal_ci\helper_scripts\install_python_interpreters.ps1 || goto :error
)
@rem Needed for big_query_utils
python -m pip install google-api-python-client || goto :error
git submodule update --init || goto :error
goto :EOF

@ -17,13 +17,13 @@ set -ex
#install ubuntu pre-requisites
sudo apt update
sudo apt install -y build-essential autoconf libtool pkg-config cmake python python-pip clang
sudo apt install -y build-essential autoconf libtool pkg-config cmake python3 python3-pip clang
sudo pip install six
cd grpc
# without port server running, many tests will fail
python tools/run_tests/start_port_server.py
python3 tools/run_tests/start_port_server.py
# build with bazel
tools/bazel build --config=opt //test/...

@ -17,7 +17,7 @@ set -ex
# install pre-requisites for gRPC C core build
sudo apt update
sudo apt install -y build-essential autoconf libtool pkg-config cmake python python-pip clang
sudo apt install -y build-essential autoconf libtool pkg-config cmake python3 python3-pip clang
sudo pip install six
# install python3.6 and pip

@ -41,7 +41,7 @@ if [ "$UPLOAD_TEST_RESULTS" != "" ]
then
# Sleep to let ResultStore finish writing results before querying
sleep 60
python ./tools/run_tests/python_utils/upload_rbe_results.py
python3 ./tools/run_tests/python_utils/upload_rbe_results.py
fi
if [ "$FAILED" != "" ]

@ -20,19 +20,19 @@ cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc
python tools/run_tests/run_tests.py \
python3 tools/run_tests/run_tests.py \
-l c c++ -x coverage_cpp/sponge_log.xml \
--use_docker -t -c gcov -j 2 || FAILED="true"
python tools/run_tests/run_tests.py \
python3 tools/run_tests/run_tests.py \
-l python -x coverage_python/sponge_log.xml \
--use_docker -t -c gcov -j 2 || FAILED="true"
python tools/run_tests/run_tests.py \
python3 tools/run_tests/run_tests.py \
-l ruby -x coverage_ruby/sponge_log.xml \
--use_docker -t -c gcov -j 2 || FAILED="true"
python tools/run_tests/run_tests.py \
python3 tools/run_tests/run_tests.py \
-l php -x coverage_php/sponge_log.xml \
--use_docker -t -c gcov -j 2 || FAILED="true"

@ -100,7 +100,7 @@ run_test() {
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
set -x
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--server_image="${SERVER_IMAGE_NAME}:${GIT_COMMIT}" \

@ -226,9 +226,9 @@ test_driver_pip_install() {
source "${venv_dir}/bin/activate"
fi
pip install -r requirements.txt
python3 -m pip install -r requirements.txt
echo "Installed Python packages:"
pip list
python3 -m pip list
}
#######################################
@ -254,7 +254,7 @@ test_driver_compile_protos() {
)
echo "Generate python code from grpc.testing protos: ${protos[*]}"
cd "${TEST_DRIVER_REPO_DIR}"
python -m grpc_tools.protoc \
python3 -m grpc_tools.protoc \
--proto_path=. \
--python_out="${TEST_DRIVER_FULL_DIR}" \
--grpc_python_out="${TEST_DRIVER_FULL_DIR}" \
@ -345,12 +345,12 @@ kokoro_setup_python_virtual_environment() {
pyenv virtualenv --no-pip "${py_latest_patch}" k8s_xds_test_runner
pyenv local k8s_xds_test_runner
pyenv activate k8s_xds_test_runner
python -m ensurepip
python3 -m ensurepip
# pip is fixed to 21.0.1 due to issue https://github.com/pypa/pip/pull/9835
# internal details: b/186411224
# TODO(sergiitk): revert https://github.com/grpc/grpc/pull/26087 when 21.1.1 released
python -m pip install -U pip==21.0.1
pip --version
python3 -m pip install -U pip==21.0.1
python3 -m pip --version
}
#######################################

@ -95,7 +95,7 @@ run_test() {
# Test driver usage:
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--secondary_kube_context="${SECONDARY_KUBE_CONTEXT}" \

@ -96,7 +96,7 @@ run_test() {
# Test driver usage:
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--secondary_kube_context="${SECONDARY_KUBE_CONTEXT}" \

@ -115,7 +115,7 @@ run_test() {
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
set -x
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--server_image="${SERVER_IMAGE_NAME}:${GIT_COMMIT}" \

@ -85,7 +85,7 @@ run_test() {
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
set -x
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--client_image="${CLIENT_IMAGE_NAME}:${GIT_COMMIT}" \

@ -95,7 +95,7 @@ run_test() {
# https://github.com/grpc/grpc/tree/master/tools/run_tests/xds_k8s_test_driver#basic-usage
local test_name="${1:?Usage: run_test test_name}"
set -x
python -m "tests.${test_name}" \
python3 -m "tests.${test_name}" \
--flagfile="${TEST_DRIVER_FLAGFILE}" \
--kube_context="${KUBE_CONTEXT}" \
--client_image="${CLIENT_IMAGE_NAME}:${GIT_COMMIT}" \

@ -19,6 +19,6 @@ cd $(dirname $0)/../../..
./tools/run_tests/start_port_server.py || true
CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload

@ -48,7 +48,7 @@ if [ "$UPLOAD_TEST_RESULTS" != "" ]
then
# Sleep to let ResultStore finish writing results before querying
sleep 60
PYTHONHTTPSVERIFY=0 python ./tools/run_tests/python_utils/upload_rbe_results.py
PYTHONHTTPSVERIFY=0 python3 ./tools/run_tests/python_utils/upload_rbe_results.py
fi
if [ "$FAILED" != "" ]

@ -30,7 +30,7 @@ set BAZEL_EXITCODE=%errorlevel%
if not "%UPLOAD_TEST_RESULTS%"=="" (
@rem Sleep to let ResultStore finish writing results before querying
timeout /t 60 /nobreak
python ./tools/run_tests/python_utils/upload_rbe_results.py
python3 ./tools/run_tests/python_utils/upload_rbe_results.py
)
exit /b %BAZEL_EXITCODE%

@ -20,7 +20,7 @@ If Not "%RUN_TESTS_FLAGS%"=="%RUN_TESTS_FLAGS:python=%" (
)
call tools/internal_ci/helper_scripts/prepare_build_windows.bat || exit /b 1
python tools/run_tests/run_tests_matrix.py %RUN_TESTS_FLAGS%
python3 tools/run_tests/run_tests_matrix.py %RUN_TESTS_FLAGS%
set RUNTESTS_EXITCODE=%errorlevel%
bash tools/internal_ci/helper_scripts/delete_nonartifacts.sh

@ -1,4 +1,3 @@
#!/usr/bin/env python2.7
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");

@ -35,13 +35,13 @@ import dockerjob
import jobset
_IMAGE_BUILDER = 'tools/run_tests/dockerize/build_interop_image.sh'
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
_LANGUAGES = list(client_matrix.LANG_RUNTIME_MATRIX.keys())
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
list(
set(release
for release_dict in client_matrix.LANG_RELEASE_MATRIX.values()
for release in release_dict.keys())))
for release_dict in list(client_matrix.LANG_RELEASE_MATRIX.values())
for release in list(release_dict.keys()))))
# Destination directory inside docker image to keep extra info from build time.
_BUILD_INFO = '/var/local/build_info'

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -40,13 +40,13 @@ import upload_test_results
_TEST_TIMEOUT_SECONDS = 60
_PULL_IMAGE_TIMEOUT_SECONDS = 15 * 60
_MAX_PARALLEL_DOWNLOADS = 6
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
_LANGUAGES = list(client_matrix.LANG_RUNTIME_MATRIX.keys())
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
list(
set(release
for release_dict in client_matrix.LANG_RELEASE_MATRIX.values()
for release in release_dict.keys())))
for release_dict in list(client_matrix.LANG_RELEASE_MATRIX.values())
for release in list(release_dict.keys()))))
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
@ -117,7 +117,7 @@ def _get_test_images_for_lang(lang, release_arg, image_path_prefix):
tag)
image_tuple = (tag, image_name)
if not images.has_key(runtime):
if runtime not in images:
images[runtime] = []
images[runtime].append(image_tuple)
return images

@ -158,13 +158,13 @@ def expand_directives(root, directives):
if directive.who not in globs[glob]:
globs[glob].append(directive.who)
# expand owners for intersecting globs
sorted_globs = sorted(globs.keys(),
sorted_globs = sorted(list(globs.keys()),
key=lambda g: len(git_glob(full_dir(root, g))),
reverse=True)
out_globs = collections.OrderedDict()
for glob_add in sorted_globs:
who_add = globs[glob_add]
pre_items = [i for i in out_globs.items()]
pre_items = [i for i in list(out_globs.items())]
out_globs[glob_add] = who_add.copy()
for glob_have, who_have in pre_items:
files_add = git_glob(full_dir(root, glob_add))
@ -186,8 +186,8 @@ def add_parent_to_globs(parent, globs, globs_dir):
for owners in owners_data:
if owners.dir == parent:
owners_globs = expand_directives(owners.dir, owners.directives)
for oglob, oglob_who in owners_globs.items():
for gglob, gglob_who in globs.items():
for oglob, oglob_who in list(owners_globs.items()):
for gglob, gglob_who in list(globs.items()):
files_parent = git_glob(full_dir(owners.dir, oglob))
files_child = git_glob(full_dir(globs_dir, gglob))
intersect = files_parent.intersection(files_child)
@ -220,7 +220,7 @@ with open(args.out, 'w') as out:
continue
globs = expand_directives(head.dir, head.directives)
add_parent_to_globs(head.parent, globs, head.dir)
for glob, owners in globs.items():
for glob, owners in list(globs.items()):
skip = False
for glob1, owners1, dir1 in reversed(written_globs):
files = git_glob(full_dir(head.dir, glob))

@ -78,7 +78,7 @@ _build('bloat_diff_new')
if args.diff_base:
where_am_i = subprocess.check_output(
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode().strip()
# checkout the diff base (="old")
subprocess.check_call(['git', 'checkout', args.diff_base])
subprocess.check_call(['git', 'submodule', 'update'])

@ -96,7 +96,7 @@ for frameworks in [False, True]:
if args.diff_base:
old = 'old'
where_am_i = subprocess.check_output(
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode().strip()
subprocess.check_call(['git', 'checkout', '--', '.'])
subprocess.check_call(['git', 'checkout', args.diff_base])
subprocess.check_call(['git', 'submodule', 'update', '--force'])

@ -95,11 +95,11 @@ def main():
filename = sys.argv[1]
core_size, objc_size, boringssl_size, protobuf_size, total_size = parse_link_map(
filename)
print('Core size:{:,}'.format(core_size))
print('ObjC size:{:,}'.format(objc_size))
print('BoringSSL size:{:,}'.format(boringssl_size))
print('Protobuf size:{:,}\n'.format(protobuf_size))
print('Total size:{:,}'.format(total_size))
print(('Core size:{:,}'.format(core_size)))
print(('ObjC size:{:,}'.format(objc_size)))
print(('BoringSSL size:{:,}'.format(boringssl_size)))
print(('Protobuf size:{:,}\n'.format(protobuf_size)))
print(('Total size:{:,}'.format(total_size)))
if __name__ == "__main__":

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -137,7 +137,7 @@ class CallStack(object):
self.signature = initial_call_stack_builder.signature
self.lines = initial_call_stack_builder.lines
for line in self.lines:
for key, val in line.times.items():
for key, val in list(line.times.items()):
line.times[key] = [val]
def add(self, call_stack_builder):
@ -146,13 +146,13 @@ class CallStack(object):
assert len(self.lines) == len(call_stack_builder.lines)
for lsum, line in zip(self.lines, call_stack_builder.lines):
assert lsum.tag == line.tag
assert lsum.times.keys() == line.times.keys()
for k, lst in lsum.times.items():
assert list(lsum.times.keys()) == list(line.times.keys())
for k, lst in list(lsum.times.items()):
lst.append(line.times[k])
def finish(self):
for line in self.lines:
for lst in line.times.values():
for lst in list(line.times.values()):
lst.sort()
@ -175,7 +175,7 @@ with open(args.source) as f:
del builder[thd]
time_taken = time.time() - start
call_stacks = sorted(call_stacks.values(),
call_stacks = sorted(list(call_stacks.values()),
key=lambda cs: cs.count,
reverse=True)
total_stacks = 0
@ -260,7 +260,7 @@ for cs in call_stacks:
out.write(BANNER[args.fmt] % {
'count': cs.count,
})
header, _ = zip(*FORMAT)
header, _ = list(zip(*FORMAT))
table = []
for line in cs.lines:
fields = []

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
#
# Copyright 2017 gRPC authors.
#
@ -69,7 +69,7 @@ def build(name, jobs):
subprocess.check_call(['git', 'submodule', 'update'])
try:
subprocess.check_call(_make_cmd(jobs))
except subprocess.CalledProcessError, e:
except subprocess.CalledProcessError as e:
subprocess.check_call(['make', 'clean'])
subprocess.check_call(_make_cmd(jobs))
os.rename('bins', 'qps_diff_%s' % name)
@ -93,11 +93,11 @@ def _load_qps(fname):
try:
with open(fname) as f:
return json.loads(f.read())['qps']
except IOError, e:
print("IOError occurred reading file: %s" % fname)
except IOError as e:
print(("IOError occurred reading file: %s" % fname))
return None
except ValueError, e:
print("ValueError occurred reading file: %s" % fname)
except ValueError as e:
print(("ValueError occurred reading file: %s" % fname))
return None
@ -128,8 +128,8 @@ def diff(scenarios, loops, old, new):
rows = []
for sn in scenarios:
mdn_diff = abs(_median(new_data[sn]) - _median(old_data[sn]))
print('%s: %s=%r %s=%r mdn_diff=%r' %
(sn, new, new_data[sn], old, old_data[sn], mdn_diff))
print(('%s: %s=%r %s=%r mdn_diff=%r' %
(sn, new, new_data[sn], old, old_data[sn], mdn_diff)))
s = bm_speedup.speedup(new_data[sn], old_data[sn], 10e-5)
if abs(s) > 3 and mdn_diff > 0.5:
rows.append([sn, '%+d%%' % s])
@ -145,7 +145,7 @@ def main(args):
if args.diff_base:
where_am_i = subprocess.check_output(
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
['git', 'rev-parse', '--abbrev-ref', 'HEAD']).decode().strip()
subprocess.check_call(['git', 'checkout', args.diff_base])
try:
build('old', args.jobs)
@ -162,7 +162,7 @@ def main(args):
text = '[qps] Performance differences noted:\n%s' % diff_output
else:
text = '[qps] No significant performance differences'
print('%s' % text)
print(('%s' % text))
check_on_pr.check_on_pr('QPS', '```\n%s\n```' % text)

@ -90,7 +90,7 @@ def get_commit_log(prevRelLabel, relBranch):
"git", "log", "--pretty=oneline", "--committer=GitHub",
"%s..%s" % (prevRelLabel, relBranch)
]
print("Running ", " ".join(glg_command))
print(("Running ", " ".join(glg_command)))
return subprocess.check_output(glg_command)
@ -98,11 +98,13 @@ def get_pr_data(pr_num):
"""Get the PR data from github. Return 'error' on exception"""
try:
from urllib2 import HTTPError
from urllib2 import Request
from urllib2 import urlopen
from urllib.error import HTTPError
from urllib.request import Request
from urllib.request import urlopen
except ImportError:
import urllib
import urllib.error
import urllib.parse
import urllib.request
from urllib.request import HTTPError
from urllib.request import Request
from urllib.request import urlopen
@ -116,7 +118,7 @@ def get_pr_data(pr_num):
except HTTPError as e:
response = json.loads(e.fp.read().decode('utf-8'))
if 'message' in response:
print(response['message'])
print((response['message']))
response = "error"
return response
@ -140,10 +142,11 @@ def get_pr_titles(gitLogs):
langs_pr = defaultdict(list)
for pr_num in prlist:
pr_num = str(pr_num)
print("---------- getting data for PR " + pr_num)
print(("---------- getting data for PR " + pr_num))
pr = get_pr_data(pr_num)
if pr == "error":
print("\n***ERROR*** Error in getting data for PR " + pr_num + "\n")
print(
("\n***ERROR*** Error in getting data for PR " + pr_num + "\n"))
error_count += 1
continue
rl_no_found = False
@ -162,7 +165,7 @@ def get_pr_titles(gitLogs):
if not body.endswith("."):
body = body + "."
if not pr["merged_by"]:
print("\n***ERROR***: No merge_by found for PR " + pr_num + "\n")
print(("\n***ERROR***: No merge_by found for PR " + pr_num + "\n"))
error_count += 1
continue
@ -173,17 +176,17 @@ def get_pr_titles(gitLogs):
print(detail)
#if no RL label
if not rl_no_found and not rl_yes_found:
print("Release notes label missing for " + pr_num)
print(("Release notes label missing for " + pr_num))
langs_pr["nolabel"].append(detail)
elif rl_yes_found and not lang_found:
print("Lang label missing for " + pr_num)
print(("Lang label missing for " + pr_num))
langs_pr["nolang"].append(detail)
elif rl_no_found:
print("'Release notes:no' found for " + pr_num)
print(("'Release notes:no' found for " + pr_num))
langs_pr["notinrel"].append(detail)
elif rl_yes_found:
print("'Release notes:yes' found for " + pr_num + " with lang " +
lang)
print(("'Release notes:yes' found for " + pr_num + " with lang " +
lang))
langs_pr["inrel"].append(detail)
langs_pr[lang].append(prline)
@ -360,7 +363,7 @@ def main():
write_draft(langs_pr, file, version, date)
file.truncate()
file.close()
print("\nDraft notes written to " + filename)
print(("\nDraft notes written to " + filename))
filename = os.path.abspath(rel_file)
if os.path.exists(filename):
@ -372,7 +375,7 @@ def main():
write_rel_notes(langs_pr, file, version, name)
file.truncate()
file.close()
print("\nRelease notes written to " + filename)
print(("\nRelease notes written to " + filename))
if error_count > 0:
print("\n\n*** Errors were encountered. See log. *********\n")

@ -38,7 +38,7 @@ def create_docker_jobspec(name,
environ['ARTIFACTS_OUT'] = 'artifacts/%s' % name
docker_args = []
for k, v in environ.items():
for k, v in list(environ.items()):
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -37,7 +37,7 @@ def create_docker_jobspec(name,
environ['RELATIVE_COPY_PATH'] = copy_rel_path
docker_args = []
for k, v in environ.items():
for k, v in list(environ.items()):
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -32,7 +32,7 @@ def create_docker_jobspec(name,
environ['RUN_COMMAND'] = shell_command
docker_args = []
for k, v in environ.items():
for k, v in list(environ.items()):
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {
'DOCKERFILE_DIR': dockerfile_dir,

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -341,6 +341,6 @@ def generate_client_referred_to_backend_multiple_balancers():
all_scenarios += generate_client_referred_to_backend_multiple_balancers()
print(yaml.dump({
print((yaml.dump({
'lb_interop_test_scenarios': all_scenarios,
}))
})))

@ -25,7 +25,7 @@ import sys
import time
import uuid
import massage_qps_stats
from . import massage_qps_stats
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))

@ -33,10 +33,11 @@ import sys
from typing import Any, Dict, Iterable, Mapping, Optional, Type
import uuid
import scenario_config
import scenario_config_exporter
import yaml
from . import scenario_config
from . import scenario_config_exporter
CONFIGURATION_FILE_HEADER_COMMENT = """
# Load test configurations generated from a template by loadtest_config.py.
# See documentation below:

@ -29,9 +29,10 @@ import argparse
import sys
from typing import Any, Dict, Iterable, List, Mapping, Type
import loadtest_config
import yaml
from . import loadtest_config
TEMPLATE_FILE_HEADER_COMMENT = """
# Template generated from load test configurations by loadtest_template.py.
#
@ -61,7 +62,7 @@ def insert_worker(worker: Dict[str, Any], workers: List[Dict[str,
def uniquify_workers(workermap: Dict[str, List[Dict[str, Any]]]) -> None:
"""Name workers if there is more than one for the same map key."""
for workers in workermap.values():
for workers in list(workermap.values()):
if len(workers) <= 1:
continue
for i, worker in enumerate(workers):

@ -14,7 +14,7 @@
# Autogenerated by tools/codegen/core/gen_stats_data.py
import massage_qps_stats_helpers
from . import massage_qps_stats_helpers
def massage_qps_stats(scenario_result):

@ -22,7 +22,7 @@ import tempfile
import time
import uuid
import jobset
from . import jobset
_DEVNULL = open(os.devnull, 'w')
@ -47,7 +47,7 @@ def docker_mapped_port(cid, port, timeout_seconds=15):
try:
output = subprocess.check_output('docker port %s %s' % (cid, port),
stderr=_DEVNULL,
shell=True)
shell=True).decode()
return int(output.split(':', 2)[1])
except subprocess.CalledProcessError as e:
pass
@ -61,7 +61,8 @@ def docker_ip_address(cid, timeout_seconds=15):
while time.time() - started < timeout_seconds:
cmd = 'docker inspect %s' % cid
try:
output = subprocess.check_output(cmd, stderr=_DEVNULL, shell=True)
output = subprocess.check_output(cmd, stderr=_DEVNULL,
shell=True).decode()
json_info = json.loads(output)
assert len(json_info) == 1
out = json_info[0]['NetworkSettings']['IPAddress']
@ -82,7 +83,7 @@ def wait_for_healthy(cid, shortname, timeout_seconds):
output = subprocess.check_output([
'docker', 'inspect', '--format="{{.State.Health.Status}}"', cid
],
stderr=_DEVNULL)
stderr=_DEVNULL).decode()
if output.strip('\n') == 'healthy':
return
except subprocess.CalledProcessError as e:

@ -1,4 +1,3 @@
#!/usr/bin/env python
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -114,7 +113,7 @@ _ALLOWLIST_DICT = {
}
# Regex that combines all keys in _ALLOWLIST_DICT
_ALL_TRIGGERS = "(" + ")|(".join(_ALLOWLIST_DICT.keys()) + ")"
_ALL_TRIGGERS = "(" + ")|(".join(list(_ALLOWLIST_DICT.keys())) + ")"
# Add all triggers to their respective test suites
for trigger, test_suites in six.iteritems(_ALLOWLIST_DICT):

@ -44,7 +44,7 @@ def strip_non_ascii_chars(s):
def sanitized_environment(env):
sanitized = {}
for key, value in env.items():
for key, value in list(env.items()):
sanitized[strip_non_ascii_chars(key)] = strip_non_ascii_chars(value)
return sanitized
@ -130,7 +130,9 @@ def message(tag, msg, explanatory_text=None, do_newline=False):
try:
if platform_string() == 'windows' or not sys.stdout.isatty():
if explanatory_text:
logging.info(explanatory_text.decode('utf8'))
if isinstance(explanatory_text, bytes):
explanatory_text = explanatory_text.decode('utf8')
logging.info(explanatory_text)
logging.info('%s: %s', tag, msg)
else:
sys.stdout.write(
@ -221,7 +223,7 @@ class JobSpec(object):
def __str__(self):
return '%s: %s %s' % (self.shortname, ' '.join(
'%s=%s' % kv for kv in self.environ.items()), ' '.join(
'%s=%s' % kv for kv in list(self.environ.items())), ' '.join(
self.cmdline))

@ -1,4 +1,4 @@
#!/usr/bin/env python2.7
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -193,7 +193,7 @@ class Handler(BaseHTTPRequestHandler):
now = time.time()
out = yaml.dump({
'pool': pool,
'in_use': dict((k, now - v) for k, v in in_use.items())
'in_use': dict((k, now - v) for k, v in list(in_use.items()))
})
mu.release()
self.wfile.write(out.encode('ascii'))

@ -30,13 +30,15 @@ import six
def _filter_msg(msg, output_format):
"""Filters out nonprintable and illegal characters from the message."""
if output_format in ['XML', 'HTML']:
if isinstance(msg, bytes):
decoded_msg = msg.decode('UTF-8', 'ignore')
else:
decoded_msg = msg
# keep whitespaces but remove formfeed and vertical tab characters
# that make XML report unparsable.
if isinstance(msg, bytes):
msg = msg.decode('UTF-8', 'ignore')
filtered_msg = ''.join(
filter(lambda x: x in string.printable and x != '\f' and x != '\v',
msg))
decoded_msg))
if output_format == 'HTML':
filtered_msg = filtered_msg.replace('"', '&quot;')
return filtered_msg
@ -138,7 +140,7 @@ def render_interop_html_report(client_langs, server_langs, test_cases,
'Mako template is not installed. Skipping HTML report generation.')
return
except IOError as e:
print('Failed to find the template %s: %s' % (template_file, e))
print(('Failed to find the template %s: %s' % (template_file, e)))
return
sorted_test_cases = sorted(test_cases)
@ -171,7 +173,7 @@ def render_interop_html_report(client_langs, server_langs, test_cases,
with open(html_file_path, 'w') as output_file:
mytemplate.render_context(Context(output_file, **args))
except:
print(exceptions.text_error_template().render())
print((exceptions.text_error_template().render()))
raise

@ -1,4 +1,3 @@
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -51,7 +50,7 @@ def start_port_server():
sys.executable, # use the same python binary as this process
os.path.abspath('tools/run_tests/python_utils/port_server.py'),
'dump_version'
]))
]).decode())
logging.info('my port server is version %d', current_version)
running = (version >= current_version)
if not running:

@ -18,10 +18,11 @@ import argparse
import json
import os
import sys
import urllib.error
import urllib.parse
import urllib.request
import uuid
import urllib2
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
sys.path.append(gcp_utils_dir)
@ -121,12 +122,12 @@ def _get_resultstore_data(api_key, invocation_id):
# that limit, the 'nextPageToken' field is included in the request to get
# subsequent data, so keep requesting until 'nextPageToken' field is omitted.
while True:
req = urllib2.Request(
req = urllib.request.Request(
url=
'https://resultstore.googleapis.com/v2/invocations/%s/targets/-/configuredTargets/-/actions?key=%s&pageToken=%s&fields=next_page_token,actions.id,actions.status_attributes,actions.timing,actions.test_action'
% (invocation_id, api_key, page_token),
headers={'Content-Type': 'application/json'})
results = json.loads(urllib2.urlopen(req).read())
results = json.loads(urllib.request.urlopen(req).read())
all_actions.extend(results['actions'])
if 'nextPageToken' not in results:
break
@ -170,7 +171,8 @@ if __name__ == "__main__":
if args.resultstore_dump_file:
with open(args.resultstore_dump_file, 'w') as f:
json.dump(resultstore_actions, f, indent=4, sort_keys=True)
print('Dumped resultstore data to file %s' % args.resultstore_dump_file)
print(
('Dumped resultstore data to file %s' % args.resultstore_dump_file))
# google.devtools.resultstore.v2.Action schema:
# https://github.com/googleapis/googleapis/blob/master/google/devtools/resultstore/v2/action.proto
@ -256,8 +258,8 @@ if __name__ == "__main__":
}
})
except Exception as e:
print('Failed to parse test result. Error: %s' % str(e))
print(json.dumps(test_case, indent=4))
print(('Failed to parse test result. Error: %s' % str(e)))
print((json.dumps(test_case, indent=4)))
bq_rows.append({
'insertId': str(uuid.uuid4()),
'json': {
@ -284,7 +286,7 @@ if __name__ == "__main__":
if args.bq_dump_file:
with open(args.bq_dump_file, 'w') as f:
json.dump(bq_rows, f, indent=4, sort_keys=True)
print('Dumped BQ data to file %s' % args.bq_dump_file)
print(('Dumped BQ data to file %s' % args.bq_dump_file))
if not args.skip_upload:
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.

@ -171,7 +171,7 @@ def docker_run_cmdline(cmdline, image, docker_args, cwd, environ=None):
# turn environ into -e docker args
docker_cmdline = 'docker run -i --rm=true'.split()
if environ:
for k, v in environ.items():
for k, v in list(environ.items()):
docker_cmdline += ['-e', '%s=%s' % (k, v)]
return docker_cmdline + ['-w', cwd] + docker_args + [image] + cmdline
@ -403,7 +403,7 @@ docker_images = {}
build_jobs = []
if len(args.language) and args.language[0] == 'all':
languages = _LANGUAGES.keys()
languages = list(_LANGUAGES.keys())
else:
languages = args.language
for lang_name in languages:
@ -501,7 +501,7 @@ def run_one_scenario(scenario_config):
grpclb_ips = []
shortname_prefix = scenario_config['name']
# Start backends
for i in xrange(len(scenario_config['backend_configs'])):
for i in range(len(scenario_config['backend_configs'])):
backend_config = scenario_config['backend_configs'][i]
backend_shortname = shortname(shortname_prefix, 'backend_server', i)
backend_spec = backend_server_jobspec(
@ -511,7 +511,7 @@ def run_one_scenario(scenario_config):
backend_addrs.append(
'%s:%d' % (backend_job.ip_address(), _BACKEND_SERVER_PORT))
# Start fallbacks
for i in xrange(len(scenario_config['fallback_configs'])):
for i in range(len(scenario_config['fallback_configs'])):
fallback_config = scenario_config['fallback_configs'][i]
fallback_shortname = shortname(shortname_prefix, 'fallback_server',
i)
@ -521,7 +521,7 @@ def run_one_scenario(scenario_config):
server_jobs[fallback_shortname] = fallback_job
fallback_ips.append(fallback_job.ip_address())
# Start balancers
for i in xrange(len(scenario_config['balancer_configs'])):
for i in range(len(scenario_config['balancer_configs'])):
balancer_config = scenario_config['balancer_configs'][i]
grpclb_shortname = shortname(shortname_prefix, 'grpclb_server', i)
grpclb_spec = grpclb_jobspec(balancer_config['transport_sec'],
@ -582,7 +582,7 @@ def run_one_scenario(scenario_config):
return num_failures
finally:
# Check if servers are still running.
for server, job in server_jobs.items():
for server, job in list(server_jobs.items()):
if not job.is_running():
print('Server "%s" has exited prematurely.' % server)
suppress_failure = suppress_server_logs and not args.verbose

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -756,8 +756,8 @@ _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES = {
'no_df_padding_sanity_test': 'large_unary'
}
_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS = _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES.keys(
)
_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS = list(
_GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES.keys())
_LANGUAGES_WITH_HTTP2_CLIENTS_FOR_HTTP2_SERVER_TEST_CASES = [
'java', 'go', 'python', 'c++'
@ -782,7 +782,7 @@ def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
# turn environ into -e docker args
if environ:
for k, v in environ.items():
for k, v in list(environ.items()):
docker_cmdline += ['-e', '%s=%s' % (k, v)]
# set working directory
@ -1208,7 +1208,7 @@ argp.add_argument('--google_default_creds_use_key_file',
'google_default_credentials test case, e.g. by '
'setting env var GOOGLE_APPLICATION_CREDENTIALS.'))
argp.add_argument('--prod_servers',
choices=prod_servers.keys(),
choices=list(prod_servers.keys()),
default=['default'],
nargs='+',
help=('The servers to run cloud_to_prod and '
@ -1549,7 +1549,7 @@ try:
(server_host, server_port) = server[1].split(':')
server_addresses[server_name] = (server_host, server_port)
for server_name, server_address in server_addresses.items():
for server_name, server_address in list(server_addresses.items()):
(server_host, server_port) = server_address
server_language = _LANGUAGES.get(server_name, None)
skip_server = [] # test cases unimplemented by server
@ -1663,7 +1663,7 @@ try:
report_utils.render_junit_xml_report(resultset, _TESTS_XML_REPORT)
for name, job in resultset.items():
for name, job in list(resultset.items()):
if "http2" in name:
job[0].http2results = aggregate_http2_results(job[0].message)
@ -1676,7 +1676,7 @@ try:
sys.exit(0)
finally:
# Check if servers are still running.
for server, job in server_jobs.items():
for server, job in list(server_jobs.items()):
if not job.is_running():
print('Server "%s" has exited prematurely.' % server)

@ -253,7 +253,7 @@ def prepare_remote_hosts(hosts, prepare_local=False):
def build_on_remote_hosts(hosts,
languages=scenario_config.LANGUAGES.keys(),
languages=list(scenario_config.LANGUAGES.keys()),
build_local=False):
"""Builds performance worker on remote hosts (and maybe also locally)."""
build_timeout = 45 * 60
@ -355,7 +355,8 @@ def create_scenarios(languages,
server_cpu_load=0):
"""Create jobspecs for scenarios to run."""
all_workers = [
worker for workers in workers_by_lang.values() for worker in workers
worker for workers in list(workers_by_lang.values())
for worker in workers
]
scenarios = []
_NO_WORKERS = []

@ -129,10 +129,10 @@ def parse_test_cases(arg):
def parse_port_range(port_arg):
try:
port = int(port_arg)
return range(port, port + 1)
return list(range(port, port + 1))
except:
port_min, port_max = port_arg.split(':')
return range(int(port_min), int(port_max) + 1)
return list(range(int(port_min), int(port_max) + 1))
argp = argparse.ArgumentParser(description='Run xDS interop tests on GCP')
@ -614,7 +614,7 @@ def compare_expected_instances(stats, expected_instances):
Returns:
Returns true if the instances are expected. False if not.
"""
for rpc_type, expected_peers in expected_instances.items():
for rpc_type, expected_peers in list(expected_instances.items()):
rpcs_by_peer_for_type = stats.rpcs_by_method[rpc_type]
rpcs_by_peer = rpcs_by_peer_for_type.rpcs_by_peer if rpcs_by_peer_for_type else None
logger.debug('rpc: %s, by_peer: %s', rpc_type, rpcs_by_peer)
@ -1031,7 +1031,7 @@ def test_metadata_filter(gcp, original_backend_service, instance_group,
with open(bootstrap_path) as f:
md = json.load(f)['node']['metadata']
match_labels = []
for k, v in md.items():
for k, v in list(md.items()):
match_labels.append({'name': k, 'value': v})
not_match_labels = [{'name': 'fake', 'value': 'fail'}]
@ -2027,7 +2027,7 @@ def test_timeout(gcp, original_backend_service, instance_group):
after_stats = get_client_accumulated_stats()
success = True
for rpc, status in expected_results.items():
for rpc, status in list(expected_results.items()):
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = test_runtime_secs * args.qps
@ -2209,7 +2209,7 @@ def test_fault_injection(gcp, original_backend_service, instance_group):
after_stats = get_client_accumulated_stats()
success = True
for status, pct in expected_results.items():
for status, pct in list(expected_results.items()):
rpc = 'UNARY_CALL'
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
@ -2416,7 +2416,8 @@ def is_primary_instance_group(gcp, instance_group):
# the client's actual locality.
instance_names = get_instance_names(gcp, instance_group)
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
return all(peer in instance_names for peer in stats.rpcs_by_peer.keys())
return all(
peer in instance_names for peer in list(stats.rpcs_by_peer.keys()))
def get_startup_script(path_to_server_binary, service_port):
@ -3006,7 +3007,7 @@ def patch_url_map_backend_service(gcp,
'weightedBackendServices': [{
'backendService': service.url,
'weight': w,
} for service, w in services_with_weights.items()]
} for service, w in list(services_with_weights.items())]
} if services_with_weights else None
config = {

@ -165,10 +165,10 @@ if len(workspace_git_hashes - git_submodule_hashes) > 0:
print(
"Found discrepancies between git submodules and Bazel WORKSPACE dependencies"
)
print("workspace_git_hashes: %s" % workspace_git_hashes)
print("git_submodule_hashes: %s" % git_submodule_hashes)
print("workspace_git_hashes - git_submodule_hashes: %s" %
(workspace_git_hashes - git_submodule_hashes))
print(("workspace_git_hashes: %s" % workspace_git_hashes))
print(("git_submodule_hashes: %s" % git_submodule_hashes))
print(("workspace_git_hashes - git_submodule_hashes: %s" %
(workspace_git_hashes - git_submodule_hashes)))
sys.exit(1)
# Also check that we can override each dependency

@ -134,9 +134,9 @@ if path_files.sort() != expected_files.sort():
diff_plus = [file for file in path_files if file not in expected_files]
diff_minus = [file for file in expected_files if file not in path_files]
for file in diff_minus:
print('- ', file)
print(('- ', file))
for file in diff_plus:
print('+ ', file)
print(('+ ', file))
errors += 1
if errors > 0:
@ -161,7 +161,7 @@ for path_file in expected_files:
fo.write(expected_content)
if 0 != os.system('diff %s %s' % (path_file_expected, path_file)):
print('Difference found in file:', path_file)
print(('Difference found in file:', path_file))
errors += 1
os.remove(path_file_expected)
@ -177,9 +177,9 @@ for root, dirs, files in os.walk('src'):
with open(path_file, "r") as fi:
content = fi.read()
if '#include <grpc++/' in content:
print(
print((
'Failed: invalid include of deprecated headers in include/grpc++ in %s'
% path_file)
% path_file))
errors += 1
except IOError:
pass

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright 2018 gRPC authors.
#
@ -23,7 +23,7 @@ subprocess.check_call(['./json_run_localhost_scenario_gen.py'])
subprocess.check_call(['./qps_json_driver_scenario_gen.py'])
subprocess.check_call(['buildifier', '-v', '-r', '.'])
output = subprocess.check_output(['git', 'status', '--porcelain'])
output = subprocess.check_output(['git', 'status', '--porcelain']).decode()
qps_json_driver_bzl = 'test/cpp/qps/qps_json_driver_scenarios.bzl'
json_run_localhost_bzl = 'test/cpp/qps/json_run_localhost_scenarios.bzl'

@ -38,9 +38,9 @@ with open('doc/environment_variables.md') as f:
for t in tracers:
if t not in text:
print(
print((
"ERROR: tracer \"%s\" is not mentioned in doc/environment_variables.md"
% t)
% t))
errors += 1
assert errors == 0

@ -31,7 +31,7 @@ from expand_version import Version
try:
branch_name = subprocess.check_output('git rev-parse --abbrev-ref HEAD',
shell=True)
shell=True).decode()
except:
print('WARNING: not a git repository')
branch_name = None
@ -63,22 +63,22 @@ settings = build_yaml['settings']
top_version = Version(settings['version'])
if not check_version(top_version):
errors += 1
print(warning % ('version', top_version))
print((warning % ('version', top_version)))
for tag, value in settings.items():
for tag, value in list(settings.items()):
if re.match(r'^[a-z]+_version$', tag):
value = Version(value)
if tag != 'core_version':
if value.major != top_version.major:
errors += 1
print('major version mismatch on %s: %d vs %d' %
(tag, value.major, top_version.major))
print(('major version mismatch on %s: %d vs %d' %
(tag, value.major, top_version.major)))
if value.minor != top_version.minor:
errors += 1
print('minor version mismatch on %s: %d vs %d' %
(tag, value.minor, top_version.minor))
print(('minor version mismatch on %s: %d vs %d' %
(tag, value.minor, top_version.minor)))
if not check_version(value):
errors += 1
print(warning % (tag, value))
print((warning % (tag, value)))
sys.exit(errors)

@ -66,11 +66,11 @@ for root, dirs, files in os.walk('src/core'):
continue
with open(path) as f:
text = f.read()
for banned, exceptions in BANNED_EXCEPT.items():
for banned, exceptions in list(BANNED_EXCEPT.items()):
if path in exceptions:
continue
if banned in text:
print('Illegal use of "%s" in %s' % (banned, path))
print(('Illegal use of "%s" in %s' % (banned, path)))
errors += 1
assert errors == 0

@ -35,7 +35,7 @@ _TARGETS += package_targets.targets()
def _create_build_map():
"""Maps task names and labels to list of tasks to be built."""
target_build_map = dict([(target.name, [target]) for target in _TARGETS])
if len(_TARGETS) > len(target_build_map.keys()):
if len(_TARGETS) > len(list(target_build_map.keys())):
raise Exception('Target names need to be unique')
label_build_map = {}
@ -47,7 +47,7 @@ def _create_build_map():
else:
label_build_map[label] = [target]
if set(target_build_map.keys()).intersection(label_build_map.keys()):
if set(target_build_map.keys()).intersection(list(label_build_map.keys())):
raise Exception('Target names need to be distinct from label names')
return dict(list(target_build_map.items()) + list(label_build_map.items()))

Loading…
Cancel
Save