Merge pull request #16461 from jtattermusch/refactor_uploading_to_bq

Refactor upload_test_results.py
pull/16522/head
Jan Tattermusch 6 years ago committed by GitHub
commit d766b9a2a4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      tools/interop_matrix/run_interop_matrix_tests.py
  2. 17
      tools/run_tests/python_utils/upload_test_results.py
  3. 2
      tools/run_tests/run_interop_tests.py
  4. 12
      tools/run_tests/run_tests.py

@ -235,7 +235,7 @@ def run_tests_for_lang(lang, runtime, images):
maxjobs=args.jobs) maxjobs=args.jobs)
if args.bq_result_table and resultset: if args.bq_result_table and resultset:
upload_test_results.upload_interop_results_to_bq( upload_test_results.upload_interop_results_to_bq(
resultset, args.bq_result_table, args) resultset, args.bq_result_table)
if num_failures: if num_failures:
jobset.message('FAILED', 'Some tests failed', do_newline=True) jobset.message('FAILED', 'Some tests failed', do_newline=True)
total_num_failures += num_failures total_num_failures += num_failures

@ -104,14 +104,13 @@ def _insert_rows_with_retries(bq, bq_table, bq_rows):
sys.exit(1) sys.exit(1)
def upload_results_to_bq(resultset, bq_table, args, platform): def upload_results_to_bq(resultset, bq_table, extra_fields):
"""Upload test results to a BQ table. """Upload test results to a BQ table.
Args: Args:
resultset: dictionary generated by jobset.run resultset: dictionary generated by jobset.run
bq_table: string name of table to create/upload results to in BQ bq_table: string name of table to create/upload results to in BQ
args: args in run_tests.py, generated by argparse extra_fields: dict with extra values that will be uploaded along with the results
platform: string name of platform tests were run on
""" """
bq = big_query_utils.create_big_query() bq = big_query_utils.create_big_query()
big_query_utils.create_partitioned_table( big_query_utils.create_partitioned_table(
@ -129,32 +128,26 @@ def upload_results_to_bq(resultset, bq_table, args, platform):
for result in results: for result in results:
test_results = {} test_results = {}
_get_build_metadata(test_results) _get_build_metadata(test_results)
test_results['compiler'] = args.compiler
test_results['config'] = args.config
test_results['cpu_estimated'] = result.cpu_estimated test_results['cpu_estimated'] = result.cpu_estimated
test_results['cpu_measured'] = result.cpu_measured test_results['cpu_measured'] = result.cpu_measured
test_results['elapsed_time'] = '%.2f' % result.elapsed_time test_results['elapsed_time'] = '%.2f' % result.elapsed_time
test_results['iomgr_platform'] = args.iomgr_platform
# args.language is a list, but will always have one element in the contexts
# this function is used.
test_results['language'] = args.language[0]
test_results['platform'] = platform
test_results['result'] = result.state test_results['result'] = result.state
test_results['return_code'] = result.returncode test_results['return_code'] = result.returncode
test_results['test_name'] = shortname test_results['test_name'] = shortname
test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
for field_name, field_value in six.iteritems(extra_fields):
test_results[field_name] = field_value
row = big_query_utils.make_row(str(uuid.uuid4()), test_results) row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
bq_rows.append(row) bq_rows.append(row)
_insert_rows_with_retries(bq, bq_table, bq_rows) _insert_rows_with_retries(bq, bq_table, bq_rows)
def upload_interop_results_to_bq(resultset, bq_table, args): def upload_interop_results_to_bq(resultset, bq_table):
"""Upload interop test results to a BQ table. """Upload interop test results to a BQ table.
Args: Args:
resultset: dictionary generated by jobset.run resultset: dictionary generated by jobset.run
bq_table: string name of table to create/upload results to in BQ bq_table: string name of table to create/upload results to in BQ
args: args in run_interop_tests.py, generated by argparse
""" """
bq = big_query_utils.create_big_query() bq = big_query_utils.create_big_query()
big_query_utils.create_partitioned_table( big_query_utils.create_partitioned_table(

@ -1494,7 +1494,7 @@ try:
maxjobs=args.jobs, maxjobs=args.jobs,
skip_jobs=args.manual_run) skip_jobs=args.manual_run)
if args.bq_result_table and resultset: if args.bq_result_table and resultset:
upload_interop_results_to_bq(resultset, args.bq_result_table, args) upload_interop_results_to_bq(resultset, args.bq_result_table)
if num_failures: if num_failures:
jobset.message('FAILED', 'Some tests failed', do_newline=True) jobset.message('FAILED', 'Some tests failed', do_newline=True)
else: else:

@ -1821,8 +1821,16 @@ def _build_and_run(check_cancelled,
for antagonist in antagonists: for antagonist in antagonists:
antagonist.kill() antagonist.kill()
if args.bq_result_table and resultset: if args.bq_result_table and resultset:
upload_results_to_bq(resultset, args.bq_result_table, args, upload_extra_fields = {
platform_string()) 'compiler': args.compiler,
'config': args.config,
'iomgr_platform': args.iomgr_platform,
'language': args.language[
0], # args.language is a list but will always have one element when uploading to BQ is enabled.
'platform': platform_string()
}
upload_results_to_bq(resultset, args.bq_result_table,
upload_extra_fields)
if xml_report and resultset: if xml_report and resultset:
report_utils.render_junit_xml_report( report_utils.render_junit_xml_report(
resultset, xml_report, suite_name=args.report_suite_name) resultset, xml_report, suite_name=args.report_suite_name)

Loading…
Cancel
Save