|
|
@ -104,14 +104,13 @@ def _insert_rows_with_retries(bq, bq_table, bq_rows): |
|
|
|
sys.exit(1) |
|
|
|
sys.exit(1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def upload_results_to_bq(resultset, bq_table, args, platform): |
|
|
|
def upload_results_to_bq(resultset, bq_table, extra_fields): |
|
|
|
"""Upload test results to a BQ table. |
|
|
|
"""Upload test results to a BQ table. |
|
|
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
Args: |
|
|
|
resultset: dictionary generated by jobset.run |
|
|
|
resultset: dictionary generated by jobset.run |
|
|
|
bq_table: string name of table to create/upload results to in BQ |
|
|
|
bq_table: string name of table to create/upload results to in BQ |
|
|
|
args: args in run_tests.py, generated by argparse |
|
|
|
extra_fields: dict with extra values that will be uploaded along with the results |
|
|
|
platform: string name of platform tests were run on |
|
|
|
|
|
|
|
""" |
|
|
|
""" |
|
|
|
bq = big_query_utils.create_big_query() |
|
|
|
bq = big_query_utils.create_big_query() |
|
|
|
big_query_utils.create_partitioned_table( |
|
|
|
big_query_utils.create_partitioned_table( |
|
|
@ -129,32 +128,26 @@ def upload_results_to_bq(resultset, bq_table, args, platform): |
|
|
|
for result in results: |
|
|
|
for result in results: |
|
|
|
test_results = {} |
|
|
|
test_results = {} |
|
|
|
_get_build_metadata(test_results) |
|
|
|
_get_build_metadata(test_results) |
|
|
|
test_results['compiler'] = args.compiler |
|
|
|
|
|
|
|
test_results['config'] = args.config |
|
|
|
|
|
|
|
test_results['cpu_estimated'] = result.cpu_estimated |
|
|
|
test_results['cpu_estimated'] = result.cpu_estimated |
|
|
|
test_results['cpu_measured'] = result.cpu_measured |
|
|
|
test_results['cpu_measured'] = result.cpu_measured |
|
|
|
test_results['elapsed_time'] = '%.2f' % result.elapsed_time |
|
|
|
test_results['elapsed_time'] = '%.2f' % result.elapsed_time |
|
|
|
test_results['iomgr_platform'] = args.iomgr_platform |
|
|
|
|
|
|
|
# args.language is a list, but will always have one element in the contexts |
|
|
|
|
|
|
|
# this function is used. |
|
|
|
|
|
|
|
test_results['language'] = args.language[0] |
|
|
|
|
|
|
|
test_results['platform'] = platform |
|
|
|
|
|
|
|
test_results['result'] = result.state |
|
|
|
test_results['result'] = result.state |
|
|
|
test_results['return_code'] = result.returncode |
|
|
|
test_results['return_code'] = result.returncode |
|
|
|
test_results['test_name'] = shortname |
|
|
|
test_results['test_name'] = shortname |
|
|
|
test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') |
|
|
|
|
|
|
|
for field_name, field_value in six.iteritems(extra_fields): |
|
|
|
|
|
|
|
test_results[field_name] = field_value |
|
|
|
row = big_query_utils.make_row(str(uuid.uuid4()), test_results) |
|
|
|
row = big_query_utils.make_row(str(uuid.uuid4()), test_results) |
|
|
|
bq_rows.append(row) |
|
|
|
bq_rows.append(row) |
|
|
|
_insert_rows_with_retries(bq, bq_table, bq_rows) |
|
|
|
_insert_rows_with_retries(bq, bq_table, bq_rows) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def upload_interop_results_to_bq(resultset, bq_table, args): |
|
|
|
def upload_interop_results_to_bq(resultset, bq_table): |
|
|
|
"""Upload interop test results to a BQ table. |
|
|
|
"""Upload interop test results to a BQ table. |
|
|
|
|
|
|
|
|
|
|
|
Args: |
|
|
|
Args: |
|
|
|
resultset: dictionary generated by jobset.run |
|
|
|
resultset: dictionary generated by jobset.run |
|
|
|
bq_table: string name of table to create/upload results to in BQ |
|
|
|
bq_table: string name of table to create/upload results to in BQ |
|
|
|
args: args in run_interop_tests.py, generated by argparse |
|
|
|
|
|
|
|
""" |
|
|
|
""" |
|
|
|
bq = big_query_utils.create_big_query() |
|
|
|
bq = big_query_utils.create_big_query() |
|
|
|
big_query_utils.create_partitioned_table( |
|
|
|
big_query_utils.create_partitioned_table( |
|
|
|