|
|
@ -134,15 +134,42 @@ def _get_resultstore_data(api_key, invocation_id): |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
if __name__ == "__main__": |
|
|
|
# Arguments are necessary if running in a non-Kokoro environment. |
|
|
|
# Arguments are necessary if running in a non-Kokoro environment. |
|
|
|
argp = argparse.ArgumentParser(description='Upload RBE results.') |
|
|
|
argp = argparse.ArgumentParser( |
|
|
|
argp.add_argument('--api_key', default='', type=str) |
|
|
|
description= |
|
|
|
argp.add_argument('--invocation_id', default='', type=str) |
|
|
|
'Fetches results for given RBE invocation and uploads them to BigQuery table.' |
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
argp.add_argument('--api_key', |
|
|
|
|
|
|
|
default='', |
|
|
|
|
|
|
|
type=str, |
|
|
|
|
|
|
|
help='The API key to read from ResultStore API') |
|
|
|
|
|
|
|
argp.add_argument('--invocation_id', |
|
|
|
|
|
|
|
default='', |
|
|
|
|
|
|
|
type=str, |
|
|
|
|
|
|
|
help='UUID of bazel invocation to fetch.') |
|
|
|
|
|
|
|
argp.add_argument('--bq_dump_file', |
|
|
|
|
|
|
|
default=None, |
|
|
|
|
|
|
|
type=str, |
|
|
|
|
|
|
|
help='Dump JSON data to file just before uploading') |
|
|
|
|
|
|
|
argp.add_argument('--resultstore_dump_file', |
|
|
|
|
|
|
|
default=None, |
|
|
|
|
|
|
|
type=str, |
|
|
|
|
|
|
|
help='Dump JSON data as received from ResultStore API') |
|
|
|
|
|
|
|
argp.add_argument('--skip_upload', |
|
|
|
|
|
|
|
default=False, |
|
|
|
|
|
|
|
action='store_const', |
|
|
|
|
|
|
|
const=True, |
|
|
|
|
|
|
|
help='Skip uploading to bigquery') |
|
|
|
args = argp.parse_args() |
|
|
|
args = argp.parse_args() |
|
|
|
|
|
|
|
|
|
|
|
api_key = args.api_key or _get_api_key() |
|
|
|
api_key = args.api_key or _get_api_key() |
|
|
|
invocation_id = args.invocation_id or _get_invocation_id() |
|
|
|
invocation_id = args.invocation_id or _get_invocation_id() |
|
|
|
resultstore_actions = _get_resultstore_data(api_key, invocation_id) |
|
|
|
resultstore_actions = _get_resultstore_data(api_key, invocation_id) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if args.resultstore_dump_file: |
|
|
|
|
|
|
|
with open(args.resultstore_dump_file, 'w') as f: |
|
|
|
|
|
|
|
json.dump(resultstore_actions, f, indent=4, sort_keys=True) |
|
|
|
|
|
|
|
print('Dumped resultstore data to file %s' % args.resultstore_dump_file) |
|
|
|
|
|
|
|
|
|
|
|
# google.devtools.resultstore.v2.Action schema: |
|
|
|
# google.devtools.resultstore.v2.Action schema: |
|
|
|
# https://github.com/googleapis/googleapis/blob/master/google/devtools/resultstore/v2/action.proto |
|
|
|
# https://github.com/googleapis/googleapis/blob/master/google/devtools/resultstore/v2/action.proto |
|
|
|
bq_rows = [] |
|
|
|
bq_rows = [] |
|
|
@ -189,8 +216,9 @@ if __name__ == "__main__": |
|
|
|
elif 'tests' not in action['testAction']['testSuite']: |
|
|
|
elif 'tests' not in action['testAction']['testSuite']: |
|
|
|
continue |
|
|
|
continue |
|
|
|
else: |
|
|
|
else: |
|
|
|
test_cases = action['testAction']['testSuite']['tests'][0][ |
|
|
|
test_cases = [] |
|
|
|
'testSuite']['tests'] |
|
|
|
for tests_item in action['testAction']['testSuite']['tests']: |
|
|
|
|
|
|
|
test_cases += tests_item['testSuite']['tests'] |
|
|
|
for test_case in test_cases: |
|
|
|
for test_case in test_cases: |
|
|
|
if any(s in test_case['testCase'] for s in ['errors', 'failures']): |
|
|
|
if any(s in test_case['testCase'] for s in ['errors', 'failures']): |
|
|
|
result = 'FAILED' |
|
|
|
result = 'FAILED' |
|
|
@ -247,6 +275,14 @@ if __name__ == "__main__": |
|
|
|
} |
|
|
|
} |
|
|
|
}) |
|
|
|
}) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if args.bq_dump_file: |
|
|
|
|
|
|
|
with open(args.bq_dump_file, 'w') as f: |
|
|
|
|
|
|
|
json.dump(bq_rows, f, indent=4, sort_keys=True) |
|
|
|
|
|
|
|
print('Dumped BQ data to file %s' % args.bq_dump_file) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if not args.skip_upload: |
|
|
|
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time. |
|
|
|
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time. |
|
|
|
for i in range((len(bq_rows) / 1000) + 1): |
|
|
|
for i in range((len(bq_rows) / 1000) + 1): |
|
|
|
_upload_results_to_bq(bq_rows[i * 1000:(i + 1) * 1000]) |
|
|
|
_upload_results_to_bq(bq_rows[i * 1000:(i + 1) * 1000]) |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
print('Skipped upload to bigquery.') |
|
|
|