Merge pull request #22359 from jtattermusch/debug_upload_rbe_results

Fix bug in upload_rbe_results.py (which sometimes skips some test cases when uploading to BQ)
pull/22360/head
Jan Tattermusch 5 years ago committed by GitHub
commit d008c07ef0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 46
      tools/run_tests/python_utils/upload_rbe_results.py

@ -134,15 +134,42 @@ def _get_resultstore_data(api_key, invocation_id):
if __name__ == "__main__":
# Arguments are necessary if running in a non-Kokoro environment.
argp = argparse.ArgumentParser(description='Upload RBE results.')
argp.add_argument('--api_key', default='', type=str)
argp.add_argument('--invocation_id', default='', type=str)
argp = argparse.ArgumentParser(
description=
'Fetches results for given RBE invocation and uploads them to BigQuery table.'
)
argp.add_argument('--api_key',
default='',
type=str,
help='The API key to read from ResultStore API')
argp.add_argument('--invocation_id',
default='',
type=str,
help='UUID of bazel invocation to fetch.')
argp.add_argument('--bq_dump_file',
default=None,
type=str,
help='Dump JSON data to file just before uploading')
argp.add_argument('--resultstore_dump_file',
default=None,
type=str,
help='Dump JSON data as received from ResultStore API')
argp.add_argument('--skip_upload',
default=False,
action='store_const',
const=True,
help='Skip uploading to bigquery')
args = argp.parse_args()
api_key = args.api_key or _get_api_key()
invocation_id = args.invocation_id or _get_invocation_id()
resultstore_actions = _get_resultstore_data(api_key, invocation_id)
if args.resultstore_dump_file:
with open(args.resultstore_dump_file, 'w') as f:
json.dump(resultstore_actions, f, indent=4, sort_keys=True)
print('Dumped resultstore data to file %s' % args.resultstore_dump_file)
# google.devtools.resultstore.v2.Action schema:
# https://github.com/googleapis/googleapis/blob/master/google/devtools/resultstore/v2/action.proto
bq_rows = []
@ -189,8 +216,9 @@ if __name__ == "__main__":
elif 'tests' not in action['testAction']['testSuite']:
continue
else:
test_cases = action['testAction']['testSuite']['tests'][0][
'testSuite']['tests']
test_cases = []
for tests_item in action['testAction']['testSuite']['tests']:
test_cases += tests_item['testSuite']['tests']
for test_case in test_cases:
if any(s in test_case['testCase'] for s in ['errors', 'failures']):
result = 'FAILED'
@ -247,6 +275,14 @@ if __name__ == "__main__":
}
})
if args.bq_dump_file:
with open(args.bq_dump_file, 'w') as f:
json.dump(bq_rows, f, indent=4, sort_keys=True)
print('Dumped BQ data to file %s' % args.bq_dump_file)
if not args.skip_upload:
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
for i in range((len(bq_rows) / 1000) + 1):
_upload_results_to_bq(bq_rows[i * 1000:(i + 1) * 1000])
else:
print('Skipped upload to bigquery.')

Loading…
Cancel
Save