Merge pull request #15918 from matt-kwong/rbe-unknown

Enable uploading UNKNOWN results for RBE
pull/15991/head
Matt Kwong 7 years ago committed by GitHub
commit f2cd616329
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 86
      tools/run_tests/python_utils/upload_rbe_results.py

@ -136,7 +136,7 @@ if __name__ == "__main__":
resultstore_actions = _get_resultstore_data(api_key, invocation_id)
bq_rows = []
for action in resultstore_actions:
for index, action in enumerate(resultstore_actions):
# Filter out non-test related data, such as build results.
if 'testAction' not in action:
continue
@ -157,6 +157,23 @@ if __name__ == "__main__":
'timedOut': True
}
}]
# When RBE believes its infrastructure is failing, it will abort and
# mark running tests as UNKNOWN. These infrastructure failures may be
# related to our tests, so we should investigate if specific tests are
# repeatedly being marked as UNKNOWN.
elif action['statusAttributes']['status'] == 'UNKNOWN':
test_cases = [{
'testCase': {
'caseName': str(action['id']['actionId']),
'unknown': True
}
}]
# Take the timestamp from the previous action, which should be
# a close approximation.
action['timing'] = {
'startTime':
resultstore_actions[index - 1]['timing']['startTime']
}
else:
test_cases = action['testAction']['testSuite']['tests'][0][
'testSuite']['tests']
@ -165,28 +182,55 @@ if __name__ == "__main__":
result = 'FAILED'
elif 'timedOut' in test_case['testCase']:
result = 'TIMEOUT'
elif 'unknown' in test_case['testCase']:
result = 'UNKNOWN'
else:
result = 'PASSED'
bq_rows.append({
'insertId': str(uuid.uuid4()),
'json': {
'job_name':
os.getenv('KOKORO_JOB_NAME'),
'build_id':
os.getenv('KOKORO_BUILD_NUMBER'),
'build_url':
'https://source.cloud.google.com/results/invocations/%s' %
invocation_id,
'test_target':
action['id']['targetId'],
'test_case':
test_case['testCase']['caseName'],
'result':
result,
'timestamp':
action['timing']['startTime'],
}
})
try:
bq_rows.append({
'insertId': str(uuid.uuid4()),
'json': {
'job_name':
os.getenv('KOKORO_JOB_NAME'),
'build_id':
os.getenv('KOKORO_BUILD_NUMBER'),
'build_url':
'https://source.cloud.google.com/results/invocations/%s'
% invocation_id,
'test_target':
action['id']['targetId'],
'test_case':
test_case['testCase']['caseName'],
'result':
result,
'timestamp':
action['timing']['startTime'],
}
})
except Exception as e:
print('Failed to parse test result. Error: %s' % str(e))
print(json.dumps(test_case, indent=4))
bq_rows.append({
'insertId': str(uuid.uuid4()),
'json': {
'job_name':
os.getenv('KOKORO_JOB_NAME'),
'build_id':
os.getenv('KOKORO_BUILD_NUMBER'),
'build_url':
'https://source.cloud.google.com/results/invocations/%s'
% invocation_id,
'test_target':
action['id']['targetId'],
'test_case':
'N/A',
'result':
'UNPARSEABLE',
'timestamp':
'N/A',
}
})
# BigQuery sometimes fails with large uploads, so batch 1,000 rows at a time.
for i in range((len(bq_rows) / 1000) + 1):
_upload_results_to_bq(bq_rows[i * 1000:(i + 1) * 1000])

Loading…
Cancel
Save