diff --git a/tools/internal_ci/helper_scripts/prepare_build_linux_rc b/tools/internal_ci/helper_scripts/prepare_build_linux_rc index 2ade8dac51f..ea2a17f2bcf 100644 --- a/tools/internal_ci/helper_scripts/prepare_build_linux_rc +++ b/tools/internal_ci/helper_scripts/prepare_build_linux_rc @@ -32,11 +32,4 @@ PYTHONWARNINGS=ignore XDG_CACHE_HOME=/tmp/xdg-cache-home sudo -E pip install cov # Download Docker images from DockerHub export DOCKERHUB_ORGANIZATION=grpctesting -# If this is a PR using RUN_TESTS_FLAGS var, then add flags to filter tests -if [ -n "$KOKORO_GITHUB_PULL_REQUEST_NUMBER" ] && [ -n "$RUN_TESTS_FLAGS" ]; then - sudo apt-get install -y jq - ghprbTargetBranch=$(curl -s https://api.github.com/repos/grpc/grpc/pulls/$KOKORO_GITHUB_PULL_REQUEST_NUMBER | jq -r .base.ref) - export RUN_TESTS_FLAGS="$RUN_TESTS_FLAGS --filter_pr_tests --base_branch origin/$ghprbTargetBranch" -fi - git submodule update --init diff --git a/tools/internal_ci/linux/grpc_interop_badserver_python.sh b/tools/internal_ci/linux/grpc_interop_badserver_python.sh deleted file mode 100755 index c2bd4e79ac1..00000000000 --- a/tools/internal_ci/linux/grpc_interop_badserver_python.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -export LANG=en_US.UTF-8 - -# Enter the gRPC repo root -cd $(dirname $0)/../../.. - -source tools/internal_ci/helper_scripts/prepare_build_linux_rc -source tools/internal_ci/helper_scripts/prepare_build_interop_rc - -tools/run_tests/run_interop_tests.py -l python --use_docker --http2_server_interop - diff --git a/tools/internal_ci/linux/grpc_interop_tocloud.cfg b/tools/internal_ci/linux/grpc_interop_tocloud.cfg index 2803616007a..13aec15770b 100644 --- a/tools/internal_ci/linux/grpc_interop_tocloud.cfg +++ b/tools/internal_ci/linux/grpc_interop_tocloud.cfg @@ -15,8 +15,7 @@ # Config file for the internal CI (in protobuf text format) # Location of the continuous shell script in repository. -build_file: "grpc/tools/internal_ci/linux/grpc_interop_tocloud.sh" -# grpc_interop tests can take 6+ hours to complete. +build_file: "grpc/tools/internal_ci/linux/grpc_run_interop_tests.sh" timeout_mins: 60 action { define_artifacts { @@ -24,3 +23,8 @@ action { regex: "github/grpc/reports/**" } } + +env_vars { + key: "RUN_TESTS_FLAGS" + value: "-l all -s all --use_docker --http2_interop --internal_ci -t -j 12 --bq_result_table interop_results" +} diff --git a/tools/internal_ci/linux/grpc_interop_tocloud.sh b/tools/internal_ci/linux/grpc_interop_tocloud.sh deleted file mode 100755 index e3ba25af5df..00000000000 --- a/tools/internal_ci/linux/grpc_interop_tocloud.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -export LANG=en_US.UTF-8 - -# Enter the gRPC repo root -cd $(dirname $0)/../../.. - -source tools/internal_ci/helper_scripts/prepare_build_linux_rc -source tools/internal_ci/helper_scripts/prepare_build_interop_rc - -tools/run_tests/run_interop_tests.py -l all -s all --use_docker --http2_interop --internal_ci -t -j 12 $@ diff --git a/tools/internal_ci/linux/grpc_interop_toprod.cfg b/tools/internal_ci/linux/grpc_interop_toprod.cfg index 903480a3d18..8d025c4f60d 100644 --- a/tools/internal_ci/linux/grpc_interop_toprod.cfg +++ b/tools/internal_ci/linux/grpc_interop_toprod.cfg @@ -15,8 +15,7 @@ # Config file for the internal CI (in protobuf text format) # Location of the continuous shell script in repository. -build_file: "grpc/tools/internal_ci/linux/grpc_interop_toprod.sh" -# grpc_interop tests can take 6+ hours to complete. +build_file: "grpc/tools/internal_ci/linux/grpc_run_interop_tests.sh" timeout_mins: 60 action { define_artifacts { @@ -25,3 +24,7 @@ action { } } +env_vars { + key: "RUN_TESTS_FLAGS" + value: "-l all --cloud_to_prod --cloud_to_prod_auth --prod_servers default gateway_v4 --use_docker --internal_ci -t -j 12 --bq_result_table interop_results" +} diff --git a/tools/internal_ci/linux/grpc_interop_toprod.sh b/tools/internal_ci/linux/grpc_interop_toprod.sh deleted file mode 100755 index 97a7d5d2393..00000000000 --- a/tools/internal_ci/linux/grpc_interop_toprod.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2017 gRPC authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ex - -export LANG=en_US.UTF-8 - -# Enter the gRPC repo root -cd $(dirname $0)/../../.. - -source tools/internal_ci/helper_scripts/prepare_build_linux_rc -source tools/internal_ci/helper_scripts/prepare_build_interop_rc - -tools/run_tests/run_interop_tests.py \ - -l all \ - --cloud_to_prod \ - --cloud_to_prod_auth \ - --prod_servers default gateway_v4 \ - --use_docker --internal_ci --allow_flakes -t -j 12 $@ - diff --git a/tools/internal_ci/linux/grpc_interop_badserver_java.sh b/tools/internal_ci/linux/grpc_run_interop_tests.sh similarity index 88% rename from tools/internal_ci/linux/grpc_interop_badserver_java.sh rename to tools/internal_ci/linux/grpc_run_interop_tests.sh index d25845ca507..1f4eda2d529 100755 --- a/tools/internal_ci/linux/grpc_interop_badserver_java.sh +++ b/tools/internal_ci/linux/grpc_run_interop_tests.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/bash # Copyright 2017 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,5 +23,4 @@ cd $(dirname $0)/../../.. source tools/internal_ci/helper_scripts/prepare_build_linux_rc source tools/internal_ci/helper_scripts/prepare_build_interop_rc -tools/run_tests/run_interop_tests.py -l java --use_docker --http2_server_interop - +tools/run_tests/run_interop_tests.py $RUN_TESTS_FLAGS diff --git a/tools/internal_ci/linux/grpc_run_tests_matrix.sh b/tools/internal_ci/linux/grpc_run_tests_matrix.sh index bd1430b7415..1018708f967 100755 --- a/tools/internal_ci/linux/grpc_run_tests_matrix.sh +++ b/tools/internal_ci/linux/grpc_run_tests_matrix.sh @@ -20,6 +20,13 @@ cd $(dirname $0)/../../.. source tools/internal_ci/helper_scripts/prepare_build_linux_rc +# If this is a PR using RUN_TESTS_FLAGS var, then add flags to filter tests +if [ -n "$KOKORO_GITHUB_PULL_REQUEST_NUMBER" ] && [ -n "$RUN_TESTS_FLAGS" ]; then + sudo apt-get install -y jq + ghprbTargetBranch=$(curl -s https://api.github.com/repos/grpc/grpc/pulls/$KOKORO_GITHUB_PULL_REQUEST_NUMBER | jq -r .base.ref) + export RUN_TESTS_FLAGS="$RUN_TESTS_FLAGS --filter_pr_tests --base_branch origin/$ghprbTargetBranch" +fi + tools/run_tests/run_tests_matrix.py $RUN_TESTS_FLAGS || FAILED="true" # Reveal leftover processes that might be left behind by the build diff --git a/tools/internal_ci/linux/grpc_interop_badserver_java.cfg b/tools/internal_ci/linux/pull_request/grpc_interop_tocloud.cfg similarity index 76% rename from tools/internal_ci/linux/grpc_interop_badserver_java.cfg rename to tools/internal_ci/linux/pull_request/grpc_interop_tocloud.cfg index dc2114273ea..cb18e8e8682 100644 --- a/tools/internal_ci/linux/grpc_interop_badserver_java.cfg +++ b/tools/internal_ci/linux/pull_request/grpc_interop_tocloud.cfg @@ -15,12 +15,16 @@ # Config file for the internal CI (in protobuf text format) # Location of the continuous shell script in repository. -build_file: "grpc/tools/internal_ci/linux/grpc_interop_badserver_java.sh" -# grpc_interop tests can take 6+ hours to complete. -timeout_mins: 480 +build_file: "grpc/tools/internal_ci/linux/grpc_run_interop_tests.sh" +timeout_mins: 60 action { define_artifacts { - regex: "**/report.xml" + regex: "**/sponge_log.xml" regex: "github/grpc/reports/**" } } + +env_vars { + key: "RUN_TESTS_FLAGS" + value: "-l all -s all --use_docker --http2_interop --internal_ci -t -j 12" +} diff --git a/tools/internal_ci/linux/grpc_interop_badserver_python.cfg b/tools/internal_ci/linux/pull_request/grpc_interop_toprod.cfg similarity index 72% rename from tools/internal_ci/linux/grpc_interop_badserver_python.cfg rename to tools/internal_ci/linux/pull_request/grpc_interop_toprod.cfg index ec738fcf74a..e141d9f6486 100644 --- a/tools/internal_ci/linux/grpc_interop_badserver_python.cfg +++ b/tools/internal_ci/linux/pull_request/grpc_interop_toprod.cfg @@ -15,12 +15,16 @@ # Config file for the internal CI (in protobuf text format) # Location of the continuous shell script in repository. -build_file: "grpc/tools/internal_ci/linux/grpc_interop_badserver_python.sh" -# grpc_interop tests can take 6+ hours to complete. -timeout_mins: 480 +build_file: "grpc/tools/internal_ci/linux/grpc_run_interop_tests.sh" +timeout_mins: 60 action { define_artifacts { - regex: "**/report.xml" + regex: "**/sponge_log.xml" regex: "github/grpc/reports/**" } } + +env_vars { + key: "RUN_TESTS_FLAGS" + value: "-l all --allow_flakes --cloud_to_prod --cloud_to_prod_auth --prod_servers default gateway_v4 --use_docker --internal_ci -t -j 12" +} diff --git a/tools/run_tests/python_utils/upload_test_results.py b/tools/run_tests/python_utils/upload_test_results.py index 15e827769e1..ea97bc0aec1 100644 --- a/tools/run_tests/python_utils/upload_test_results.py +++ b/tools/run_tests/python_utils/upload_test_results.py @@ -51,6 +51,19 @@ _RESULTS_SCHEMA = [ ('cpu_measured', 'FLOAT', 'Actual CPU usage of test'), ('return_code', 'INTEGER', 'Exit code of test'), ] +_INTEROP_RESULTS_SCHEMA = [ + ('job_name', 'STRING', 'Name of Jenkins/Kokoro job'), + ('build_id', 'INTEGER', 'Build ID of Jenkins/Kokoro job'), + ('build_url', 'STRING', 'URL of Jenkins/Kokoro job'), + ('test_name', 'STRING', 'Unique test name combining client, server, and test_name'), + ('suite', 'STRING', 'Test suite: cloud_to_cloud, cloud_to_prod, or cloud_to_prod_auth'), + ('client', 'STRING', 'Client language'), + ('server', 'STRING', 'Server host name'), + ('test_case', 'STRING', 'Name of test case'), + ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'), + ('timestamp', 'TIMESTAMP', 'Timestamp of test run'), + ('elapsed_time', 'FLOAT', 'How long test took to run'), +] def _get_build_metadata(test_results): @@ -114,3 +127,41 @@ def upload_results_to_bq(resultset, bq_table, args, platform): else: print('Error uploading result to bigquery, all attempts failed.') sys.exit(1) + + +def upload_interop_results_to_bq(resultset, bq_table, args): + """Upload interop test results to a BQ table. + + Args: + resultset: dictionary generated by jobset.run + bq_table: string name of table to create/upload results to in BQ + args: args in run_interop_tests.py, generated by argparse + """ + bq = big_query_utils.create_big_query() + big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION, + partition_type=_PARTITION_TYPE, expiration_ms= _EXPIRATION_MS) + + for shortname, results in six.iteritems(resultset): + for result in results: + test_results = {} + _get_build_metadata(test_results) + test_results['elapsed_time'] = '%.2f' % result.elapsed_time + test_results['result'] = result.state + test_results['test_name'] = shortname + test_results['suite'] = shortname.split(':')[0] + test_results['client'] = shortname.split(':')[1] + test_results['server'] = shortname.split(':')[2] + test_results['test_case'] = shortname.split(':')[3] + test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S') + row = big_query_utils.make_row(str(uuid.uuid4()), test_results) + # TODO(jtattermusch): rows are inserted one by one, very inefficient + max_retries = 3 + for attempt in range(max_retries): + if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]): + break + else: + if attempt < max_retries - 1: + print('Error uploading result to bigquery, will retry.') + else: + print('Error uploading result to bigquery, all attempts failed.') + sys.exit(1) diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py index 1537641aee9..192f8e76eb4 100755 --- a/tools/run_tests/run_interop_tests.py +++ b/tools/run_tests/run_interop_tests.py @@ -35,6 +35,11 @@ import traceback import python_utils.dockerjob as dockerjob import python_utils.jobset as jobset import python_utils.report_utils as report_utils +# It's ok to not import because this is only necessary to upload results to BQ. +try: + from python_utils.upload_test_results import upload_interop_results_to_bq +except ImportError as e: + print(e) # Docker doesn't clean up after itself, so we do it on exit. atexit.register(lambda: subprocess.call(['stty', 'echo'])) @@ -956,6 +961,11 @@ argp.add_argument('--internal_ci', const=True, help=('Put reports into subdirectories to improve ' 'presentation of results by Internal CI.')) +argp.add_argument('--bq_result_table', + default='', + type=str, + nargs='?', + help='Upload test results to a specified BQ table.') args = argp.parse_args() servers = set(s for s in itertools.chain.from_iterable(_SERVERS @@ -1205,6 +1215,8 @@ try: num_failures, resultset = jobset.run(jobs, newline_on_success=True, maxjobs=args.jobs, skip_jobs=args.manual_run) + if args.bq_result_table and resultset: + upload_interop_results_to_bq(resultset, args.bq_result_table, args) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) else: