small cleanup in microbenchmark scripts (#28886)

pull/28888/head
Jan Tattermusch 3 years ago committed by GitHub
parent e0a5c310cd
commit d1db000f62
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 10
      tools/internal_ci/linux/grpc_performance_profile_daily.sh
  2. 11
      tools/internal_ci/linux/grpc_performance_profile_master.sh
  3. 34
      tools/internal_ci/linux/run_performance_profile_daily.sh
  4. 24
      tools/internal_ci/linux/run_performance_profile_hourly.sh
  5. 8
      tools/profiling/microbenchmarks/bm_json.py
  6. 21
      tools/run_tests/run_microbenchmark.py
  7. 8
      tools/run_tests/run_performance_tests.py

@ -24,12 +24,4 @@ CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
./tools/run_tests/start_port_server.py || true
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload || FAILED="true"
# kill port_server.py to prevent the build from freezing
ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
if [ "$FAILED" != "" ]
then
exit 1
fi
tools/run_tests/run_microbenchmark.py --collect summary --bq_result_table microbenchmarks.microbenchmarks

@ -20,13 +20,8 @@ cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
tools/internal_ci/linux/run_performance_profile_hourly.sh || FAILED="true"
CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
# kill port_server.py to prevent the build from freezing
ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
if [ "$FAILED" != "" ]
then
exit 1
fi
./tools/run_tests/start_port_server.py || true
tools/run_tests/run_microbenchmark.py --collect summary --bq_result_table microbenchmarks.microbenchmarks

@ -1,34 +0,0 @@
#!/bin/bash
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
cd $(dirname $0)/../../..
# try to use pypy for generating reports
# each trace dumps 7-8gig of text to disk, and processing this into a report is
# heavyweight - so any speed boost is worthwhile
# TODO(ctiller): consider rewriting report generation in C++ for performance
if which pypy >/dev/null; then
PYTHON=pypy
else
PYTHON=python2.7
fi
BENCHMARKS_TO_RUN="bm_fullstack_unary_ping_pong bm_fullstack_streaming_ping_pong bm_fullstack_streaming_pump bm_closure bm_cq bm_call_create bm_error bm_chttp2_hpack bm_chttp2_transport bm_pollset bm_metadata"
./tools/run_tests/start_port_server.py || true
$PYTHON tools/run_tests/run_microbenchmark.py --collect summary perf latency -b $BENCHMARKS_TO_RUN

@ -1,24 +0,0 @@
#!/bin/bash
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
cd $(dirname $0)/../../..
./tools/run_tests/start_port_server.py || true
CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload

@ -198,9 +198,13 @@ def expand_json(js, js2=None):
labels = dict(labels_list)
else:
labels = {}
# TODO(jtattermusch): grabbing kokoro env values shouldn't be buried
# deep in the JSON conversion logic.
# Link the data to a kokoro job run by adding
# well known kokoro env variables as metadata for each row
row = {
'jenkins_build': os.environ.get('BUILD_NUMBER', ''),
'jenkins_job': os.environ.get('JOB_NAME', ''),
'jenkins_build': os.environ.get('KOKORO_BUILD_NUMBER', ''),
'jenkins_job': os.environ.get('KOKORO_JOB_NAME', ''),
}
row.update(context)
row.update(bm)

@ -207,7 +207,7 @@ def collect_summary(bm_name, args):
text(run_summary(bm_name, 'opt', bm_name))
heading('Summary: %s [with counters]' % bm_name)
text(run_summary(bm_name, 'counters', bm_name))
if args.bigquery_upload:
if args.bq_result_table:
with open('%s.csv' % bm_name, 'w') as f:
f.write(
subprocess.check_output([
@ -215,10 +215,10 @@ def collect_summary(bm_name, args):
'%s.counters.json' % bm_name,
'%s.opt.json' % bm_name
]).decode('UTF-8'))
subprocess.check_call([
'bq', 'load', 'microbenchmarks.microbenchmarks',
'%s.csv' % bm_name
])
subprocess.check_call(
['bq', 'load',
'%s' % args.bq_result_table,
'%s.csv' % bm_name])
collectors = {
@ -241,11 +241,12 @@ argp.add_argument('-b',
nargs='+',
type=str,
help='Which microbenchmarks should be run')
argp.add_argument('--bigquery_upload',
default=False,
action='store_const',
const=True,
help='Upload results from summary collection to bigquery')
argp.add_argument(
'--bq_result_table',
default='',
type=str,
help='Upload results from summary collection to a specified bigquery table.'
)
argp.add_argument(
'--summary_time',
default=None,

@ -169,12 +169,12 @@ def create_netperf_jobspec(server_host='localhost',
# If netperf is running remotely, the env variables populated by Jenkins
# won't be available on the client, but we need them for uploading results
# to BigQuery.
jenkins_job_name = os.getenv('JOB_NAME')
jenkins_job_name = os.getenv('KOKORO_JOB_NAME')
if jenkins_job_name:
cmd += 'JOB_NAME="%s" ' % jenkins_job_name
jenkins_build_number = os.getenv('BUILD_NUMBER')
cmd += 'KOKORO_JOB_NAME="%s" ' % jenkins_job_name
jenkins_build_number = os.getenv('KOKORO_BUILD_NUMBER')
if jenkins_build_number:
cmd += 'BUILD_NUMBER="%s" ' % jenkins_build_number
cmd += 'KOKORO_BUILD_NUMBER="%s" ' % jenkins_build_number
cmd += 'tools/run_tests/performance/run_netperf.sh'
if client_host:

Loading…
Cancel
Save