Merge pull request #8288 from jtattermusch/backport_matrix_runner

Backport matrix runner scripts into v1.0.x
pull/7933/merge
Jan Tattermusch 8 years ago committed by GitHub
commit 47f97d6216
  1. 42
      tools/jenkins/run_jenkins_matrix.sh
  2. 12
      tools/run_tests/dockerize/build_docker_and_run_tests.sh
  3. 3
      tools/run_tests/dockerize/docker_run_tests.sh
  4. 46
      tools/run_tests/run_tests_in_workspace.sh
  5. 282
      tools/run_tests/run_tests_matrix.py

@ -0,0 +1,42 @@
#!/usr/bin/env bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# This script is invoked by Jenkins and triggers a test run, bypassing
# all args to the test script.
#
# Setting up rvm environment BEFORE we set -ex.
[[ -s /etc/profile.d/rvm.sh ]] && . /etc/profile.d/rvm.sh
# To prevent cygwin bash complaining about empty lines ending with \r
# we set the igncr option. The option doesn't exist on Linux, so we fallback
# to just 'set -ex' there.
# NOTE: No empty lines should appear in this file before igncr is set!
set -ex -o igncr || set -ex
python tools/run_tests/run_tests_matrix.py $@

@ -44,9 +44,6 @@ mkdir -p /tmp/ccache
# its cache location now that --download-cache is deprecated).
mkdir -p /tmp/xdg-cache-home
# Create a local branch so the child Docker script won't complain
git branch -f jenkins-docker
# Inputs
# DOCKERFILE_DIR - Directory in which Dockerfile file is located.
# DOCKER_RUN_SCRIPT - Script to run under docker (relative to grpc repo root)
@ -86,9 +83,12 @@ docker run \
$DOCKER_IMAGE_NAME \
bash -l "/var/local/jenkins/grpc/$DOCKER_RUN_SCRIPT" || DOCKER_FAILED="true"
docker cp "$CONTAINER_NAME:/var/local/git/grpc/reports.zip" $git_root || true
unzip -o $git_root/reports.zip -d $git_root || true
rm -f reports.zip
# use unique name for reports.zip to prevent clash between concurrent
# run_tests.py runs
TEMP_REPORTS_ZIP=`mktemp`
docker cp "$CONTAINER_NAME:/var/local/git/grpc/reports.zip" ${TEMP_REPORTS_ZIP} || true
unzip -o ${TEMP_REPORTS_ZIP} -d $git_root || true
rm -f ${TEMP_REPORTS_ZIP}
# remove the container, possibly killing it first
docker rm -f $CONTAINER_NAME || true

@ -63,6 +63,7 @@ echo '</body></html>' >> index.html
cd ..
zip -r reports.zip reports
find . -name report.xml | xargs zip reports.zip
find . -name report.xml | xargs -r zip reports.zip
find . -name 'report_*.xml' | xargs -r zip reports.zip
exit $exit_code

@ -0,0 +1,46 @@
#!/bin/bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Create a workspace in a subdirectory to allow running multiple builds in isolation.
# WORKSPACE_NAME env variable needs to contain name of the workspace to create.
# All cmdline args will be passed to run_tests.py script (executed in the
# newly created workspace)
set -ex
cd $(dirname $0)/../..
rm -rf "${WORKSPACE_NAME}"
# TODO(jtattermusch): clone --recursive fetches the submodules from github.
# Try avoiding that to save time and network capacity.
git clone --recursive . "${WORKSPACE_NAME}"
echo "Running run_tests.py in workspace ${WORKSPACE_NAME}"
python "${WORKSPACE_NAME}/tools/run_tests/run_tests.py" $@

@ -0,0 +1,282 @@
#!/usr/bin/env python2.7
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run test matrix."""
import argparse
import jobset
import multiprocessing
import os
import report_utils
import sys
_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(_ROOT)
# Set the timeout high to allow enough time for sanitizers and pre-building
# clang docker.
_RUNTESTS_TIMEOUT = 2*60*60
# Number of jobs assigned to each run_tests.py instance
_INNER_JOBS = 2
def _docker_jobspec(name, runtests_args=[]):
"""Run a single instance of run_tests.py in a docker container"""
test_job = jobset.JobSpec(
cmdline=['python', 'tools/run_tests/run_tests.py',
'--use_docker',
'-t',
'-j', str(_INNER_JOBS),
'-x', 'report_%s.xml' % name] + runtests_args,
shortname='run_tests_%s' % name,
timeout_seconds=_RUNTESTS_TIMEOUT)
return test_job
def _workspace_jobspec(name, runtests_args=[], workspace_name=None):
"""Run a single instance of run_tests.py in a separate workspace"""
if not workspace_name:
workspace_name = 'workspace_%s' % name
env = {'WORKSPACE_NAME': workspace_name}
test_job = jobset.JobSpec(
cmdline=['tools/run_tests/run_tests_in_workspace.sh',
'-t',
'-j', str(_INNER_JOBS),
'-x', '../report_%s.xml' % name] + runtests_args,
environ=env,
shortname='run_tests_%s' % name,
timeout_seconds=_RUNTESTS_TIMEOUT)
return test_job
def _generate_jobs(languages, configs, platforms,
arch=None, compiler=None,
labels=[], extra_args=[]):
result = []
for language in languages:
for platform in platforms:
for config in configs:
name = '%s_%s_%s' % (language, platform, config)
runtests_args = ['-l', language,
'-c', config]
if arch or compiler:
name += '_%s_%s' % (arch, compiler)
runtests_args += ['--arch', arch,
'--compiler', compiler]
runtests_args += extra_args
if platform == 'linux':
job = _docker_jobspec(name=name, runtests_args=runtests_args)
else:
job = _workspace_jobspec(name=name, runtests_args=runtests_args)
job.labels = [platform, config, language] + labels
result.append(job)
return result
def _create_test_jobs(extra_args=[]):
test_jobs = []
# supported on linux only
test_jobs += _generate_jobs(languages=['sanity', 'php7'],
configs=['dbg', 'opt'],
platforms=['linux'],
labels=['basictests'],
extra_args=extra_args)
# supported on all platforms.
test_jobs += _generate_jobs(languages=['c', 'csharp', 'node', 'python'],
configs=['dbg', 'opt'],
platforms=['linux', 'macos', 'windows'],
labels=['basictests'],
extra_args=extra_args)
# supported on linux and mac.
test_jobs += _generate_jobs(languages=['c++', 'ruby', 'php'],
configs=['dbg', 'opt'],
platforms=['linux', 'macos'],
labels=['basictests'],
extra_args=extra_args)
# supported on mac only.
test_jobs += _generate_jobs(languages=['objc'],
configs=['dbg', 'opt'],
platforms=['macos'],
labels=['basictests'],
extra_args=extra_args)
# sanitizers
test_jobs += _generate_jobs(languages=['c'],
configs=['msan', 'asan', 'tsan'],
platforms=['linux'],
labels=['sanitizers'],
extra_args=extra_args)
test_jobs += _generate_jobs(languages=['c++'],
configs=['asan', 'tsan'],
platforms=['linux'],
labels=['sanitizers'],
extra_args=extra_args)
return test_jobs
def _create_portability_test_jobs(extra_args=[]):
test_jobs = []
# portability C x86
test_jobs += _generate_jobs(languages=['c'],
configs=['dbg'],
platforms=['linux'],
arch='x86',
compiler='default',
labels=['portability'],
extra_args=extra_args)
# portability C and C++ on x64
for compiler in ['gcc4.4', 'gcc4.6', 'gcc5.3',
'clang3.5', 'clang3.6', 'clang3.7']:
test_jobs += _generate_jobs(languages=['c', 'c++'],
configs=['dbg'],
platforms=['linux'],
arch='x64',
compiler=compiler,
labels=['portability'],
extra_args=extra_args)
# portability C on Windows
for arch in ['x86', 'x64']:
for compiler in ['vs2013', 'vs2015']:
test_jobs += _generate_jobs(languages=['c'],
configs=['dbg'],
platforms=['windows'],
arch=arch,
compiler=compiler,
labels=['portability'],
extra_args=extra_args)
test_jobs += _generate_jobs(languages=['python'],
configs=['dbg'],
platforms=['linux'],
arch='default',
compiler='python3.4',
labels=['portability'],
extra_args=extra_args)
test_jobs += _generate_jobs(languages=['csharp'],
configs=['dbg'],
platforms=['linux'],
arch='default',
compiler='coreclr',
labels=['portability'],
extra_args=extra_args)
return test_jobs
def _allowed_labels():
"""Returns a list of existing job labels."""
all_labels = set()
for job in _create_test_jobs() + _create_portability_test_jobs():
for label in job.labels:
all_labels.add(label)
return sorted(all_labels)
argp = argparse.ArgumentParser(description='Run a matrix of run_tests.py tests.')
argp.add_argument('-j', '--jobs',
default=multiprocessing.cpu_count()/_INNER_JOBS,
type=int,
help='Number of concurrent run_tests.py instances.')
argp.add_argument('-f', '--filter',
choices=_allowed_labels(),
nargs='+',
default=[],
help='Filter targets to run by label with AND semantics.')
argp.add_argument('--build_only',
default=False,
action='store_const',
const=True,
help='Pass --build_only flag to run_tests.py instances.')
argp.add_argument('--force_default_poller', default=False, action='store_const', const=True,
help='Pass --force_default_poller to run_tests.py instances.')
argp.add_argument('--dry_run',
default=False,
action='store_const',
const=True,
help='Only print what would be run.')
args = argp.parse_args()
extra_args = []
if args.build_only:
extra_args.append('--build_only')
if args.force_default_poller:
extra_args.append('--force_default_poller')
all_jobs = _create_test_jobs(extra_args=extra_args) + _create_portability_test_jobs(extra_args=extra_args)
jobs = []
for job in all_jobs:
if not args.filter or all(filter in job.labels for filter in args.filter):
jobs.append(job)
if not jobs:
jobset.message('FAILED', 'No test suites match given criteria.',
do_newline=True)
sys.exit(1)
print('IMPORTANT: The changes you are testing need to be locally committed')
print('because only the committed changes in the current branch will be')
print('copied to the docker environment or into subworkspaces.')
print
print 'Will run these tests:'
for job in jobs:
if args.dry_run:
print ' %s: "%s"' % (job.shortname, ' '.join(job.cmdline))
else:
print ' %s' % job.shortname
print
if args.dry_run:
print '--dry_run was used, exiting'
sys.exit(1)
jobset.message('START', 'Running test matrix.', do_newline=True)
num_failures, resultset = jobset.run(jobs,
newline_on_success=True,
travis=True,
maxjobs=args.jobs)
report_utils.render_junit_xml_report(resultset, 'report.xml')
if num_failures == 0:
jobset.message('SUCCESS', 'All run_tests.py instance finished successfully.',
do_newline=True)
else:
jobset.message('FAILED', 'Some run_tests.py instance have failed.',
do_newline=True)
sys.exit(1)
Loading…
Cancel
Save