task_runner.py improvements (foundations for future speedup) (#28480)

* add --dry_run support for task_runner.py

* dont sort task_runner targets before running

* fixup dry run

* support task_runner.py --inner_jobs

* pass inner_jobs to build_jobspec for task_runner.py

* support inner_jobs for C# artifacts

* support inner_jobs for protoc artifacts

* inner jobs support for src/csharp/experimental/build_native_ext_for_android.sh

* address review feedback
pull/28526/head
Jan Tattermusch 3 years ago committed by GitHub
parent e48845c346
commit f23e1d9e85
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      src/csharp/experimental/build_native_ext_for_android.sh
  2. 52
      tools/run_tests/artifacts/artifact_targets.py
  3. 5
      tools/run_tests/artifacts/build_artifact_csharp.sh
  4. 6
      tools/run_tests/artifacts/build_artifact_protoc.sh
  5. 19
      tools/run_tests/artifacts/distribtest_targets.py
  6. 12
      tools/run_tests/artifacts/package_targets.py
  7. 24
      tools/run_tests/task_runner.py

@ -50,4 +50,7 @@ ${ANDROID_SDK_CMAKE} ../.. \
-DANDROID_NDK="${ANDROID_NDK_PATH}" \
-DgRPC_XDS_USER_AGENT_IS_CSHARP=ON
make -j4 grpc_csharp_ext
# Use externally provided env to determine build parallelism, otherwise use default.
GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS=${GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS:-2}
make grpc_csharp_ext "-j${GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS}"

@ -118,8 +118,13 @@ class PythonArtifact:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building native extension
# building the native extension is the most time-consuming part of the build
environ['GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS'] = str(inner_jobs)
if self.platform == 'linux_extra':
# Crosscompilation build for armv7 (e.g. Raspberry Pi)
environ['PYTHON'] = '/opt/python/{}/bin/python3'.format(
@ -204,7 +209,9 @@ class RubyArtifact:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
# TODO(jtattermusch): honor inner_jobs arg for this task.
del inner_jobs
# Ruby build uses docker internally and docker cannot be nested.
# We are using a custom workspace instead.
return create_jobspec(
@ -231,26 +238,34 @@ class CSharpExtArtifact:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building native extension
environ['GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS'] = str(inner_jobs)
if self.arch == 'android':
environ['ANDROID_ABI'] = self.arch_abi
return create_docker_jobspec(
self.name,
'tools/dockerfile/grpc_artifact_android_ndk',
'tools/run_tests/artifacts/build_artifact_csharp_android.sh',
environ={'ANDROID_ABI': self.arch_abi})
environ=environ)
elif self.arch == 'ios':
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_csharp_ios.sh'],
timeout_seconds=60 * 60,
use_workspace=True)
use_workspace=True,
environ=environ)
elif self.platform == 'windows':
return create_jobspec(self.name, [
'tools\\run_tests\\artifacts\\build_artifact_csharp.bat',
self.arch
],
timeout_seconds=45 * 60,
use_workspace=True)
use_workspace=True,
environ=environ)
else:
if self.platform == 'linux':
dockerfile_dir = 'tools/dockerfile/grpc_artifact_centos6_{}'.format(
@ -260,14 +275,17 @@ class CSharpExtArtifact:
# give us both ready to use crosscompiler and sufficient backward compatibility
dockerfile_dir = 'tools/dockerfile/grpc_artifact_python_manylinux2014_aarch64'
return create_docker_jobspec(
self.name, dockerfile_dir,
'tools/run_tests/artifacts/build_artifact_csharp.sh')
self.name,
dockerfile_dir,
'tools/run_tests/artifacts/build_artifact_csharp.sh',
environ=environ)
else:
return create_jobspec(
self.name,
['tools/run_tests/artifacts/build_artifact_csharp.sh'],
timeout_seconds=45 * 60,
use_workspace=True)
use_workspace=True,
environ=environ)
def __str__(self):
return self.name
@ -287,7 +305,8 @@ class PHPArtifact:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as PHP artifact build is basically just packing an archive
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
@ -313,9 +332,15 @@ class ProtocArtifact:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
environ = {}
if inner_jobs is not None:
# set number of parallel jobs when building protoc
environ['GRPC_PROTOC_BUILD_COMPILER_JOBS'] = str(inner_jobs)
if self.platform != 'windows':
environ = {'CXXFLAGS': '', 'LDFLAGS': ''}
environ['CXXFLAGS'] = ''
environ['LDFLAGS'] = ''
if self.platform == 'linux':
dockerfile_dir = 'tools/dockerfile/grpc_artifact_centos6_{}'.format(
self.arch)
@ -340,10 +365,11 @@ class ProtocArtifact:
use_workspace=True)
else:
generator = 'Visual Studio 14 2015 Win64' if self.arch == 'x64' else 'Visual Studio 14 2015'
environ['generator'] = generator
return create_jobspec(
self.name,
['tools\\run_tests\\artifacts\\build_artifact_protoc.bat'],
environ={'generator': generator},
environ=environ,
use_workspace=True)
def __str__(self):

@ -26,7 +26,10 @@ cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo \
-DgRPC_XDS_USER_AGENT_IS_CSHARP=ON \
../..
make grpc_csharp_ext -j2
# Use externally provided env to determine build parallelism, otherwise use default.
GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS=${GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS:-2}
make grpc_csharp_ext "-j${GRPC_CSHARP_BUILD_EXT_COMPILER_JOBS}"
if [ -f "libgrpc_csharp_ext.so" ]
then

@ -21,7 +21,11 @@ mkdir -p cmake/build
pushd cmake/build
cmake -DgRPC_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release ../..
make protoc plugins -j2
# Use externally provided env to determine build parallelism, otherwise use default.
GRPC_PROTOC_BUILD_COMPILER_JOBS=${GRPC_PROTOC_BUILD_COMPILER_JOBS:-2}
make protoc plugins "-j${GRPC_PROTOC_BUILD_COMPILER_JOBS}"
popd

@ -108,7 +108,8 @@ class CSharpDistribTest(object):
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as there is little opportunity for parallelizing whats inside the distribtests
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
@ -170,7 +171,9 @@ class PythonDistribTest(object):
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
# TODO(jtattermusch): honor inner_jobs arg for this task.
del inner_jobs
if not self.platform == 'linux':
raise Exception("Not supported yet.")
@ -220,7 +223,9 @@ class RubyDistribTest(object):
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
# TODO(jtattermusch): honor inner_jobs arg for this task.
del inner_jobs
arch_to_gem_arch = {
'x64': 'x86_64',
'x86': 'x86',
@ -260,7 +265,9 @@ class PHP7DistribTest(object):
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
# TODO(jtattermusch): honor inner_jobs arg for this task.
del inner_jobs
if self.platform == 'linux':
return create_docker_jobspec(
self.name,
@ -314,7 +321,9 @@ class CppDistribTest(object):
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
# TODO(jtattermusch): honor inner_jobs arg for this task.
del inner_jobs
if self.platform == 'linux':
return create_docker_jobspec(
self.name,

@ -87,7 +87,8 @@ class CSharpPackage:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as there is little opportunity for parallelizing
environ = {
'GRPC_CSHARP_BUILD_SINGLE_PLATFORM_NUGET':
os.getenv('GRPC_CSHARP_BUILD_SINGLE_PLATFORM_NUGET')
@ -119,7 +120,8 @@ class RubyPackage:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as this step simply collects preexisting artifacts
return create_docker_jobspec(
self.name, 'tools/dockerfile/grpc_artifact_centos6_x64',
'tools/run_tests/artifacts/build_package_ruby.sh')
@ -135,7 +137,8 @@ class PythonPackage:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as this step simply collects preexisting artifacts
# since the python package build does very little, we can use virtually
# any image that has new-enough python, so reusing one of the images used
# for artifact building seems natural.
@ -156,7 +159,8 @@ class PHPPackage:
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
def build_jobspec(self, inner_jobs=None):
del inner_jobs # arg unused as this step simply collects preexisting artifacts
return create_docker_jobspec(
self.name, 'tools/dockerfile/grpc_artifact_centos6_x64',
'tools/run_tests/artifacts/build_package_php.sh')

@ -73,6 +73,18 @@ argp.add_argument('-x',
default='report_taskrunner_sponge_log.xml',
type=str,
help='Filename for the JUnit-compatible XML report')
argp.add_argument('--dry_run',
default=False,
action='store_const',
const=True,
help='Only print what would be run.')
argp.add_argument(
'--inner_jobs',
default=None,
type=int,
help=
'Number of parallel jobs to use by each target. Passed as build_jobspec(inner_jobs=N) to each target.'
)
args = argp.parse_args()
@ -83,7 +95,15 @@ for label in args.build:
# Among targets selected by -b, filter out those that don't match the filter
targets = [t for t in targets if all(f in t.labels for f in args.filter)]
targets = sorted(set(targets), key=lambda target: target.name)
print('Will build %d targets:' % len(targets))
for target in targets:
print(' %s, labels %s' % (target.name, target.labels))
print()
if args.dry_run:
print('--dry_run was used, exiting')
sys.exit(1)
# Execute pre-build phase
prebuild_jobs = []
@ -99,7 +119,7 @@ if prebuild_jobs:
build_jobs = []
for target in targets:
build_jobs.append(target.build_jobspec())
build_jobs.append(target.build_jobspec(inner_jobs=args.inner_jobs))
if not build_jobs:
print('Nothing to build.')
sys.exit(1)

Loading…
Cancel
Save