Crosscompile python aarch64 wheels with dockcross (#25418)

* build aarch64 python wheels via crosscompilation

* yapf format code

* fix shellcheck complaints

* fix python37 aarch64 wheel build

* build python wheels on linux aarch64 with static libstdc++

* yapf format code
pull/25602/head
Jan Tattermusch 4 years ago committed by GitHub
parent 0e3a02e903
commit fcd43e9030
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 13
      setup.py
  2. 15
      src/python/grpcio/commands.py
  3. 70
      tools/distrib/python/grpcio_tools/setup.py
  4. 31
      tools/dockerfile/grpc_artifact_python_manylinux2014_aarch64/Dockerfile
  5. 23
      tools/run_tests/artifacts/artifact_targets.py
  6. 41
      tools/run_tests/artifacts/build_artifact_python.sh

@ -138,6 +138,16 @@ BUILD_WITH_SYSTEM_CARES = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_CARES',
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_RE2 = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_RE2', False)
# Export this variable to force building the python extension with a statically linked libstdc++.
# At least on linux, this is normally not needed as we can build manylinux-compatible wheels on linux just fine
# without statically linking libstdc++ (which leads to a slight increase in the wheel size).
# This option is useful when crosscompiling wheels for aarch64 where
# it's difficult to ensure that the crosscompilation toolchain has a high-enough version
# of GCC (we require >4.9) but still uses old-enough libstdc++ symbols.
# TODO(jtattermusch): remove this workaround once issues with crosscompiler version are resolved.
BUILD_WITH_STATIC_LIBSTDCXX = os.environ.get(
'GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX', False)
# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
# compilation by first building gRPC Core using make, then building only the
@ -239,6 +249,9 @@ if EXTRA_ENV_LINK_ARGS is None:
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
if BUILD_WITH_STATIC_LIBSTDCXX:
EXTRA_LINK_ARGS.append('-static-libstdc++')
CYTHON_EXTENSION_PACKAGE_NAMES = ()
CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',)

@ -24,6 +24,7 @@ import re
import shutil
import subprocess
import sys
import sysconfig
import traceback
import setuptools
@ -211,6 +212,20 @@ class BuildExt(build_ext.build_ext):
}
LINK_OPTIONS = {}
def get_ext_filename(self, ext_name):
# since python3.5, python extensions' shared libraries use a suffix that corresponds to the value
# of sysconfig.get_config_var('EXT_SUFFIX') and contains info about the architecture the library targets.
# E.g. on x64 linux the suffix is ".cpython-XYZ-x86_64-linux-gnu.so"
# When crosscompiling python wheels, we need to be able to override this suffix
# so that the resulting file name matches the target architecture and we end up with a well-formed
# wheel.
filename = build_ext.build_ext.get_ext_filename(self, ext_name)
orig_ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
new_ext_suffix = os.getenv('GRPC_PYTHON_OVERRIDE_EXT_SUFFIX')
if new_ext_suffix and filename.endswith(orig_ext_suffix):
filename = filename[:-len(orig_ext_suffix)] + new_ext_suffix
return filename
def build_extensions(self):
def compiler_ok_with_extra_std():

@ -71,6 +71,16 @@ PY3 = sys.version_info.major == 3
# to have been generated by building first *with* Cython support.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# Export this variable to force building the python extension with a statically linked libstdc++.
# At least on linux, this is normally not needed as we can build manylinux-compatible wheels on linux just fine
# without statically linking libstdc++ (which leads to a slight increase in the wheel size).
# This option is useful when crosscompiling wheels for aarch64 where
# it's difficult to ensure that the crosscompilation toolchain has a high-enough version
# of GCC (we require >4.9) but still uses old-enough libstdc++ symbols.
# TODO(jtattermusch): remove this workaround once issues with crosscompiler version are resolved.
BUILD_WITH_STATIC_LIBSTDCXX = os.environ.get(
'GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX', False)
def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
@ -95,6 +105,24 @@ def check_linker_need_libatomic():
return cpp_test.returncode == 0
class BuildExt(build_ext.build_ext):
"""Custom build_ext command."""
def get_ext_filename(self, ext_name):
# since python3.5, python extensions' shared libraries use a suffix that corresponds to the value
# of sysconfig.get_config_var('EXT_SUFFIX') and contains info about the architecture the library targets.
# E.g. on x64 linux the suffix is ".cpython-XYZ-x86_64-linux-gnu.so"
# When crosscompiling python wheels, we need to be able to override this suffix
# so that the resulting file name matches the target architecture and we end up with a well-formed
# wheel.
filename = build_ext.build_ext.get_ext_filename(self, ext_name)
orig_ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
new_ext_suffix = os.getenv('GRPC_PYTHON_OVERRIDE_EXT_SUFFIX')
if new_ext_suffix and filename.endswith(orig_ext_suffix):
filename = filename[:-len(orig_ext_suffix)] + new_ext_suffix
return filename
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
@ -159,6 +187,9 @@ if EXTRA_ENV_LINK_ARGS is None:
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
if BUILD_WITH_STATIC_LIBSTDCXX:
EXTRA_LINK_ARGS.append('-static-libstdc++')
CC_FILES = [os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
PROTO_FILES = [
os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES
@ -245,22 +276,23 @@ def extension_modules():
return extensions
setuptools.setup(
name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
long_description=open(_README_PATH, 'r').read(),
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1, < 4.0dev',
'grpcio>={version}'.format(version=grpc_version.VERSION),
'setuptools',
],
package_data=package_data(),
)
setuptools.setup(name='grpcio-tools',
version=grpc_version.VERSION,
description='Protobuf code generator for gRPC',
long_description=open(_README_PATH, 'r').read(),
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
license='Apache License 2.0',
classifiers=CLASSIFIERS,
ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
install_requires=[
'protobuf>=3.5.0.post1, < 4.0dev',
'grpcio>={version}'.format(version=grpc_version.VERSION),
'setuptools',
],
package_data=package_data(),
cmdclass={
'build_ext': BuildExt,
})

@ -0,0 +1,31 @@
# Copyright 2020 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The aarch64 wheels are being crosscompiled to allow running the build
# on x64 machine. The dockcross/manylinux2014-aarch64 image is a x86_64
# image with crosscompilation toolchain installed.
# Use an older version of dockcross image that has gcc4.9.4 because it was built
# before https://github.com/dockcross/dockcross/pull/449
FROM dockcross/manylinux2014-aarch64:20200929-608e6ac
# Update the package manager
RUN yum update -y && yum install -y curl-devel expat-devel gettext-devel openssl-devel zlib-devel
###################################
# Install Python build requirements
RUN /opt/python/cp35-cp35m/bin/pip install --upgrade cython
RUN /opt/python/cp36-cp36m/bin/pip install --upgrade cython
RUN /opt/python/cp37-cp37m/bin/pip install --upgrade cython
RUN /opt/python/cp38-cp38/bin/pip install --upgrade cython
RUN /opt/python/cp39-cp39/bin/pip install --upgrade cython

@ -144,8 +144,24 @@ class PythonArtifact:
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(
self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
environ['GRPC_BUILD_MANYLINUX_WHEEL'] = 'TRUE'
if self.arch == 'aarch64':
environ['GRPC_SKIP_TWINE_CHECK'] = 'TRUE'
# when crosscompiling, we need to force statically linking libstdc++
# otherwise libstdc++ symbols would be too new and the resulting
# wheel wouldn't pass the auditwheel check.
# This is needed because C core won't build with GCC 4.8 that's
# included in the default dockcross toolchain and we needed
# to opt into using a slighly newer version of GCC.
environ['GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX'] = 'TRUE'
else:
# only run auditwheel if we're not crosscompiling
environ['GRPC_RUN_AUDITWHEEL_REPAIR'] = 'TRUE'
# only build the packages that depend on grpcio-tools
# if we're not crosscompiling.
# - they require protoc to run on current architecture
# - they only have sdist packages anyway, so it's useless to build them again
environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE'
return create_docker_jobspec(
self.name,
# NOTE(rbellevi): Do *not* update this without also ensuring the
@ -375,6 +391,9 @@ def targets():
PythonArtifact('manylinux2010', 'x86', 'cp37-cp37m'),
PythonArtifact('manylinux2010', 'x86', 'cp38-cp38'),
PythonArtifact('manylinux2010', 'x86', 'cp39-cp39'),
PythonArtifact('manylinux2014', 'aarch64', 'cp37-cp37m'),
PythonArtifact('manylinux2014', 'aarch64', 'cp38-cp38'),
PythonArtifact('manylinux2014', 'aarch64', 'cp39-cp39'),
PythonArtifact('linux_extra', 'armv7', '2.7'),
PythonArtifact('linux_extra', 'armv7', '3.5'),
PythonArtifact('linux_extra', 'armv7', '3.6'),

@ -33,6 +33,24 @@ export GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS=${GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS
mkdir -p "${ARTIFACTS_OUT}"
ARTIFACT_DIR="$PWD/${ARTIFACTS_OUT}"
# check whether we are crosscompiling. AUDITWHEEL_ARCH is set by the dockcross docker image.
if [ "$AUDITWHEEL_ARCH" == "aarch64" ]
then
# when crosscompiling for aarch64, --plat-name needs to be set explicitly
# to end up with correctly named wheel file
# the value should be manylinuxABC_ARCH and dockcross docker image
# conveniently provides the value in the AUDITWHEEL_PLAT env
WHEEL_PLAT_NAME_FLAG="--plat-name=$AUDITWHEEL_PLAT"
# override the value of EXT_SUFFIX to make sure the crosscompiled .so files in the wheel have the correct filename suffix
GRPC_PYTHON_OVERRIDE_EXT_SUFFIX="$(${PYTHON} -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX").replace("-x86_64-linux-gnu.so", "-aarch64-linux-gnu.so"))')"
export GRPC_PYTHON_OVERRIDE_EXT_SUFFIX
# Set to empty string to disable the option (see https://github.com/grpc/grpc/issues/24498)
# TODO: enable ASM optimizations for crosscompiled wheels
export GRPC_BUILD_WITH_BORING_SSL_ASM=""
fi
# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
@ -40,7 +58,8 @@ ${SETARCH_CMD} "${PYTHON}" setup.py sdist
# Wheel has a bug where directories don't get excluded.
# https://bitbucket.org/pypa/wheel/issues/99/cannot-exclude-directory
${SETARCH_CMD} "${PYTHON}" setup.py bdist_wheel
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
GRPCIO_STRIP_TEMPDIR=$(mktemp -d)
GRPCIO_TAR_GZ_LIST=( dist/grpcio-*.tar.gz )
@ -78,9 +97,10 @@ mv "${GRPCIO_STRIPPED_TAR_GZ}" "${GRPCIO_TAR_GZ}"
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py sdist
# Build gRPC tools package binary distribution
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py bdist_wheel
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
if [ "$GRPC_BUILD_MANYLINUX_WHEEL" != "" ]
if [ "$GRPC_RUN_AUDITWHEEL_REPAIR" != "" ]
then
for wheel in dist/*.whl; do
"${AUDITWHEEL}" show "$wheel" | tee /dev/stderr | grep -E -w "$AUDITWHEEL_PLAT"
@ -136,12 +156,15 @@ then
cp -r src/python/grpcio_status/dist/* "$ARTIFACT_DIR"
fi
# Ensure the generated artifacts are valid.
"${PYTHON}" -m pip install virtualenv
"${PYTHON}" -m virtualenv venv || { "${PYTHON}" -m pip install virtualenv==16.7.9 && "${PYTHON}" -m virtualenv venv; }
venv/bin/python -m pip install "twine<=2.0"
venv/bin/python -m twine check dist/* tools/distrib/python/grpcio_tools/dist/*
rm -rf venv/
if [ "$GRPC_SKIP_TWINE_CHECK" == "" ]
then
# Ensure the generated artifacts are valid.
"${PYTHON}" -m pip install virtualenv
"${PYTHON}" -m virtualenv venv || { "${PYTHON}" -m pip install virtualenv==16.7.9 && "${PYTHON}" -m virtualenv venv; }
venv/bin/python -m pip install "twine<=2.0"
venv/bin/python -m twine check dist/* tools/distrib/python/grpcio_tools/dist/*
rm -rf venv/
fi
cp -r dist/* "$ARTIFACT_DIR"
cp -r tools/distrib/python/grpcio_tools/dist/* "$ARTIFACT_DIR"

Loading…
Cancel
Save