Add Python3.5 artifact targets

pull/7732/head
Ken Payson 8 years ago
parent 10776b99cf
commit 5998cd760c
  1. 2
      PYTHON-MANIFEST.in
  2. 62
      setup.py
  3. 62
      src/python/grpcio/_spawn_patch.py
  4. 48
      tools/distrib/python/grpcio_tools/setup.py
  5. 79
      tools/distrib/python/make_grpcio_tools.py
  6. 70
      tools/run_tests/artifact_targets.py
  7. 51
      tools/run_tests/build_artifact_python.bat
  8. 36
      tools/run_tests/build_artifact_python.sh

@ -7,7 +7,7 @@ graft include/grpc
graft third_party/boringssl
graft third_party/nanopb
graft third_party/zlib
include src/python/grpcio/_unixccompiler_patch.py
include src/python/grpcio/_spawn_patch.py
include src/python/grpcio/commands.py
include src/python/grpcio/grpc_version.py
include src/python/grpcio/grpc_core_dependencies.py

@ -28,7 +28,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A setup module for the GRPC Python package."""
from distutils import cygwinccompiler
from distutils import extension as _extension
from distutils import util
import os
@ -58,14 +58,12 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath(PYTHON_STEM))
# Break import-style to ensure we can actually find our in-repo dependencies.
import _unixccompiler_patch
import _spawn_patch
import commands
import grpc_core_dependencies
import grpc_version
if 'win32' in sys.platform:
_unixccompiler_patch.monkeypatch_unix_compiler()
_spawn_patch.monkeypatch_spawn()
LICENSE = '3-clause BSD'
@ -83,9 +81,38 @@ ENABLE_CYTHON_TRACING = os.environ.get(
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# By default we assume a GCC-like compiler.
EXTRA_COMPILE_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_CFLAGS', ''))
EXTRA_LINK_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_LDFLAGS', ''))
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = ''
if 'win32' in sys.platform and sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CYTHON_EXTENSION_PACKAGE_NAMES = ()
@ -104,28 +131,29 @@ if "linux" in sys.platform:
if not "win32" in sys.platform:
EXTENSION_LIBRARIES += ('m',)
if "win32" in sys.platform:
EXTENSION_LIBRARIES += ('ws2_32',)
EXTENSION_LIBRARIES += ('advapi32', 'ws2_32',)
DEFINE_MACROS = (
('OPENSSL_NO_ASM', 1), ('_WIN32_WINNT', 0x600),
('GPR_BACKWARDS_COMPATIBILITY_MODE', 1),)
if "win32" in sys.platform:
DEFINE_MACROS += (('OPENSSL_WINDOWS', 1), ('WIN32_LEAN_AND_MEAN', 1),)
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif sys.version_info >= (3, 5):
# For some reason, this is needed to get access to inet_pton/inet_ntop
# on msvc, but only for 32 bits
DEFINE_MACROS += (('NTDDI_VERSION', 0x06000000),)
LDFLAGS = tuple(EXTRA_LINK_ARGS)
CFLAGS = tuple(EXTRA_COMPILE_ARGS)
if "linux" in sys.platform:
LDFLAGS += ('-Wl,-wrap,memcpy',)
if "linux" in sys.platform or "darwin" in sys.platform:
CFLAGS += ('-fvisibility=hidden',)
pymodinit_type = 'PyObject*' if PY3 else 'void'
pymodinit = '__attribute__((visibility ("default"))) {}'.format(pymodinit_type)
DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),)
CFLAGS += ('-fvisibility=hidden',)
pymodinit_type = 'PyObject*' if PY3 else 'void'
pymodinit = '__attribute__((visibility ("default"))) {}'.format(pymodinit_type)
DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.

@ -27,51 +27,47 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Covers inadequacies in distutils."""
"""Patches the spawn() command for windows compilers.
Windows has an 8191 character command line limit, but some compilers
support an @command_file directive where command_file is a file
containing the full command line.
"""
from distutils import ccompiler
from distutils import errors
from distutils import unixccompiler
import os
import os.path
import shlex
import shutil
import sys
import tempfile
def _unix_commandfile_spawn(self, command):
"""Wrapper around distutils.util.spawn that attempts to use command files.
MAX_COMMAND_LENGTH = 8191
Meant to replace the CCompiler method `spawn` on UnixCCompiler and its
derivatives (e.g. the MinGW32 compiler).
_classic_spawn = ccompiler.CCompiler.spawn
Some commands like `gcc` (and friends like `clang`) support command files to
work around shell command length limits.
"""
# Sometimes distutils embeds the executables as full strings including some
# hard-coded flags rather than as lists.
command = list(shlex.split(command[0])) + list(command[1:])
command_base = os.path.basename(command[0].strip())
if command_base == 'ccache':
command_base = command[:2]
command_args = command[2:]
elif command_base.startswith('ccache') or command_base in ['gcc', 'clang', 'clang++', 'g++']:
command_base = command[:1]
command_args = command[1:]
def _commandfile_spawn(self, command):
command_length = sum([len(arg) for arg in command])
if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
# Even if this command doesn't support the @command_file, it will
# fail as is so we try blindly
print('Command line length exceeded, using command file')
print(' '.join(command))
temporary_directory = tempfile.mkdtemp()
command_filename = os.path.abspath(
os.path.join(temporary_directory, 'command'))
with open(command_filename, 'w') as command_file:
escaped_args = ['"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]]
command_file.write(' '.join(escaped_args))
modified_command = command[:1] + ['@{}'.format(command_filename)]
try:
_classic_spawn(self, modified_command)
finally:
shutil.rmtree(temporary_directory)
else:
return ccompiler.CCompiler.spawn(self, command)
temporary_directory = tempfile.mkdtemp()
command_filename = os.path.abspath(os.path.join(temporary_directory, 'command'))
with open(command_filename, 'w') as command_file:
escaped_args = [arg.replace('\\', '\\\\') for arg in command_args]
command_file.write(' '.join(escaped_args))
modified_command = command_base + ['@{}'.format(command_filename)]
result = ccompiler.CCompiler.spawn(self, modified_command)
shutil.rmtree(temporary_directory)
return result
_classic_spawn(self, command)
def monkeypatch_unix_compiler():
def monkeypatch_spawn():
"""Monkeypatching is dumb, but it's either that or we become maintainers of
something much, much bigger."""
unixccompiler.UnixCCompiler.spawn = _unix_commandfile_spawn
ccompiler.CCompiler.spawn = _commandfile_spawn

@ -27,6 +27,7 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from distutils import cygwinccompiler
from distutils import extension
from distutils import util
import errno
@ -57,11 +58,38 @@ PY3 = sys.version_info.major == 3
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
# By default we assume a GCC-like compiler.
EXTRA_COMPILE_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_CFLAGS',
'-fno-wrapv -frtti -std=c++11'))
EXTRA_LINK_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_LDFLAGS',
'-lpthread'))
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = '-std=c++11'
if 'win32' in sys.platform and sys.version_info < (3, 5):
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
elif "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fno-wrapv -frtti'
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
# TODO(atash) sift through the GCC specs to see if libstdc++ can have any
# influence on the linkage outcome on MinGW for non-C++ programs.
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
'-static'.format(msvcr=msvcr))
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
CC_FILES = [
os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
@ -73,9 +101,13 @@ PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE)
GRPC_PYTHON_TOOLS_PACKAGE = 'grpc.tools'
GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto'
DEFINE_MACROS = (('HAVE_PTHREAD', 1),)
if "win32" in sys.platform and '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
DEFINE_MACROS = ()
if "win32" in sys.platform:
DEFINE_MACROS += (('WIN32_LEAN_AND_MEAN', 1),)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
elif "linux" in sys.platform or "darwin" in sys.platform:
DEFINE_MACROS += (('HAVE_PTHREAD', 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python3 was built with.

@ -88,22 +88,23 @@ GRPC_ROOT = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'..', '..', '..'))
GRPC_PYTHON_ROOT = os.path.join(GRPC_ROOT, 'tools/distrib/python/grpcio_tools')
GRPC_PYTHON_ROOT = os.path.join(GRPC_ROOT, 'tools', 'distrib',
'python', 'grpcio_tools')
GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT = 'third_party/protobuf/src'
GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT = os.path.join('third_party', 'protobuf', 'src')
GRPC_PROTOBUF = os.path.join(GRPC_ROOT, GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT)
GRPC_PROTOC_PLUGINS = os.path.join(GRPC_ROOT, 'src/compiler')
GRPC_PYTHON_PROTOBUF = os.path.join(GRPC_PYTHON_ROOT,
'third_party/protobuf/src')
GRPC_PYTHON_PROTOC_PLUGINS = os.path.join(GRPC_PYTHON_ROOT,
'grpc_root/src/compiler')
GRPC_PROTOC_PLUGINS = os.path.join(GRPC_ROOT, 'src', 'compiler')
GRPC_PYTHON_PROTOBUF = os.path.join(GRPC_PYTHON_ROOT, 'third_party', 'protobuf',
'src')
GRPC_PYTHON_PROTOC_PLUGINS = os.path.join(GRPC_PYTHON_ROOT, 'grpc_root', 'src',
'compiler')
GRPC_PYTHON_PROTOC_LIB_DEPS = os.path.join(GRPC_PYTHON_ROOT,
'protoc_lib_deps.py')
GRPC_INCLUDE = os.path.join(GRPC_ROOT, 'include')
GRPC_PYTHON_INCLUDE = os.path.join(GRPC_PYTHON_ROOT, 'grpc_root/include')
GRPC_PYTHON_INCLUDE = os.path.join(GRPC_PYTHON_ROOT, 'grpc_root', 'include')
BAZEL_DEPS = os.path.join(GRPC_ROOT, 'tools/distrib/python/bazel_deps.sh')
BAZEL_DEPS = os.path.join(GRPC_ROOT, 'tools', 'distrib', 'python', 'bazel_deps.sh')
BAZEL_DEPS_PROTOC_LIB_QUERY = '//:protoc_lib'
BAZEL_DEPS_COMMON_PROTOS_QUERY = '//:well_known_protos'
@ -136,64 +137,6 @@ def long_path(path):
else:
return path
def atomic_file_copy(src, dst):
"""Based on the lock-free-whack-a-mole algorithm, depending on filesystem
renaming being atomic. Described at http://stackoverflow.com/a/28090883.
"""
try:
if filecmp.cmp(src, dst):
return
except:
pass
dst_dir = os.path.abspath(os.path.dirname(dst))
dst_base = os.path.basename(dst)
this_id = str(uuid.uuid4()).replace('.', '-')
temporary_file = os.path.join(dst_dir, '{}.{}.tmp'.format(dst_base, this_id))
mole_file = os.path.join(dst_dir, '{}.{}.mole.tmp'.format(dst_base, this_id))
mole_pattern = os.path.join(dst_dir, '{}.*.mole.tmp'.format(dst_base))
src = long_path(src)
dst = long_path(dst)
temporary_file = long_path(temporary_file)
mole_file = long_path(mole_file)
mole_pattern = long_path(mole_pattern)
shutil.copy2(src, temporary_file)
try:
os.rename(temporary_file, mole_file)
except:
print('Error moving temporary file {} to {}'.format(temporary_file, mole_file), file=sys.stderr)
print('while trying to copy file {} to {}'.format(src, dst), file=sys.stderr)
raise
for other_file in glob.glob(mole_pattern):
other_id = other_file.split('.')[-3]
if this_id == other_id:
pass
elif this_id < other_id:
try:
os.remove(other_file)
except:
pass
else:
try:
os.remove(mole_file)
except:
pass
this_id = other_id
mole_file = other_file
try:
if filecmp.cmp(src, dst):
try:
os.remove(mole_file)
except:
pass
return
except:
pass
try:
os.rename(mole_file, dst)
except:
pass
def main():
os.chdir(GRPC_ROOT)
@ -211,7 +154,7 @@ def main():
for relative_file in files:
source_file = os.path.abspath(os.path.join(source_dir, relative_file))
target_file = os.path.abspath(os.path.join(target_dir, relative_file))
atomic_file_copy(source_file, target_file)
shutil.copyfile(source_file, target_file)
try:
protoc_lib_deps_content = get_deps()

@ -31,6 +31,8 @@
"""Definition of targets to build artifacts."""
import os.path
import random
import string
import sys
import jobset
@ -79,27 +81,16 @@ _ARCH_FLAG_MAP = {
'x64': '-m64'
}
python_windows_version_arch_map = {
('x86', '2.7'): 'Python27_32bits',
('x64', '2.7'): 'Python27',
('x86', '3.4'): 'Python34_32bits',
('x64', '3.4'): 'Python34',
}
class PythonArtifact:
"""Builds Python artifacts."""
def __init__(self, platform, arch, python_version, manylinux_build=None):
if manylinux_build:
self.name = 'python%s_%s_%s_%s' % (python_version, platform, arch, manylinux_build)
else:
self.name = 'python%s_%s_%s' % (python_version, platform, arch)
def __init__(self, platform, arch, py_version):
self.name = 'python_%s_%s_%s' % (platform, arch, py_version)
self.platform = platform
self.arch = arch
self.labels = ['artifact', 'python', python_version, platform, arch]
self.python_version = python_version
self.python_windows_prefix = python_windows_version_arch_map[arch, python_version]
self.manylinux_build = manylinux_build
self.labels = ['artifact', 'python', platform, arch, py_version]
self.py_version = py_version
def pre_build_jobspecs(self):
return []
@ -111,8 +102,8 @@ class PythonArtifact:
environ['SETARCH_CMD'] = 'linux32'
# Inside the manylinux container, the python installations are located in
# special places...
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(self.manylinux_build)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.manylinux_build)
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(self.py_version)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.py_version)
# Platform autodetection for the manylinux1 image breaks so we set the
# defines ourselves.
# TODO(atash) get better platform-detection support in core so we don't
@ -126,14 +117,24 @@ class PythonArtifact:
environ=environ,
timeout_seconds=60*60)
elif self.platform == 'windows':
if 'Python27' in self.py_version or 'Python34' in self.py_version:
environ['EXT_COMPILER'] = 'mingw32'
else:
environ['EXT_COMPILER'] = 'msvc'
# For some reason, the batch script %random% always runs with the same
# seed. We create a random temp-dir here
dir = ''.join(random.choice(string.ascii_uppercase) for _ in range(10))
return create_jobspec(self.name,
['tools\\run_tests\\build_artifact_python.bat',
self.python_windows_prefix,
'32' if self.arch == 'x86' else '64'
self.py_version,
'32' if self.arch == 'x86' else '64',
dir
],
environ=environ,
shell=True)
else:
environ['PYTHON'] = 'python{}'.format(self.python_version)
environ['PYTHON'] = self.py_version
environ['SKIP_PIP_INSTALL'] = 'TRUE'
return create_jobspec(self.name,
['tools/run_tests/build_artifact_python.sh'],
environ=environ)
@ -330,18 +331,23 @@ def targets():
for Cls in (CSharpExtArtifact, NodeExtArtifact, ProtocArtifact)
for platform in ('linux', 'macos', 'windows')
for arch in ('x86', 'x64')] +
[PythonArtifact('linux', 'x86', '2.7', 'cp27-cp27m'),
PythonArtifact('linux', 'x86', '2.7', 'cp27-cp27mu'),
PythonArtifact('linux', 'x64', '2.7', 'cp27-cp27m'),
PythonArtifact('linux', 'x64', '2.7', 'cp27-cp27mu'),
PythonArtifact('macos', 'x64', '2.7'),
PythonArtifact('windows', 'x86', '2.7'),
PythonArtifact('windows', 'x64', '2.7'),
PythonArtifact('linux', 'x86', '3.4', 'cp34-cp34m'),
PythonArtifact('linux', 'x64', '3.4', 'cp34-cp34m'),
PythonArtifact('macos', 'x64', '3.4'),
PythonArtifact('windows', 'x86', '3.4'),
PythonArtifact('windows', 'x64', '3.4'),
[PythonArtifact('linux', 'x86', 'cp27-cp27m'),
PythonArtifact('linux', 'x86', 'cp27-cp27mu'),
PythonArtifact('linux', 'x86', 'cp34-cp34m'),
PythonArtifact('linux', 'x86', 'cp35-cp35m'),
PythonArtifact('linux', 'x64', 'cp27-cp27m'),
PythonArtifact('linux', 'x64', 'cp27-cp27mu'),
PythonArtifact('linux', 'x64', 'cp34-cp34m'),
PythonArtifact('linux', 'x64', 'cp35-cp35m'),
PythonArtifact('macos', 'x64', 'python2.7'),
PythonArtifact('macos', 'x64', 'python3.4'),
PythonArtifact('macos', 'x64', 'python3.5'),
PythonArtifact('windows', 'x86', 'Python27_32bits'),
PythonArtifact('windows', 'x86', 'Python34_32bits'),
PythonArtifact('windows', 'x86', 'Python35_32bits'),
PythonArtifact('windows', 'x64', 'Python27'),
PythonArtifact('windows', 'x64', 'Python34'),
PythonArtifact('windows', 'x64', 'Python35'),
RubyArtifact('linux', 'x86'),
RubyArtifact('linux', 'x64'),
RubyArtifact('macos', 'x64'),

@ -34,56 +34,45 @@ pip install --upgrade six
pip install --upgrade setuptools
pip install -rrequirements.txt
@rem Because this is windows and *everything seems to hate Windows* we have to
@rem set all of these flags ourselves because Python won't help us (see the
@rem setup.py of the grpcio_tools project).
set GRPC_PYTHON_CFLAGS=-fno-wrapv -frtti -std=c++11
@rem See https://sourceforge.net/p/mingw-w64/bugs/363/
if %2 == 32 (
set GRPC_PYTHON_CFLAGS=%GRPC_PYTHON_CFLAGS% -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s
) else (
set GRPC_PYTHON_CFLAGS=%GRPC_PYTHON_CFLAGS% -D_ftime=_ftime64 -D_timeb=__timeb64
)
@rem Further confusing things, MSYS2's mingw64 tries to dynamically link
@rem libgcc, libstdc++, and winpthreads. We have to override this or our
@rem extensions end up linking to MSYS2 DLLs, which the normal Python on
@rem Windows user won't have... and ON TOP OF THIS, there's MinGW's GCC default
@rem behavior of linking msvcrt.dll as the C runtime library, which we need to
@rem override so that Python's distutils doesn't link us against multiple C
@rem runtimes.
python -c "from distutils.cygwinccompiler import get_msvcr; print(get_msvcr()[0])" > temp.txt
set /p PYTHON_MSVCR=<temp.txt
set GRPC_PYTHON_LDFLAGS=-static-libgcc -static-libstdc++ -mcrtdll=%PYTHON_MSVCR% -static -lpthread
set GRPC_PYTHON_BUILD_WITH_CYTHON=1
@rem Multiple builds are running simultaneously, so to avoid distutils
@rem file collisions, we build everything in a tmp directory
if not exist "artifacts" mkdir "artifacts"
set ARTIFACT_DIR=%cd%\artifacts
set BUILD_DIR=C:\Windows\Temp\pygrpc-%3\
mkdir %BUILD_DIR%
xcopy /s/e/q %cd%\* %BUILD_DIR%
pushd %BUILD_DIR%
@rem Set up gRPC Python tools
python tools\distrib\python\make_grpcio_tools.py
@rem Build gRPC Python extensions
python setup.py build_ext -c mingw32
python setup.py build_ext -c %EXT_COMPILER% || goto :error
pushd tools\distrib\python\grpcio_tools
python setup.py build_ext -c mingw32
python setup.py build_ext -c %EXT_COMPILER% || goto :error
popd
@rem Build gRPC Python distributions
python setup.py bdist_wheel
python setup.py bdist_wheel || goto :error
pushd tools\distrib\python\grpcio_tools
python setup.py bdist_wheel
python setup.py bdist_wheel || goto :error
popd
mkdir artifacts
xcopy /Y /I /S dist\* artifacts\ || goto :error
xcopy /Y /I /S tools\distrib\python\grpcio_tools\dist\* artifacts\ || goto :error
xcopy /Y /I /S dist\* %ARTIFACT_DIR% || goto :error
xcopy /Y /I /S tools\distrib\python\grpcio_tools\dist\* %ARTIFACT_DIR% || goto :error
popd
rmdir /s /q %BUILD_DIR%
goto :EOF
:error
popd
rmdir /s /q %BUILD_DIR%
exit /b 1

@ -38,38 +38,42 @@ export PYTHON=${PYTHON:-python}
export PIP=${PIP:-pip}
export AUDITWHEEL=${AUDITWHEEL:-auditwheel}
# Because multiple builds run in parallel, some distutils file
# operations may collide. To avoid this, each build is run in
# a temp directory
mkdir -p artifacts
ARTIFACT_DIR="$PWD/artifacts"
BUILD_DIR=`mktemp -d "${TMPDIR:-/tmp}/pygrpc.XXXXXX"`
trap "rm -rf $BUILD_DIR" EXIT
cp -r * "$BUILD_DIR"
cd "$BUILD_DIR"
# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
${SETARCH_CMD} ${PYTHON} setup.py \
sdist
${SETARCH_CMD} ${PYTHON} setup.py sdist
# Wheel has a bug where directories don't get excluded.
# https://bitbucket.org/pypa/wheel/issues/99/cannot-exclude-directory
${SETARCH_CMD} ${PYTHON} setup.py \
bdist_wheel
${SETARCH_CMD} ${PYTHON} setup.py bdist_wheel
# Build gRPC tools package distribution
${PYTHON} tools/distrib/python/make_grpcio_tools.py
# Build gRPC tools package source distribution
${SETARCH_CMD} ${PYTHON} tools/distrib/python/grpcio_tools/setup.py \
sdist
${SETARCH_CMD} ${PYTHON} tools/distrib/python/grpcio_tools/setup.py sdist
# Build gRPC tools package binary distribution
CFLAGS="$CFLAGS -fno-wrapv" ${SETARCH_CMD} \
${PYTHON} tools/distrib/python/grpcio_tools/setup.py bdist_wheel
${SETARCH_CMD} ${PYTHON} tools/distrib/python/grpcio_tools/setup.py bdist_wheel
mkdir -p artifacts
if [ "$BUILD_MANYLINUX_WHEEL" != "" ]
then
for wheel in dist/*.whl; do
${AUDITWHEEL} repair $wheel -w artifacts/
${AUDITWHEEL} repair $wheel -w "$ARTIFACT_DIR"
rm $wheel
done
for wheel in tools/distrib/python/grpcio_tools/dist/*.whl; do
${AUDITWHEEL} repair $wheel -w artifacts/
${AUDITWHEEL} repair $wheel -w "$ARTIFACT_DIR"
rm $wheel
done
fi
@ -81,14 +85,14 @@ fi
if [ "$BUILD_HEALTH_CHECKING" != "" ]
then
${PIP} install -rrequirements.txt
${PIP} install grpcio --no-index --find-links "file://${PWD}/artifacts/"
${PIP} install grpcio-tools --no-index --find-links "file://${PWD}/artifacts/"
${PIP} install grpcio --no-index --find-links "file://$ARTIFACT_DIR/"
${PIP} install grpcio-tools --no-index --find-links "file://$ARTIFACT_DIR/"
# Build gRPC health check source distribution
${SETARCH_CMD} ${PYTHON} src/python/grpcio_health_checking/setup.py \
preprocess build_package_protos sdist
cp -r src/python/grpcio_health_checking/dist/* artifacts
cp -r src/python/grpcio_health_checking/dist/* "$ARTIFACT_DIR"
fi
cp -r dist/* artifacts
cp -r tools/distrib/python/grpcio_tools/dist/* artifacts
cp -r dist/* "$ARTIFACT_DIR"
cp -r tools/distrib/python/grpcio_tools/dist/* "$ARTIFACT_DIR"

Loading…
Cancel
Save