Merge remote-tracking branch 'upstream/master' into alarm_openloop

pull/5201/head
Vijay Pai 9 years ago
parent 8ca0faaf52
commit b48f4c830a
  1. 1
      PYTHON-MANIFEST.in
  2. 3
      package.json
  3. 1
      requirements.txt
  4. 43
      setup.py
  5. 38
      src/python/grpcio/README.rst
  6. 133
      src/python/grpcio/commands.py
  7. 102
      src/python/grpcio/precompiled.py
  8. 3
      templates/package.json.template
  9. 3
      test/distrib/node/run_distrib_test.sh
  10. 5
      tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile
  11. 5
      tools/dockerfile/distribtest/ruby_fedora21_x64/Dockerfile
  12. 4
      tools/run_tests/build_artifact_node.sh
  13. 12
      tools/run_tests/build_artifact_python.sh
  14. 1
      tools/run_tests/build_package_node.sh
  15. 10
      tools/run_tests/distribtest_targets.py

@ -7,6 +7,7 @@ graft third_party/zlib
include src/python/grpcio/commands.py
include src/python/grpcio/grpc_version.py
include src/python/grpcio/grpc_core_dependencies.py
include src/python/grpcio/precompiled.py
include src/python/grpcio/support.py
include src/python/grpcio/README.rst
include requirements.txt

@ -23,13 +23,12 @@
"test": "./node_modules/.bin/mocha src/node/test && npm run-script lint",
"gen_docs": "./node_modules/.bin/jsdoc -c src/node/jsdoc_conf.json",
"coverage": "./node_modules/.bin/istanbul cover ./node_modules/.bin/_mocha src/node/test",
"preinstall": "npm install node-pre-gyp",
"install": "./node_modules/.bin/node-pre-gyp install --fallback-to-build"
},
"bundledDependencies": ["node-pre-gyp"],
"dependencies": {
"lodash": "^3.9.3",
"nan": "^2.0.0",
"node-pre-gyp": "^0.6.19",
"protobufjs": "^4.0.0"
},
"devDependencies": {

@ -4,3 +4,4 @@ futures>=2.2.0
cython>=0.23
coverage>=4.0
six>=1.10
wheel>=0.29

@ -53,6 +53,7 @@ sys.path.insert(0, os.path.abspath(PYTHON_STEM))
# Break import-style to ensure we can actually find our in-repo dependencies.
import commands
import precompiled
import grpc_core_dependencies
import grpc_version
@ -156,15 +157,14 @@ SETUP_REQUIRES = (
) + INSTALL_REQUIRES
COMMAND_CLASS = {
'install': commands.Install,
'doc': commands.SphinxDocumentation,
'build_proto_modules': commands.BuildProtoModules,
'build_project_metadata': commands.BuildProjectMetadata,
'build_py': commands.BuildPy,
'build_ext': commands.BuildExt,
'build_tagged_ext': precompiled.BuildTaggedExt,
'gather': commands.Gather,
'run_interop': commands.RunInterop,
'bdist_egg_grpc_custom': commands.BdistEggCustomName,
}
# Ensure that package data is copied over before any commands have been run:
@ -202,10 +202,13 @@ TEST_LOADER = 'tests:Loader'
TEST_RUNNER = 'tests:Runner'
PACKAGE_DATA = {
# Binaries that may or may not be present in the final installation, but are
# mentioned here for completeness.
'grpc._cython': [
'_credentials/roots.pem',
'_windows/grpc_c.32.python',
'_windows/grpc_c.64.python',
'cygrpc.so',
],
}
if INSTALL_TESTS:
@ -215,19 +218,23 @@ else:
PACKAGES = setuptools.find_packages(
PYTHON_STEM, exclude=['tests', 'tests.*'])
setuptools.setup(
name='grpcio',
version=grpc_version.VERSION,
license=LICENSE,
ext_modules=CYTHON_EXTENSION_MODULES,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE,
test_loader=TEST_LOADER,
test_runner=TEST_RUNNER,
)
setup_arguments = {
'name': 'grpcio',
'version': grpc_version.VERSION,
'license': LICENSE,
'ext_modules': CYTHON_EXTENSION_MODULES,
'packages': list(PACKAGES),
'package_dir': PACKAGE_DIRECTORIES,
'package_data': PACKAGE_DATA,
'install_requires': INSTALL_REQUIRES,
'setup_requires': SETUP_REQUIRES,
'cmdclass': COMMAND_CLASS,
'tests_require': TESTS_REQUIRE,
'test_suite': TEST_SUITE,
'test_loader': TEST_LOADER,
'test_runner': TEST_RUNNER,
}
precompiled.update_setup_arguments(setup_arguments)
setuptools.setup(**setup_arguments)

@ -1,22 +1,40 @@
gRPC Python
===========
Package for GRPC Python.
Package for gRPC Python.
Dependencies
Installation
------------
Ensure you have installed the gRPC core. On Mac OS X, install homebrew_.
Run the following command to install gRPC Python.
gRPC Python is available for Linux and Mac OS X running Python 2.7.
From PyPI
~~~~~~~~~
If you are installing locally...
::
$ curl -fsSL https://goo.gl/getgrpc | bash -s python
$ pip install grpcio
Else system wide (on Ubuntu)...
::
This will download and run the [gRPC install script][] to install grpc core. The script then uses pip to install this package. It also installs the Protocol Buffers compiler (_protoc_) and the gRPC _protoc_ plugin for python.
$ sudo pip install grpcio
From Source
~~~~~~~~~~~
Building from source requires that you have the Python headers (usually a
package named `python-dev`).
::
Otherwise, `install from source`_
$ export REPO_ROOT=grpc
$ git clone https://github.com/grpc/grpc.git $REPO_ROOT
$ cd $REPO_ROOT
$ pip install .
.. _`install from source`: https://github.com/grpc/grpc/blob/master/src/python/README.md#building-from-source
.. _homebrew: http://brew.sh
.. _`gRPC install script`: https://raw.githubusercontent.com/grpc/homebrew-grpc/master/scripts/install
Note that `$REPO_ROOT` can be assigned to whatever directory name floats your
fancy.

@ -41,7 +41,6 @@ import sys
import traceback
import setuptools
from setuptools.command import bdist_egg
from setuptools.command import build_ext
from setuptools.command import build_py
from setuptools.command import easy_install
@ -52,13 +51,6 @@ import support
PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
BINARIES_REPOSITORY = os.environ.get(
'GRPC_PYTHON_BINARIES_REPOSITORY',
'https://storage.googleapis.com/grpc-precompiled-binaries/python')
USE_GRPC_CUSTOM_BDIST = bool(int(os.environ.get(
'GRPC_PYTHON_USE_CUSTOM_BDIST', '1')))
CONF_PY_ADDENDUM = """
extensions.append('sphinx.ext.napoleon')
napoleon_google_docstring = True
@ -74,126 +66,39 @@ class CommandError(Exception):
# TODO(atash): Remove this once PyPI has better Linux bdist support. See
# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
def _get_grpc_custom_bdist_egg(decorated_basename, target_egg_basename):
"""Returns a string path to a .egg file for Linux to install.
def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
"""Returns a string path to a bdist file for Linux to install.
If we can retrieve a pre-compiled egg from online, uses it. Else, emits a
If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
warning and builds from source.
"""
# TODO(atash): somehow the name that's returned from `wheel` is different
# between different versions of 'wheel' (but from a compatibility standpoint,
# the names are compatible); we should have some way of determining name
# compatibility in the same way `wheel` does to avoid having to rename all of
# the custom wheels that we build/upload to GCS.
# Break import style to ensure that setup.py has had a chance to install the
# relevant package eggs.
# relevant package.
from six.moves.urllib import request
decorated_path = decorated_basename + '.egg'
decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
try:
url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
egg_data = request.urlopen(url).read()
bdist_data = request.urlopen(url).read()
except IOError as error:
raise CommandError(
'{}\n\nCould not find the bdist egg {}: {}'
'{}\n\nCould not find the bdist {}: {}'
.format(traceback.format_exc(), decorated_path, error.message))
# Our chosen local egg path.
egg_path = target_egg_basename + '.egg'
# Our chosen local bdist path.
bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
try:
with open(egg_path, 'w') as egg_file:
egg_file.write(egg_data)
with open(bdist_path, 'w') as bdist_file:
bdist_file.write(bdist_data)
except IOError as error:
raise CommandError(
'{}\n\nCould not write grpcio egg: {}'
'{}\n\nCould not write grpcio bdist: {}'
.format(traceback.format_exc(), error.message))
return egg_path
class EggNameMixin(object):
"""Mixin for setuptools.Command classes to enable acquiring the egg name."""
def egg_name(self, with_custom):
"""
Args:
with_custom: Boolean describing whether or not to decorate the egg name
with custom gRPC-specific target information.
"""
egg_command = self.get_finalized_command('bdist_egg')
base = os.path.splitext(os.path.basename(egg_command.egg_output))[0]
if with_custom:
flavor = 'ucs2' if sys.maxunicode == 65535 else 'ucs4'
return '{base}-{flavor}'.format(base=base, flavor=flavor)
else:
return base
class Install(install.install, EggNameMixin):
"""Custom Install command for gRPC Python.
This is for bdist shims and whatever else we might need a custom install
command for.
"""
user_options = install.install.user_options + [
# TODO(atash): remove this once PyPI has better Linux bdist support. See
# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
('use-grpc-custom-bdist', None,
'Whether to retrieve a binary from the gRPC binary repository instead '
'of building from source.'),
]
def initialize_options(self):
install.install.initialize_options(self)
self.use_grpc_custom_bdist = USE_GRPC_CUSTOM_BDIST
def finalize_options(self):
install.install.finalize_options(self)
def run(self):
if self.use_grpc_custom_bdist:
try:
try:
egg_path = _get_grpc_custom_bdist_egg(self.egg_name(True),
self.egg_name(False))
except CommandError as error:
sys.stderr.write(
'\nWARNING: Failed to acquire grpcio prebuilt binary:\n'
'{}.\n\n'.format(error.message))
raise
try:
self._run_bdist_retrieval_install(egg_path)
except Exception as error:
# if anything else happens (and given how there's no way to really know
# what's happening in setuptools here, I mean *anything*), warn the user
# and fall back to building from source.
sys.stderr.write(
'{}\nWARNING: Failed to install grpcio prebuilt binary.\n\n'
.format(traceback.format_exc()))
raise
except Exception:
install.install.run(self)
else:
install.install.run(self)
# TODO(atash): Remove this once PyPI has better Linux bdist support. See
# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
def _run_bdist_retrieval_install(self, bdist_egg):
easy_install = self.distribution.get_command_class('easy_install')
easy_install_command = easy_install(
self.distribution, args='x', root=self.root, record=self.record,
)
easy_install_command.ensure_finalized()
easy_install_command.always_copy_from = '.'
easy_install_command.package_index.scan(glob.glob('*.egg'))
arguments = [bdist_egg]
if setuptools.bootstrap_install_from:
args.insert(0, setuptools.bootstrap_install_from)
easy_install_command.args = arguments
easy_install_command.run()
setuptools.bootstrap_install_from = None
class BdistEggCustomName(bdist_egg.bdist_egg, EggNameMixin):
"""Thin wrapper around the bdist_egg command to build with our custom name."""
def run(self):
bdist_egg.bdist_egg.run(self)
target = os.path.join(self.dist_dir, '{}.egg'.format(self.egg_name(True)))
shutil.move(self.get_outputs()[0], target)
return bdist_path
class SphinxDocumentation(setuptools.Command):

@ -0,0 +1,102 @@
# Copyright 2015-2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import platform
import shutil
import sys
import setuptools
import commands
import grpc_version
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
BINARIES_REPOSITORY = os.environ.get(
'GRPC_PYTHON_BINARIES_REPOSITORY',
'https://storage.googleapis.com/grpc-precompiled-binaries/python')
USE_PRECOMPILED_BINARIES = bool(int(os.environ.get(
'GRPC_PYTHON_USE_PRECOMPILED_BINARIES', '1')))
def _tagged_ext_name(base):
uname = platform.uname()
tags = '-'.join((grpc_version.VERSION, uname[0], uname[4]))
flavor = 'ucs2' if sys.maxunicode == 65535 else 'ucs4'
return '{base}-{tags}-{flavor}'.format(base=base, tags=tags, flavor=flavor)
class BuildTaggedExt(setuptools.Command):
description = 'build the gRPC tagged extensions'
user_options = []
def initialize_options(self):
# distutils requires this override.
pass
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if 'linux' in sys.platform:
self.run_command('build_ext')
try:
os.makedirs('dist/')
except OSError:
pass
shutil.copyfile(
os.path.join(PYTHON_STEM, 'grpc/_cython/cygrpc.so'),
'dist/{}.so'.format(_tagged_ext_name('cygrpc')))
else:
sys.stderr.write('nothing to do for build_tagged_ext\n')
def update_setup_arguments(setup_arguments):
url = '{}/{}.so'.format(BINARIES_REPOSITORY, _tagged_ext_name('cygrpc'))
target_path = os.path.join(PYTHON_STEM, 'grpc/_cython/cygrpc.so')
try:
extension = urlopen(url).read()
except:
sys.stderr.write(
'could not download precompiled extension: {}\n'.format(url))
return
try:
with open(target_path, 'w') as target:
target.write(extension)
setup_arguments['ext_modules'] = []
except:
sys.stderr.write(
'could not write precompiled extension to directory: {} -> {}\n'
.format(url, target_path))

@ -25,13 +25,12 @@
"test": "./node_modules/.bin/mocha src/node/test && npm run-script lint",
"gen_docs": "./node_modules/.bin/jsdoc -c src/node/jsdoc_conf.json",
"coverage": "./node_modules/.bin/istanbul cover ./node_modules/.bin/_mocha src/node/test",
"preinstall": "npm install node-pre-gyp",
"install": "./node_modules/.bin/node-pre-gyp install --fallback-to-build"
},
"bundledDependencies": ["node-pre-gyp"],
"dependencies": {
"lodash": "^3.9.3",
"nan": "^2.0.0",
"node-pre-gyp": "^0.6.19",
"protobufjs": "^4.0.0"
},
"devDependencies": {

@ -38,6 +38,9 @@ nvm install $NODE_VERSION
npm install -g node-static
# Kill off existing static servers
kill -9 $(ps aux | grep '[n]ode .*static' | awk '{print $2}') || true
STATIC_SERVER=127.0.0.1
STATIC_PORT=8080

@ -29,4 +29,9 @@
FROM fedora:21
# Make yum work properly under docker when using overlay storage driver.
# https://bugzilla.redhat.com/show_bug.cgi?id=1213602#c9
# https://github.com/docker/docker/issues/10180
RUN yum install -y yum-plugin-ovl
RUN yum clean all && yum update -y && yum install -y python python-pip

@ -29,6 +29,11 @@
FROM fedora:21
# Make yum work properly under docker when using overlay storage driver.
# https://bugzilla.redhat.com/show_bug.cgi?id=1213602#c9
# https://github.com/docker/docker/issues/10180
RUN yum install -y yum-plugin-ovl
RUN yum clean all && yum update -y && yum install -y ruby
RUN gem install bundler

@ -36,7 +36,7 @@ nvm use 4
cd $(dirname $0)/../..
rm -rf build
rm -rf build || true
mkdir -p artifacts
@ -46,6 +46,6 @@ node_versions=( 0.12.0 1.0.0 1.1.0 2.0.0 3.0.0 4.0.0 5.0.0 )
for version in ${node_versions[@]}
do
node-pre-gyp configure rebuild package testpackage --target=$version --target_arch=$NODE_TARGET_ARCH
./node_modules/.bin/node-pre-gyp configure rebuild package testpackage --target=$version --target_arch=$NODE_TARGET_ARCH
cp -r build/stage/* artifacts/
done

@ -39,12 +39,20 @@ then
pip install -rrequirements.txt
fi
# The bdist_wheel_grpc_custom command is finicky about command output ordering
# and thus ought to be run in a shell command separate of others. Further, it
# trashes the actual bdist_wheel output, so it should be run first so that
# bdist_wheel may be run unmolested.
GRPC_PYTHON_USE_CUSTOM_BDIST=0 \
GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
${SETARCH_CMD} python setup.py \
build_tagged_ext
GRPC_PYTHON_USE_CUSTOM_BDIST=0 \
GRPC_PYTHON_BUILD_WITH_CYTHON=1 \
${SETARCH_CMD} python setup.py \
bdist_wheel \
sdist \
bdist_egg_grpc_custom
sdist
mkdir -p artifacts

@ -38,6 +38,7 @@ cd $(dirname $0)/../..
mkdir -p artifacts/
cp -r $EXTERNAL_GIT_ROOT/architecture={x86,x64},language=node,platform={windows,linux,macos}/artifacts/* artifacts/ || true
npm update
npm pack
cp grpc-*.tgz artifacts/grpc.tgz

@ -122,11 +122,15 @@ class NodeDistribTest(object):
def build_jobspec(self):
if self.platform == 'linux':
linux32 = ''
if self.arch == 'x86':
linux32 = 'linux32'
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/node_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/node/run_distrib_test.sh %s' % (
'%s test/distrib/node/run_distrib_test.sh %s' % (
linux32,
self.node_version))
elif self.platform == 'macos':
return create_jobspec(self.name,
@ -236,11 +240,7 @@ def targets():
RubyDistribTest('linux', 'x64', 'ubuntu1504'),
RubyDistribTest('linux', 'x64', 'ubuntu1510'),
RubyDistribTest('linux', 'x64', 'ubuntu1604'),
NodeDistribTest('macos', 'x64', None, '0.10'),
NodeDistribTest('macos', 'x64', None, '0.12'),
NodeDistribTest('macos', 'x64', None, '3'),
NodeDistribTest('macos', 'x64', None, '4'),
NodeDistribTest('macos', 'x64', None, '5'),
NodeDistribTest('linux', 'x86', 'jessie', '4')
] + [
NodeDistribTest('linux', 'x64', os, version)

Loading…
Cancel
Save