The C based gRPC (C++, Python, Ruby, Objective-C, PHP, C#) https://grpc.io/
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

314 lines
12 KiB

#!/bin/bash
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
cd "$(dirname "$0")/../../.."
9 years ago
export GRPC_PYTHON_BUILD_WITH_CYTHON=1
export PYTHON=${PYTHON:-python}
export AUDITWHEEL=${AUDITWHEEL:-auditwheel}
# activate ccache if desired
# shellcheck disable=SC1091
source tools/internal_ci/helper_scripts/prepare_ccache_symlinks_rc
# Needed for building binary distribution wheels -- bdist_wheel
"${PYTHON}" -m pip install --upgrade pip
# Ping to a single version to make sure we're building the same artifacts
"${PYTHON}" -m pip install setuptools==69.5.1 wheel==0.43.0
if [ "$GRPC_SKIP_PIP_CYTHON_UPGRADE" == "" ]
then
# Install Cython to avoid source wheel build failure.
# This only needs to be done when not running under docker (=on MacOS)
# since the docker images used for building python wheels
# already have a new-enough version of cython pre-installed.
# Any installation step is a potential source of breakages,
# so we are trying to perform as few download-and-install operations
# as possible.
"${PYTHON}" -m pip install --upgrade 'cython<3.0.0rc1'
fi
# Allow build_ext to build C/C++ files in parallel
# by enabling a monkeypatch. It speeds up the build a lot.
# Use externally provided GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS value if set.
export GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS=${GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS:-2}
mkdir -p "${ARTIFACTS_OUT}"
ARTIFACT_DIR="$PWD/${ARTIFACTS_OUT}"
9 years ago
# check whether we are crosscompiling. AUDITWHEEL_ARCH is set by the dockcross docker image.
if [ "$AUDITWHEEL_ARCH" == "aarch64" ]
then
# when crosscompiling for aarch64, --plat-name needs to be set explicitly
# to end up with correctly named wheel file
# the value should be manylinuxABC_ARCH and dockcross docker image
# conveniently provides the value in the AUDITWHEEL_PLAT env
WHEEL_PLAT_NAME_FLAG="--plat-name=$AUDITWHEEL_PLAT"
# override the value of EXT_SUFFIX to make sure the crosscompiled .so files in the wheel have the correct filename suffix
GRPC_PYTHON_OVERRIDE_EXT_SUFFIX="$(${PYTHON} -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX").replace("-x86_64-linux-gnu.so", "-aarch64-linux-gnu.so"))')"
export GRPC_PYTHON_OVERRIDE_EXT_SUFFIX
# since we're crosscompiling, we need to explicitly choose the right platform for boringssl assembly optimizations
export GRPC_BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM="linux-aarch64"
fi
# check whether we are crosscompiling. AUDITWHEEL_ARCH is set by the dockcross docker image.
if [ "$AUDITWHEEL_ARCH" == "armv7l" ]
then
# when crosscompiling for arm, --plat-name needs to be set explicitly
# to end up with correctly named wheel file
# our dockcross-based docker image onveniently provides the value in the AUDITWHEEL_PLAT env
WHEEL_PLAT_NAME_FLAG="--plat-name=$AUDITWHEEL_PLAT"
# override the value of EXT_SUFFIX to make sure the crosscompiled .so files in the wheel have the correct filename suffix
GRPC_PYTHON_OVERRIDE_EXT_SUFFIX="$(${PYTHON} -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX").replace("-x86_64-linux-gnu.so", "-arm-linux-gnueabihf.so"))')"
export GRPC_PYTHON_OVERRIDE_EXT_SUFFIX
# since we're crosscompiling, we need to explicitly choose the right platform for boringssl assembly optimizations
export GRPC_BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM="linux-arm"
fi
ancillary_package_dir=(
"src/python/grpcio_admin/"
"src/python/grpcio_channelz/"
"src/python/grpcio_csds/"
"src/python/grpcio_health_checking/"
"src/python/grpcio_reflection/"
"src/python/grpcio_status/"
"src/python/grpcio_testing/"
"src/python/grpcio_observability/"
)
# Copy license to ancillary package directories so it will be distributed.
for directory in "${ancillary_package_dir[@]}"; do
cp "LICENSE" "${directory}"
done
# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
${SETARCH_CMD} "${PYTHON}" setup.py sdist
# Wheel has a bug where directories don't get excluded.
# https://bitbucket.org/pypa/wheel/issues/99/cannot-exclude-directory
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
GRPCIO_STRIP_TEMPDIR=$(mktemp -d)
GRPCIO_TAR_GZ_LIST=( dist/grpcio-*.tar.gz )
GRPCIO_TAR_GZ=${GRPCIO_TAR_GZ_LIST[0]}
GRPCIO_STRIPPED_TAR_GZ=$(mktemp -t "TAR_GZ_XXXXXXXXXX")
clean_non_source_files() {
( cd "$1"
find . -type f \
| grep -v '\.c$' | grep -v '\.cc$' | grep -v '\.cpp$' \
| grep -v '\.h$' | grep -v '\.hh$' | grep -v '\.inc$' \
| grep -v '\.s$' | grep -v '\.py$' | grep -v '\.hpp$' \
| grep -v '\.S$' | grep -v '\.asm$' \
| while read -r file; do
rm -f "$file" || true
done
find . -type d -empty -delete
)
}
tar xzf "${GRPCIO_TAR_GZ}" -C "${GRPCIO_STRIP_TEMPDIR}"
( cd "${GRPCIO_STRIP_TEMPDIR}"
find . -type d -name .git -exec rm -fr {} \; || true
for dir in */third_party/*; do
clean_non_source_files "${dir}" || true
done
tar czf "${GRPCIO_STRIPPED_TAR_GZ}" -- *
chmod ugo+r "${GRPCIO_STRIPPED_TAR_GZ}"
)
mv "${GRPCIO_STRIPPED_TAR_GZ}" "${GRPCIO_TAR_GZ}"
# Build gRPC tools package distribution
"${PYTHON}" tools/distrib/python/make_grpcio_tools.py
# Build gRPC tools package source distribution
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py sdist
# Build gRPC tools package binary distribution
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/grpcio_tools/setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
[Python o11y] Fix Python O11Y artifacts name (#35965) Update observability to PYPI failed because the artifact name is not correct. This PR: * Fix the artifacts name. * Add step to test observability artifacts in `test_packages.sh`. * Added `-fno-ipa-cp` compile flag. * We're seeing `inlining failed in call to always_inline 'vsnprintf': function body can be overwritten at link time` errors when building from source using musl libc. * Based on [investigation](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626), it's because we're using `-flto` flag. * One solution is to [disable fortify by adding this flag](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626). After this PR, the observability artifacts have the correct name: * [Distribution Tests Python Linux](https://pantheon.corp.google.com/storage/browser/grpc-testing-kokoro-prod/test_result_public/prod/grpc/core/pull_request/linux/grpc_distribtests_python/28687/20240223-105306/github/grpc/artifacts;tab=objects?pageState=(%22StorageObjectListTable%22:(%22f%22:%22%255B%255D%22))&e=13802955&mods=-logs_tg_prod&prefix=&forceOnObjectsSortingFiltering=false) Also tested that the artifacts build in this PR can be successfully uploaded to testpypi: * https://test.pypi.org/project/grpcio-observability/1.63.0.dev0 <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. --> Closes #35965 PiperOrigin-RevId: 611268154
9 months ago
if [ "$GRPC_BUILD_MAC" == "" ]; then
"${PYTHON}" src/python/grpcio_observability/make_grpcio_observability.py
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_observability/setup.py sdist
# shellcheck disable=SC2086
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_observability/setup.py bdist_wheel $WHEEL_PLAT_NAME_FLAG
fi
# run twine check before auditwheel, because auditwheel puts the repaired wheels into
# the artifacts output dir.
if [ "$GRPC_SKIP_TWINE_CHECK" == "" ]
then
# Install virtualenv if it isn't already available.
# TODO(jtattermusch): cleanup the virtualenv version fallback logic.
"${PYTHON}" -m pip install virtualenv
"${PYTHON}" -m virtualenv venv || { "${PYTHON}" -m pip install virtualenv==20.0.23 && "${PYTHON}" -m virtualenv venv; }
# Ensure the generated artifacts are valid using "twine check"
[Python distribtests] Fix version issue for readme_renderer (#34316) Distribtests is failing with the following error: ``` Collecting twine<=2.0 Downloading twine-2.0.0-py3-none-any.whl (34 kB) Collecting pkginfo>=1.4.2 (from twine<=2.0) Downloading pkginfo-1.9.6-py3-none-any.whl (30 kB) Collecting readme-renderer>=21.0 (from twine<=2.0) Obtaining dependency information for readme-renderer>=21.0 from https://files.pythonhosted.org/packages/b5/7e/992e0e21b37cadd668226f75fef0aa81bf21c2426c98bc06a55e514cb323/readme_renderer-42.0-py3-none-any.whl.metadata Downloading readme_renderer-42.0-py3-none-any.whl.metadata (2.8 kB) Collecting requests>=2.20 (from twine<=2.0) Obtaining dependency information for requests>=2.20 from https://files.pythonhosted.org/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl.metadata Downloading requests-2.31.0-py3-none-any.whl.metadata (4.6 kB) Collecting requests-toolbelt!=0.9.0,>=0.8.0 (from twine<=2.0) Downloading requests_toolbelt-1.0.0-py2.py3-none-any.whl (54 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 54.5/54.5 kB 5.1 MB/s eta 0:00:00 Requirement already satisfied: setuptools>=0.7.0 in ./venv/lib/python3.9/site-packages (from twine<=2.0) (68.2.0) Collecting tqdm>=4.14 (from twine<=2.0) Obtaining dependency information for tqdm>=4.14 from https://files.pythonhosted.org/packages/00/e5/f12a80907d0884e6dff9c16d0c0114d81b8cd07dc3ae54c5e962cc83037e/tqdm-4.66.1-py3-none-any.whl.metadata Downloading tqdm-4.66.1-py3-none-any.whl.metadata (57 kB) ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 57.6/57.6 kB 7.6 MB/s eta 0:00:00 Collecting nh3>=0.2.14 (from readme-renderer>=21.0->twine<=2.0) Downloading nh3-0.2.14.tar.gz (14 kB) Installing build dependencies: started Installing build dependencies: finished with status 'done' Getting requirements to build wheel: started Getting requirements to build wheel: finished with status 'done' Preparing metadata (pyproject.toml): started Preparing metadata (pyproject.toml): finished with status 'error' error: subprocess-exited-with-error × Preparing metadata (pyproject.toml) did not run successfully. │ exit code: 1 ╰─> [6 lines of output] Cargo, the Rust package manager, is not installed or is not on PATH. This package requires Rust and Cargo to compile extensions. Install it through the system's package manager or via https://rustup.rs/ Checking for Rust toolchain.... [end of output] note: This error originates from a subprocess, and is likely not a problem with pip. error: metadata-generation-failed × Encountered error while generating package metadata. ╰─> See above for output. ``` ### Why * We're pulling readme_renderer 42.0 from twine, since 42.0 requires nh3 and nh3 requires Rust, the test is failing. ### Fix * Pinged readme_renderer to `<40.0` since any version higher or equal to 40.0 requires Python 3.8. ### Testing * Passed manual run: http://sponge/57d815a7-629f-455f-b710-5b80369206cd <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. -->
1 year ago
venv/bin/python -m pip install "twine<=2.0" "readme_renderer<40.0"
venv/bin/python -m twine check dist/* tools/distrib/python/grpcio_tools/dist/*
[Python o11y] Fix Python O11Y artifacts name (#35965) Update observability to PYPI failed because the artifact name is not correct. This PR: * Fix the artifacts name. * Add step to test observability artifacts in `test_packages.sh`. * Added `-fno-ipa-cp` compile flag. * We're seeing `inlining failed in call to always_inline 'vsnprintf': function body can be overwritten at link time` errors when building from source using musl libc. * Based on [investigation](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626), it's because we're using `-flto` flag. * One solution is to [disable fortify by adding this flag](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626). After this PR, the observability artifacts have the correct name: * [Distribution Tests Python Linux](https://pantheon.corp.google.com/storage/browser/grpc-testing-kokoro-prod/test_result_public/prod/grpc/core/pull_request/linux/grpc_distribtests_python/28687/20240223-105306/github/grpc/artifacts;tab=objects?pageState=(%22StorageObjectListTable%22:(%22f%22:%22%255B%255D%22))&e=13802955&mods=-logs_tg_prod&prefix=&forceOnObjectsSortingFiltering=false) Also tested that the artifacts build in this PR can be successfully uploaded to testpypi: * https://test.pypi.org/project/grpcio-observability/1.63.0.dev0 <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. --> Closes #35965 PiperOrigin-RevId: 611268154
9 months ago
if [ "$GRPC_BUILD_MAC" == "" ]; then
venv/bin/python -m twine check src/python/grpcio_observability/dist/*
fi
rm -rf venv/
fi
assert_is_universal_wheel() {
WHL="$1"
TMPDIR=$(mktemp -d)
unzip "$WHL" -d "$TMPDIR"
SO=$(find "$TMPDIR" -name '*.so' | head -n1)
if ! file "$SO" | grep "Mach-O universal binary with 2 architectures"; then
echo "$WHL is not universal2. Found the following:" >/dev/stderr
file "$SO" >/dev/stderr
exit 1
fi
}
fix_faulty_universal2_wheel() {
WHL="$1"
assert_is_universal_wheel "$WHL"
if echo "$WHL" | grep "x86_64"; then
UPDATED_NAME="${WHL//x86_64/universal2}"
mv "$WHL" "$UPDATED_NAME"
fi
}
# This is necessary due to https://github.com/pypa/wheel/issues/406.
[Python 3.12] Deprecate distutil (#34186) ### Background * `distutils` is deprecated with removal planned for Python 3.12 ([pep-0632](https://peps.python.org/pep-0632/)), thus we're trying to replace all distutils usage with setuptools. * Please note that user still have access to `distutils` if setuptools is installed and `SETUPTOOLS_USE_DISTUTILS` is set to `local` (The default in setuptools, more details can be found [in this discussion](https://github.com/pypa/setuptools/issues/2806#issuecomment-1193336591)). ### How we decide the replacement * We're following setuptools [Porting from Distutils guide](https://setuptools.pypa.io/en/latest/deprecated/distutils-legacy.html#porting-from-distutils) when deciding the replacement. #### Replacement not mentioned in the guide * Replaced `distutils.utils.get_platform()` with `sysconfig.get_platform()`. * Based on the [answer here](https://stackoverflow.com/questions/71664875/what-is-the-replacement-for-distutils-util-get-platform), and also checked the document that `sysconfig.get_platform()` is good enough for our use cases. * Replaced `DistutilsOptionError` with `OptionError`. * `setuptools.error` is exporting it as `OptionError` [in the code](https://github.com/pypa/setuptools/blob/v59.6.0/setuptools/errors.py). * Upgrade `setuptools` in `test_packages.sh` and changed the version ping to `59.6.0` in `build_artifact_python.bat`. * `distutils.errors.*` is not fully re-exported until `59.0.0` (See [this issue](https://github.com/pypa/setuptools/issues/2698) for more details). ### Changes not included in this PR * We're patching some compiler related functions provided by distutils in our code ([example](https://github.com/grpc/grpc/blob/ee4efc31c1dde7389ece70ba908049d7baeb9c65/src/python/grpcio/_spawn_patch.py#L30)), but since `setuptools` doesn't have similar interface (See [this issue for more details](https://github.com/pypa/setuptools/issues/2806)), we don't have a clear path to replace them yet. <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. -->
1 year ago
# wheel incorrectly generates a universal2 artifact that only contains
# x86_64 libraries.
if [ "$GRPC_BUILD_MAC" != "" ]; then
for WHEEL in dist/*.whl tools/distrib/python/grpcio_tools/dist/*.whl; do
fix_faulty_universal2_wheel "$WHEEL"
done
fi
if [ "$GRPC_RUN_AUDITWHEEL_REPAIR" != "" ]
9 years ago
then
for wheel in dist/*.whl; do
"${AUDITWHEEL}" show "$wheel" | tee /dev/stderr | grep -E -w "$AUDITWHEEL_PLAT"
"${AUDITWHEEL}" repair "$wheel" --strip --wheel-dir "$ARTIFACT_DIR"
rm "$wheel"
9 years ago
done
for wheel in tools/distrib/python/grpcio_tools/dist/*.whl; do
"${AUDITWHEEL}" show "$wheel" | tee /dev/stderr | grep -E -w "$AUDITWHEEL_PLAT"
"${AUDITWHEEL}" repair "$wheel" --strip --wheel-dir "$ARTIFACT_DIR"
rm "$wheel"
9 years ago
done
else
cp -r dist/*.whl "$ARTIFACT_DIR"
cp -r tools/distrib/python/grpcio_tools/dist/*.whl "$ARTIFACT_DIR"
9 years ago
fi
[Python o11y] Fix Python O11Y artifacts name (#35965) Update observability to PYPI failed because the artifact name is not correct. This PR: * Fix the artifacts name. * Add step to test observability artifacts in `test_packages.sh`. * Added `-fno-ipa-cp` compile flag. * We're seeing `inlining failed in call to always_inline 'vsnprintf': function body can be overwritten at link time` errors when building from source using musl libc. * Based on [investigation](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626), it's because we're using `-flto` flag. * One solution is to [disable fortify by adding this flag](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626). After this PR, the observability artifacts have the correct name: * [Distribution Tests Python Linux](https://pantheon.corp.google.com/storage/browser/grpc-testing-kokoro-prod/test_result_public/prod/grpc/core/pull_request/linux/grpc_distribtests_python/28687/20240223-105306/github/grpc/artifacts;tab=objects?pageState=(%22StorageObjectListTable%22:(%22f%22:%22%255B%255D%22))&e=13802955&mods=-logs_tg_prod&prefix=&forceOnObjectsSortingFiltering=false) Also tested that the artifacts build in this PR can be successfully uploaded to testpypi: * https://test.pypi.org/project/grpcio-observability/1.63.0.dev0 <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. --> Closes #35965 PiperOrigin-RevId: 611268154
9 months ago
# grpcio and grpcio-tools have already been copied to artifact_dir
# by "auditwheel repair", now copy the .tar.gz source archives as well.
cp -r dist/*.tar.gz "$ARTIFACT_DIR"
cp -r tools/distrib/python/grpcio_tools/dist/*.tar.gz "$ARTIFACT_DIR"
[Python o11y] Fix Python O11Y artifacts name (#35965) Update observability to PYPI failed because the artifact name is not correct. This PR: * Fix the artifacts name. * Add step to test observability artifacts in `test_packages.sh`. * Added `-fno-ipa-cp` compile flag. * We're seeing `inlining failed in call to always_inline 'vsnprintf': function body can be overwritten at link time` errors when building from source using musl libc. * Based on [investigation](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626), it's because we're using `-flto` flag. * One solution is to [disable fortify by adding this flag](https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626). After this PR, the observability artifacts have the correct name: * [Distribution Tests Python Linux](https://pantheon.corp.google.com/storage/browser/grpc-testing-kokoro-prod/test_result_public/prod/grpc/core/pull_request/linux/grpc_distribtests_python/28687/20240223-105306/github/grpc/artifacts;tab=objects?pageState=(%22StorageObjectListTable%22:(%22f%22:%22%255B%255D%22))&e=13802955&mods=-logs_tg_prod&prefix=&forceOnObjectsSortingFiltering=false) Also tested that the artifacts build in this PR can be successfully uploaded to testpypi: * https://test.pypi.org/project/grpcio-observability/1.63.0.dev0 <!-- If you know who should review your pull request, please assign it to that person, otherwise the pull request would get assigned randomly. If your pull request is for a specific language, please add the appropriate lang label. --> Closes #35965 PiperOrigin-RevId: 611268154
9 months ago
if [ "$GRPC_BUILD_MAC" == "" ]; then
if [ "$GRPC_RUN_AUDITWHEEL_REPAIR" != "" ]
then
for wheel in src/python/grpcio_observability/dist/*.whl; do
"${AUDITWHEEL}" show "$wheel" | tee /dev/stderr | grep -E -w "$AUDITWHEEL_PLAT"
"${AUDITWHEEL}" repair "$wheel" --strip --wheel-dir "$ARTIFACT_DIR"
rm "$wheel"
done
else
cp -r src/python/grpcio_observability/dist/*.whl "$ARTIFACT_DIR"
fi
cp -r src/python/grpcio_observability/dist/*.tar.gz "$ARTIFACT_DIR"
fi
# We need to use the built grpcio-tools/grpcio to compile the health proto
# Wheels are not supported by setup_requires/dependency_links, so we
# manually install the dependency. Note we should only do this if we
# are in a docker image or in a virtualenv.
if [ "$GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS" != "" ]
then
"${PYTHON}" -m pip install -rrequirements.txt
if [ "$("$PYTHON" -c "import sys; print(sys.version_info[0])")" == "2" ]
then
"${PYTHON}" -m pip install futures>=2.2.0 enum34>=1.0.4
fi
"${PYTHON}" -m pip install grpcio --no-index --find-links "file://$ARTIFACT_DIR/"
"${PYTHON}" -m pip install grpcio-tools --no-index --find-links "file://$ARTIFACT_DIR/"
# Note(lidiz) setuptools's "sdist" command creates a source tarball, which
# demands an extra step of building the wheel. The building step is merely ran
# through setup.py, but we can optimize it with "bdist_wheel" command, which
# skips the wheel building step.
# Build xds_protos source distribution
# build.py is invoked as part of generate_projects.
${SETARCH_CMD} "${PYTHON}" tools/distrib/python/xds_protos/setup.py \
sdist bdist_wheel install
cp -r tools/distrib/python/xds_protos/dist/* "$ARTIFACT_DIR"
# Build grpcio_testing source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_testing/setup.py preprocess \
sdist bdist_wheel
cp -r src/python/grpcio_testing/dist/* "$ARTIFACT_DIR"
# Build grpcio_channelz source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_channelz/setup.py \
preprocess build_package_protos sdist bdist_wheel
cp -r src/python/grpcio_channelz/dist/* "$ARTIFACT_DIR"
# Build grpcio_health_checking source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_health_checking/setup.py \
preprocess build_package_protos sdist bdist_wheel
cp -r src/python/grpcio_health_checking/dist/* "$ARTIFACT_DIR"
# Build grpcio_reflection source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_reflection/setup.py \
preprocess build_package_protos sdist bdist_wheel
cp -r src/python/grpcio_reflection/dist/* "$ARTIFACT_DIR"
# Build grpcio_status source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_status/setup.py \
preprocess sdist bdist_wheel
cp -r src/python/grpcio_status/dist/* "$ARTIFACT_DIR"
# Install xds-protos as a dependency of grpcio-csds
"${PYTHON}" -m pip install xds-protos --no-index --find-links "file://$ARTIFACT_DIR/"
# Build grpcio_csds source distribution
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_csds/setup.py \
sdist bdist_wheel
cp -r src/python/grpcio_csds/dist/* "$ARTIFACT_DIR"
# Build grpcio_admin source distribution and it needs the cutting-edge version
# of Channelz and CSDS to be installed.
"${PYTHON}" -m pip install grpcio-channelz --no-index --find-links "file://$ARTIFACT_DIR/"
"${PYTHON}" -m pip install grpcio-csds --no-index --find-links "file://$ARTIFACT_DIR/"
${SETARCH_CMD} "${PYTHON}" src/python/grpcio_admin/setup.py \
sdist bdist_wheel
cp -r src/python/grpcio_admin/dist/* "$ARTIFACT_DIR"
fi