[bazelified tests] First attempt at bazelified rules for building artifacts/packages (#34462)

Foundation for being able to bazelify the build artifact -> build
package -> distribtest workflow tests.

Main ideas:
- "build artifact" and "build packages" will be represented by a custom
genrule (that runs the build on RBE under a docker container).
- since genrule doesn't support displaying logs for each target as a
separate "target log" (in the same way that bazel tests do), and we
generally want readable per-target logs for the bazelified test, a pair
of targets will be created for each "build artifact task":
- a genrule that actually performs the build, creates an archive with
artifacts and stores the exitcode and build log as rule outputs
- a corresponding "build_test" sh_test that simply looks at the result
of the genrule and presents the build log and build result a "target
log" for this test.
pull/34589/head
Jan Tattermusch 1 year ago committed by GitHub
parent b581a24a4c
commit 3b40af376e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      tools/bazelify_tests/BUILD
  2. 151
      tools/bazelify_tests/build_defs.bzl
  3. 65
      tools/bazelify_tests/grpc_build_artifact_task.sh
  4. 55
      tools/bazelify_tests/grpc_build_artifact_task_build_test.sh
  5. 49
      tools/bazelify_tests/grpc_run_distribtest_test.sh
  6. 193
      tools/bazelify_tests/test/BUILD
  7. 20
      tools/bazelify_tests/test/build_artifact_php_linux.sh
  8. 25
      tools/bazelify_tests/test/build_artifact_protoc_linux.sh
  9. 36
      tools/bazelify_tests/test/build_artifact_python_linux_x64_cp311.sh
  10. 36
      tools/bazelify_tests/test/build_artifact_python_linux_x64_cp37.sh
  11. 43
      tools/bazelify_tests/test/build_package_csharp_linux.sh
  12. 27
      tools/bazelify_tests/test/build_package_python_linux.sh
  13. 28
      tools/bazelify_tests/test/run_distribtest_csharp_linux.sh
  14. 28
      tools/bazelify_tests/test/run_distribtest_php_linux.sh
  15. 28
      tools/bazelify_tests/test/run_distribtest_python_linux.sh

@ -26,7 +26,10 @@ exports_files([
"grpc_run_tests_harness_test.sh", "grpc_run_tests_harness_test.sh",
"grpc_run_bazel_distribtest_test.sh", "grpc_run_bazel_distribtest_test.sh",
"grpc_run_cpp_distribtest_test.sh", "grpc_run_cpp_distribtest_test.sh",
"grpc_run_distribtest_test.sh",
"grpc_run_simple_command_test.sh", "grpc_run_simple_command_test.sh",
"grpc_build_artifact_task.sh",
"grpc_build_artifact_task_build_test.sh",
]) ])
genrule( genrule(

@ -63,6 +63,48 @@ def _dockerized_sh_test(name, srcs = [], args = [], data = [], size = "medium",
**test_args **test_args
) )
def _dockerized_genrule(name, cmd, outs, srcs = [], timeout = None, tags = [], exec_compatible_with = [], flaky = None, docker_image_version = None, docker_run_as_root = False):
"""Runs genrule under docker either via RBE or via docker sandbox."""
if docker_image_version:
image_spec = DOCKERIMAGE_CURRENT_VERSIONS.get(docker_image_version, None)
if not image_spec:
fail("Version info for docker image '%s' not found in dockerimage_current_versions.bzl" % docker_image_version)
else:
fail("docker_image_version attribute not set for dockerized test '%s'" % name)
exec_properties = create_rbe_exec_properties_dict(
labels = {
"workload": "misc",
"machine_size": "misc_large",
},
docker_network = "standard",
container_image = image_spec,
# TODO(jtattermusch): note that docker sandbox doesn't currently support "docker_run_as_root"
docker_run_as_root = docker_run_as_root,
)
# since the tests require special bazel args, only run them when explicitly requested
tags = ["manual"] + tags
# TODO(jtattermusch): find a way to ensure that action can only run under docker sandbox or remotely
# to avoid running it outside of a docker container by accident.
genrule_args = {
"name": name,
"cmd": cmd,
"srcs": srcs,
"tags": tags,
"flaky": flaky,
"timeout": timeout,
"exec_compatible_with": exec_compatible_with,
"exec_properties": exec_properties,
"outs": outs,
}
native.genrule(
**genrule_args
)
def grpc_run_tests_harness_test(name, args = [], data = [], size = "medium", timeout = None, tags = [], exec_compatible_with = [], flaky = None, docker_image_version = None, use_login_shell = None, prepare_script = None): def grpc_run_tests_harness_test(name, args = [], data = [], size = "medium", timeout = None, tags = [], exec_compatible_with = [], flaky = None, docker_image_version = None, use_login_shell = None, prepare_script = None):
"""Execute an run_tests.py-harness style test under bazel. """Execute an run_tests.py-harness style test under bazel.
@ -204,3 +246,112 @@ def grpc_run_simple_command_test(name, args = [], data = [], size = "medium", ti
env = {} env = {}
_dockerized_sh_test(name = name, srcs = srcs, args = args, data = data, size = size, timeout = timeout, tags = tags, exec_compatible_with = exec_compatible_with, flaky = flaky, docker_image_version = docker_image_version, env = env, docker_run_as_root = False) _dockerized_sh_test(name = name, srcs = srcs, args = args, data = data, size = size, timeout = timeout, tags = tags, exec_compatible_with = exec_compatible_with, flaky = flaky, docker_image_version = docker_image_version, env = env, docker_run_as_root = False)
def grpc_build_artifact_task(name, timeout = None, artifact_deps = [], tags = [], exec_compatible_with = [], flaky = None, docker_image_version = None, build_script = None):
"""Execute a build artifact task and a corresponding 'build test'.
The artifact is built by a genrule that always succeeds (Even if the underlying build fails)
and an sh_test (with "_build_test" suffix) that presents the result of the artifact build
in the result UI (by displaying the the build status, the log, and artifacts produced).
Such layout helps to easily build artifacts and run distribtests that depend on other artifacts,
while making the test results well structured and easy to interpret.
Args:
name: The name of the target.
timeout: The test timeout for the build.
artifact_deps: List of dependencies on artifacts built by another grpc_build_artifact_task.
tags: The tags for the target.
exec_compatible_with: A list of constraint values that must be
satisifed for the platform.
flaky: Whether this artifact build is flaky.
docker_image_version: The docker .current_version file to use for docker containerization.
build_script: The script that builds the aritfacts.
"""
out_exitcode_file = str(name + "_exit_code")
out_build_log = str(name + "_build_log.txt")
out_archive_name = str(name + ".tar.gz")
genrule_outs = [
out_exitcode_file,
out_build_log,
out_archive_name,
]
genrule_srcs = [
"//tools/bazelify_tests:grpc_build_artifact_task.sh",
"//tools/bazelify_tests:grpc_repo_archive_with_submodules.tar.gz",
build_script,
]
cmd = "$(location //tools/bazelify_tests:grpc_build_artifact_task.sh) $(location //tools/bazelify_tests:grpc_repo_archive_with_submodules.tar.gz) $(location " + build_script + ") $(location " + out_exitcode_file + ") $(location " + out_build_log + ") $(location " + out_archive_name + ")"
# for each artifact task we depends on, use the correponding tar.gz as extra src and pass its location as an extra cmdline arg.
for dep in artifact_deps:
dep_archive_name = str(dep + ".tar.gz")
cmd = cmd + " $(location " + dep_archive_name + ")"
genrule_srcs.append(dep_archive_name)
_dockerized_genrule(name = name, cmd = cmd, outs = genrule_outs, srcs = genrule_srcs, timeout = timeout, tags = tags, exec_compatible_with = exec_compatible_with, flaky = flaky, docker_image_version = docker_image_version, docker_run_as_root = False)
# The genrule above always succeeds (even if the underlying build fails), so that we can create rules that depend
# on multiple artifact builds (of which some can fail). The actual build status (exitcode) and the log of the build
# will be reported by an associated sh_test (that gets displayed in the UI in a much nicer way than a genrule).
# Note that in bazel you cannot declare a test that has declared outputs and you also cannot make other rules
# depend on a test - which is the reason why we need a separate genrule to represent the build itself.
test_name = str(name + "_build_test")
test_srcs = [
"//tools/bazelify_tests:grpc_build_artifact_task_build_test.sh",
]
test_data = [
out_exitcode_file,
out_build_log,
out_archive_name,
]
test_env = {}
test_args = [
"$(location " + out_exitcode_file + ")",
"$(location " + out_build_log + ")",
"$(location " + out_archive_name + ")",
]
_dockerized_sh_test(name = test_name, srcs = test_srcs, args = test_args, data = test_data, size = "small", tags = tags, exec_compatible_with = exec_compatible_with, flaky = flaky, docker_image_version = docker_image_version, env = test_env, docker_run_as_root = False)
def grpc_run_distribtest_test(name, artifact_deps = [], size = "medium", timeout = None, tags = [], exec_compatible_with = [], flaky = None, docker_image_version = None, build_script = None, docker_run_as_root = False):
"""Run a distribtest for a previously built artifact/package
Args:
name: The name of the test.
artifact_deps: List of dependencies on artifacts built by another grpc_build_artifact_task.
size: The size of the test.
timeout: The test timeout.
tags: The tags for the test.
exec_compatible_with: A list of constraint values that must be
satisifed for the platform.
flaky: Whether this test is flaky.
docker_image_version: The docker .current_version file to use for docker containerization.
build_script: The script that runs the test.
docker_run_as_root: If True, the test will run under docker as root.
"""
data = [
"//tools/bazelify_tests:grpc_repo_archive_with_submodules.tar.gz",
build_script,
]
args = [
"$(location //tools/bazelify_tests:grpc_repo_archive_with_submodules.tar.gz)",
"$(location " + build_script + ")",
]
# for each artifact task we depends on, use the correponding tar.gz as extra data item and pass its location as an extra arg.
for dep in artifact_deps:
dep_archive_name = str(dep + ".tar.gz")
args.append("$(location " + dep_archive_name + ")")
data.append(dep_archive_name)
srcs = [
"//tools/bazelify_tests:grpc_run_distribtest_test.sh",
]
env = {}
_dockerized_sh_test(name = name, srcs = srcs, args = args, data = data, size = size, timeout = timeout, tags = tags, exec_compatible_with = exec_compatible_with, flaky = flaky, docker_image_version = docker_image_version, env = env, docker_run_as_root = docker_run_as_root)

@ -0,0 +1,65 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
ARCHIVE_WITH_SUBMODULES="$1"
BUILD_SCRIPT="$2"
EXIT_CODE_FILE="$3"
SCRIPT_LOG_FILE="$4"
ARTIFACTS_OUT_FILE="$5"
shift 5
# Extract grpc repo archive
tar -xopf ${ARCHIVE_WITH_SUBMODULES}
cd grpc
# Extract all input archives with artifacts into input_artifacts directory
# TODO(jtattermusch): Deduplicate the snippet below (it appears in multiple files).
mkdir -p input_artifacts
pushd input_artifacts >/dev/null
# all remaining args are .tar.gz archives with input artifacts
for input_artifact_archive in "$@"
do
# extract the .tar.gz with artifacts into a directory named after a basename
# of the archive itself (and strip the "artifact/" prefix)
# Note that input artifacts from different dependencies can have files
# with the same name, so disambiguating through the name of the archive
# is important.
archive_extract_dir="$(basename ${input_artifact_archive} .tar.gz)"
mkdir -p "${archive_extract_dir}"
pushd "${archive_extract_dir}" >/dev/null
tar --strip-components=1 -xopf ../../../${input_artifact_archive}
popd >/dev/null
done
popd >/dev/null
mkdir -p artifacts
# Run the build script with args, storing its stdout and stderr
# in a log file.
SCRIPT_EXIT_CODE=0
../"${BUILD_SCRIPT}" >"../${SCRIPT_LOG_FILE}" 2>&1 || SCRIPT_EXIT_CODE="$?"
# Store build script's exitcode in a file.
# Note that the build atifacts task will terminate with success even when
# there was an error building the artifacts.
# The error status (an associated log) will be reported by an associated
# bazel test.
echo "${SCRIPT_EXIT_CODE}" >"../${EXIT_CODE_FILE}"
# collect the artifacts
# TODO(jtattermusch): add tar flags to create deterministic tar archive
tar -czvf ../"${ARTIFACTS_OUT_FILE}" artifacts

@ -0,0 +1,55 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
EXIT_CODE_FILE="$1"
SCRIPT_LOG_FILE="$2"
ARTIFACTS_ARCHIVE="$3"
shift 3
BUILD_ARTIFACT_EXITCODE="$(cat ${EXIT_CODE_FILE})"
echo "Build artifact/package task for '${ARTIFACTS_ARCHIVE}' has finished with exitcode ${BUILD_ARTIFACT_EXITCODE}."
echo "BUILD LOG"
echo "--------------"
cat "${SCRIPT_LOG_FILE}"
echo "--------------"
echo
# Try extracting the archive with artifacts (and list the files)
mkdir -p input_artifacts
pushd input_artifacts >/dev/null
echo "Artifacts that were built by the build artifact/package task:"
echo "--------------"
# TODO(jtattermusch): strip top level artifacts/ directory from the archive?
tar -xopvf ../${ARTIFACTS_ARCHIVE}
echo "--------------"
popd >/dev/null
# Add artifact archive to the "undeclared test outputs" directory
# to make it readily available in the resultstore UI.
# See bazel docs for TEST_UNDECLARED_OUTPUTS_DIR.
mkdir -p "${TEST_UNDECLARED_OUTPUTS_DIR}"
cp "${ARTIFACTS_ARCHIVE}" "${TEST_UNDECLARED_OUTPUTS_DIR}" || true
if [ "${BUILD_ARTIFACT_EXITCODE}" -eq "0" ]
then
echo "SUCCESS: Build artifact/package task for '${ARTIFACTS_ARCHIVE}' ran successfully."
else
echo "FAIL: Build artifact/package task for '${ARTIFACTS_ARCHIVE}' failed with exitcode ${BUILD_ARTIFACT_EXITCODE}."
exit 1
fi

@ -0,0 +1,49 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
ARCHIVE_WITH_SUBMODULES="$1"
BUILD_SCRIPT="$2"
shift 2
# Extract grpc repo archive
tar -xopf ${ARCHIVE_WITH_SUBMODULES}
cd grpc
# Extract all input archives with artifacts into input_artifacts directory
# TODO(jtattermusch): Deduplicate the snippet below (it appears in multiple files).
mkdir -p input_artifacts
pushd input_artifacts >/dev/null
# all remaining args are .tar.gz archives with input artifacts
for input_artifact_archive in "$@"
do
# extract the .tar.gz with artifacts into a directory named after a basename
# of the archive itself (and strip the "artifact/" prefix)
# Note that input artifacts from different dependencies can have files
# with the same name, so disambiguating through the name of the archive
# is important.
archive_extract_dir="$(basename ${input_artifact_archive} .tar.gz)"
mkdir -p "${archive_extract_dir}"
pushd "${archive_extract_dir}" >/dev/null
tar --strip-components=1 -xopf ../../../${input_artifact_archive}
popd >/dev/null
done
popd >/dev/null
ls -lR input_artifacts
# Run build script passed as arg.
"${BUILD_SCRIPT}"

@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
load("//bazel:grpc_build_system.bzl", "grpc_package") load("//bazel:grpc_build_system.bzl", "grpc_package")
load("//tools/bazelify_tests:build_defs.bzl", "grpc_run_cpp_distribtest_test", "grpc_run_simple_command_test", "grpc_run_tests_harness_test") load("//tools/bazelify_tests:build_defs.bzl", "grpc_build_artifact_task", "grpc_run_cpp_distribtest_test", "grpc_run_distribtest_test", "grpc_run_simple_command_test", "grpc_run_tests_harness_test")
load(":portability_tests.bzl", "generate_run_tests_portability_tests") load(":portability_tests.bzl", "generate_run_tests_portability_tests")
load(":bazel_distribtests.bzl", "generate_bazel_distribtests") load(":bazel_distribtests.bzl", "generate_bazel_distribtests")
@ -243,13 +243,204 @@ test_suite(
], ],
) )
# protoc artifact build tasks
grpc_build_artifact_task(
name = "artifact_protoc_linux_x64",
build_script = "build_artifact_protoc_linux.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_centos6_x64.current_version",
)
grpc_build_artifact_task(
name = "artifact_protoc_linux_x86",
build_script = "build_artifact_protoc_linux.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_centos6_x86.current_version",
)
grpc_build_artifact_task(
name = "artifact_protoc_linux_aarch64",
build_script = "build_artifact_protoc_linux.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_protoc_aarch64.current_version",
)
# PHP artifact build tasks
grpc_build_artifact_task(
name = "artifact_php_linux_x64",
build_script = "build_artifact_php_linux.sh",
docker_image_version = "tools/dockerfile/test/php73_zts_debian11_x64.current_version",
)
# Python artifact build tasks
grpc_build_artifact_task(
name = "artifact_python_linux_x64_manylinux2014_cp311",
build_script = "build_artifact_python_linux_x64_cp311.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_python_manylinux2014_x64.current_version",
)
grpc_build_artifact_task(
name = "artifact_python_linux_x64_manylinux2014_cp37",
build_script = "build_artifact_python_linux_x64_cp37.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_python_manylinux2014_x64.current_version",
)
# TODO(jtattermusch): add more grpc_build_artifact_task targets for existing python artifacts from artifact_targets.py
grpc_build_artifact_task(
name = "package_python_linux",
# TODO(jtattermusch): add more python artifacts once they are migrated from artifact_targets.py
artifact_deps = [
"artifact_python_linux_x64_manylinux2014_cp311",
"artifact_python_linux_x64_manylinux2014_cp37",
],
build_script = "build_package_python_linux.sh",
docker_image_version = "tools/dockerfile/grpc_artifact_python_manylinux2014_x64.current_version",
)
# C# package build tasks
grpc_build_artifact_task(
name = "package_csharp_linux",
# csharp package needs pre-built protoc and protoc plugin binaries
artifact_deps = [
"artifact_protoc_linux_x64",
"artifact_protoc_linux_x86",
"artifact_protoc_linux_aarch64",
],
build_script = "build_package_csharp_linux.sh",
docker_image_version = "tools/dockerfile/test/csharp_debian11_x64.current_version",
)
# C# distribtests
grpc_run_distribtest_test(
name = "distribtest_csharp_linux_x64_debian10",
# depend on the C# packages
artifact_deps = [
"package_csharp_linux",
],
build_script = "run_distribtest_csharp_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/csharp_debian10_x64.current_version",
)
grpc_run_distribtest_test(
name = "distribtest_csharp_linux_x64_ubuntu2204",
# depend on the C# packages
artifact_deps = [
"package_csharp_linux",
],
build_script = "run_distribtest_csharp_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/csharp_ubuntu2204_x64.current_version",
)
grpc_run_distribtest_test(
name = "distribtest_csharp_linux_x64_alpine",
# depend on the C# packages
artifact_deps = [
"package_csharp_linux",
],
build_script = "run_distribtest_csharp_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/csharp_alpine_x64.current_version",
)
grpc_run_distribtest_test(
name = "distribtest_csharp_linux_x64_dotnet31",
# depend on the C# packages
artifact_deps = [
"package_csharp_linux",
],
build_script = "run_distribtest_csharp_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/csharp_dotnet31_x64.current_version",
)
grpc_run_distribtest_test(
name = "distribtest_csharp_linux_x64_dotnet5",
# depend on the C# packages
artifact_deps = [
"package_csharp_linux",
],
build_script = "run_distribtest_csharp_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/csharp_dotnet5_x64.current_version",
)
test_suite(
name = "csharp_distribtests_linux",
tests = [
":distribtest_csharp_linux_x64_alpine",
":distribtest_csharp_linux_x64_debian10",
":distribtest_csharp_linux_x64_dotnet31",
":distribtest_csharp_linux_x64_dotnet5",
":distribtest_csharp_linux_x64_ubuntu2204",
],
)
# PHP distribtests
grpc_run_distribtest_test(
name = "distribtest_php_linux_x64_debian10",
artifact_deps = [
"artifact_php_linux_x64",
],
build_script = "run_distribtest_php_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/php7_debian10_x64.current_version",
docker_run_as_root = True,
)
test_suite(
name = "php_distribtests_linux",
tests = [
":distribtest_php_linux_x64_debian10",
],
)
# Python distribtests
grpc_run_distribtest_test(
name = "distribtest_python_linux_x64_buster",
artifact_deps = [
"package_python_linux",
],
build_script = "run_distribtest_python_linux.sh",
docker_image_version = "tools/dockerfile/distribtest/python_buster_x64.current_version",
)
test_suite(
name = "python_distribtests_linux",
tests = [
":distribtest_python_linux_x64_buster",
],
)
# TODO(jtattermusch): add more grpc_run_distribtest_test targets for existing python distribtests from distribtest_targets.py
# Note that there a two flavors of python distribtests - "binary" (which uses pre-built wheels) and "source" (which compiles python extension for sources)
# TODO(jtattermusch): add grpc_build_artifact_task targets for ruby artifacts (which is tricky, since ruby artifact builds do not run under docker since they invoke docker themselves)
test_suite(
name = "artifact_build_tests_linux",
tests = [
":artifact_php_linux_x64_build_test",
":artifact_protoc_linux_aarch64_build_test",
":artifact_protoc_linux_x64_build_test",
":artifact_protoc_linux_x86_build_test",
":artifact_python_linux_x64_manylinux2014_cp311_build_test",
":artifact_python_linux_x64_manylinux2014_cp37_build_test",
":package_csharp_linux_build_test",
":package_python_linux_build_test",
],
)
test_suite( test_suite(
name = "all_tests_linux", name = "all_tests_linux",
tests = [ tests = [
":artifact_build_tests_linux",
":basic_tests_linux", ":basic_tests_linux",
":bazel_build_tests_linux", ":bazel_build_tests_linux",
":bazel_distribtests_linux", ":bazel_distribtests_linux",
":cpp_distribtests_linux", ":cpp_distribtests_linux",
":csharp_distribtests_linux",
":php_distribtests_linux",
":portability_tests_linux", ":portability_tests_linux",
":python_distribtests_linux",
], ],
) )

@ -0,0 +1,20 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
mkdir -p artifacts
ARTIFACTS_OUT=artifacts tools/run_tests/artifacts/build_artifact_php.sh

@ -0,0 +1,25 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# compile/link options extracted from ProtocArtifact in tools/run_tests/artifacts/artifact_targets.py
export LDFLAGS="${LDFLAGS} -static-libgcc -static-libstdc++ -s"
# set build parallelism to fit the machine configuration of bazelified tests RBE pool.
export GRPC_PROTOC_BUILD_COMPILER_JOBS=8
mkdir -p artifacts
ARTIFACTS_OUT=artifacts tools/run_tests/artifacts/build_artifact_protoc.sh

@ -0,0 +1,36 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# env variable values extracted from PythonArtifact in tools/run_tests/artifacts/artifact_targets.py
# TODO(jtattermusch): find a better way of configuring the python artifact build (the current approach mostly serves as a demonstration)
export PYTHON=/opt/python/cp311-cp311/bin/python
export PIP=/opt/python/cp311-cp311/bin/pip
export GRPC_SKIP_PIP_CYTHON_UPGRADE=TRUE
export GRPC_RUN_AUDITWHEEL_REPAIR=TRUE
export GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS=TRUE
# Without this python cannot find the c++ compiler
# TODO(jtattermusch): find better solution to prevent bazel from
# restricting path contents
export PATH="/opt/rh/devtoolset-10/root/usr/bin:$PATH"
# set build parallelism to fit the machine configuration of bazelified tests RBE pool.
export GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS=8
mkdir -p artifacts
ARTIFACTS_OUT=artifacts tools/run_tests/artifacts/build_artifact_python.sh

@ -0,0 +1,36 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# env variable values extracted from PythonArtifact in tools/run_tests/artifacts/artifact_targets.py
# TODO(jtattermusch): find a better way of configuring the python artifact build (the current approach mostly serves as a demonstration)
export PYTHON=/opt/python/cp37-cp37m/bin/python
export PIP=/opt/python/cp37-cp37m/bin/pip
export GRPC_SKIP_PIP_CYTHON_UPGRADE=TRUE
export GRPC_RUN_AUDITWHEEL_REPAIR=TRUE
export GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS=TRUE
# Without this python cannot find the c++ compiler
# TODO(jtattermusch): find better solution to prevent bazel from
# restricting path contents
export PATH="/opt/rh/devtoolset-10/root/usr/bin:$PATH"
# set build parallelism to fit the machine configuration of bazelified tests RBE pool.
export GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS=8
mkdir -p artifacts
ARTIFACTS_OUT=artifacts tools/run_tests/artifacts/build_artifact_python.sh

@ -0,0 +1,43 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
mkdir -p artifacts
# List all input artifacts we obtained for easier troubleshooting.
ls -lR input_artifacts
# Put the input artifacts where the legacy logic for building
# C# package expects to find them.
# See artifact_targets.py and package_targets.py for details.
# TODO(jtattermusch): get rid of the manual renames of artifact directories.
export EXTERNAL_GIT_ROOT="$(pwd)"
mv input_artifacts/artifact_protoc_linux_aarch64 input_artifacts/protoc_linux_aarch64 || true
mv input_artifacts/artifact_protoc_linux_x64 input_artifacts/protoc_linux_x64 || true
mv input_artifacts/artifact_protoc_linux_x86 input_artifacts/protoc_linux_x86 || true
# In the bazel workflow, we only have linux protoc artifact at hand,
# so we can only build a "singleplatform" version of the C# package.
export GRPC_CSHARP_BUILD_SINGLE_PLATFORM_NUGET=1
# TODO(jtattermusch): when building the C# nugets, the current git commit SHA
# is retrieved and stored as package metadata. But when running
# as bazelified test, this is not possible since we're not in a git
# workspace when running the build. This is ok for testing purposes
# but would be a problem if building a production package
# for the end users.
src/csharp/build_nuget.sh

@ -0,0 +1,27 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
mkdir -p artifacts
# List all input artifacts we obtained for easier troubleshooting.
ls -lR input_artifacts
# All the python packages have been built in the artifact phase already
# and we only collect them here to deliver them to the distribtest phase.
# This is the same logic as in "tools/run_tests/artifacts/build_package_python.sh",
# but expects different layout under input_artifacts.
cp -r input_artifacts/artifact_python_*/* artifacts/ || true

@ -0,0 +1,28 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# List all input artifacts we obtained for easier troubleshooting.
ls -lR input_artifacts
# Put the input packages where the legacy logic for running
# C# distribtest expects to find them.
# See distribtest_targets.py for details.
# TODO(jtattermusch): get rid of the manual renames of artifact files.
export EXTERNAL_GIT_ROOT="$(pwd)"
mv input_artifacts/package_csharp_linux/* input_artifacts/ || true
test/distrib/csharp/run_distrib_test_dotnetcli.sh

@ -0,0 +1,28 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# List all input artifacts we obtained for easier troubleshooting.
ls -lR input_artifacts
# Put the input packages where the legacy logic for running
# PHP distribtest expects to find them.
# See distribtest_targets.py for details.
# TODO(jtattermusch): get rid of the manual renames of artifact files.
export EXTERNAL_GIT_ROOT="$(pwd)"
mv input_artifacts/artifact_php_linux_x64/* input_artifacts/ || true
test/distrib/php/run_distrib_test.sh

@ -0,0 +1,28 @@
#!/bin/bash
# Copyright 2023 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
# List all input artifacts we obtained for easier troubleshooting.
ls -lR input_artifacts
# Put the input packages where the legacy logic for running
# Python distribtest expects to find them.
# See distribtest_targets.py for details.
# TODO(jtattermusch): get rid of the manual renames of artifact files.
export EXTERNAL_GIT_ROOT="$(pwd)"
mv input_artifacts/package_python_linux/* input_artifacts/ || true
test/distrib/python/run_binary_distrib_test.sh
Loading…
Cancel
Save