Add basic benchmark test for Python

pull/17793/head
Lidi Zheng 6 years ago
parent c4ef85588d
commit 4f451c78a4
  1. 18
      src/proto/grpc/core/BUILD
  2. 94
      src/proto/grpc/testing/BUILD
  3. 89
      src/python/grpcio_tests/tests/qps/BUILD
  4. 100
      src/python/grpcio_tests/tests/qps/README.md
  5. 45
      src/python/grpcio_tests/tests/qps/basic_benchmark_test.sh
  6. 96
      src/python/grpcio_tests/tests/qps/scenarios.json
  7. 6
      test/cpp/qps/BUILD

@ -14,11 +14,25 @@
licenses(["notice"]) # Apache v2
load("//bazel:grpc_build_system.bzl", "grpc_proto_library", "grpc_package")
load("//bazel:grpc_build_system.bzl", "grpc_package", "grpc_proto_library")
load("@grpc_python_dependencies//:requirements.bzl", "requirement")
load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_library")
grpc_package(name = "core", visibility = "public")
grpc_package(
name = "core",
visibility = "public",
)
grpc_proto_library(
name = "stats_proto",
srcs = ["stats.proto"],
)
py_proto_library(
name = "py_stats_proto",
protos = ["stats.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)

@ -14,11 +14,14 @@
licenses(["notice"]) # Apache v2
load("//bazel:grpc_build_system.bzl", "grpc_proto_library", "grpc_package")
load("//bazel:grpc_build_system.bzl", "grpc_package", "grpc_proto_library")
load("@grpc_python_dependencies//:requirements.bzl", "requirement")
load("@org_pubref_rules_protobuf//python:rules.bzl", "py_proto_library")
grpc_package(name = "testing", visibility = "public")
grpc_package(
name = "testing",
visibility = "public",
)
exports_files([
"echo.proto",
@ -50,9 +53,11 @@ grpc_proto_library(
grpc_proto_library(
name = "echo_proto",
srcs = ["echo.proto"],
deps = ["echo_messages_proto",
"simple_messages_proto"],
generate_mocks = True,
deps = [
"echo_messages_proto",
"simple_messages_proto",
],
)
grpc_proto_library(
@ -63,10 +68,10 @@ grpc_proto_library(
py_proto_library(
name = "py_empty_proto",
protos = ["empty.proto",],
protos = ["empty.proto"],
with_grpc = True,
deps = [
requirement('protobuf'),
requirement("protobuf"),
],
)
@ -78,10 +83,10 @@ grpc_proto_library(
py_proto_library(
name = "py_messages_proto",
protos = ["messages.proto",],
protos = ["messages.proto"],
with_grpc = True,
deps = [
requirement('protobuf'),
requirement("protobuf"),
],
)
@ -100,7 +105,7 @@ grpc_proto_library(
name = "benchmark_service_proto",
srcs = ["benchmark_service.proto"],
deps = [
"messages_proto",
"messages_proto",
],
)
@ -108,7 +113,7 @@ grpc_proto_library(
name = "report_qps_scenario_service_proto",
srcs = ["report_qps_scenario_service.proto"],
deps = [
"control_proto",
"control_proto",
],
)
@ -116,7 +121,7 @@ grpc_proto_library(
name = "worker_service_proto",
srcs = ["worker_service.proto"],
deps = [
"control_proto",
"control_proto",
],
)
@ -132,7 +137,7 @@ grpc_proto_library(
has_services = False,
deps = [
"//src/proto/grpc/core:stats_proto",
]
],
)
grpc_proto_library(
@ -146,14 +151,71 @@ grpc_proto_library(
py_proto_library(
name = "py_test_proto",
protos = ["test.proto",],
proto_deps = [
":py_empty_proto",
":py_messages_proto",
],
protos = ["test.proto"],
with_grpc = True,
deps = [
requirement('protobuf'),
requirement("protobuf"),
],
)
py_proto_library(
name = "py_benchmark_service_proto",
proto_deps = [
":py_empty_proto",
":py_messages_proto",
]
],
protos = ["benchmark_service.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)
py_proto_library(
name = "py_payloads_proto",
protos = ["payloads.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)
py_proto_library(
name = "py_stats_proto",
proto_deps = [
"//src/proto/grpc/core:py_stats_proto",
],
protos = ["stats.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)
py_proto_library(
name = "py_control_proto",
proto_deps = [
":py_payloads_proto",
":py_stats_proto",
],
protos = ["control.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)
py_proto_library(
name = "py_worker_service_proto",
proto_deps = [
":py_control_proto",
],
protos = ["worker_service.proto"],
with_grpc = True,
deps = [
requirement("protobuf"),
],
)

@ -0,0 +1,89 @@
package(default_visibility = ["//visibility:public"])
load("@grpc_python_dependencies//:requirements.bzl", "requirement")
py_library(
name = "benchmark_client",
srcs = ["benchmark_client.py"],
imports = ["../../"],
deps = [
requirement("six"),
"//src/proto/grpc/testing:py_benchmark_service_proto",
"//src/proto/grpc/testing:py_messages_proto",
"//src/python/grpcio/grpc:grpcio",
"//src/python/grpcio_tests/tests/unit:resources",
"//src/python/grpcio_tests/tests/unit:test_common",
],
)
py_library(
name = "benchmark_server",
srcs = ["benchmark_server.py"],
imports = ["../../"],
deps = [
"//src/proto/grpc/testing:py_benchmark_service_proto",
"//src/proto/grpc/testing:py_messages_proto",
],
)
py_library(
name = "client_runner",
srcs = ["client_runner.py"],
imports = ["../../"],
)
py_library(
name = "histogram",
srcs = ["histogram.py"],
imports = ["../../"],
deps = [
"//src/proto/grpc/testing:py_stats_proto",
],
)
py_library(
name = "worker_server",
srcs = ["worker_server.py"],
imports = ["../../"],
deps = [
":benchmark_client",
":benchmark_server",
":client_runner",
":histogram",
"//src/proto/grpc/testing:py_benchmark_service_proto",
"//src/proto/grpc/testing:py_control_proto",
"//src/proto/grpc/testing:py_stats_proto",
"//src/proto/grpc/testing:py_worker_service_proto",
"//src/python/grpcio/grpc:grpcio",
"//src/python/grpcio_tests/tests/unit:resources",
"//src/python/grpcio_tests/tests/unit:test_common",
],
)
py_binary(
name = "qps_worker",
srcs = ["qps_worker.py"],
imports = ["../../"],
main = "qps_worker.py",
deps = [
":worker_server",
"//src/proto/grpc/testing:py_worker_service_proto",
"//src/python/grpcio/grpc:grpcio",
"//src/python/grpcio_tests/tests/unit:test_common",
],
)
filegroup(
name = "scenarios",
srcs = ["scenarios.json"],
)
sh_test(
name = "basic_benchmark_test",
srcs = ["basic_benchmark_test.sh"],
data = [
":qps_worker",
":scenarios",
"//test/cpp/qps:qps_json_driver",
],
)

@ -0,0 +1,100 @@
# Python Benchmark Tools
## Scenarios
In `src/proto/grpc/testing/control.proto`, it defines the fields of a scenario.
In `tools/run_tests/performance/scenario_config.py`, the script generates actual scenario content that usually in json format, or piped to another script.
All Python related benchmark scenarios are:
* netperf
* python_generic_sync_streaming_ping_pong
* python_protobuf_sync_streaming_ping_pong
* python_protobuf_async_unary_ping_pong
* python_protobuf_sync_unary_ping_pong
* python_protobuf_sync_unary_qps_unconstrained
* python_protobuf_sync_streaming_qps_unconstrained
* python_protobuf_sync_unary_ping_pong_1MB
Here I picked the top 2 most representative scenarios of them, and reduce their benchmark duration from 30 seconds to 10 seconds:
* python_protobuf_async_unary_ping_pong
* python_protobuf_sync_streaming_ping_pong
## Why keep the scenario file if it can be generated?
Well... The `tools/run_tests/performance/scenario_config.py` is 1274 lines long. The intention of building these benchmark tools is reducing the complexity of existing infrastructure code. Depending on something that is
## How to run it?
```shell
bazel test --test_output=streamed src/python/grpcio_tests/tests/qps:basic_benchmark_test
```
## How is the output look like?
```
RUNNING SCENARIO: python_protobuf_async_unary_ping_pong
I0123 00:26:04.746195000 140736237159296 driver.cc:288] Starting server on localhost:10086 (worker #0)
D0123 00:26:04.747190000 140736237159296 ev_posix.cc:170] Using polling engine: poll
D0123 00:26:04.747264000 140736237159296 dns_resolver_ares.cc:488] Using ares dns resolver
I0123 00:26:04.748445000 140736237159296 subchannel.cc:869] Connect failed: {"created":"@1548203164.748403000","description":"Failed to connect to remote host: Connection refused","errno":61,"file":"src/core/lib/iomgr/tcp_client_posix.cc","file_line":207,"os_error":"Connection refused","syscall":"connect","target_address":"ipv6:[::1]:10086"}
I0123 00:26:04.748585000 140736237159296 subchannel.cc:869] Connect failed: {"created":"@1548203164.748564000","description":"Failed to connect to remote host: Connection refused","errno":61,"file":"src/core/lib/iomgr/tcp_client_posix.cc","file_line":207,"os_error":"Connection refused","syscall":"connect","target_address":"ipv4:127.0.0.1:10086"}
I0123 00:26:04.748596000 140736237159296 subchannel.cc:751] Subchannel 0x7fca43c19360: Retry in 999 milliseconds
I0123 00:26:05.751251000 123145571299328 subchannel.cc:710] Failed to connect to channel, retrying
I0123 00:26:05.752209000 140736237159296 subchannel.cc:832] New connected subchannel at 0x7fca45000060 for subchannel 0x7fca43c19360
I0123 00:26:05.772291000 140736237159296 driver.cc:349] Starting client on localhost:10087 (worker #1)
D0123 00:26:05.772384000 140736237159296 driver.cc:373] Client 0 gets 1 channels
I0123 00:26:05.773286000 140736237159296 subchannel.cc:832] New connected subchannel at 0x7fca45004a80 for subchannel 0x7fca451034b0
I0123 00:26:05.789797000 140736237159296 driver.cc:394] Initiating
I0123 00:26:05.790858000 140736237159296 driver.cc:415] Warming up
I0123 00:26:07.791078000 140736237159296 driver.cc:421] Starting
I0123 00:26:07.791860000 140736237159296 driver.cc:448] Running
I0123 00:26:17.790915000 140736237159296 driver.cc:462] Finishing clients
I0123 00:26:17.791821000 140736237159296 driver.cc:476] Received final status from client 0
I0123 00:26:17.792148000 140736237159296 driver.cc:508] Finishing servers
I0123 00:26:17.792493000 140736237159296 driver.cc:522] Received final status from server 0
I0123 00:26:17.795786000 140736237159296 report.cc:82] QPS: 2066.6
I0123 00:26:17.795799000 140736237159296 report.cc:122] QPS: 2066.6 (258.3/server core)
I0123 00:26:17.795805000 140736237159296 report.cc:127] Latencies (50/90/95/99/99.9%-ile): 467.9/504.8/539.0/653.3/890.4 us
I0123 00:26:17.795811000 140736237159296 report.cc:137] Server system time: 100.00%
I0123 00:26:17.795815000 140736237159296 report.cc:139] Server user time: 100.00%
I0123 00:26:17.795818000 140736237159296 report.cc:141] Client system time: 100.00%
I0123 00:26:17.795821000 140736237159296 report.cc:143] Client user time: 100.00%
I0123 00:26:17.795825000 140736237159296 report.cc:148] Server CPU usage: 0.00%
I0123 00:26:17.795828000 140736237159296 report.cc:153] Client Polls per Request: 0.00
I0123 00:26:17.795831000 140736237159296 report.cc:155] Server Polls per Request: 0.00
I0123 00:26:17.795834000 140736237159296 report.cc:160] Server Queries/CPU-sec: 1033.19
I0123 00:26:17.795837000 140736237159296 report.cc:162] Client Queries/CPU-sec: 1033.32
RUNNING SCENARIO: python_protobuf_sync_streaming_ping_pong
I0123 00:26:17.795888000 140736237159296 driver.cc:288] Starting server on localhost:10086 (worker #0)
D0123 00:26:17.795964000 140736237159296 ev_posix.cc:170] Using polling engine: poll
D0123 00:26:17.795978000 140736237159296 dns_resolver_ares.cc:488] Using ares dns resolver
I0123 00:26:17.796613000 140736237159296 subchannel.cc:832] New connected subchannel at 0x7fca43c15820 for subchannel 0x7fca43d12140
I0123 00:26:17.810911000 140736237159296 driver.cc:349] Starting client on localhost:10087 (worker #1)
D0123 00:26:17.811037000 140736237159296 driver.cc:373] Client 0 gets 1 channels
I0123 00:26:17.811892000 140736237159296 subchannel.cc:832] New connected subchannel at 0x7fca43d18f40 for subchannel 0x7fca43d16b80
I0123 00:26:17.818902000 140736237159296 driver.cc:394] Initiating
I0123 00:26:17.820776000 140736237159296 driver.cc:415] Warming up
I0123 00:26:19.824685000 140736237159296 driver.cc:421] Starting
I0123 00:26:19.825970000 140736237159296 driver.cc:448] Running
I0123 00:26:29.821866000 140736237159296 driver.cc:462] Finishing clients
I0123 00:26:29.823259000 140736237159296 driver.cc:476] Received final status from client 0
I0123 00:26:29.827195000 140736237159296 driver.cc:508] Finishing servers
I0123 00:26:29.827599000 140736237159296 driver.cc:522] Received final status from server 0
I0123 00:26:29.828739000 140736237159296 report.cc:82] QPS: 619.5
I0123 00:26:29.828752000 140736237159296 report.cc:122] QPS: 619.5 (77.4/server core)
I0123 00:26:29.828760000 140736237159296 report.cc:127] Latencies (50/90/95/99/99.9%-ile): 1589.8/1854.3/1920.4/2015.8/2204.8 us
I0123 00:26:29.828765000 140736237159296 report.cc:137] Server system time: 100.00%
I0123 00:26:29.828769000 140736237159296 report.cc:139] Server user time: 100.00%
I0123 00:26:29.828772000 140736237159296 report.cc:141] Client system time: 100.00%
I0123 00:26:29.828776000 140736237159296 report.cc:143] Client user time: 100.00%
I0123 00:26:29.828780000 140736237159296 report.cc:148] Server CPU usage: 0.00%
I0123 00:26:29.828784000 140736237159296 report.cc:153] Client Polls per Request: 0.00
I0123 00:26:29.828788000 140736237159296 report.cc:155] Server Polls per Request: 0.00
I0123 00:26:29.828792000 140736237159296 report.cc:160] Server Queries/CPU-sec: 309.58
I0123 00:26:29.828795000 140736237159296 report.cc:162] Client Queries/CPU-sec: 309.75
```
## Future Works (TODOs)
1. Generate a target for each scenario.
2. Simplify the main entrance of our benchmark related code, or make it depends on Bazel.

@ -0,0 +1,45 @@
#! /bin/bash
# Copyright 2019 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This test benchmarks Python client/server.
set -ex
declare -a DRIVER_PORTS=("10086" "10087")
SCENARIOS_FILE=src/python/grpcio_tests/tests/qps/scenarios.json
function join { local IFS="$1"; shift; echo "$*"; }
if [[ -e "${SCENARIOS_FILE}" ]]; then
echo "Running against scenarios.json:"
cat "${SCENARIOS_FILE}"
else
echo "Failed to find scenarios.json!"
exit 1
fi
echo "Starting Python qps workers..."
qps_workers=()
for DRIVER_PORT in "${DRIVER_PORTS[@]}"
do
echo -e "\tRunning Python qps worker listening at localhost:${DRIVER_PORT}..."
src/python/grpcio_tests/tests/qps/qps_worker \
--driver_port="${DRIVER_PORT}" &
qps_workers+=("localhost:${DRIVER_PORT}")
done
echo "Running qps json driver..."
QPS_WORKERS=$(join , ${qps_workers[@]})
export QPS_WORKERS
test/cpp/qps/qps_json_driver --scenarios_file="${SCENARIOS_FILE}"

@ -0,0 +1,96 @@
{
"scenarios": [
{
"name": "python_protobuf_async_unary_ping_pong",
"clientConfig": {
"clientType": "ASYNC_CLIENT",
"securityParams": {
"useTestCa": true,
"serverHostOverride": "foo.test.google.fr"
},
"outstandingRpcsPerChannel": 1,
"clientChannels": 1,
"asyncClientThreads": 1,
"loadParams": {
"closedLoop": {}
},
"payloadConfig": {
"simpleParams": {}
},
"histogramParams": {
"resolution": 0.01,
"maxPossible": 60000000000
},
"channelArgs": [
{
"name": "grpc.optimization_target",
"strValue": "latency"
}
]
},
"numClients": 1,
"serverConfig": {
"serverType": "ASYNC_SERVER",
"securityParams": {
"useTestCa": true,
"serverHostOverride": "foo.test.google.fr"
},
"channelArgs": [
{
"name": "grpc.optimization_target",
"strValue": "latency"
}
]
},
"numServers": 1,
"warmupSeconds": 2,
"benchmarkSeconds": 10
},
{
"name": "python_protobuf_sync_streaming_ping_pong",
"clientConfig": {
"securityParams": {
"useTestCa": true,
"serverHostOverride": "foo.test.google.fr"
},
"outstandingRpcsPerChannel": 1,
"clientChannels": 1,
"asyncClientThreads": 1,
"rpcType": "STREAMING",
"loadParams": {
"closedLoop": {}
},
"payloadConfig": {
"simpleParams": {}
},
"histogramParams": {
"resolution": 0.01,
"maxPossible": 60000000000
},
"channelArgs": [
{
"name": "grpc.optimization_target",
"strValue": "latency"
}
]
},
"numClients": 1,
"serverConfig": {
"serverType": "ASYNC_SERVER",
"securityParams": {
"useTestCa": true,
"serverHostOverride": "foo.test.google.fr"
},
"channelArgs": [
{
"name": "grpc.optimization_target",
"strValue": "latency"
}
]
},
"numServers": 1,
"warmupSeconds": 2,
"benchmarkSeconds": 10
}
]
}

@ -14,8 +14,10 @@
licenses(["notice"]) # Apache v2
load("//bazel:grpc_build_system.bzl", "grpc_cc_test", "grpc_cc_library", "grpc_cc_binary", "grpc_package")
load("//test/cpp/qps:qps_benchmark_script.bzl", "qps_json_driver_batch", "json_run_localhost_batch")
package(default_visibility = ["//visibility:public"])
load("//bazel:grpc_build_system.bzl", "grpc_cc_binary", "grpc_cc_library", "grpc_cc_test", "grpc_package")
load("//test/cpp/qps:qps_benchmark_script.bzl", "json_run_localhost_batch", "qps_json_driver_batch")
grpc_package(name = "test/cpp/qps")

Loading…
Cancel
Save