Add protobuf to benchmark

pull/20753/head
Richard Belleville 5 years ago
parent 90cded9c44
commit 8ba4105972
  1. 21
      src/python/grpcio_tests/tests/stress/BUILD.bazel
  2. 30
      src/python/grpcio_tests/tests/stress/unary_stream_benchmark.py

@ -1,3 +1,22 @@
load("@com_github_grpc_grpc//bazel:python_rules.bzl", "py_proto_library", "py_grpc_library")
proto_library(
name = "unary_stream_benchmark_proto",
srcs = ["unary_stream_benchmark.proto"],
deps = [],
)
py_proto_library(
name = "unary_stream_benchmark_py_pb2",
deps = [":unary_stream_benchmark_proto"],
)
py_grpc_library(
name = "unary_stream_benchmark_py_pb2_grpc",
srcs = [":unary_stream_benchmark_proto"],
deps = [":unary_stream_benchmark_py_pb2"],
)
py_binary(
name = "unary_stream_benchmark",
srcs_version = "PY3",
@ -5,5 +24,7 @@ py_binary(
srcs = ["unary_stream_benchmark.py"],
deps = [
"//src/python/grpcio/grpc:grpcio",
":unary_stream_benchmark_py_pb2",
":unary_stream_benchmark_py_pb2_grpc",
]
)

@ -8,6 +8,9 @@ import contextlib
import datetime
import sys
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc
_PORT = 5741
_MESSAGE_SIZE = 4
_RESPONSE_COUNT = 32 * 1024
@ -18,21 +21,20 @@ import datetime
import threading
import grpc
from concurrent import futures
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2
from src.python.grpcio_tests.tests.stress import unary_stream_benchmark_pb2_grpc
def _handler_behavior(request, context):
message_size, response_count = request.decode('ascii').split(',')
for _ in range(int(response_count)):
yield b'\\x00\\x01' * int(int(message_size) / 2)
class Handler(unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceServicer):
class _Handler(grpc.GenericRpcHandler):
def service(self, handler_call_details):
return grpc.unary_stream_rpc_method_handler(_handler_behavior)
def Benchmark(self, request, context):
payload = b'\\x00\\x01' * int(request.message_size / 2)
for _ in range(request.response_count):
yield unary_stream_benchmark_pb2.BenchmarkResponse(response=payload)
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
server.add_insecure_port('[::]:%d')
server.add_generic_rpc_handlers((_Handler(),))
unary_stream_benchmark_pb2_grpc.add_UnaryStreamBenchmarkServiceServicer_to_server(Handler(), server)
server.start()
server.wait_for_termination()
""" % _PORT
@ -49,13 +51,17 @@ def _running_server():
yield
finally:
server_process.terminate()
server_process.wait()
sys.stdout.write("stdout: {}".format(server_process.stdout.read())); sys.stdout.flush()
sys.stdout.write("stderr: {}".format(server_process.stderr.read())); sys.stdout.flush()
def profile(message_size, response_count):
request = unary_stream_benchmark_pb2.BenchmarkRequest(message_size=message_size, response_count=response_count)
with grpc.insecure_channel('[::]:{}'.format(_PORT), options=_GRPC_CHANNEL_OPTIONS) as channel:
call = channel.unary_stream('foo')
stub = unary_stream_benchmark_pb2_grpc.UnaryStreamBenchmarkServiceStub(channel)
start = datetime.datetime.now()
request = '{},{}'.format(message_size, response_count).encode('ascii')
for message in call(request, wait_for_ready=True):
call = stub.Benchmark(request, wait_for_ready=True)
for message in call:
pass
end = datetime.datetime.now()
return end - start

Loading…
Cancel
Save