[Python] Migrate from yapf to black (#33138)

- Switched  from yapf to black
- Reconfigure isort for black
- Resolve black/pylint idiosyncrasies 

Note: I used `--experimental-string-processing` because black was
producing "implicit string concatenation", similar to what described
here: https://github.com/psf/black/issues/1837. While currently this
feature is experimental, it will be enabled by default:
https://github.com/psf/black/issues/2188. After running black with the
new string processing so that the generated code merges these `"hello" "
world"` strings concatenations, then I removed
`--experimental-string-processing` for stability, and regenerated the
code again.

To the reviewer: don't even try to open "Files Changed" tab 😄 It's
better to review commit-by-commit, and ignore `run black and isort`.
pull/33394/head
Sergii Tkachenko 1 year ago committed by GitHub
parent e9e5dc0cf6
commit de6ed9ba9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      .gitignore
  2. 51
      black.toml
  3. 40
      examples/python/async_streaming/client.py
  4. 15
      examples/python/async_streaming/server.py
  5. 8
      examples/python/auth/_credentials.py
  6. 43
      examples/python/auth/async_customized_auth_client.py
  7. 58
      examples/python/auth/async_customized_auth_server.py
  8. 36
      examples/python/auth/customized_auth_client.py
  9. 51
      examples/python/auth/customized_auth_server.py
  10. 20
      examples/python/auth/test/_auth_example_test.py
  11. 66
      examples/python/cancellation/client.py
  12. 35
      examples/python/cancellation/search.py
  13. 45
      examples/python/cancellation/server.py
  14. 53
      examples/python/cancellation/test/_cancellation_example_test.py
  15. 48
      examples/python/compression/client.py
  16. 68
      examples/python/compression/server.py
  17. 34
      examples/python/compression/test/compression_example_test.py
  18. 6
      examples/python/data_transmission/alts_client.py
  19. 9
      examples/python/data_transmission/alts_server.py
  20. 51
      examples/python/data_transmission/client.py
  21. 43
      examples/python/data_transmission/server.py
  22. 39
      examples/python/debug/asyncio_debug_server.py
  23. 19
      examples/python/debug/asyncio_get_stats.py
  24. 40
      examples/python/debug/asyncio_send_message.py
  25. 33
      examples/python/debug/debug_server.py
  26. 16
      examples/python/debug/get_stats.py
  27. 35
      examples/python/debug/send_message.py
  28. 18
      examples/python/debug/test/_debug_example_test.py
  29. 14
      examples/python/errors/client.py
  30. 27
      examples/python/errors/server.py
  31. 8
      examples/python/errors/test/_error_handling_example_test.py
  32. 11
      examples/python/health_checking/greeter_client.py
  33. 19
      examples/python/health_checking/greeter_server.py
  34. 17
      examples/python/hellostreamingworld/async_greeter_client.py
  35. 6
      examples/python/hellostreamingworld/async_greeter_server.py
  36. 6
      examples/python/helloworld/async_greeter_client.py
  37. 20
      examples/python/helloworld/async_greeter_client_with_options.py
  38. 13
      examples/python/helloworld/async_greeter_server.py
  39. 17
      examples/python/helloworld/async_greeter_server_with_graceful_shutdown.py
  40. 15
      examples/python/helloworld/async_greeter_server_with_reflection.py
  41. 6
      examples/python/helloworld/greeter_client.py
  42. 12
      examples/python/helloworld/greeter_client_reflection.py
  43. 20
      examples/python/helloworld/greeter_client_with_options.py
  44. 9
      examples/python/helloworld/greeter_server.py
  45. 9
      examples/python/helloworld/greeter_server_with_reflection.py
  46. 17
      examples/python/interceptors/async/async_greeter_client.py
  47. 37
      examples/python/interceptors/async/async_greeter_server_with_interceptor.py
  48. 22
      examples/python/interceptors/default_value/default_value_client_interceptor.py
  49. 21
      examples/python/interceptors/default_value/greeter_client.py
  50. 38
      examples/python/interceptors/headers/generic_client_interceptor.py
  51. 18
      examples/python/interceptors/headers/greeter_client.py
  52. 25
      examples/python/interceptors/headers/greeter_server.py
  53. 35
      examples/python/interceptors/headers/header_manipulator_client_interceptor.py
  54. 8
      examples/python/interceptors/headers/request_header_validator_interceptor.py
  55. 26
      examples/python/keep_alive/greeter_client.py
  56. 31
      examples/python/keep_alive/greeter_server.py
  57. 6
      examples/python/lb_policies/greeter_client.py
  58. 9
      examples/python/lb_policies/greeter_server.py
  59. 25
      examples/python/metadata/metadata_client.py
  60. 19
      examples/python/metadata/metadata_server.py
  61. 26
      examples/python/multiplex/multiplex_client.py
  62. 44
      examples/python/multiplex/multiplex_server.py
  63. 12
      examples/python/multiplex/route_guide_resources.py
  64. 32
      examples/python/multiplex/run_codegen.py
  65. 35
      examples/python/multiprocessing/client.py
  66. 31
      examples/python/multiprocessing/server.py
  67. 30
      examples/python/multiprocessing/test/_multiprocessing_example_test.py
  68. 6
      examples/python/no_codegen/greeter_client.py
  69. 7
      examples/python/no_codegen/greeter_server.py
  70. 47
      examples/python/retry/async_retry_client.py
  71. 30
      examples/python/retry/flaky_server.py
  72. 44
      examples/python/retry/retry_client.py
  73. 36
      examples/python/route_guide/asyncio_route_guide_client.py
  74. 67
      examples/python/route_guide/asyncio_route_guide_server.py
  75. 18
      examples/python/route_guide/route_guide_client.py
  76. 12
      examples/python/route_guide/route_guide_resources.py
  77. 35
      examples/python/route_guide/route_guide_server.py
  78. 16
      examples/python/route_guide/run_codegen.py
  79. 18
      examples/python/timeout/greeter_client.py
  80. 7
      examples/python/timeout/greeter_server.py
  81. 9
      examples/python/uds/async_greeter_client.py
  82. 15
      examples/python/uds/async_greeter_server.py
  83. 8
      examples/python/uds/greeter_client.py
  84. 9
      examples/python/uds/greeter_server.py
  85. 41
      examples/python/wait_for_ready/asyncio_wait_for_ready_example.py
  86. 6
      examples/python/wait_for_ready/test/_wait_for_ready_example_test.py
  87. 41
      examples/python/wait_for_ready/wait_for_ready_example.py
  88. 25
      examples/python/wait_for_ready/wait_for_ready_with_client_timeout_example_client.py
  89. 15
      examples/python/wait_for_ready/wait_for_ready_with_client_timeout_example_server.py
  90. 13
      examples/python/xds/client.py
  91. 74
      examples/python/xds/server.py
  92. 4
      setup.cfg
  93. 489
      setup.py
  94. 15
      src/abseil-cpp/gen_build_yaml.py
  95. 290
      src/abseil-cpp/preprocessed_builds.yaml.gen.py
  96. 34
      src/benchmark/gen_build_yaml.py
  97. 160
      src/boringssl/gen_build_yaml.py
  98. 231
      src/c-ares/gen_build_yaml.py
  99. 104
      src/csharp/Grpc.Tools.Tests/scripts/fakeprotoc.py
  100. 19
      src/objective-c/change-comments.py
  101. Some files were not shown because too many files have changed in this diff Show More

2
.gitignore vendored

@ -23,7 +23,7 @@ src/python/grpcio_*/=*
src/python/grpcio_*/build/
src/python/grpcio_*/LICENSE
src/python/grpcio_status/grpc_status/google/rpc/status.proto
yapf_virtual_environment/
black_virtual_environment/
isort_virtual_environment/
# Node installation output

@ -0,0 +1,51 @@
[tool.black]
line-length = 80
target-version = [
"py37",
"py38",
"py39",
"py310",
"py311",
]
extend-exclude = '''
# A regex preceded with ^/ will apply only to files and directories
# in the root of the project.
(
site-packages
| test/cpp/naming/resolver_component_tests_runner.py # AUTO-GENERATED
# AUTO-GENERATED from a template:
| grpc_version.py
| src/python/grpcio/grpc_core_dependencies.py
| src/python/grpcio/grpc/_grpcio_metadata.py
# AUTO-GENERATED BY make_grpcio_tools.py
| tools/distrib/python/grpcio_tools/protoc_lib_deps.py
| .*_pb2.py # autogenerated Protocol Buffer files
| .*_pb2_grpc.py # autogenerated Protocol Buffer gRPC files
)
'''
[tool.isort]
profile = "black"
line_length = 80
src_paths = [
"examples/python/data_transmission",
"examples/python/async_streaming",
"tools/run_tests/xds_k8s_test_driver",
"src/python/grpcio_tests",
"tools/run_tests",
]
known_first_party = [
"examples",
"src",
]
known_third_party = ["grpc"]
skip_glob = [
"third_party/*",
"*/env/*",
"*pb2*.py",
"*pb2*.pyi",
"**/site-packages/**/*",
]
single_line_exclusions = ["typing"]
force_single_line = true
force_sort_within_sections = true

@ -24,9 +24,12 @@ import phone_pb2_grpc
class CallMaker:
def __init__(self, executor: ThreadPoolExecutor, channel: grpc.Channel,
phone_number: str) -> None:
def __init__(
self,
executor: ThreadPoolExecutor,
channel: grpc.Channel,
phone_number: str,
) -> None:
self._executor = executor
self._channel = channel
self._stub = phone_pb2_grpc.PhoneStub(self._channel)
@ -39,8 +42,8 @@ class CallMaker:
self._consumer_future = None
def _response_watcher(
self,
response_iterator: Iterator[phone_pb2.StreamCallResponse]) -> None:
self, response_iterator: Iterator[phone_pb2.StreamCallResponse]
) -> None:
try:
for response in response_iterator:
# NOTE: All fields in Proto3 are optional. This is the recommended way
@ -52,7 +55,8 @@ class CallMaker:
self._on_call_state(response.call_state.state)
else:
raise RuntimeError(
"Received StreamCallResponse without call_info and call_state"
"Received StreamCallResponse without call_info and"
" call_state"
)
except Exception as e:
self._peer_responded.set()
@ -63,8 +67,11 @@ class CallMaker:
self._audio_session_link = call_info.media
def _on_call_state(self, call_state: phone_pb2.CallState.State) -> None:
logging.info("Call toward [%s] enters [%s] state", self._phone_number,
phone_pb2.CallState.State.Name(call_state))
logging.info(
"Call toward [%s] enters [%s] state",
self._phone_number,
phone_pb2.CallState.State.Name(call_state),
)
self._call_state = call_state
if call_state == phone_pb2.CallState.State.ACTIVE:
self._peer_responded.set()
@ -77,8 +84,9 @@ class CallMaker:
request.phone_number = self._phone_number
response_iterator = self._stub.StreamCall(iter((request,)))
# Instead of consuming the response on current thread, spawn a consumption thread.
self._consumer_future = self._executor.submit(self._response_watcher,
response_iterator)
self._consumer_future = self._executor.submit(
self._response_watcher, response_iterator
)
def wait_peer(self) -> bool:
logging.info("Waiting for peer to connect [%s]...", self._phone_number)
@ -95,8 +103,9 @@ class CallMaker:
logging.info("Audio session finished [%s]", self._audio_session_link)
def process_call(executor: ThreadPoolExecutor, channel: grpc.Channel,
phone_number: str) -> None:
def process_call(
executor: ThreadPoolExecutor, channel: grpc.Channel, phone_number: str
) -> None:
call_maker = CallMaker(executor, channel, phone_number)
call_maker.call()
if call_maker.wait_peer():
@ -109,11 +118,12 @@ def process_call(executor: ThreadPoolExecutor, channel: grpc.Channel,
def run():
executor = ThreadPoolExecutor()
with grpc.insecure_channel("localhost:50051") as channel:
future = executor.submit(process_call, executor, channel,
"555-0100-XXXX")
future = executor.submit(
process_call, executor, channel, "555-0100-XXXX"
)
future.result()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
run()

@ -26,14 +26,14 @@ import phone_pb2_grpc
def create_state_response(
call_state: phone_pb2.CallState.State) -> phone_pb2.StreamCallResponse:
call_state: phone_pb2.CallState.State,
) -> phone_pb2.StreamCallResponse:
response = phone_pb2.StreamCallResponse()
response.call_state.state = call_state
return response
class Phone(phone_pb2_grpc.PhoneServicer):
def __init__(self):
self._id_counter = 0
self._lock = threading.RLock()
@ -51,13 +51,16 @@ class Phone(phone_pb2_grpc.PhoneServicer):
logging.info("Call session cleaned [%s]", MessageToJson(call_info))
def StreamCall(
self, request_iterator: Iterable[phone_pb2.StreamCallRequest],
context: grpc.ServicerContext
self,
request_iterator: Iterable[phone_pb2.StreamCallRequest],
context: grpc.ServicerContext,
) -> Iterable[phone_pb2.StreamCallResponse]:
try:
request = next(request_iterator)
logging.info("Received a phone call request for number [%s]",
request.phone_number)
logging.info(
"Received a phone call request for number [%s]",
request.phone_number,
)
except StopIteration:
raise RuntimeError("Failed to receive call request")
# Simulate the acceptance of call request

@ -18,10 +18,10 @@ import os
def _load_credential_from_file(filepath):
real_path = os.path.join(os.path.dirname(__file__), filepath)
with open(real_path, 'rb') as f:
with open(real_path, "rb") as f:
return f.read()
SERVER_CERTIFICATE = _load_credential_from_file('credentials/localhost.crt')
SERVER_CERTIFICATE_KEY = _load_credential_from_file('credentials/localhost.key')
ROOT_CERTIFICATE = _load_credential_from_file('credentials/root.crt')
SERVER_CERTIFICATE = _load_credential_from_file("credentials/localhost.crt")
SERVER_CERTIFICATE_KEY = _load_credential_from_file("credentials/localhost.key")
ROOT_CERTIFICATE = _load_credential_from_file("credentials/root.crt")

@ -21,19 +21,22 @@ import _credentials
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
_SERVER_ADDR_TEMPLATE = 'localhost:%d'
_SIGNATURE_HEADER_KEY = 'x-signature'
_SERVER_ADDR_TEMPLATE = "localhost:%d"
_SIGNATURE_HEADER_KEY = "x-signature"
class AuthGateway(grpc.AuthMetadataPlugin):
def __call__(self, context: grpc.AuthMetadataContext,
callback: grpc.AuthMetadataPluginCallback) -> None:
def __call__(
self,
context: grpc.AuthMetadataContext,
callback: grpc.AuthMetadataPluginCallback,
) -> None:
"""Implements authentication by passing metadata to a callback.
Implementations of this method must not block.
@ -54,11 +57,13 @@ class AuthGateway(grpc.AuthMetadataPlugin):
def create_client_channel(addr: str) -> grpc.aio.Channel:
# Call credential object will be invoked for every single RPC
call_credentials = grpc.metadata_call_credentials(AuthGateway(),
name='auth gateway')
call_credentials = grpc.metadata_call_credentials(
AuthGateway(), name="auth gateway"
)
# Channel credential will be valid for the entire channel
channel_credential = grpc.ssl_channel_credentials(
_credentials.ROOT_CERTIFICATE)
_credentials.ROOT_CERTIFICATE
)
# Combining channel credentials and call credentials together
composite_credentials = grpc.composite_channel_credentials(
channel_credential,
@ -70,24 +75,26 @@ def create_client_channel(addr: str) -> grpc.aio.Channel:
async def send_rpc(channel: grpc.aio.Channel) -> helloworld_pb2.HelloReply:
stub = helloworld_pb2_grpc.GreeterStub(channel)
request = helloworld_pb2.HelloRequest(name='you')
request = helloworld_pb2.HelloRequest(name="you")
try:
response = await stub.SayHello(request)
except grpc.RpcError as rpc_error:
_LOGGER.error('Received error: %s', rpc_error)
_LOGGER.error("Received error: %s", rpc_error)
return rpc_error
else:
_LOGGER.info('Received message: %s', response)
_LOGGER.info("Received message: %s", response)
return response
async def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--port',
nargs='?',
type=int,
default=50051,
help='the address of server')
parser.add_argument(
"--port",
nargs="?",
type=int,
default=50051,
help="the address of server",
)
args = parser.parse_args()
channel = create_client_channel(_SERVER_ADDR_TEMPLATE % args.port)
@ -95,6 +102,6 @@ async def main() -> None:
await channel.close()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(main())

@ -22,34 +22,35 @@ import _credentials
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
_LISTEN_ADDRESS_TEMPLATE = 'localhost:%d'
_SIGNATURE_HEADER_KEY = 'x-signature'
_LISTEN_ADDRESS_TEMPLATE = "localhost:%d"
_SIGNATURE_HEADER_KEY = "x-signature"
class SignatureValidationInterceptor(grpc.aio.ServerInterceptor):
def __init__(self):
def abort(ignored_request, context: grpc.aio.ServicerContext) -> None:
context.abort(grpc.StatusCode.UNAUTHENTICATED, 'Invalid signature')
context.abort(grpc.StatusCode.UNAUTHENTICATED, "Invalid signature")
self._abort_handler = grpc.unary_unary_rpc_method_handler(abort)
async def intercept_service(
self, continuation: Callable[[grpc.HandlerCallDetails],
Awaitable[grpc.RpcMethodHandler]],
handler_call_details: grpc.HandlerCallDetails
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
# Example HandlerCallDetails object:
# _HandlerCallDetails(
# method=u'/helloworld.Greeter/SayHello',
# invocation_metadata=...)
method_name = handler_call_details.method.split('/')[-1]
method_name = handler_call_details.method.split("/")[-1]
expected_metadata = (_SIGNATURE_HEADER_KEY, method_name[::-1])
if expected_metadata in handler_call_details.invocation_metadata:
return await continuation(handler_call_details)
@ -58,10 +59,10 @@ class SignatureValidationInterceptor(grpc.aio.ServerInterceptor):
class SimpleGreeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(self, request: helloworld_pb2.HelloRequest,
unused_context) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
async def SayHello(
self, request: helloworld_pb2.HelloRequest, unused_context
) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def run_server(port: int) -> Tuple[grpc.aio.Server, int]:
@ -70,14 +71,19 @@ async def run_server(port: int) -> Tuple[grpc.aio.Server, int]:
helloworld_pb2_grpc.add_GreeterServicer_to_server(SimpleGreeter(), server)
# Loading credentials
server_credentials = grpc.ssl_server_credentials(((
_credentials.SERVER_CERTIFICATE_KEY,
_credentials.SERVER_CERTIFICATE,
),))
server_credentials = grpc.ssl_server_credentials(
(
(
_credentials.SERVER_CERTIFICATE_KEY,
_credentials.SERVER_CERTIFICATE,
),
)
)
# Pass down credentials
port = server.add_secure_port(_LISTEN_ADDRESS_TEMPLATE % port,
server_credentials)
port = server.add_secure_port(
_LISTEN_ADDRESS_TEMPLATE % port, server_credentials
)
await server.start()
return server, port
@ -85,18 +91,16 @@ async def run_server(port: int) -> Tuple[grpc.aio.Server, int]:
async def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--port',
nargs='?',
type=int,
default=50051,
help='the listening port')
parser.add_argument(
"--port", nargs="?", type=int, default=50051, help="the listening port"
)
args = parser.parse_args()
server, port = await run_server(args.port)
logging.info('Server is listening at port :%d', port)
logging.info("Server is listening at port :%d", port)
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(main())

@ -21,17 +21,17 @@ import _credentials
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
_SERVER_ADDR_TEMPLATE = 'localhost:%d'
_SIGNATURE_HEADER_KEY = 'x-signature'
_SERVER_ADDR_TEMPLATE = "localhost:%d"
_SIGNATURE_HEADER_KEY = "x-signature"
class AuthGateway(grpc.AuthMetadataPlugin):
def __call__(self, context, callback):
"""Implements authentication by passing metadata to a callback.
@ -55,11 +55,13 @@ class AuthGateway(grpc.AuthMetadataPlugin):
@contextlib.contextmanager
def create_client_channel(addr):
# Call credential object will be invoked for every single RPC
call_credentials = grpc.metadata_call_credentials(AuthGateway(),
name='auth gateway')
call_credentials = grpc.metadata_call_credentials(
AuthGateway(), name="auth gateway"
)
# Channel credential will be valid for the entire channel
channel_credential = grpc.ssl_channel_credentials(
_credentials.ROOT_CERTIFICATE)
_credentials.ROOT_CERTIFICATE
)
# Combining channel credentials and call credentials together
composite_credentials = grpc.composite_channel_credentials(
channel_credential,
@ -71,30 +73,32 @@ def create_client_channel(addr):
def send_rpc(channel):
stub = helloworld_pb2_grpc.GreeterStub(channel)
request = helloworld_pb2.HelloRequest(name='you')
request = helloworld_pb2.HelloRequest(name="you")
try:
response = stub.SayHello(request)
except grpc.RpcError as rpc_error:
_LOGGER.error('Received error: %s', rpc_error)
_LOGGER.error("Received error: %s", rpc_error)
return rpc_error
else:
_LOGGER.info('Received message: %s', response)
_LOGGER.info("Received message: %s", response)
return response
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--port',
nargs='?',
type=int,
default=50051,
help='the address of server')
parser.add_argument(
"--port",
nargs="?",
type=int,
default=50051,
help="the address of server",
)
args = parser.parse_args()
with create_client_channel(_SERVER_ADDR_TEMPLATE % args.port) as channel:
send_rpc(channel)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()

@ -22,21 +22,20 @@ import _credentials
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
_LISTEN_ADDRESS_TEMPLATE = 'localhost:%d'
_SIGNATURE_HEADER_KEY = 'x-signature'
_LISTEN_ADDRESS_TEMPLATE = "localhost:%d"
_SIGNATURE_HEADER_KEY = "x-signature"
class SignatureValidationInterceptor(grpc.ServerInterceptor):
def __init__(self):
def abort(ignored_request, context):
context.abort(grpc.StatusCode.UNAUTHENTICATED, 'Invalid signature')
context.abort(grpc.StatusCode.UNAUTHENTICATED, "Invalid signature")
self._abortion = grpc.unary_unary_rpc_method_handler(abort)
@ -45,7 +44,7 @@ class SignatureValidationInterceptor(grpc.ServerInterceptor):
# _HandlerCallDetails(
# method=u'/helloworld.Greeter/SayHello',
# invocation_metadata=...)
method_name = handler_call_details.method.split('/')[-1]
method_name = handler_call_details.method.split("/")[-1]
expected_metadata = (_SIGNATURE_HEADER_KEY, method_name[::-1])
if expected_metadata in handler_call_details.invocation_metadata:
return continuation(handler_call_details)
@ -54,27 +53,33 @@ class SignatureValidationInterceptor(grpc.ServerInterceptor):
class SimpleGreeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, unused_context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
@contextlib.contextmanager
def run_server(port):
# Bind interceptor to server
server = grpc.server(futures.ThreadPoolExecutor(),
interceptors=(SignatureValidationInterceptor(),))
server = grpc.server(
futures.ThreadPoolExecutor(),
interceptors=(SignatureValidationInterceptor(),),
)
helloworld_pb2_grpc.add_GreeterServicer_to_server(SimpleGreeter(), server)
# Loading credentials
server_credentials = grpc.ssl_server_credentials(((
_credentials.SERVER_CERTIFICATE_KEY,
_credentials.SERVER_CERTIFICATE,
),))
server_credentials = grpc.ssl_server_credentials(
(
(
_credentials.SERVER_CERTIFICATE_KEY,
_credentials.SERVER_CERTIFICATE,
),
)
)
# Pass down credentials
port = server.add_secure_port(_LISTEN_ADDRESS_TEMPLATE % port,
server_credentials)
port = server.add_secure_port(
_LISTEN_ADDRESS_TEMPLATE % port, server_credentials
)
server.start()
try:
@ -85,18 +90,16 @@ def run_server(port):
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--port',
nargs='?',
type=int,
default=50051,
help='the listening port')
parser.add_argument(
"--port", nargs="?", type=int, default=50051, help="the listening port"
)
args = parser.parse_args()
with run_server(args.port) as (server, port):
logging.info('Server is listening at port :%d', port)
logging.info("Server is listening at port :%d", port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()

@ -24,15 +24,15 @@ from examples.python.auth import async_customized_auth_server
from examples.python.auth import customized_auth_client
from examples.python.auth import customized_auth_server
_SERVER_ADDR_TEMPLATE = 'localhost:%d'
_SERVER_ADDR_TEMPLATE = "localhost:%d"
class AuthExampleTest(unittest.TestCase):
def test_successful_call(self):
with customized_auth_server.run_server(0) as (_, port):
with customized_auth_client.create_client_channel(
_SERVER_ADDR_TEMPLATE % port) as channel:
_SERVER_ADDR_TEMPLATE % port
) as channel:
customized_auth_client.send_rpc(channel)
# No unhandled exception raised, test passed!
@ -45,18 +45,20 @@ class AuthExampleTest(unittest.TestCase):
def test_no_call_credential(self):
with customized_auth_server.run_server(0) as (_, port):
channel_credential = grpc.ssl_channel_credentials(
_credentials.ROOT_CERTIFICATE)
with grpc.secure_channel(_SERVER_ADDR_TEMPLATE % port,
channel_credential) as channel:
_credentials.ROOT_CERTIFICATE
)
with grpc.secure_channel(
_SERVER_ADDR_TEMPLATE % port, channel_credential
) as channel:
resp = customized_auth_client.send_rpc(channel)
self.assertEqual(resp.code(), grpc.StatusCode.UNAUTHENTICATED)
def test_successful_call_asyncio(self):
async def test_body():
server, port = await async_customized_auth_server.run_server(0)
channel = async_customized_auth_client.create_client_channel(
_SERVER_ADDR_TEMPLATE % port)
_SERVER_ADDR_TEMPLATE % port
)
await async_customized_auth_client.send_rpc(channel)
await channel.close()
await server.stop(0)
@ -65,5 +67,5 @@ class AuthExampleTest(unittest.TestCase):
asyncio.get_event_loop().run_until_complete(test_body())
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(verbosity=2)

@ -34,9 +34,12 @@ _LOGGER = logging.getLogger(__name__)
def run_unary_client(server_target, name, ideal_distance):
with grpc.insecure_channel(server_target) as channel:
stub = hash_name_pb2_grpc.HashFinderStub(channel)
future = stub.Find.future(hash_name_pb2.HashNameRequest(
desired_name=name, ideal_hamming_distance=ideal_distance),
wait_for_ready=True)
future = stub.Find.future(
hash_name_pb2.HashNameRequest(
desired_name=name, ideal_hamming_distance=ideal_distance
),
wait_for_ready=True,
)
def cancel_request(unused_signum, unused_frame):
future.cancel()
@ -47,15 +50,19 @@ def run_unary_client(server_target, name, ideal_distance):
print(result)
def run_streaming_client(server_target, name, ideal_distance,
interesting_distance):
def run_streaming_client(
server_target, name, ideal_distance, interesting_distance
):
with grpc.insecure_channel(server_target) as channel:
stub = hash_name_pb2_grpc.HashFinderStub(channel)
result_generator = stub.FindRange(hash_name_pb2.HashNameRequest(
desired_name=name,
ideal_hamming_distance=ideal_distance,
interesting_hamming_distance=interesting_distance),
wait_for_ready=True)
result_generator = stub.FindRange(
hash_name_pb2.HashNameRequest(
desired_name=name,
ideal_hamming_distance=ideal_distance,
interesting_hamming_distance=interesting_distance,
),
wait_for_ready=True,
)
def cancel_request(unused_signum, unused_frame):
result_generator.cancel()
@ -68,29 +75,36 @@ def run_streaming_client(server_target, name, ideal_distance,
def main():
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument("name", type=str, help='The desired name.')
parser.add_argument("--ideal-distance",
default=0,
nargs='?',
type=int,
help="The desired Hamming distance.")
parser.add_argument('--server',
default='localhost:50051',
type=str,
nargs='?',
help='The host-port pair at which to reach the server.')
parser.add_argument("name", type=str, help="The desired name.")
parser.add_argument(
'--show-inferior',
"--ideal-distance",
default=0,
nargs="?",
type=int,
help="The desired Hamming distance.",
)
parser.add_argument(
"--server",
default="localhost:50051",
type=str,
nargs="?",
help="The host-port pair at which to reach the server.",
)
parser.add_argument(
"--show-inferior",
default=None,
type=int,
nargs='?',
help='Also show candidates with a Hamming distance less than this value.'
nargs="?",
help=(
"Also show candidates with a Hamming distance less than this value."
),
)
args = parser.parse_args()
if args.show_inferior is not None:
run_streaming_client(args.server, args.name, args.ideal_distance,
args.show_inferior)
run_streaming_client(
args.server, args.name, args.ideal_distance, args.show_inferior
)
else:
run_unary_client(args.server, args.name, args.ideal_distance)

@ -53,8 +53,9 @@ def _get_substring_hamming_distance(candidate, target):
if len(target) > len(candidate):
raise ValueError("Candidate must be at least as long as target.")
for i in range(len(candidate) - len(target) + 1):
distance = _get_hamming_distance(candidate[i:i + len(target)].lower(),
target.lower())
distance = _get_hamming_distance(
candidate[i : i + len(target)].lower(), target.lower()
)
if min_distance is None or distance < min_distance:
min_distance = distance
return min_distance
@ -63,7 +64,7 @@ def _get_substring_hamming_distance(candidate, target):
def _get_hash(secret):
hasher = hashlib.sha1()
hasher.update(secret)
return base64.b64encode(hasher.digest()).decode('ascii')
return base64.b64encode(hasher.digest()).decode("ascii")
class ResourceLimitExceededError(Exception):
@ -80,7 +81,7 @@ def _bytestrings_of_length(length):
All bytestrings of length `length`.
"""
for digits in itertools.product(range(_BYTE_MAX), repeat=length):
yield b''.join(struct.pack('B', i) for i in digits)
yield b"".join(struct.pack("B", i) for i in digits)
def _all_bytestrings():
@ -92,15 +93,18 @@ def _all_bytestrings():
All bytestrings in ascending order of length.
"""
for bytestring in itertools.chain.from_iterable(
_bytestrings_of_length(length) for length in itertools.count()):
_bytestrings_of_length(length) for length in itertools.count()
):
yield bytestring
def search(target,
ideal_distance,
stop_event,
maximum_hashes,
interesting_hamming_distance=None):
def search(
target,
ideal_distance,
stop_event,
maximum_hashes,
interesting_hamming_distance=None,
):
"""Find candidate strings.
Search through the space of all bytestrings, in order of increasing length,
@ -130,18 +134,23 @@ def search(target,
return
candidate_hash = _get_hash(secret)
distance = _get_substring_hamming_distance(candidate_hash, target)
if interesting_hamming_distance is not None and distance <= interesting_hamming_distance:
if (
interesting_hamming_distance is not None
and distance <= interesting_hamming_distance
):
# Surface interesting candidates, but don't stop.
yield hash_name_pb2.HashNameResponse(
secret=base64.b64encode(secret),
hashed_name=candidate_hash,
hamming_distance=distance)
hamming_distance=distance,
)
elif distance <= ideal_distance:
# Yield ideal candidate and end the stream.
yield hash_name_pb2.HashNameResponse(
secret=base64.b64encode(secret),
hashed_name=candidate_hash,
hamming_distance=distance)
hamming_distance=distance,
)
return
hashes_computed += 1
if hashes_computed == maximum_hashes:

@ -29,13 +29,12 @@ from examples.python.cancellation import hash_name_pb2
from examples.python.cancellation import hash_name_pb2_grpc
_LOGGER = logging.getLogger(__name__)
_SERVER_HOST = 'localhost'
_SERVER_HOST = "localhost"
_DESCRIPTION = "A server for finding hashes similar to names."
class HashFinder(hash_name_pb2_grpc.HashFinderServicer):
def __init__(self, maximum_hashes):
super(HashFinder, self).__init__()
self._maximum_hashes = maximum_hashes
@ -51,9 +50,13 @@ class HashFinder(hash_name_pb2_grpc.HashFinderServicer):
candidates = []
try:
candidates = list(
search.search(request.desired_name,
request.ideal_hamming_distance, stop_event,
self._maximum_hashes))
search.search(
request.desired_name,
request.ideal_hamming_distance,
stop_event,
self._maximum_hashes,
)
)
except search.ResourceLimitExceededError:
_LOGGER.info("Cancelling RPC due to exhausted resources.")
context.cancel()
@ -75,7 +78,8 @@ class HashFinder(hash_name_pb2_grpc.HashFinderServicer):
request.ideal_hamming_distance,
stop_event,
self._maximum_hashes,
interesting_hamming_distance=request.interesting_hamming_distance)
interesting_hamming_distance=request.interesting_hamming_distance,
)
try:
for candidate in secret_generator:
yield candidate
@ -89,11 +93,13 @@ def _running_server(port, maximum_hashes):
# We use only a single servicer thread here to demonstrate that, if managed
# carefully, cancelled RPCs can need not continue occupying servicers
# threads.
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1),
maximum_concurrent_rpcs=1)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=1), maximum_concurrent_rpcs=1
)
hash_name_pb2_grpc.add_HashFinderServicer_to_server(
HashFinder(maximum_hashes), server)
address = '{}:{}'.format(_SERVER_HOST, port)
HashFinder(maximum_hashes), server
)
address = "{}:{}".format(_SERVER_HOST, port)
actual_port = server.add_insecure_port(address)
server.start()
print("Server listening at '{}'".format(address))
@ -102,17 +108,20 @@ def _running_server(port, maximum_hashes):
def main():
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument('--port',
type=int,
default=50051,
nargs='?',
help='The port on which the server will listen.')
parser.add_argument(
'--maximum-hashes',
"--port",
type=int,
default=50051,
nargs="?",
help="The port on which the server will listen.",
)
parser.add_argument(
"--maximum-hashes",
type=int,
default=1000000,
nargs='?',
help='The maximum number of hashes to search before cancelling.')
nargs="?",
help="The maximum number of hashes to search before cancelling.",
)
args = parser.parse_args()
server = _running_server(args.port, args.maximum_hashes)
server.wait_for_termination()

@ -21,9 +21,10 @@ import subprocess
import unittest
_BINARY_DIR = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server')
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client')
os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
)
_SERVER_PATH = os.path.join(_BINARY_DIR, "server")
_CLIENT_PATH = os.path.join(_BINARY_DIR, "client")
@contextlib.contextmanager
@ -32,33 +33,42 @@ def _get_port():
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError("Failed to set SO_REUSEPORT.")
sock.bind(('', 0))
sock.bind(("", 0))
try:
yield sock.getsockname()[1]
finally:
sock.close()
def _start_client(server_port,
desired_string,
ideal_distance,
interesting_distance=None):
interesting_distance_args = () if interesting_distance is None else (
'--show-inferior', interesting_distance)
return subprocess.Popen((_CLIENT_PATH, desired_string, '--server',
'localhost:{}'.format(server_port),
'--ideal-distance', str(ideal_distance)) +
interesting_distance_args)
def _start_client(
server_port, desired_string, ideal_distance, interesting_distance=None
):
interesting_distance_args = (
()
if interesting_distance is None
else ("--show-inferior", interesting_distance)
)
return subprocess.Popen(
(
_CLIENT_PATH,
desired_string,
"--server",
"localhost:{}".format(server_port),
"--ideal-distance",
str(ideal_distance),
)
+ interesting_distance_args
)
class CancellationExampleTest(unittest.TestCase):
def test_successful_run(self):
with _get_port() as test_port:
server_process = subprocess.Popen(
(_SERVER_PATH, '--port', str(test_port)))
(_SERVER_PATH, "--port", str(test_port))
)
try:
client_process = _start_client(test_port, 'aa', 0)
client_process = _start_client(test_port, "aa", 0)
client_return_code = client_process.wait()
self.assertEqual(0, client_return_code)
self.assertIsNone(server_process.poll())
@ -69,12 +79,13 @@ class CancellationExampleTest(unittest.TestCase):
def test_graceful_sigint(self):
with _get_port() as test_port:
server_process = subprocess.Popen(
(_SERVER_PATH, '--port', str(test_port)))
(_SERVER_PATH, "--port", str(test_port))
)
try:
client_process1 = _start_client(test_port, 'aaaaaaaaaa', 0)
client_process1 = _start_client(test_port, "aaaaaaaaaa", 0)
client_process1.send_signal(signal.SIGINT)
client_process1.wait()
client_process2 = _start_client(test_port, 'aa', 0)
client_process2 = _start_client(test_port, "aa", 0)
client_return_code = client_process2.wait()
self.assertEqual(0, client_return_code)
self.assertIsNone(server_process.poll())
@ -83,5 +94,5 @@ class CancellationExampleTest(unittest.TestCase):
server_process.wait()
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(verbosity=2)

@ -25,7 +25,7 @@ import grpc
from examples.protos import helloworld_pb2
from examples.protos import helloworld_pb2_grpc
_DESCRIPTION = 'A client capable of compression.'
_DESCRIPTION = "A client capable of compression."
_COMPRESSION_OPTIONS = {
"none": grpc.Compression.NoCompression,
"deflate": grpc.Compression.Deflate,
@ -36,33 +36,41 @@ _LOGGER = logging.getLogger(__name__)
def run_client(channel_compression, call_compression, target):
with grpc.insecure_channel(target,
compression=channel_compression) as channel:
with grpc.insecure_channel(
target, compression=channel_compression
) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
compression=call_compression,
wait_for_ready=True)
response = stub.SayHello(
helloworld_pb2.HelloRequest(name="you"),
compression=call_compression,
wait_for_ready=True,
)
print("Response: {}".format(response))
def main():
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument('--channel_compression',
default='none',
nargs='?',
choices=_COMPRESSION_OPTIONS.keys(),
help='The compression method to use for the channel.')
parser.add_argument(
'--call_compression',
default='none',
nargs='?',
"--channel_compression",
default="none",
nargs="?",
choices=_COMPRESSION_OPTIONS.keys(),
help='The compression method to use for an individual call.')
parser.add_argument('--server',
default='localhost:50051',
type=str,
nargs='?',
help='The host-port pair at which to reach the server.')
help="The compression method to use for the channel.",
)
parser.add_argument(
"--call_compression",
default="none",
nargs="?",
choices=_COMPRESSION_OPTIONS.keys(),
help="The compression method to use for an individual call.",
)
parser.add_argument(
"--server",
default="localhost:50051",
type=str,
nargs="?",
help="The host-port pair at which to reach the server.",
)
args = parser.parse_args()
channel_compression = _COMPRESSION_OPTIONS[args.channel_compression]
call_compression = _COMPRESSION_OPTIONS[args.call_compression]

@ -27,7 +27,7 @@ import grpc
from examples.protos import helloworld_pb2
from examples.protos import helloworld_pb2_grpc
_DESCRIPTION = 'A server capable of compression.'
_DESCRIPTION = "A server capable of compression."
_COMPRESSION_OPTIONS = {
"none": grpc.Compression.NoCompression,
"deflate": grpc.Compression.Deflate,
@ -35,11 +35,10 @@ _COMPRESSION_OPTIONS = {
}
_LOGGER = logging.getLogger(__name__)
_SERVER_HOST = 'localhost'
_SERVER_HOST = "localhost"
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self, no_compress_every_n):
super(Greeter, self).__init__()
self._no_compress_every_n = 0
@ -49,7 +48,10 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer):
def _should_suppress_compression(self):
suppress_compression = False
with self._counter_lock:
if self._no_compress_every_n and self._request_counter % self._no_compress_every_n == 0:
if (
self._no_compress_every_n
and self._request_counter % self._no_compress_every_n == 0
):
suppress_compression = True
self._request_counter += 1
return suppress_compression
@ -57,16 +59,19 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
if self._should_suppress_compression():
context.set_response_compression(grpc.Compression.NoCompression)
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def run_server(server_compression, no_compress_every_n, port):
server = grpc.server(futures.ThreadPoolExecutor(),
compression=server_compression,
options=(('grpc.so_reuseport', 1),))
server = grpc.server(
futures.ThreadPoolExecutor(),
compression=server_compression,
options=(("grpc.so_reuseport", 1),),
)
helloworld_pb2_grpc.add_GreeterServicer_to_server(
Greeter(no_compress_every_n), server)
address = '{}:{}'.format(_SERVER_HOST, port)
Greeter(no_compress_every_n), server
)
address = "{}:{}".format(_SERVER_HOST, port)
server.add_insecure_port(address)
server.start()
print("Server listening at '{}'".format(address))
@ -75,24 +80,33 @@ def run_server(server_compression, no_compress_every_n, port):
def main():
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument('--server_compression',
default='none',
nargs='?',
choices=_COMPRESSION_OPTIONS.keys(),
help='The default compression method for the server.')
parser.add_argument('--no_compress_every_n',
type=int,
default=0,
nargs='?',
help='If set, every nth reply will be uncompressed.')
parser.add_argument('--port',
type=int,
default=50051,
nargs='?',
help='The port on which the server will listen.')
parser.add_argument(
"--server_compression",
default="none",
nargs="?",
choices=_COMPRESSION_OPTIONS.keys(),
help="The default compression method for the server.",
)
parser.add_argument(
"--no_compress_every_n",
type=int,
default=0,
nargs="?",
help="If set, every nth reply will be uncompressed.",
)
parser.add_argument(
"--port",
type=int,
default=50051,
nargs="?",
help="The port on which the server will listen.",
)
args = parser.parse_args()
run_server(_COMPRESSION_OPTIONS[args.server_compression],
args.no_compress_every_n, args.port)
run_server(
_COMPRESSION_OPTIONS[args.server_compression],
args.no_compress_every_n,
args.port,
)
if __name__ == "__main__":

@ -20,9 +20,10 @@ import subprocess
import unittest
_BINARY_DIR = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server')
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client')
os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
)
_SERVER_PATH = os.path.join(_BINARY_DIR, "server")
_CLIENT_PATH = os.path.join(_BINARY_DIR, "client")
@contextlib.contextmanager
@ -31,7 +32,7 @@ def _get_port():
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError("Failed to set SO_REUSEPORT.")
sock.bind(('', 0))
sock.bind(("", 0))
try:
yield sock.getsockname()[1]
finally:
@ -39,17 +40,28 @@ def _get_port():
class CompressionExampleTest(unittest.TestCase):
def test_compression_example(self):
with _get_port() as test_port:
server_process = subprocess.Popen(
(_SERVER_PATH, '--port', str(test_port), '--server_compression',
'gzip'))
(
_SERVER_PATH,
"--port",
str(test_port),
"--server_compression",
"gzip",
)
)
try:
server_target = 'localhost:{}'.format(test_port)
server_target = "localhost:{}".format(test_port)
client_process = subprocess.Popen(
(_CLIENT_PATH, '--server', server_target,
'--channel_compression', 'gzip'))
(
_CLIENT_PATH,
"--server",
server_target,
"--channel_compression",
"gzip",
)
)
client_return_code = client_process.wait()
self.assertEqual(0, client_return_code)
self.assertIsNone(server_process.poll())
@ -58,5 +70,5 @@ class CompressionExampleTest(unittest.TestCase):
server_process.wait()
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main(verbosity=2)

@ -28,8 +28,8 @@ SERVER_ADDRESS = "localhost:23333"
def main():
with grpc.secure_channel(
SERVER_ADDRESS,
credentials=grpc.alts_channel_credentials()) as channel:
SERVER_ADDRESS, credentials=grpc.alts_channel_credentials()
) as channel:
stub = demo_pb2_grpc.GRPCDemoStub(channel)
simple_method(stub)
client_streaming_method(stub)
@ -37,5 +37,5 @@ def main():
bidirectional_streaming_method(stub)
if __name__ == '__main__':
if __name__ == "__main__":
main()

@ -22,18 +22,19 @@ import grpc
import demo_pb2_grpc
from server import DemoServer
SERVER_ADDRESS = 'localhost:23333'
SERVER_ADDRESS = "localhost:23333"
def main():
svr = grpc.server(futures.ThreadPoolExecutor())
demo_pb2_grpc.add_GRPCDemoServicer_to_server(DemoServer(), svr)
svr.add_secure_port(SERVER_ADDRESS,
server_credentials=grpc.alts_server_credentials())
svr.add_secure_port(
SERVER_ADDRESS, server_credentials=grpc.alts_server_credentials()
)
print("------------------start Python GRPC server with ALTS encryption")
svr.start()
svr.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
main()

@ -21,8 +21,10 @@ import demo_pb2
import demo_pb2_grpc
__all__ = [
'simple_method', 'client_streaming_method', 'server_streaming_method',
'bidirectional_streaming_method'
"simple_method",
"client_streaming_method",
"server_streaming_method",
"bidirectional_streaming_method",
]
SERVER_ADDRESS = "localhost:23333"
@ -38,11 +40,14 @@ CLIENT_ID = 1
# only respond once.)
def simple_method(stub):
print("--------------Call SimpleMethod Begin--------------")
request = demo_pb2.Request(client_id=CLIENT_ID,
request_data="called by Python client")
request = demo_pb2.Request(
client_id=CLIENT_ID, request_data="called by Python client"
)
response = stub.SimpleMethod(request)
print("resp from server(%d), the message=%s" %
(response.server_id, response.response_data))
print(
"resp from server(%d), the message=%s"
% (response.server_id, response.response_data)
)
print("--------------Call SimpleMethod Over---------------")
@ -58,12 +63,15 @@ def client_streaming_method(stub):
for i in range(5):
request = demo_pb2.Request(
client_id=CLIENT_ID,
request_data=("called by Python client, message:%d" % i))
request_data="called by Python client, message:%d" % i,
)
yield request
response = stub.ClientStreamingMethod(request_messages())
print("resp from server(%d), the message=%s" %
(response.server_id, response.response_data))
print(
"resp from server(%d), the message=%s"
% (response.server_id, response.response_data)
)
print("--------------Call ClientStreamingMethod Over---------------")
@ -72,12 +80,15 @@ def client_streaming_method(stub):
# but the server can return the response many times.)
def server_streaming_method(stub):
print("--------------Call ServerStreamingMethod Begin--------------")
request = demo_pb2.Request(client_id=CLIENT_ID,
request_data="called by Python client")
request = demo_pb2.Request(
client_id=CLIENT_ID, request_data="called by Python client"
)
response_iterator = stub.ServerStreamingMethod(request)
for response in response_iterator:
print("recv from server(%d), message=%s" %
(response.server_id, response.response_data))
print(
"recv from server(%d), message=%s"
% (response.server_id, response.response_data)
)
print("--------------Call ServerStreamingMethod Over---------------")
@ -87,7 +98,8 @@ def server_streaming_method(stub):
# to each other multiple times.)
def bidirectional_streaming_method(stub):
print(
"--------------Call BidirectionalStreamingMethod Begin---------------")
"--------------Call BidirectionalStreamingMethod Begin---------------"
)
# 创建一个生成器
# create a generator
@ -95,14 +107,17 @@ def bidirectional_streaming_method(stub):
for i in range(5):
request = demo_pb2.Request(
client_id=CLIENT_ID,
request_data=("called by Python client, message: %d" % i))
request_data="called by Python client, message: %d" % i,
)
yield request
time.sleep(1)
response_iterator = stub.BidirectionalStreamingMethod(request_messages())
for response in response_iterator:
print("recv from server(%d), message=%s" %
(response.server_id, response.response_data))
print(
"recv from server(%d), message=%s"
% (response.server_id, response.response_data)
)
print("--------------Call BidirectionalStreamingMethod Over---------------")
@ -120,5 +135,5 @@ def main():
bidirectional_streaming_method(stub)
if __name__ == '__main__':
if __name__ == "__main__":
main()

@ -21,22 +21,24 @@ import grpc
import demo_pb2
import demo_pb2_grpc
__all__ = 'DemoServer'
SERVER_ADDRESS = 'localhost:23333'
__all__ = "DemoServer"
SERVER_ADDRESS = "localhost:23333"
SERVER_ID = 1
class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
# 一元模式(在一次调用中, 客户端只能向服务器传输一次请求数据, 服务器也只能返回一次响应)
# unary-unary(In a single call, the client can only send request once, and the server can
# only respond once.)
def SimpleMethod(self, request, context):
print("SimpleMethod called by client(%d) the message: %s" %
(request.client_id, request.request_data))
print(
"SimpleMethod called by client(%d) the message: %s"
% (request.client_id, request.request_data)
)
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data="Python server SimpleMethod Ok!!!!")
response_data="Python server SimpleMethod Ok!!!!",
)
return response
# 客户端流模式(在一次调用中, 客户端可以多次向服务器传输数据, 但是服务器只能返回一次响应)
@ -45,19 +47,24 @@ class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
def ClientStreamingMethod(self, request_iterator, context):
print("ClientStreamingMethod called by client...")
for request in request_iterator:
print("recv from client(%d), message= %s" %
(request.client_id, request.request_data))
print(
"recv from client(%d), message= %s"
% (request.client_id, request.request_data)
)
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data="Python server ClientStreamingMethod ok")
response_data="Python server ClientStreamingMethod ok",
)
return response
# 服务端流模式(在一次调用中, 客户端只能一次向服务器传输数据, 但是服务器可以多次返回响应)
# unary-stream (In a single call, the client can only transmit data to the server at one time,
# but the server can return the response many times.)
def ServerStreamingMethod(self, request, context):
print("ServerStreamingMethod called by client(%d), message= %s" %
(request.client_id, request.request_data))
print(
"ServerStreamingMethod called by client(%d), message= %s"
% (request.client_id, request.request_data)
)
# 创建一个生成器
# create a generator
@ -65,7 +72,8 @@ class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
for i in range(5):
response = demo_pb2.Response(
server_id=SERVER_ID,
response_data=("send by Python server, message=%d" % i))
response_data="send by Python server, message=%d" % i,
)
yield response
return response_messages()
@ -80,8 +88,10 @@ class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
# Open a sub thread to receive data
def parse_request():
for request in request_iterator:
print("recv from client(%d), message= %s" %
(request.client_id, request.request_data))
print(
"recv from client(%d), message= %s"
% (request.client_id, request.request_data)
)
t = Thread(target=parse_request)
t.start()
@ -89,7 +99,8 @@ class DemoServer(demo_pb2_grpc.GRPCDemoServicer):
for i in range(5):
yield demo_pb2.Response(
server_id=SERVER_ID,
response_data=("send by Python server, message= %d" % i))
response_data="send by Python server, message= %d" % i,
)
t.join()
@ -112,5 +123,5 @@ def main():
# time.sleep(10)
if __name__ == '__main__':
if __name__ == "__main__":
main()

@ -21,7 +21,8 @@ import random
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
# TODO: Suppress until the macOS segfault fix rolled out
from grpc_channelz.v1 import channelz # pylint: disable=wrong-import-position
@ -33,23 +34,26 @@ _RANDOM_FAILURE_RATE = 0.3
class FaultInjectGreeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self, failure_rate):
self._failure_rate = failure_rate
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
if random.random() < self._failure_rate:
context.abort(grpc.StatusCode.UNAVAILABLE,
'Randomly injected failure.')
return helloworld_pb2.HelloReply(message=f'Hello, {request.name}!')
context.abort(
grpc.StatusCode.UNAVAILABLE, "Randomly injected failure."
)
return helloworld_pb2.HelloReply(message=f"Hello, {request.name}!")
def create_server(addr: str, failure_rate: float) -> grpc.aio.Server:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(
FaultInjectGreeter(failure_rate), server)
FaultInjectGreeter(failure_rate), server
)
# Add Channelz Servicer to the gRPC server
channelz.add_channelz_servicer(server)
@ -60,17 +64,20 @@ def create_server(addr: str, failure_rate: float) -> grpc.aio.Server:
async def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to listen on')
parser.add_argument(
'--failure_rate',
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to listen on",
)
parser.add_argument(
"--failure_rate",
nargs=1,
type=float,
default=0.3,
help='a float indicates the percentage of failed message injections')
help="a float indicates the percentage of failed message injections",
)
args = parser.parse_args()
server = create_server(addr=args.addr, failure_rate=args.failure_rate)
@ -78,6 +85,6 @@ async def main() -> None:
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())

@ -26,21 +26,24 @@ async def run(addr: str) -> None:
async with grpc.aio.insecure_channel(addr) as channel:
channelz_stub = channelz_pb2_grpc.ChannelzStub(channel)
response = await channelz_stub.GetServers(
channelz_pb2.GetServersRequest(start_server_id=0))
print('Info for all servers: %s' % response)
channelz_pb2.GetServersRequest(start_server_id=0)
)
print("Info for all servers: %s" % response)
async def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to request')
parser.add_argument(
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to request",
)
args = parser.parse_args()
run(addr=args.addr)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.get_event_loop().run_until_complete(main())

@ -20,43 +20,49 @@ import logging
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
async def process(stub: helloworld_pb2_grpc.GreeterStub,
request: helloworld_pb2.HelloRequest) -> None:
async def process(
stub: helloworld_pb2_grpc.GreeterStub, request: helloworld_pb2.HelloRequest
) -> None:
try:
response = await stub.SayHello(request)
except grpc.aio.AioRpcError as rpc_error:
print(f'Received error: {rpc_error}')
print(f"Received error: {rpc_error}")
else:
print(f'Received message: {response}')
print(f"Received message: {response}")
async def run(addr: str, n: int) -> None:
async with grpc.aio.insecure_channel(addr) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
request = helloworld_pb2.HelloRequest(name='you')
request = helloworld_pb2.HelloRequest(name="you")
for _ in range(n):
await process(stub, request)
async def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to request')
parser.add_argument('-n',
nargs=1,
type=int,
default=10,
help='an integer for number of messages to sent')
parser.add_argument(
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to request",
)
parser.add_argument(
"-n",
nargs=1,
type=int,
default=10,
help="an integer for number of messages to sent",
)
args = parser.parse_args()
await run(addr=args.addr, n=args.n)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())

@ -25,7 +25,8 @@ import random
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
# TODO: Suppress until the macOS segfault fix rolled out
from grpc_channelz.v1 import channelz # pylint: disable=wrong-import-position
@ -37,21 +38,22 @@ _RANDOM_FAILURE_RATE = 0.3
class FaultInjectGreeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self, failure_rate):
self._failure_rate = failure_rate
def SayHello(self, request, context):
if random.random() < self._failure_rate:
context.abort(grpc.StatusCode.UNAVAILABLE,
'Randomly injected failure.')
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
context.abort(
grpc.StatusCode.UNAVAILABLE, "Randomly injected failure."
)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def create_server(addr, failure_rate):
server = grpc.server(futures.ThreadPoolExecutor())
helloworld_pb2_grpc.add_GreeterServicer_to_server(
FaultInjectGreeter(failure_rate), server)
FaultInjectGreeter(failure_rate), server
)
# Add Channelz Servicer to the gRPC server
channelz.add_channelz_servicer(server)
@ -62,17 +64,20 @@ def create_server(addr, failure_rate):
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to listen on')
parser.add_argument(
'--failure_rate',
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to listen on",
)
parser.add_argument(
"--failure_rate",
nargs=1,
type=float,
default=0.3,
help='a float indicates the percentage of failed message injections')
help="a float indicates the percentage of failed message injections",
)
args = parser.parse_args()
server = create_server(addr=args.addr, failure_rate=args.failure_rate)
@ -80,6 +85,6 @@ def main():
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()

@ -32,20 +32,22 @@ def run(addr):
# succeeded/failed RPCs. For more info see:
# https://github.com/grpc/grpc/blob/master/src/proto/grpc/channelz/channelz.proto
response = channelz_stub.GetServers(channelz_pb2.GetServersRequest())
print(f'Info for all servers: {response}')
print(f"Info for all servers: {response}")
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to request')
parser.add_argument(
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to request",
)
args = parser.parse_args()
run(addr=args.addr)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
main()

@ -23,42 +23,47 @@ import logging
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
def process(stub, request):
try:
response = stub.SayHello(request)
except grpc.RpcError as rpc_error:
print('Received error: %s' % rpc_error)
print("Received error: %s" % rpc_error)
else:
print('Received message: %s' % response)
print("Received message: %s" % response)
def run(addr, n):
with grpc.insecure_channel(addr) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
request = helloworld_pb2.HelloRequest(name='you')
request = helloworld_pb2.HelloRequest(name="you")
for _ in range(n):
process(stub, request)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--addr',
nargs=1,
type=str,
default='[::]:50051',
help='the address to request')
parser.add_argument('-n',
nargs=1,
type=int,
default=10,
help='an integer for number of messages to sent')
parser.add_argument(
"--addr",
nargs=1,
type=str,
default="[::]:50051",
help="the address to request",
)
parser.add_argument(
"-n",
nargs=1,
type=int,
default=10,
help="an integer for number of messages to sent",
)
args = parser.parse_args()
run(addr=args.addr, n=args.n)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
main()

@ -30,15 +30,15 @@ _LOGGER.setLevel(logging.INFO)
_FAILURE_RATE = 0.5
_NUMBER_OF_MESSAGES = 100
_ADDR_TEMPLATE = 'localhost:%d'
_ADDR_TEMPLATE = "localhost:%d"
class DebugExampleTest(unittest.TestCase):
def test_channelz_example(self):
server = debug_server.create_server(addr='[::]:0',
failure_rate=_FAILURE_RATE)
port = server.add_insecure_port('[::]:0')
server = debug_server.create_server(
addr="[::]:0", failure_rate=_FAILURE_RATE
)
port = server.add_insecure_port("[::]:0")
server.start()
address = _ADDR_TEMPLATE % port
@ -48,11 +48,11 @@ class DebugExampleTest(unittest.TestCase):
# No unhandled exception raised, test passed!
def test_asyncio_channelz_example(self):
async def body():
server = asyncio_debug_server.create_server(
addr='[::]:0', failure_rate=_FAILURE_RATE)
port = server.add_insecure_port('[::]:0')
addr="[::]:0", failure_rate=_FAILURE_RATE
)
port = server.add_insecure_port("[::]:0")
await server.start()
address = _ADDR_TEMPLATE % port
@ -64,6 +64,6 @@ class DebugExampleTest(unittest.TestCase):
asyncio.get_event_loop().run_until_complete(body())
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)

@ -29,29 +29,29 @@ _LOGGER = logging.getLogger(__name__)
def process(stub):
try:
response = stub.SayHello(helloworld_pb2.HelloRequest(name='Alice'))
_LOGGER.info('Call success: %s', response.message)
response = stub.SayHello(helloworld_pb2.HelloRequest(name="Alice"))
_LOGGER.info("Call success: %s", response.message)
except grpc.RpcError as rpc_error:
_LOGGER.error('Call failure: %s', rpc_error)
_LOGGER.error("Call failure: %s", rpc_error)
status = rpc_status.from_call(rpc_error)
for detail in status.details:
if detail.Is(error_details_pb2.QuotaFailure.DESCRIPTOR):
info = error_details_pb2.QuotaFailure()
detail.Unpack(info)
_LOGGER.error('Quota failure: %s', info)
_LOGGER.error("Quota failure: %s", info)
else:
raise RuntimeError('Unexpected failure: %s' % detail)
raise RuntimeError("Unexpected failure: %s" % detail)
def main():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
process(stub)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
main()

@ -31,21 +31,23 @@ from examples.protos import helloworld_pb2_grpc
def create_greet_limit_exceed_error_status(name):
detail = any_pb2.Any()
detail.Pack(
error_details_pb2.QuotaFailure(violations=[
error_details_pb2.QuotaFailure.Violation(
subject="name: %s" % name,
description="Limit one greeting per person",
)
],))
error_details_pb2.QuotaFailure(
violations=[
error_details_pb2.QuotaFailure.Violation(
subject="name: %s" % name,
description="Limit one greeting per person",
)
],
)
)
return status_pb2.Status(
code=code_pb2.RESOURCE_EXHAUSTED,
message='Request limit exceeded.',
message="Request limit exceeded.",
details=[detail],
)
class LimitedGreeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self):
self._lock = threading.RLock()
self._greeted = set()
@ -54,11 +56,12 @@ class LimitedGreeter(helloworld_pb2_grpc.GreeterServicer):
with self._lock:
if request.name in self._greeted:
rich_status = create_greet_limit_exceed_error_status(
request.name)
request.name
)
context.abort_with_status(rpc_status.to_status(rich_status))
else:
self._greeted.add(request.name)
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def create_server(server_address):
@ -74,10 +77,10 @@ def serve(server):
def main():
server, unused_port = create_server('[::]:50051')
server, unused_port = create_server("[::]:50051")
serve(server)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
main()

@ -17,6 +17,7 @@
# please refer to comments in the "bazel_namespace_package_hack" module.
try:
from tests import bazel_namespace_package_hack
bazel_namespace_package_hack.sys_path_to_site_dir_hack()
except ImportError:
pass
@ -32,11 +33,10 @@ from examples.python.errors import server as error_handling_server
class ErrorHandlingExampleTest(unittest.TestCase):
def setUp(self):
self._server, port = error_handling_server.create_server('[::]:0')
self._server, port = error_handling_server.create_server("[::]:0")
self._server.start()
self._channel = grpc.insecure_channel('localhost:%d' % port)
self._channel = grpc.insecure_channel("localhost:%d" % port)
def tearDown(self):
self._channel.close()
@ -49,6 +49,6 @@ class ErrorHandlingExampleTest(unittest.TestCase):
# No unhandled exception raised, test passed!
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)

@ -24,8 +24,9 @@ import helloworld_pb2_grpc
def unary_call(stub: helloworld_pb2_grpc.GreeterStub, message: str):
response = stub.SayHello(helloworld_pb2.HelloRequest(name=message),
timeout=3)
response = stub.SayHello(
helloworld_pb2.HelloRequest(name=message), timeout=3
)
print(f"Greeter client received: {response.message}")
@ -39,11 +40,11 @@ def health_check_call(stub: health_pb2_grpc.HealthStub):
def run():
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
health_stub = health_pb2_grpc.HealthStub(channel)
# Should succeed
unary_call(stub, 'you')
unary_call(stub, "you")
# Check health status every 1 second for 30 seconds
for _ in range(30):
@ -51,6 +52,6 @@ def run():
sleep(1)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -27,7 +27,6 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message=request.name)
@ -47,28 +46,30 @@ def _toggle_health(health_servicer: health.HealthServicer, service: str):
def _configure_health_server(server: grpc.Server):
health_servicer = health.HealthServicer(
experimental_non_blocking=True,
experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=10))
experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=10),
)
health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
# Use a daemon thread to toggle health status
toggle_health_status_thread = threading.Thread(target=_toggle_health,
args=(health_servicer,
"helloworld.Greeter"),
daemon=True)
toggle_health_status_thread = threading.Thread(
target=_toggle_health,
args=(health_servicer, "helloworld.Greeter"),
daemon=True,
)
toggle_health_status_thread.start()
def serve():
port = '50051'
port = "50051"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:' + port)
server.add_insecure_port("[::]:" + port)
_configure_health_server(server)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -27,19 +27,24 @@ async def run() -> None:
# Read from an async generator
async for response in stub.sayHello(
hellostreamingworld_pb2.HelloRequest(name="you")):
print("Greeter client received from async generator: " +
response.message)
hellostreamingworld_pb2.HelloRequest(name="you")
):
print(
"Greeter client received from async generator: "
+ response.message
)
# Direct read from the stub
hello_stream = stub.sayHello(
hellostreamingworld_pb2.HelloRequest(name="you"))
hellostreamingworld_pb2.HelloRequest(name="you")
)
while True:
response = await hello_stream.read()
if response == grpc.aio.EOF:
break
print("Greeter client received from direct read: " +
response.message)
print(
"Greeter client received from direct read: " + response.message
)
if __name__ == "__main__":

@ -26,9 +26,9 @@ NUMBER_OF_REPLY = 10
class Greeter(MultiGreeterServicer):
async def sayHello(self, request: HelloRequest,
context: grpc.aio.ServicerContext) -> HelloReply:
async def sayHello(
self, request: HelloRequest, context: grpc.aio.ServicerContext
) -> HelloReply:
logging.info("Serving sayHello request %s", request)
for i in range(NUMBER_OF_REPLY):
yield HelloReply(message=f"Hello number {i}, {request.name}!")

@ -22,12 +22,12 @@ import helloworld_pb2_grpc
async def run() -> None:
async with grpc.aio.insecure_channel('localhost:50051') as channel:
async with grpc.aio.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = await stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = await stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())

@ -21,22 +21,26 @@ import helloworld_pb2
import helloworld_pb2_grpc
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
CHANNEL_OPTIONS = [('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10000)]
CHANNEL_OPTIONS = [
("grpc.lb_policy_name", "pick_first"),
("grpc.enable_retries", 0),
("grpc.keepalive_timeout_ms", 10000),
]
async def run() -> None:
async with grpc.aio.insecure_channel(target='localhost:50051',
options=CHANNEL_OPTIONS) as channel:
async with grpc.aio.insecure_channel(
target="localhost:50051", options=CHANNEL_OPTIONS
) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Timeout in seconds.
# Please refer gRPC Python documents for more detail. https://grpc.io/grpc/python/grpc.html
response = await stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
timeout=10)
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name="you"), timeout=10
)
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())

@ -22,23 +22,24 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = '[::]:50051'
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())

@ -25,20 +25,21 @@ _cleanup_coroutines = []
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
logging.info('Received request, sleeping for 4 seconds...')
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
logging.info("Received request, sleeping for 4 seconds...")
await asyncio.sleep(4)
logging.info('Sleep completed, responding')
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
logging.info("Sleep completed, responding")
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = '[::]:50051'
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
@ -54,7 +55,7 @@ async def serve() -> None:
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
loop = asyncio.get_event_loop()
try:

@ -23,26 +23,27 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
SERVICE_NAMES = (
helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name,
helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name,
reflection.SERVICE_NAME,
)
reflection.enable_server_reflection(SERVICE_NAMES, server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(serve())

@ -27,12 +27,12 @@ def run():
# used in circumstances in which the with statement does not fit the needs
# of the code.
print("Will try to greet world ...")
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -17,13 +17,14 @@ import logging
from google.protobuf.descriptor_pool import DescriptorPool
import grpc
from grpc_reflection.v1alpha.proto_reflection_descriptor_database import \
ProtoReflectionDescriptorDatabase
from grpc_reflection.v1alpha.proto_reflection_descriptor_database import (
ProtoReflectionDescriptorDatabase,
)
def run():
print("Will try to greet world ...")
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
reflection_db = ProtoReflectionDescriptorDatabase(channel)
services = reflection_db.get_services()
print(f"found services: {services}")
@ -37,10 +38,11 @@ def run():
print(f"input type for this method: {input_type.full_name}")
request_desc = desc_pool.FindMessageTypeByName(
"helloworld.HelloRequest")
"helloworld.HelloRequest"
)
print(f"found request name: {request_desc.full_name}")
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -28,19 +28,23 @@ def run():
# of the code.
#
# For more channel options, please see https://grpc.io/grpc/core/group__grpc__arg__keys.html
with grpc.insecure_channel(target='localhost:50051',
options=[('grpc.lb_policy_name', 'pick_first'),
('grpc.enable_retries', 0),
('grpc.keepalive_timeout_ms', 10000)
]) as channel:
with grpc.insecure_channel(
target="localhost:50051",
options=[
("grpc.lb_policy_name", "pick_first"),
("grpc.enable_retries", 0),
("grpc.keepalive_timeout_ms", 10000),
],
) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Timeout in seconds.
# Please refer gRPC Python documents for more detail. https://grpc.io/grpc/python/grpc.html
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
timeout=10)
response = stub.SayHello(
helloworld_pb2.HelloRequest(name="you"), timeout=10
)
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -22,21 +22,20 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
port = '50051'
port = "50051"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:' + port)
server.add_insecure_port("[::]:" + port)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -23,24 +23,23 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
SERVICE_NAMES = (
helloworld_pb2.DESCRIPTOR.services_by_name['Greeter'].full_name,
helloworld_pb2.DESCRIPTOR.services_by_name["Greeter"].full_name,
reflection.SERVICE_NAME,
)
reflection.enable_server_reflection(SERVICE_NAMES, server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -22,20 +22,23 @@ import grpc
import helloworld_pb2
import helloworld_pb2_grpc
test_var = contextvars.ContextVar('test', default='test')
test_var = contextvars.ContextVar("test", default="test")
async def run() -> None:
async with grpc.aio.insecure_channel('localhost:50051') as channel:
async with grpc.aio.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
rpc_id = '{:032x}'.format(random.getrandbits(128))
metadata = grpc.aio.Metadata(('client-rpc-id', rpc_id),)
rpc_id = "{:032x}".format(random.getrandbits(128))
metadata = grpc.aio.Metadata(
("client-rpc-id", rpc_id),
)
print(f"Sending request with rpc id: {rpc_id}")
response = await stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
metadata=metadata)
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name="you"), metadata=metadata
)
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())

@ -22,19 +22,20 @@ import grpc
import helloworld_pb2
import helloworld_pb2_grpc
rpc_id_var = contextvars.ContextVar('rpc_id', default='default')
rpc_id_var = contextvars.ContextVar("rpc_id", default="default")
class RPCIdInterceptor(grpc.aio.ServerInterceptor):
def __init__(self, tag: str, rpc_id: Optional[str] = None) -> None:
self.tag = tag
self.rpc_id = rpc_id
async def intercept_service(
self, continuation: Callable[[grpc.HandlerCallDetails],
Awaitable[grpc.RpcMethodHandler]],
handler_call_details: grpc.HandlerCallDetails
self,
continuation: Callable[
[grpc.HandlerCallDetails], Awaitable[grpc.RpcMethodHandler]
],
handler_call_details: grpc.HandlerCallDetails,
) -> grpc.RpcMethodHandler:
"""
This interceptor prepends its tag to the rpc_id.
@ -42,9 +43,9 @@ class RPCIdInterceptor(grpc.aio.ServerInterceptor):
will be something like this: Interceptor2-Interceptor1-RPC_ID.
"""
logging.info("%s called with rpc_id: %s", self.tag, rpc_id_var.get())
if rpc_id_var.get() == 'default':
if rpc_id_var.get() == "default":
_metadata = dict(handler_call_details.invocation_metadata)
rpc_id_var.set(self.decorate(_metadata['client-rpc-id']))
rpc_id_var.set(self.decorate(_metadata["client-rpc-id"]))
else:
rpc_id_var.set(self.decorate(rpc_id_var.get()))
return await continuation(handler_call_details)
@ -54,30 +55,32 @@ class RPCIdInterceptor(grpc.aio.ServerInterceptor):
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
logging.info("Handle rpc with id %s in server handler.",
rpc_id_var.get())
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
logging.info(
"Handle rpc with id %s in server handler.", rpc_id_var.get()
)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
interceptors = [
RPCIdInterceptor('Interceptor1'),
RPCIdInterceptor('Interceptor2')
RPCIdInterceptor("Interceptor1"),
RPCIdInterceptor("Interceptor2"),
]
server = grpc.aio.server(interceptors=interceptors)
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = '[::]:50051'
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())

@ -17,7 +17,6 @@ import grpc
class _ConcreteValue(grpc.Future):
def __init__(self, result):
self._result = result
@ -46,21 +45,24 @@ class _ConcreteValue(grpc.Future):
fn(self._result)
class DefaultValueClientInterceptor(grpc.UnaryUnaryClientInterceptor,
grpc.StreamUnaryClientInterceptor):
class DefaultValueClientInterceptor(
grpc.UnaryUnaryClientInterceptor, grpc.StreamUnaryClientInterceptor
):
def __init__(self, value):
self._default = _ConcreteValue(value)
def _intercept_call(self, continuation, client_call_details,
request_or_iterator):
def _intercept_call(
self, continuation, client_call_details, request_or_iterator
):
response = continuation(client_call_details, request_or_iterator)
return self._default if response.exception() else response
def intercept_unary_unary(self, continuation, client_call_details, request):
return self._intercept_call(continuation, client_call_details, request)
def intercept_stream_unary(self, continuation, client_call_details,
request_iterator):
return self._intercept_call(continuation, client_call_details,
request_iterator)
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
return self._intercept_call(
continuation, client_call_details, request_iterator
)

@ -25,20 +25,25 @@ import helloworld_pb2_grpc
def run():
default_value = helloworld_pb2.HelloReply(
message='Hello from your local interceptor!')
default_value_interceptor = default_value_client_interceptor.DefaultValueClientInterceptor(
default_value)
message="Hello from your local interceptor!"
)
default_value_interceptor = (
default_value_client_interceptor.DefaultValueClientInterceptor(
default_value
)
)
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
intercept_channel = grpc.intercept_channel(channel,
default_value_interceptor)
with grpc.insecure_channel("localhost:50051") as channel:
intercept_channel = grpc.intercept_channel(
channel, default_value_interceptor
)
stub = helloworld_pb2_grpc.GreeterStub(intercept_channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -16,38 +16,46 @@
import grpc
class _GenericClientInterceptor(grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor):
class _GenericClientInterceptor(
grpc.UnaryUnaryClientInterceptor,
grpc.UnaryStreamClientInterceptor,
grpc.StreamUnaryClientInterceptor,
grpc.StreamStreamClientInterceptor,
):
def __init__(self, interceptor_function):
self._fn = interceptor_function
def intercept_unary_unary(self, continuation, client_call_details, request):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, False)
client_call_details, iter((request,)), False, False
)
response = continuation(new_details, next(new_request_iterator))
return postprocess(response) if postprocess else response
def intercept_unary_stream(self, continuation, client_call_details,
request):
def intercept_unary_stream(
self, continuation, client_call_details, request
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, iter((request,)), False, True)
client_call_details, iter((request,)), False, True
)
response_it = continuation(new_details, next(new_request_iterator))
return postprocess(response_it) if postprocess else response_it
def intercept_stream_unary(self, continuation, client_call_details,
request_iterator):
def intercept_stream_unary(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, False)
client_call_details, request_iterator, True, False
)
response = continuation(new_details, new_request_iterator)
return postprocess(response) if postprocess else response
def intercept_stream_stream(self, continuation, client_call_details,
request_iterator):
def intercept_stream_stream(
self, continuation, client_call_details, request_iterator
):
new_details, new_request_iterator, postprocess = self._fn(
client_call_details, request_iterator, True, True)
client_call_details, request_iterator, True, True
)
response_it = continuation(new_details, new_request_iterator)
return postprocess(response_it) if postprocess else response_it

@ -24,19 +24,23 @@ import helloworld_pb2_grpc
def run():
header_adder_interceptor = header_manipulator_client_interceptor.header_adder_interceptor(
'one-time-password', '42')
header_adder_interceptor = (
header_manipulator_client_interceptor.header_adder_interceptor(
"one-time-password", "42"
)
)
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
intercept_channel = grpc.intercept_channel(channel,
header_adder_interceptor)
with grpc.insecure_channel("localhost:50051") as channel:
intercept_channel = grpc.intercept_channel(
channel, header_adder_interceptor
)
stub = helloworld_pb2_grpc.GreeterStub(intercept_channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -19,28 +19,33 @@ import logging
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
from request_header_validator_interceptor import \
RequestHeaderValidatorInterceptor
from request_header_validator_interceptor import (
RequestHeaderValidatorInterceptor,
)
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
header_validator = RequestHeaderValidatorInterceptor(
'one-time-password', '42', grpc.StatusCode.UNAUTHENTICATED,
'Access denied!')
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10),
interceptors=(header_validator,))
"one-time-password",
"42",
grpc.StatusCode.UNAUTHENTICATED,
"Access denied!",
)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=10),
interceptors=(header_validator,),
)
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -20,27 +20,36 @@ import grpc
class _ClientCallDetails(
collections.namedtuple(
'_ClientCallDetails',
('method', 'timeout', 'metadata', 'credentials')),
grpc.ClientCallDetails):
collections.namedtuple(
"_ClientCallDetails", ("method", "timeout", "metadata", "credentials")
),
grpc.ClientCallDetails,
):
pass
def header_adder_interceptor(header, value):
def intercept_call(client_call_details, request_iterator, request_streaming,
response_streaming):
def intercept_call(
client_call_details,
request_iterator,
request_streaming,
response_streaming,
):
metadata = []
if client_call_details.metadata is not None:
metadata = list(client_call_details.metadata)
metadata.append((
header,
value,
))
metadata.append(
(
header,
value,
)
)
client_call_details = _ClientCallDetails(
client_call_details.method, client_call_details.timeout, metadata,
client_call_details.credentials)
client_call_details.method,
client_call_details.timeout,
metadata,
client_call_details.credentials,
)
return client_call_details, request_iterator, None
return generic_client_interceptor.create(intercept_call)

@ -17,7 +17,6 @@ import grpc
def _unary_unary_rpc_terminator(code, details):
def terminate(ignored_request, context):
context.abort(code, details)
@ -25,15 +24,16 @@ def _unary_unary_rpc_terminator(code, details):
class RequestHeaderValidatorInterceptor(grpc.ServerInterceptor):
def __init__(self, header, value, code, details):
self._header = header
self._value = value
self._terminator = _unary_unary_rpc_terminator(code, details)
def intercept_service(self, continuation, handler_call_details):
if (self._header,
self._value) in handler_call_details.invocation_metadata:
if (
self._header,
self._value,
) in handler_call_details.invocation_metadata:
return continuation(handler_call_details)
else:
return self._terminator

@ -21,14 +21,15 @@ import helloworld_pb2
import helloworld_pb2_grpc
def unary_call(stub: helloworld_pb2_grpc.GreeterStub, request_id: int,
message: str):
def unary_call(
stub: helloworld_pb2_grpc.GreeterStub, request_id: int, message: str
):
print("call:", request_id)
try:
response = stub.SayHello(helloworld_pb2.HelloRequest(name=message))
print(f"Greeter client received: {response.message}")
except grpc.RpcError as rpc_error:
print('Call failed with code: ', rpc_error.code())
print("Call failed with code: ", rpc_error.code())
def run():
@ -44,16 +45,19 @@ def run():
send a data/header frame.
For more details, check: https://github.com/grpc/grpc/blob/master/doc/keepalive.md
"""
channel_options = [('grpc.keepalive_time_ms', 8000),
('grpc.keepalive_timeout_ms', 5000),
('grpc.http2.max_pings_without_data', 5),
('grpc.keepalive_permit_without_calls', 1)]
channel_options = [
("grpc.keepalive_time_ms", 8000),
("grpc.keepalive_timeout_ms", 5000),
("grpc.http2.max_pings_without_data", 5),
("grpc.keepalive_permit_without_calls", 1),
]
with grpc.insecure_channel(target='localhost:50051',
options=channel_options) as channel:
with grpc.insecure_channel(
target="localhost:50051", options=channel_options
) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Should succeed
unary_call(stub, 1, 'you')
unary_call(stub, 1, "you")
# Run 30s, run this with GRPC_VERBOSITY=DEBUG GRPC_TRACE=http_keepalive to observe logs.
# Client will be closed after receveing GOAWAY from server.
@ -62,6 +66,6 @@ def run():
sleep(1)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -23,7 +23,6 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
message = request.name
if message.startswith("[delay]"):
@ -51,24 +50,28 @@ def serve():
pings to be sent even if there are no calls in flight.
For more details, check: https://github.com/grpc/grpc/blob/master/doc/keepalive.md
"""
server_options = [('grpc.keepalive_time_ms', 20000),
('grpc.keepalive_timeout_ms', 10000),
('grpc.http2.min_ping_interval_without_data_ms', 5000),
('grpc.max_connection_idle_ms', 10000),
('grpc.max_connection_age_ms', 30000),
('grpc.max_connection_age_grace_ms', 5000),
('grpc.http2.max_pings_without_data', 5),
('grpc.keepalive_permit_without_calls', 1)]
port = '50051'
server = grpc.server(thread_pool=futures.ThreadPoolExecutor(max_workers=10),
options=server_options)
server_options = [
("grpc.keepalive_time_ms", 20000),
("grpc.keepalive_timeout_ms", 10000),
("grpc.http2.min_ping_interval_without_data_ms", 5000),
("grpc.max_connection_idle_ms", 10000),
("grpc.max_connection_age_ms", 30000),
("grpc.max_connection_age_grace_ms", 5000),
("grpc.http2.max_pings_without_data", 5),
("grpc.keepalive_permit_without_calls", 1),
]
port = "50051"
server = grpc.server(
thread_pool=futures.ThreadPoolExecutor(max_workers=10),
options=server_options,
)
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:' + port)
server.add_insecure_port("[::]:" + port)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -25,12 +25,12 @@ def run():
options = (("grpc.lb_policy_name", "round_robin"),)
# Load balancing takes effect when the DNS server returns multiple IPs for the DNS hostname.
# Replace "localhost" with such hostname to see the round robin LB policy take effect.
with grpc.insecure_channel('localhost:50051', options=options) as channel:
with grpc.insecure_channel("localhost:50051", options=options) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -22,21 +22,20 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
port = '50051'
port = "50051"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:' + port)
server.add_insecure_port("[::]:" + port)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -26,23 +26,28 @@ def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response, call = stub.SayHello.with_call(
helloworld_pb2.HelloRequest(name='you'),
helloworld_pb2.HelloRequest(name="you"),
metadata=(
('initial-metadata-1', 'The value should be str'),
('binary-metadata-bin',
b'With -bin surffix, the value can be bytes'),
('accesstoken', 'gRPC Python is great'),
))
("initial-metadata-1", "The value should be str"),
(
"binary-metadata-bin",
b"With -bin surffix, the value can be bytes",
),
("accesstoken", "gRPC Python is great"),
),
)
print("Greeter client received: " + response.message)
for key, value in call.trailing_metadata():
print('Greeter client received trailing metadata: key=%s value=%s' %
(key, value))
print(
"Greeter client received trailing metadata: key=%s value=%s"
% (key, value)
)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -24,26 +24,27 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
for key, value in context.invocation_metadata():
print('Received initial metadata: key=%s value=%s' % (key, value))
print("Received initial metadata: key=%s value=%s" % (key, value))
context.set_trailing_metadata((
('checksum-bin', b'I agree'),
('retry', 'false'),
))
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
context.set_trailing_metadata(
(
("checksum-bin", b"I agree"),
("retry", "false"),
)
)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -30,7 +30,8 @@ import route_guide_resources
def make_route_note(message, latitude, longitude):
return route_guide_pb2.RouteNote(
message=message,
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude))
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude),
)
def guide_get_one_feature(route_guide_stub, point):
@ -48,15 +49,18 @@ def guide_get_one_feature(route_guide_stub, point):
def guide_get_feature(route_guide_stub):
guide_get_one_feature(
route_guide_stub,
route_guide_pb2.Point(latitude=409146138, longitude=-746188906))
guide_get_one_feature(route_guide_stub,
route_guide_pb2.Point(latitude=0, longitude=0))
route_guide_pb2.Point(latitude=409146138, longitude=-746188906),
)
guide_get_one_feature(
route_guide_stub, route_guide_pb2.Point(latitude=0, longitude=0)
)
def guide_list_features(route_guide_stub):
rectangle = route_guide_pb2.Rectangle(
lo=route_guide_pb2.Point(latitude=400000000, longitude=-750000000),
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000))
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000),
)
print("Looking for features between 40, -75 and 42, -73")
features = route_guide_stub.ListFeatures(rectangle)
@ -101,19 +105,21 @@ def generate_messages():
def guide_route_chat(route_guide_stub):
responses = route_guide_stub.RouteChat(generate_messages())
for response in responses:
print("Received message %s at %s" %
(response.message, response.location))
print(
"Received message %s at %s" % (response.message, response.location)
)
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
greeter_stub = helloworld_pb2_grpc.GreeterStub(channel)
route_guide_stub = route_guide_pb2_grpc.RouteGuideStub(channel)
greeter_response = greeter_stub.SayHello(
helloworld_pb2.HelloRequest(name='you'))
helloworld_pb2.HelloRequest(name="you")
)
print("Greeter client received: " + greeter_response.message)
print("-------------- GetFeature --------------")
guide_get_feature(route_guide_stub)
@ -125,6 +131,6 @@ def run():
guide_route_chat(route_guide_stub)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -46,9 +46,11 @@ def _get_distance(start, end):
delta_lat_rad = math.radians(lat_2 - lat_1)
delta_lon_rad = math.radians(lon_2 - lon_1)
a = (pow(math.sin(delta_lat_rad / 2), 2) +
(math.cos(lat_rad_1) * math.cos(lat_rad_2) *
pow(math.sin(delta_lon_rad / 2), 2)))
a = pow(math.sin(delta_lat_rad / 2), 2) + (
math.cos(lat_rad_1)
* math.cos(lat_rad_2)
* pow(math.sin(delta_lon_rad / 2), 2)
)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
R = 6371000
# metres
@ -56,10 +58,10 @@ def _get_distance(start, end):
class _GreeterServicer(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
return helloworld_pb2.HelloReply(
message='Hello, {}!'.format(request.name))
message="Hello, {}!".format(request.name)
)
class _RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
@ -81,10 +83,12 @@ class _RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
top = max(request.lo.latitude, request.hi.latitude)
bottom = min(request.lo.latitude, request.hi.latitude)
for feature in self.db:
if (feature.location.longitude >= left and
feature.location.longitude <= right and
feature.location.latitude >= bottom and
feature.location.latitude <= top):
if (
feature.location.longitude >= left
and feature.location.longitude <= right
and feature.location.latitude >= bottom
and feature.location.latitude <= top
):
yield feature
def RecordRoute(self, request_iterator, context):
@ -103,10 +107,12 @@ class _RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
prev_point = point
elapsed_time = time.time() - start_time
return route_guide_pb2.RouteSummary(point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time))
return route_guide_pb2.RouteSummary(
point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time),
)
def RouteChat(self, request_iterator, context):
prev_notes = []
@ -119,15 +125,17 @@ class _RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(_GreeterServicer(),
server)
helloworld_pb2_grpc.add_GreeterServicer_to_server(
_GreeterServicer(), server
)
route_guide_pb2_grpc.add_RouteGuideServicer_to_server(
_RouteGuideServicer(), server)
server.add_insecure_port('[::]:50051')
_RouteGuideServicer(), server
)
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -21,10 +21,10 @@ import route_guide_pb2
def read_route_guide_database():
"""Reads the route guide database.
Returns:
The full contents of the route guide database as a sequence of
route_guide_pb2.Features.
"""
Returns:
The full contents of the route guide database as a sequence of
route_guide_pb2.Features.
"""
feature_list = []
with open("route_guide_db.json") as route_guide_db_file:
for item in json.load(route_guide_db_file):
@ -32,6 +32,8 @@ def read_route_guide_database():
name=item["name"],
location=route_guide_pb2.Point(
latitude=item["location"]["latitude"],
longitude=item["location"]["longitude"]))
longitude=item["location"]["longitude"],
),
)
feature_list.append(feature)
return feature_list

@ -15,17 +15,21 @@
from grpc_tools import protoc
protoc.main((
'',
'-I../../protos',
'--python_out=.',
'--grpc_python_out=.',
'../../protos/helloworld.proto',
))
protoc.main((
'',
'-I../../protos',
'--python_out=.',
'--grpc_python_out=.',
'../../protos/route_guide.proto',
))
protoc.main(
(
"",
"-I../../protos",
"--python_out=.",
"--grpc_python_out=.",
"../../protos/helloworld.proto",
)
)
protoc.main(
(
"",
"-I../../protos",
"--python_out=.",
"--grpc_python_out=.",
"../../protos/route_guide.proto",
)
)

@ -49,43 +49,50 @@ def _shutdown_worker():
def _initialize_worker(server_address):
global _worker_channel_singleton # pylint: disable=global-statement
global _worker_stub_singleton # pylint: disable=global-statement
_LOGGER.info('Initializing worker process.')
_LOGGER.info("Initializing worker process.")
_worker_channel_singleton = grpc.insecure_channel(server_address)
_worker_stub_singleton = prime_pb2_grpc.PrimeCheckerStub(
_worker_channel_singleton)
_worker_channel_singleton
)
atexit.register(_shutdown_worker)
def _run_worker_query(primality_candidate):
_LOGGER.info('Checking primality of %s.', primality_candidate)
_LOGGER.info("Checking primality of %s.", primality_candidate)
return _worker_stub_singleton.check(
prime_pb2.PrimeCandidate(candidate=primality_candidate))
prime_pb2.PrimeCandidate(candidate=primality_candidate)
)
def _calculate_primes(server_address):
worker_pool = multiprocessing.Pool(processes=_PROCESS_COUNT,
initializer=_initialize_worker,
initargs=(server_address,))
worker_pool = multiprocessing.Pool(
processes=_PROCESS_COUNT,
initializer=_initialize_worker,
initargs=(server_address,),
)
check_range = range(2, _MAXIMUM_CANDIDATE)
primality = worker_pool.map(_run_worker_query, check_range)
primes = zip(check_range, map(operator.attrgetter('isPrime'), primality))
primes = zip(check_range, map(operator.attrgetter("isPrime"), primality))
return tuple(primes)
def main():
msg = 'Determine the primality of the first {} integers.'.format(
_MAXIMUM_CANDIDATE)
msg = "Determine the primality of the first {} integers.".format(
_MAXIMUM_CANDIDATE
)
parser = argparse.ArgumentParser(description=msg)
parser.add_argument('server_address',
help='The address of the server (e.g. localhost:50051)')
parser.add_argument(
"server_address",
help="The address of the server (e.g. localhost:50051)",
)
args = parser.parse_args()
primes = _calculate_primes(args.server_address)
print(primes)
if __name__ == '__main__':
if __name__ == "__main__":
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[PID %(process)d] %(message)s')
formatter = logging.Formatter("[PID %(process)d] %(message)s")
handler.setFormatter(formatter)
_LOGGER.addHandler(handler)
_LOGGER.setLevel(logging.INFO)

@ -47,9 +47,8 @@ def is_prime(n):
class PrimeChecker(prime_pb2_grpc.PrimeCheckerServicer):
def check(self, request, context):
_LOGGER.info('Determining primality of %s', request.candidate)
_LOGGER.info("Determining primality of %s", request.candidate)
return prime_pb2.Primality(isPrime=is_prime(request.candidate))
@ -63,12 +62,15 @@ def _wait_forever(server):
def _run_server(bind_address):
"""Start a server in a subprocess."""
_LOGGER.info('Starting new server.')
options = (('grpc.so_reuseport', 1),)
server = grpc.server(futures.ThreadPoolExecutor(
max_workers=_THREAD_CONCURRENCY,),
options=options)
_LOGGER.info("Starting new server.")
options = (("grpc.so_reuseport", 1),)
server = grpc.server(
futures.ThreadPoolExecutor(
max_workers=_THREAD_CONCURRENCY,
),
options=options,
)
prime_pb2_grpc.add_PrimeCheckerServicer_to_server(PrimeChecker(), server)
server.add_insecure_port(bind_address)
server.start()
@ -82,7 +84,7 @@ def _reserve_port():
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError("Failed to set SO_REUSEPORT.")
sock.bind(('', 0))
sock.bind(("", 0))
try:
yield sock.getsockname()[1]
finally:
@ -91,7 +93,7 @@ def _reserve_port():
def main():
with _reserve_port() as port:
bind_address = 'localhost:{}'.format(port)
bind_address = "localhost:{}".format(port)
_LOGGER.info("Binding to '%s'", bind_address)
sys.stdout.flush()
workers = []
@ -99,17 +101,18 @@ def main():
# NOTE: It is imperative that the worker subprocesses be forked before
# any gRPC servers start up. See
# https://github.com/grpc/grpc/issues/16001 for more details.
worker = multiprocessing.Process(target=_run_server,
args=(bind_address,))
worker = multiprocessing.Process(
target=_run_server, args=(bind_address,)
)
worker.start()
workers.append(worker)
for worker in workers:
worker.join()
if __name__ == '__main__':
if __name__ == "__main__":
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[PID %(process)d] %(message)s')
formatter = logging.Formatter("[PID %(process)d] %(message)s")
handler.setFormatter(formatter)
_LOGGER.addHandler(handler)
_LOGGER.setLevel(logging.INFO)

@ -23,9 +23,10 @@ import tempfile
import unittest
_BINARY_DIR = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server')
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client')
os.path.join(os.path.dirname(os.path.abspath(__file__)), "..")
)
_SERVER_PATH = os.path.join(_BINARY_DIR, "server")
_CLIENT_PATH = os.path.join(_BINARY_DIR, "client")
def is_prime(n):
@ -41,34 +42,35 @@ def _get_server_address(server_stream):
server_stream.seek(0)
line = server_stream.readline()
while line:
matches = re.search('Binding to \'(.+)\'', line)
matches = re.search("Binding to '(.+)'", line)
if matches is not None:
return matches.groups()[0]
line = server_stream.readline()
class MultiprocessingExampleTest(unittest.TestCase):
def test_multiprocessing_example(self):
server_stdout = tempfile.TemporaryFile(mode='r')
server_stdout = tempfile.TemporaryFile(mode="r")
server_process = subprocess.Popen((_SERVER_PATH,), stdout=server_stdout)
server_address = _get_server_address(server_stdout)
client_stdout = tempfile.TemporaryFile(mode='r')
client_process = subprocess.Popen((
_CLIENT_PATH,
server_address,
),
stdout=client_stdout)
client_stdout = tempfile.TemporaryFile(mode="r")
client_process = subprocess.Popen(
(
_CLIENT_PATH,
server_address,
),
stdout=client_stdout,
)
client_process.wait()
server_process.terminate()
client_stdout.seek(0)
results = ast.literal_eval(client_stdout.read().strip().split('\n')[-1])
results = ast.literal_eval(client_stdout.read().strip().split("\n")[-1])
values = tuple(result[0] for result in results)
self.assertSequenceEqual(range(2, 10000), values)
for result in results:
self.assertEqual(is_prime(result[0]), result[1])
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
unittest.main(verbosity=2)

@ -35,7 +35,7 @@ services = grpc.services("helloworld.proto")
logging.basicConfig()
response = services.Greeter.SayHello(protos.HelloRequest(name='you'),
'localhost:50051',
insecure=True)
response = services.Greeter.SayHello(
protos.HelloRequest(name="you"), "localhost:50051", insecure=True
)
print("Greeter client received: " + response.message)

@ -22,19 +22,18 @@ protos, services = grpc.protos_and_services("helloworld.proto")
class Greeter(services.GreeterServicer):
def SayHello(self, request, context):
return protos.HelloReply(message='Hello, %s!' % request.name)
return protos.HelloReply(message="Hello, %s!" % request.name)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
services.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -20,39 +20,44 @@ import logging
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
async def run() -> None:
# The ServiceConfig proto definition can be found:
# https://github.com/grpc/grpc-proto/blob/ec886024c2f7b7f597ba89d5b7d60c3f94627b17/grpc/service_config/service_config.proto#L377
service_config_json = json.dumps({
"methodConfig": [{
# To apply retry to all methods, put [{}] in the "name" field
"name": [{
"service": "helloworld.Greeter",
"method": "SayHello"
}],
"retryPolicy": {
"maxAttempts": 5,
"initialBackoff": "0.1s",
"maxBackoff": "1s",
"backoffMultiplier": 2,
"retryableStatusCodes": ["UNAVAILABLE"],
},
}]
})
service_config_json = json.dumps(
{
"methodConfig": [
{
# To apply retry to all methods, put [{}] in the "name" field
"name": [
{"service": "helloworld.Greeter", "method": "SayHello"}
],
"retryPolicy": {
"maxAttempts": 5,
"initialBackoff": "0.1s",
"maxBackoff": "1s",
"backoffMultiplier": 2,
"retryableStatusCodes": ["UNAVAILABLE"],
},
}
]
}
)
options = []
# NOTE: the retry feature will be enabled by default >=v1.40.0
options.append(("grpc.enable_retries", 1))
options.append(("grpc.service_config", service_config_json))
async with grpc.aio.insecure_channel('localhost:50051',
options=options) as channel:
async with grpc.aio.insecure_channel(
"localhost:50051", options=options
) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = await stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = await stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
asyncio.run(run())

@ -21,38 +21,42 @@ import random
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
class ErrorInjectingGreeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self):
self._counter = collections.defaultdict(int)
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
self._counter[context.peer()] += 1
if self._counter[context.peer()] < 5:
if random.random() < 0.75:
logging.info('Injecting error to RPC from %s', context.peer())
await context.abort(grpc.StatusCode.UNAVAILABLE,
'injected error')
logging.info('Successfully responding to RPC from %s', context.peer())
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
logging.info("Injecting error to RPC from %s", context.peer())
await context.abort(
grpc.StatusCode.UNAVAILABLE, "injected error"
)
logging.info("Successfully responding to RPC from %s", context.peer())
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(ErrorInjectingGreeter(),
server)
listen_addr = '[::]:50051'
helloworld_pb2_grpc.add_GreeterServicer_to_server(
ErrorInjectingGreeter(), server
)
listen_addr = "[::]:50051"
server.add_insecure_port(listen_addr)
logging.info("Starting flaky server on %s", listen_addr)
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())

@ -19,38 +19,42 @@ import logging
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
def run():
# The ServiceConfig proto definition can be found:
# https://github.com/grpc/grpc-proto/blob/ec886024c2f7b7f597ba89d5b7d60c3f94627b17/grpc/service_config/service_config.proto#L377
service_config_json = json.dumps({
"methodConfig": [{
# To apply retry to all methods, put [{}] in the "name" field
"name": [{
"service": "helloworld.Greeter",
"method": "SayHello"
}],
"retryPolicy": {
"maxAttempts": 5,
"initialBackoff": "0.1s",
"maxBackoff": "1s",
"backoffMultiplier": 2,
"retryableStatusCodes": ["UNAVAILABLE"],
},
}]
})
service_config_json = json.dumps(
{
"methodConfig": [
{
# To apply retry to all methods, put [{}] in the "name" field
"name": [
{"service": "helloworld.Greeter", "method": "SayHello"}
],
"retryPolicy": {
"maxAttempts": 5,
"initialBackoff": "0.1s",
"maxBackoff": "1s",
"backoffMultiplier": 2,
"retryableStatusCodes": ["UNAVAILABLE"],
},
}
]
}
)
options = []
# NOTE: the retry feature will be enabled by default >=v1.40.0
options.append(("grpc.enable_retries", 1))
options.append(("grpc.service_config", service_config_json))
with grpc.insecure_channel('localhost:50051', options=options) as channel:
with grpc.insecure_channel("localhost:50051", options=options) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -24,16 +24,19 @@ import route_guide_pb2_grpc
import route_guide_resources
def make_route_note(message: str, latitude: int,
longitude: int) -> route_guide_pb2.RouteNote:
def make_route_note(
message: str, latitude: int, longitude: int
) -> route_guide_pb2.RouteNote:
return route_guide_pb2.RouteNote(
message=message,
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude))
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude),
)
# Performs an unary call
async def guide_get_one_feature(stub: route_guide_pb2_grpc.RouteGuideStub,
point: route_guide_pb2.Point) -> None:
async def guide_get_one_feature(
stub: route_guide_pb2_grpc.RouteGuideStub, point: route_guide_pb2.Point
) -> None:
feature = await stub.GetFeature(point)
if not feature.location:
print("Server returned incomplete feature")
@ -50,20 +53,25 @@ async def guide_get_feature(stub: route_guide_pb2_grpc.RouteGuideStub) -> None:
# and scheduled in the event loop so that they can run concurrently
task_group = asyncio.gather(
guide_get_one_feature(
stub, route_guide_pb2.Point(latitude=409146138,
longitude=-746188906)),
guide_get_one_feature(stub,
route_guide_pb2.Point(latitude=0, longitude=0)))
stub,
route_guide_pb2.Point(latitude=409146138, longitude=-746188906),
),
guide_get_one_feature(
stub, route_guide_pb2.Point(latitude=0, longitude=0)
),
)
# Wait until the Future is resolved
await task_group
# Performs a server-streaming call
async def guide_list_features(
stub: route_guide_pb2_grpc.RouteGuideStub) -> None:
stub: route_guide_pb2_grpc.RouteGuideStub,
) -> None:
rectangle = route_guide_pb2.Rectangle(
lo=route_guide_pb2.Point(latitude=400000000, longitude=-750000000),
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000))
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000),
)
print("Looking for features between 40, -75 and 42, -73")
features = stub.ListFeatures(rectangle)
@ -73,7 +81,7 @@ async def guide_list_features(
def generate_route(
feature_list: List[route_guide_pb2.Feature]
feature_list: List[route_guide_pb2.Feature],
) -> Iterable[route_guide_pb2.Point]:
for _ in range(0, 10):
random_feature = random.choice(feature_list)
@ -118,7 +126,7 @@ async def guide_route_chat(stub: route_guide_pb2_grpc.RouteGuideStub) -> None:
async def main() -> None:
async with grpc.aio.insecure_channel('localhost:50051') as channel:
async with grpc.aio.insecure_channel("localhost:50051") as channel:
stub = route_guide_pb2_grpc.RouteGuideStub(channel)
print("-------------- GetFeature --------------")
await guide_get_feature(stub)
@ -130,6 +138,6 @@ async def main() -> None:
await guide_route_chat(stub)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())

@ -25,8 +25,9 @@ import route_guide_pb2_grpc
import route_guide_resources
def get_feature(feature_db: Iterable[route_guide_pb2.Feature],
point: route_guide_pb2.Point) -> route_guide_pb2.Feature:
def get_feature(
feature_db: Iterable[route_guide_pb2.Feature], point: route_guide_pb2.Point
) -> route_guide_pb2.Feature:
"""Returns Feature at given location or None."""
for feature in feature_db:
if feature.location == point:
@ -34,8 +35,9 @@ def get_feature(feature_db: Iterable[route_guide_pb2.Feature],
return None
def get_distance(start: route_guide_pb2.Point,
end: route_guide_pb2.Point) -> float:
def get_distance(
start: route_guide_pb2.Point, end: route_guide_pb2.Point
) -> float:
"""Distance between two points."""
coord_factor = 10000000.0
lat_1 = start.latitude / coord_factor
@ -48,9 +50,11 @@ def get_distance(start: route_guide_pb2.Point,
delta_lon_rad = math.radians(lon_2 - lon_1)
# Formula is based on http://mathforum.org/library/drmath/view/51879.html
a = (pow(math.sin(delta_lat_rad / 2), 2) +
(math.cos(lat_rad_1) * math.cos(lat_rad_2) *
pow(math.sin(delta_lon_rad / 2), 2)))
a = pow(math.sin(delta_lat_rad / 2), 2) + (
math.cos(lat_rad_1)
* math.cos(lat_rad_2)
* pow(math.sin(delta_lon_rad / 2), 2)
)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
R = 6371000
# metres
@ -63,8 +67,9 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
def __init__(self) -> None:
self.db = route_guide_resources.read_route_guide_database()
def GetFeature(self, request: route_guide_pb2.Point,
unused_context) -> route_guide_pb2.Feature:
def GetFeature(
self, request: route_guide_pb2.Point, unused_context
) -> route_guide_pb2.Feature:
feature = get_feature(self.db, request)
if feature is None:
return route_guide_pb2.Feature(name="", location=request)
@ -72,21 +77,26 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
return feature
async def ListFeatures(
self, request: route_guide_pb2.Rectangle,
unused_context) -> AsyncIterable[route_guide_pb2.Feature]:
self, request: route_guide_pb2.Rectangle, unused_context
) -> AsyncIterable[route_guide_pb2.Feature]:
left = min(request.lo.longitude, request.hi.longitude)
right = max(request.lo.longitude, request.hi.longitude)
top = max(request.lo.latitude, request.hi.latitude)
bottom = min(request.lo.latitude, request.hi.latitude)
for feature in self.db:
if (feature.location.longitude >= left and
feature.location.longitude <= right and
feature.location.latitude >= bottom and
feature.location.latitude <= top):
if (
feature.location.longitude >= left
and feature.location.longitude <= right
and feature.location.latitude >= bottom
and feature.location.latitude <= top
):
yield feature
async def RecordRoute(self, request_iterator: AsyncIterable[
route_guide_pb2.Point], unused_context) -> route_guide_pb2.RouteSummary:
async def RecordRoute(
self,
request_iterator: AsyncIterable[route_guide_pb2.Point],
unused_context,
) -> route_guide_pb2.RouteSummary:
point_count = 0
feature_count = 0
distance = 0.0
@ -102,14 +112,18 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
prev_point = point
elapsed_time = time.time() - start_time
return route_guide_pb2.RouteSummary(point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time))
return route_guide_pb2.RouteSummary(
point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time),
)
async def RouteChat(
self, request_iterator: AsyncIterable[route_guide_pb2.RouteNote],
unused_context) -> AsyncIterable[route_guide_pb2.RouteNote]:
self,
request_iterator: AsyncIterable[route_guide_pb2.RouteNote],
unused_context,
) -> AsyncIterable[route_guide_pb2.RouteNote]:
prev_notes = []
async for new_note in request_iterator:
for prev_note in prev_notes:
@ -121,12 +135,13 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
async def serve() -> None:
server = grpc.aio.server()
route_guide_pb2_grpc.add_RouteGuideServicer_to_server(
RouteGuideServicer(), server)
server.add_insecure_port('[::]:50051')
RouteGuideServicer(), server
)
server.add_insecure_port("[::]:50051")
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(serve())

@ -27,7 +27,8 @@ import route_guide_resources
def make_route_note(message, latitude, longitude):
return route_guide_pb2.RouteNote(
message=message,
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude))
location=route_guide_pb2.Point(latitude=latitude, longitude=longitude),
)
def guide_get_one_feature(stub, point):
@ -44,14 +45,16 @@ def guide_get_one_feature(stub, point):
def guide_get_feature(stub):
guide_get_one_feature(
stub, route_guide_pb2.Point(latitude=409146138, longitude=-746188906))
stub, route_guide_pb2.Point(latitude=409146138, longitude=-746188906)
)
guide_get_one_feature(stub, route_guide_pb2.Point(latitude=0, longitude=0))
def guide_list_features(stub):
rectangle = route_guide_pb2.Rectangle(
lo=route_guide_pb2.Point(latitude=400000000, longitude=-750000000),
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000))
hi=route_guide_pb2.Point(latitude=420000000, longitude=-730000000),
)
print("Looking for features between 40, -75 and 42, -73")
features = stub.ListFeatures(rectangle)
@ -94,15 +97,16 @@ def generate_messages():
def guide_route_chat(stub):
responses = stub.RouteChat(generate_messages())
for response in responses:
print("Received message %s at %s" %
(response.message, response.location))
print(
"Received message %s at %s" % (response.message, response.location)
)
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = route_guide_pb2_grpc.RouteGuideStub(channel)
print("-------------- GetFeature --------------")
guide_get_feature(stub)
@ -114,6 +118,6 @@ def run():
guide_route_chat(stub)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -21,10 +21,10 @@ import route_guide_pb2
def read_route_guide_database():
"""Reads the route guide database.
Returns:
The full contents of the route guide database as a sequence of
route_guide_pb2.Features.
"""
Returns:
The full contents of the route guide database as a sequence of
route_guide_pb2.Features.
"""
feature_list = []
with open("route_guide_db.json") as route_guide_db_file:
for item in json.load(route_guide_db_file):
@ -32,6 +32,8 @@ def read_route_guide_database():
name=item["name"],
location=route_guide_pb2.Point(
latitude=item["location"]["latitude"],
longitude=item["location"]["longitude"]))
longitude=item["location"]["longitude"],
),
)
feature_list.append(feature)
return feature_list

@ -45,9 +45,11 @@ def get_distance(start, end):
delta_lon_rad = math.radians(lon_2 - lon_1)
# Formula is based on http://mathforum.org/library/drmath/view/51879.html
a = (pow(math.sin(delta_lat_rad / 2), 2) +
(math.cos(lat_rad_1) * math.cos(lat_rad_2) *
pow(math.sin(delta_lon_rad / 2), 2)))
a = pow(math.sin(delta_lat_rad / 2), 2) + (
math.cos(lat_rad_1)
* math.cos(lat_rad_2)
* pow(math.sin(delta_lon_rad / 2), 2)
)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
R = 6371000
# metres
@ -73,10 +75,12 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
top = max(request.lo.latitude, request.hi.latitude)
bottom = min(request.lo.latitude, request.hi.latitude)
for feature in self.db:
if (feature.location.longitude >= left and
feature.location.longitude <= right and
feature.location.latitude >= bottom and
feature.location.latitude <= top):
if (
feature.location.longitude >= left
and feature.location.longitude <= right
and feature.location.latitude >= bottom
and feature.location.latitude <= top
):
yield feature
def RecordRoute(self, request_iterator, context):
@ -95,10 +99,12 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
prev_point = point
elapsed_time = time.time() - start_time
return route_guide_pb2.RouteSummary(point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time))
return route_guide_pb2.RouteSummary(
point_count=point_count,
feature_count=feature_count,
distance=int(distance),
elapsed_time=int(elapsed_time),
)
def RouteChat(self, request_iterator, context):
prev_notes = []
@ -112,12 +118,13 @@ class RouteGuideServicer(route_guide_pb2_grpc.RouteGuideServicer):
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
route_guide_pb2_grpc.add_RouteGuideServicer_to_server(
RouteGuideServicer(), server)
server.add_insecure_port('[::]:50051')
RouteGuideServicer(), server
)
server.add_insecure_port("[::]:50051")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -15,10 +15,12 @@
from grpc_tools import protoc
protoc.main((
'',
'-I../../protos',
'--python_out=.',
'--grpc_python_out=.',
'../../protos/route_guide.proto',
))
protoc.main(
(
"",
"-I../../protos",
"--python_out=.",
"--grpc_python_out=.",
"../../protos/route_guide.proto",
)
)

@ -20,26 +20,28 @@ import helloworld_pb2
import helloworld_pb2_grpc
def unary_call(stub: helloworld_pb2_grpc.GreeterStub, request_id: int,
message: str):
def unary_call(
stub: helloworld_pb2_grpc.GreeterStub, request_id: int, message: str
):
print("call:", request_id)
try:
response = stub.SayHello(helloworld_pb2.HelloRequest(name=message),
timeout=3)
response = stub.SayHello(
helloworld_pb2.HelloRequest(name=message), timeout=3
)
print(f"Greeter client received: {response.message}")
except grpc.RpcError as rpc_error:
print(f"Call failed with code: {rpc_error.code()}")
def run():
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Should success
unary_call(stub, 1, 'you')
unary_call(stub, 1, "you")
# Should fail with DEADLINE_EXCEEDED
unary_call(stub, 2, '[delay] you')
unary_call(stub, 2, "[delay] you")
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
run()

@ -23,7 +23,6 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
message = request.name
if message.startswith("[delay]"):
@ -32,15 +31,15 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer):
def serve():
port = '50051'
port = "50051"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:' + port)
server.add_insecure_port("[::]:" + port)
server.start()
print("Server started, listening on " + port)
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -22,15 +22,16 @@ import helloworld_pb2_grpc
async def run() -> None:
uds_addresses = ['unix:helloworld.sock', 'unix:///tmp/helloworld.sock']
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
for uds_address in uds_addresses:
async with grpc.aio.insecure_channel(uds_address) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name='you'))
logging.info('Received: %s', response.message)
helloworld_pb2.HelloRequest(name="you")
)
logging.info("Received: %s", response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(run())

@ -22,25 +22,26 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
self,
request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext,
) -> helloworld_pb2.HelloReply:
del request
return helloworld_pb2.HelloReply(message=f'Hello to {context.peer()}!')
return helloworld_pb2.HelloReply(message=f"Hello to {context.peer()}!")
async def serve() -> None:
uds_addresses = ['unix:helloworld.sock', 'unix:///tmp/helloworld.sock']
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
for uds_address in uds_addresses:
server.add_insecure_port(uds_address)
logging.info('Server listening on: %s', uds_address)
logging.info("Server listening on: %s", uds_address)
await server.start()
await server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())

@ -23,14 +23,14 @@ import helloworld_pb2_grpc
def run():
uds_addresses = ['unix:helloworld.sock', 'unix:///tmp/helloworld.sock']
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
for uds_address in uds_addresses:
with grpc.insecure_channel(uds_address) as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
logging.info('Received: %s', response.message)
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
logging.info("Received: %s", response.message)
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
run()

@ -22,23 +22,22 @@ import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, context):
del request
return helloworld_pb2.HelloReply(message=f'Hello to {context.peer()}!')
return helloworld_pb2.HelloReply(message=f"Hello to {context.peer()}!")
def serve():
uds_addresses = ['unix:helloworld.sock', 'unix:///tmp/helloworld.sock']
uds_addresses = ["unix:helloworld.sock", "unix:///tmp/helloworld.sock"]
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
for uds_address in uds_addresses:
server.add_insecure_port(uds_address)
logging.info('Server listening on: %s', uds_address)
logging.info("Server listening on: %s", uds_address)
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
serve()

@ -22,7 +22,8 @@ from typing import Iterable
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
@ -34,32 +35,35 @@ def get_free_loopback_tcp_port() -> Iterable[str]:
tcp_socket = socket.socket(socket.AF_INET6)
else:
tcp_socket = socket.socket(socket.AF_INET)
tcp_socket.bind(('', 0))
tcp_socket.bind(("", 0))
address_tuple = tcp_socket.getsockname()
yield f"localhost:{address_tuple[1]}"
tcp_socket.close()
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(self, request: helloworld_pb2.HelloRequest,
unused_context) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message=f'Hello, {request.name}!')
async def SayHello(
self, request: helloworld_pb2.HelloRequest, unused_context
) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message=f"Hello, {request.name}!")
def create_server(server_address: str):
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
bound_port = server.add_insecure_port(server_address)
assert bound_port == int(server_address.split(':')[-1])
assert bound_port == int(server_address.split(":")[-1])
return server
async def process(stub: helloworld_pb2_grpc.GreeterStub,
wait_for_ready: bool = None) -> None:
async def process(
stub: helloworld_pb2_grpc.GreeterStub, wait_for_ready: bool = None
) -> None:
try:
response = await stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
wait_for_ready=wait_for_ready)
response = await stub.SayHello(
helloworld_pb2.HelloRequest(name="you"),
wait_for_ready=wait_for_ready,
)
message = response.message
except grpc.aio.AioRpcError as rpc_error:
assert rpc_error.code() == grpc.StatusCode.UNAVAILABLE
@ -67,8 +71,11 @@ async def process(stub: helloworld_pb2_grpc.GreeterStub,
message = rpc_error
else:
assert wait_for_ready
_LOGGER.info("Wait-for-ready %s, client received: %s",
"enabled" if wait_for_ready else "disabled", message)
_LOGGER.info(
"Wait-for-ready %s, client received: %s",
"enabled" if wait_for_ready else "disabled",
message,
)
async def main() -> None:
@ -80,10 +87,12 @@ async def main() -> None:
# Fire an RPC without wait_for_ready
fail_fast_task = asyncio.get_event_loop().create_task(
process(stub, wait_for_ready=False))
process(stub, wait_for_ready=False)
)
# Fire an RPC with wait_for_ready
wait_for_ready_task = asyncio.get_event_loop().create_task(
process(stub, wait_for_ready=True))
process(stub, wait_for_ready=True)
)
# Wait for the channel entering TRANSIENT FAILURE state.
state = channel.get_state()
@ -104,6 +113,6 @@ async def main() -> None:
await channel.close()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())

@ -22,17 +22,17 @@ from examples.python.wait_for_ready import wait_for_ready_example
class WaitForReadyExampleTest(unittest.TestCase):
def test_wait_for_ready_example(self):
wait_for_ready_example.main()
# No unhandled exception raised, no deadlock, test passed!
def test_asyncio_wait_for_ready_example(self):
asyncio.get_event_loop().run_until_complete(
asyncio_wait_for_ready_example.main())
asyncio_wait_for_ready_example.main()
)
# No unhandled exception raised, no deadlock, test passed!
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
unittest.main(verbosity=2)

@ -22,7 +22,8 @@ import threading
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
@ -34,30 +35,31 @@ def get_free_loopback_tcp_port():
tcp_socket = socket.socket(socket.AF_INET6)
else:
tcp_socket = socket.socket(socket.AF_INET)
tcp_socket.bind(('', 0))
tcp_socket.bind(("", 0))
address_tuple = tcp_socket.getsockname()
yield "localhost:%s" % (address_tuple[1])
tcp_socket.close()
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHello(self, request, unused_context):
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
return helloworld_pb2.HelloReply(message="Hello, %s!" % request.name)
def create_server(server_address):
server = grpc.server(futures.ThreadPoolExecutor())
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
bound_port = server.add_insecure_port(server_address)
assert bound_port == int(server_address.split(':')[-1])
assert bound_port == int(server_address.split(":")[-1])
return server
def process(stub, wait_for_ready=None):
try:
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'),
wait_for_ready=wait_for_ready)
response = stub.SayHello(
helloworld_pb2.HelloRequest(name="you"),
wait_for_ready=wait_for_ready,
)
message = response.message
except grpc.RpcError as rpc_error:
assert rpc_error.code() == grpc.StatusCode.UNAVAILABLE
@ -65,19 +67,24 @@ def process(stub, wait_for_ready=None):
message = rpc_error
else:
assert wait_for_ready
_LOGGER.info("Wait-for-ready %s, client received: %s",
"enabled" if wait_for_ready else "disabled", message)
_LOGGER.info(
"Wait-for-ready %s, client received: %s",
"enabled" if wait_for_ready else "disabled",
message,
)
def main():
# Pick a random free port
with get_free_loopback_tcp_port() as server_address:
# Register connectivity event to notify main thread
transient_failure_event = threading.Event()
def wait_for_transient_failure(channel_connectivity):
if channel_connectivity == grpc.ChannelConnectivity.TRANSIENT_FAILURE:
if (
channel_connectivity
== grpc.ChannelConnectivity.TRANSIENT_FAILURE
):
transient_failure_event.set()
# Create gRPC channel
@ -86,12 +93,14 @@ def main():
stub = helloworld_pb2_grpc.GreeterStub(channel)
# Fire an RPC without wait_for_ready
thread_disabled_wait_for_ready = threading.Thread(target=process,
args=(stub, False))
thread_disabled_wait_for_ready = threading.Thread(
target=process, args=(stub, False)
)
thread_disabled_wait_for_ready.start()
# Fire an RPC with wait_for_ready
thread_enabled_wait_for_ready = threading.Thread(target=process,
args=(stub, True))
thread_enabled_wait_for_ready = threading.Thread(
target=process, args=(stub, True)
)
thread_enabled_wait_for_ready.start()
# Wait for the channel entering TRANSIENT FAILURE state.
@ -108,6 +117,6 @@ def main():
channel.close()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()

@ -28,7 +28,8 @@ from typing import Sequence, Tuple
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_LOGGER = logging.getLogger(__name__)
_LOGGER.setLevel(logging.INFO)
@ -37,8 +38,10 @@ _LOGGER.setLevel(logging.INFO)
def wait_for_metadata(response_future, event):
metadata: Sequence[Tuple[str, str]] = response_future.initial_metadata()
for key, value in metadata:
print('Greeter client received initial metadata: key=%s value=%s' %
(key, value))
print(
"Greeter client received initial metadata: key=%s value=%s"
% (key, value)
)
event.set()
@ -55,20 +58,22 @@ def check_status(response_future, wait_success):
def main():
# Create gRPC channel
with grpc.insecure_channel('localhost:50051') as channel:
with grpc.insecure_channel("localhost:50051") as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
event_for_delay = threading.Event()
# Server will delay send initial metadata back for this RPC
response_future_delay = stub.SayHelloStreamReply(
helloworld_pb2.HelloRequest(name='you'), wait_for_ready=True)
helloworld_pb2.HelloRequest(name="you"), wait_for_ready=True
)
# Fire RPC and wait for metadata
thread_with_delay = threading.Thread(target=wait_for_metadata,
args=(response_future_delay,
event_for_delay),
daemon=True)
thread_with_delay = threading.Thread(
target=wait_for_metadata,
args=(response_future_delay, event_for_delay),
daemon=True,
)
thread_with_delay.start()
# Wait on client side with 7 seconds timeout
@ -76,6 +81,6 @@ def main():
check_status(response_future_delay, event_for_delay.wait(timeout))
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
main()

@ -29,9 +29,10 @@ from time import sleep
import grpc
helloworld_pb2, helloworld_pb2_grpc = grpc.protos_and_services(
"helloworld.proto")
"helloworld.proto"
)
_INITIAL_METADATA = ((b'initial-md', 'initial-md-value'),)
_INITIAL_METADATA = ((b"initial-md", "initial-md-value"),)
def starting_up_server():
@ -45,7 +46,6 @@ def do_work():
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def SayHelloStreamReply(self, request, servicer_context):
# Suppose server will take some time to setup, client can set the time it willing to wait
# for server to up and running.
@ -60,19 +60,20 @@ class Greeter(helloworld_pb2_grpc.GreeterServicer):
# Sending actual response.
for i in range(3):
yield helloworld_pb2.HelloReply(message='Hello %s times %s' %
(request.name, i))
yield helloworld_pb2.HelloReply(
message="Hello %s times %s" % (request.name, i)
)
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
server.add_insecure_port('[::]:50051')
server.add_insecure_port("[::]:50051")
print("starting server")
server.start()
server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
logging.basicConfig()
serve()

@ -35,19 +35,20 @@ def run(server_address, secure):
channel = grpc.insecure_channel(server_address)
with channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
response = stub.SayHello(helloworld_pb2.HelloRequest(name="you"))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument("server",
default=None,
help="The address of the server.")
parser.add_argument(
"server", default=None, help="The address of the server."
)
parser.add_argument(
"--xds-creds",
action="store_true",
help="If specified, uses xDS credentials to connect to the server.")
help="If specified, uses xDS credentials to connect to the server.",
)
args = parser.parse_args()
logging.basicConfig()
run(args.server, args.xds_creds)

@ -34,24 +34,28 @@ _THREAD_POOL_SIZE = 256
logger = logging.getLogger()
console_handler = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
formatter = logging.Formatter(fmt="%(asctime)s: %(levelname)-8s %(message)s")
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
class Greeter(helloworld_pb2_grpc.GreeterServicer):
def __init__(self, hostname: str):
self._hostname = hostname if hostname else socket.gethostname()
def SayHello(self, request: helloworld_pb2.HelloRequest,
context: grpc.ServicerContext) -> helloworld_pb2.HelloReply:
def SayHello(
self,
request: helloworld_pb2.HelloRequest,
context: grpc.ServicerContext,
) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(
message=f"Hello {request.name} from {self._hostname}!")
message=f"Hello {request.name} from {self._hostname}!"
)
def _configure_maintenance_server(server: grpc.Server,
maintenance_port: int) -> None:
def _configure_maintenance_server(
server: grpc.Server, maintenance_port: int
) -> None:
listen_address = f"{_LISTEN_HOST}:{maintenance_port}"
server.add_insecure_port(listen_address)
@ -60,13 +64,15 @@ def _configure_maintenance_server(server: grpc.Server,
health_servicer = health.HealthServicer(
experimental_non_blocking=True,
experimental_thread_pool=futures.ThreadPoolExecutor(
max_workers=_THREAD_POOL_SIZE))
max_workers=_THREAD_POOL_SIZE
),
)
# Create a tuple of all of the services we want to export via reflection.
services = tuple(
service.full_name
for service in helloworld_pb2.DESCRIPTOR.services_by_name.values()) + (
reflection.SERVICE_NAME, health.SERVICE_NAME)
for service in helloworld_pb2.DESCRIPTOR.services_by_name.values()
) + (reflection.SERVICE_NAME, health.SERVICE_NAME)
# Mark all services as healthy.
health_pb2_grpc.add_HealthServicer_to_server(health_servicer, server)
@ -75,8 +81,9 @@ def _configure_maintenance_server(server: grpc.Server,
reflection.enable_server_reflection(services, server)
def _configure_greeter_server(server: grpc.Server, port: int, secure_mode: bool,
hostname) -> None:
def _configure_greeter_server(
server: grpc.Server, port: int, secure_mode: bool, hostname
) -> None:
# Add the application servicer to the server.
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(hostname), server)
listen_address = f"{_LISTEN_HOST}:{port}"
@ -92,12 +99,14 @@ def _configure_greeter_server(server: grpc.Server, port: int, secure_mode: bool,
server.add_secure_port(listen_address, server_creds)
def serve(port: int, hostname: str, maintenance_port: int,
secure_mode: bool) -> None:
def serve(
port: int, hostname: str, maintenance_port: int, secure_mode: bool
) -> None:
if port == maintenance_port:
# If maintenance port and port are the same, start a single server.
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE))
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE)
)
_configure_greeter_server(server, port, secure_mode, hostname)
_configure_maintenance_server(server, maintenance_port)
server.start()
@ -108,12 +117,14 @@ def serve(port: int, hostname: str, maintenance_port: int,
# Otherwise, start two different servers.
greeter_server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE),
xds=secure_mode)
xds=secure_mode,
)
_configure_greeter_server(greeter_server, port, secure_mode, hostname)
greeter_server.start()
logger.info("Greeter server listening on port %d", port)
maintenance_server = grpc.server(
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE))
futures.ThreadPoolExecutor(max_workers=_THREAD_POOL_SIZE)
)
_configure_maintenance_server(maintenance_server, maintenance_port)
maintenance_server.start()
logger.info("Maintenance server listening on port %d", maintenance_port)
@ -121,22 +132,27 @@ def serve(port: int, hostname: str, maintenance_port: int,
maintenance_server.wait_for_termination()
if __name__ == '__main__':
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument("port",
default=50051,
type=int,
nargs="?",
help="The port on which to listen.")
parser.add_argument("hostname",
type=str,
default=None,
nargs="?",
help="The name clients will see in responses.")
parser.add_argument(
"port",
default=50051,
type=int,
nargs="?",
help="The port on which to listen.",
)
parser.add_argument(
"hostname",
type=str,
default=None,
nargs="?",
help="The name clients will see in responses.",
)
parser.add_argument(
"--xds-creds",
action="store_true",
help="If specified, uses xDS credentials to connect to the server.")
help="If specified, uses xDS credentials to connect to the server.",
)
args = parser.parse_args()
logging.basicConfig()
logger.setLevel(logging.INFO)

@ -12,10 +12,6 @@ inplace=1
[build_package_protos]
exclude=.*protoc_plugin/protoc_plugin_test\.proto$
# Style settings
[yapf]
based_on_style = google
[metadata]
license_files = LICENSE

@ -21,7 +21,7 @@ import setuptools # isort:skip
# files used by boring SSL.
from distutils.unixccompiler import UnixCCompiler
UnixCCompiler.src_extensions.append('.S')
UnixCCompiler.src_extensions.append(".S")
del UnixCCompiler
from distutils import cygwinccompiler
@ -44,42 +44,46 @@ import pkg_resources
from setuptools.command import egg_info
# Redirect the manifest template from MANIFEST.in to PYTHON-MANIFEST.in.
egg_info.manifest_maker.template = 'PYTHON-MANIFEST.in'
egg_info.manifest_maker.template = "PYTHON-MANIFEST.in"
PY3 = sys.version_info.major == 3
PYTHON_STEM = os.path.join('src', 'python', 'grpcio')
PYTHON_STEM = os.path.join("src", "python", "grpcio")
CORE_INCLUDE = (
'include',
'.',
"include",
".",
)
ABSL_INCLUDE = (os.path.join("third_party", "abseil-cpp"),)
ADDRESS_SORTING_INCLUDE = (
os.path.join("third_party", "address_sorting", "include"),
)
ABSL_INCLUDE = (os.path.join('third_party', 'abseil-cpp'),)
ADDRESS_SORTING_INCLUDE = (os.path.join('third_party', 'address_sorting',
'include'),)
CARES_INCLUDE = (
os.path.join('third_party', 'cares', 'cares', 'include'),
os.path.join('third_party', 'cares'),
os.path.join('third_party', 'cares', 'cares'),
os.path.join("third_party", "cares", "cares", "include"),
os.path.join("third_party", "cares"),
os.path.join("third_party", "cares", "cares"),
)
if "darwin" in sys.platform:
CARES_INCLUDE += (os.path.join("third_party", "cares", "config_darwin"),)
if "freebsd" in sys.platform:
CARES_INCLUDE += (os.path.join("third_party", "cares", "config_freebsd"),)
if "linux" in sys.platform:
CARES_INCLUDE += (os.path.join("third_party", "cares", "config_linux"),)
if "openbsd" in sys.platform:
CARES_INCLUDE += (os.path.join("third_party", "cares", "config_openbsd"),)
RE2_INCLUDE = (os.path.join("third_party", "re2"),)
SSL_INCLUDE = (
os.path.join("third_party", "boringssl-with-bazel", "src", "include"),
)
UPB_INCLUDE = (os.path.join("third_party", "upb"),)
UPB_GRPC_GENERATED_INCLUDE = (
os.path.join("src", "core", "ext", "upb-generated"),
)
if 'darwin' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_darwin'),)
if 'freebsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_freebsd'),)
if 'linux' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_linux'),)
if 'openbsd' in sys.platform:
CARES_INCLUDE += (os.path.join('third_party', 'cares', 'config_openbsd'),)
RE2_INCLUDE = (os.path.join('third_party', 're2'),)
SSL_INCLUDE = (os.path.join('third_party', 'boringssl-with-bazel', 'src',
'include'),)
UPB_INCLUDE = (os.path.join('third_party', 'upb'),)
UPB_GRPC_GENERATED_INCLUDE = (os.path.join('src', 'core', 'ext',
'upb-generated'),)
UPBDEFS_GRPC_GENERATED_INCLUDE = (os.path.join('src', 'core', 'ext',
'upbdefs-generated'),)
UTF8_RANGE_INCLUDE = (os.path.join('third_party', 'utf8_range'),)
XXHASH_INCLUDE = (os.path.join('third_party', 'xxhash'),)
ZLIB_INCLUDE = (os.path.join('third_party', 'zlib'),)
README = os.path.join(PYTHON_STEM, 'README.rst')
UPBDEFS_GRPC_GENERATED_INCLUDE = (
os.path.join("src", "core", "ext", "upbdefs-generated"),
)
UTF8_RANGE_INCLUDE = (os.path.join("third_party", "utf8_range"),)
XXHASH_INCLUDE = (os.path.join("third_party", "xxhash"),)
ZLIB_INCLUDE = (os.path.join("third_party", "zlib"),)
README = os.path.join(PYTHON_STEM, "README.rst")
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
@ -96,28 +100,29 @@ import grpc_version
_parallel_compile_patch.monkeypatch_compile_maybe()
_spawn_patch.monkeypatch_spawn()
LICENSE = 'Apache License 2.0'
LICENSE = "Apache License 2.0"
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'License :: OSI Approved :: Apache Software License',
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"License :: OSI Approved :: Apache Software License",
]
def _env_bool_value(env_name, default):
"""Parses a bool option from an environment variable"""
return os.environ.get(env_name, default).upper() not in ['FALSE', '0', '']
return os.environ.get(env_name, default).upper() not in ["FALSE", "0", ""]
BUILD_WITH_BORING_SSL_ASM = _env_bool_value('GRPC_BUILD_WITH_BORING_SSL_ASM',
'True')
BUILD_WITH_BORING_SSL_ASM = _env_bool_value(
"GRPC_BUILD_WITH_BORING_SSL_ASM", "True"
)
# Export this environment variable to override the platform variant that will
# be chosen for boringssl assembly optimizations. This option is useful when
@ -125,42 +130,46 @@ BUILD_WITH_BORING_SSL_ASM = _env_bool_value('GRPC_BUILD_WITH_BORING_SSL_ASM',
# doesn't match the platform we are targetting.
# Example value: "linux-aarch64"
BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM = os.environ.get(
'GRPC_BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM', '')
"GRPC_BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM", ""
)
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support. Even if this
# is set to false, if the script detects that the generated `.c` file isn't
# present, then it will still attempt to use Cython.
BUILD_WITH_CYTHON = _env_bool_value('GRPC_PYTHON_BUILD_WITH_CYTHON', 'False')
BUILD_WITH_CYTHON = _env_bool_value("GRPC_PYTHON_BUILD_WITH_CYTHON", "False")
# Export this variable to use the system installation of openssl. You need to
# have the header files installed (in /usr/include/openssl) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_OPENSSL = _env_bool_value('GRPC_PYTHON_BUILD_SYSTEM_OPENSSL',
'False')
BUILD_WITH_SYSTEM_OPENSSL = _env_bool_value(
"GRPC_PYTHON_BUILD_SYSTEM_OPENSSL", "False"
)
# Export this variable to use the system installation of zlib. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_ZLIB = _env_bool_value('GRPC_PYTHON_BUILD_SYSTEM_ZLIB',
'False')
BUILD_WITH_SYSTEM_ZLIB = _env_bool_value(
"GRPC_PYTHON_BUILD_SYSTEM_ZLIB", "False"
)
# Export this variable to use the system installation of cares. You need to
# have the header files installed (in /usr/include/) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_CARES = _env_bool_value('GRPC_PYTHON_BUILD_SYSTEM_CARES',
'False')
BUILD_WITH_SYSTEM_CARES = _env_bool_value(
"GRPC_PYTHON_BUILD_SYSTEM_CARES", "False"
)
# Export this variable to use the system installation of re2. You need to
# have the header files installed (in /usr/include/re2) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_RE2 = _env_bool_value('GRPC_PYTHON_BUILD_SYSTEM_RE2', 'False')
BUILD_WITH_SYSTEM_RE2 = _env_bool_value("GRPC_PYTHON_BUILD_SYSTEM_RE2", "False")
# Export this variable to use the system installation of abseil. You need to
# have the header files installed (in /usr/include/absl) and during
# runtime, the shared library must be installed
BUILD_WITH_SYSTEM_ABSL = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_ABSL', False)
BUILD_WITH_SYSTEM_ABSL = os.environ.get("GRPC_PYTHON_BUILD_SYSTEM_ABSL", False)
# Export this variable to force building the python extension with a statically linked libstdc++.
# At least on linux, this is normally not needed as we can build manylinux-compatible wheels on linux just fine
@ -170,7 +179,8 @@ BUILD_WITH_SYSTEM_ABSL = os.environ.get('GRPC_PYTHON_BUILD_SYSTEM_ABSL', False)
# of GCC (we require >=5.1) but still uses old-enough libstdc++ symbols.
# TODO(jtattermusch): remove this workaround once issues with crosscompiler version are resolved.
BUILD_WITH_STATIC_LIBSTDCXX = _env_bool_value(
'GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX', 'False')
"GRPC_PYTHON_BUILD_WITH_STATIC_LIBSTDCXX", "False"
)
# For local development use only: This skips building gRPC Core and its
# dependencies, including protobuf and boringssl. This allows "incremental"
@ -183,44 +193,53 @@ BUILD_WITH_STATIC_LIBSTDCXX = _env_bool_value(
# make HAS_SYSTEM_OPENSSL_ALPN=0
#
# TODO(ericgribkoff) Respect the BUILD_WITH_SYSTEM_* flags alongside this option
USE_PREBUILT_GRPC_CORE = _env_bool_value('GRPC_PYTHON_USE_PREBUILT_GRPC_CORE',
'False')
USE_PREBUILT_GRPC_CORE = _env_bool_value(
"GRPC_PYTHON_USE_PREBUILT_GRPC_CORE", "False"
)
# If this environmental variable is set, GRPC will not try to be compatible with
# libc versions old than the one it was compiled against.
DISABLE_LIBC_COMPATIBILITY = _env_bool_value(
'GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY', 'False')
"GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY", "False"
)
# Environment variable to determine whether or not to enable coverage analysis
# in Cython modules.
ENABLE_CYTHON_TRACING = _env_bool_value('GRPC_PYTHON_ENABLE_CYTHON_TRACING',
'False')
ENABLE_CYTHON_TRACING = _env_bool_value(
"GRPC_PYTHON_ENABLE_CYTHON_TRACING", "False"
)
# Environment variable specifying whether or not there's interest in setting up
# documentation building.
ENABLE_DOCUMENTATION_BUILD = _env_bool_value(
'GRPC_PYTHON_ENABLE_DOCUMENTATION_BUILD', 'False')
"GRPC_PYTHON_ENABLE_DOCUMENTATION_BUILD", "False"
)
def check_linker_need_libatomic():
"""Test if linker on system needs libatomic."""
code_test = (b'#include <atomic>\n' +
b'int main() { return std::atomic<int64_t>{}; }')
cxx = shlex.split(os.environ.get('CXX', 'c++'))
cpp_test = subprocess.Popen(cxx + ['-x', 'c++', '-std=c++14', '-'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE)
code_test = (
b"#include <atomic>\n"
+ b"int main() { return std::atomic<int64_t>{}; }"
)
cxx = shlex.split(os.environ.get("CXX", "c++"))
cpp_test = subprocess.Popen(
cxx + ["-x", "c++", "-std=c++14", "-"],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
)
cpp_test.communicate(input=code_test)
if cpp_test.returncode == 0:
return False
# Double-check to see if -latomic actually can solve the problem.
# https://github.com/grpc/grpc/issues/22491
cpp_test = subprocess.Popen(cxx +
['-x', 'c++', '-std=c++14', '-', '-latomic'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE)
cpp_test = subprocess.Popen(
cxx + ["-x", "c++", "-std=c++14", "-", "-latomic"],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
)
cpp_test.communicate(input=code_test)
return cpp_test.returncode == 0
@ -232,119 +251,141 @@ def check_linker_need_libatomic():
# We can also use these variables as a way to inject environment-specific
# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
# reasonable default.
EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
EXTRA_ENV_COMPILE_ARGS = os.environ.get("GRPC_PYTHON_CFLAGS", None)
EXTRA_ENV_LINK_ARGS = os.environ.get("GRPC_PYTHON_LDFLAGS", None)
if EXTRA_ENV_COMPILE_ARGS is None:
EXTRA_ENV_COMPILE_ARGS = ' -std=c++14'
if 'win32' in sys.platform:
EXTRA_ENV_COMPILE_ARGS = " -std=c++14"
if "win32" in sys.platform:
if sys.version_info < (3, 5):
EXTRA_ENV_COMPILE_ARGS += ' -D_hypot=hypot'
EXTRA_ENV_COMPILE_ARGS += " -D_hypot=hypot"
# We use define flags here and don't directly add to DEFINE_MACROS below to
# ensure that the expert user/builder has a way of turning it off (via the
# envvars) without adding yet more GRPC-specific envvars.
# See https://sourceforge.net/p/mingw-w64/bugs/363/
if '32' in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
if "32" in platform.architecture()[0]:
EXTRA_ENV_COMPILE_ARGS += (
" -D_ftime=_ftime32 -D_timeb=__timeb32"
" -D_ftime_s=_ftime32_s"
)
else:
EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
EXTRA_ENV_COMPILE_ARGS += (
" -D_ftime=_ftime64 -D_timeb=__timeb64"
)
else:
# We need to statically link the C++ Runtime, only the C runtime is
# available dynamically
EXTRA_ENV_COMPILE_ARGS += ' /MT'
EXTRA_ENV_COMPILE_ARGS += " /MT"
elif "linux" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -fvisibility=hidden -fno-wrapv -fno-exceptions'
EXTRA_ENV_COMPILE_ARGS += (
" -fvisibility=hidden -fno-wrapv -fno-exceptions"
)
elif "darwin" in sys.platform:
EXTRA_ENV_COMPILE_ARGS += ' -stdlib=libc++ -fvisibility=hidden -fno-wrapv -fno-exceptions -DHAVE_UNISTD_H'
EXTRA_ENV_COMPILE_ARGS += (
" -stdlib=libc++ -fvisibility=hidden -fno-wrapv -fno-exceptions"
" -DHAVE_UNISTD_H"
)
if EXTRA_ENV_LINK_ARGS is None:
EXTRA_ENV_LINK_ARGS = ''
EXTRA_ENV_LINK_ARGS = ""
if "linux" in sys.platform or "darwin" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -lpthread'
EXTRA_ENV_LINK_ARGS += " -lpthread"
if check_linker_need_libatomic():
EXTRA_ENV_LINK_ARGS += ' -latomic'
EXTRA_ENV_LINK_ARGS += " -latomic"
elif "win32" in sys.platform and sys.version_info < (3, 5):
msvcr = cygwinccompiler.get_msvcr()[0]
EXTRA_ENV_LINK_ARGS += (
' -static-libgcc -static-libstdc++ -mcrtdll={msvcr}'
' -static -lshlwapi'.format(msvcr=msvcr))
" -static-libgcc -static-libstdc++ -mcrtdll={msvcr}"
" -static -lshlwapi".format(msvcr=msvcr)
)
if "linux" in sys.platform:
EXTRA_ENV_LINK_ARGS += ' -static-libgcc'
EXTRA_ENV_LINK_ARGS += " -static-libgcc"
EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
if BUILD_WITH_STATIC_LIBSTDCXX:
EXTRA_LINK_ARGS.append('-static-libstdc++')
EXTRA_LINK_ARGS.append("-static-libstdc++")
CYTHON_EXTENSION_PACKAGE_NAMES = ()
CYTHON_EXTENSION_MODULE_NAMES = ('grpc._cython.cygrpc',)
CYTHON_EXTENSION_MODULE_NAMES = ("grpc._cython.cygrpc",)
CYTHON_HELPER_C_FILES = ()
CORE_C_FILES = tuple(grpc_core_dependencies.CORE_SOURCE_FILES)
if "win32" in sys.platform:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
CORE_C_FILES = filter(lambda x: "third_party/cares" not in x, CORE_C_FILES)
if BUILD_WITH_SYSTEM_OPENSSL:
CORE_C_FILES = filter(lambda x: 'third_party/boringssl' not in x,
CORE_C_FILES)
CORE_C_FILES = filter(lambda x: 'src/boringssl' not in x, CORE_C_FILES)
SSL_INCLUDE = (os.path.join('/usr', 'include', 'openssl'),)
CORE_C_FILES = filter(
lambda x: "third_party/boringssl" not in x, CORE_C_FILES
)
CORE_C_FILES = filter(lambda x: "src/boringssl" not in x, CORE_C_FILES)
SSL_INCLUDE = (os.path.join("/usr", "include", "openssl"),)
if BUILD_WITH_SYSTEM_ZLIB:
CORE_C_FILES = filter(lambda x: 'third_party/zlib' not in x, CORE_C_FILES)
ZLIB_INCLUDE = (os.path.join('/usr', 'include'),)
CORE_C_FILES = filter(lambda x: "third_party/zlib" not in x, CORE_C_FILES)
ZLIB_INCLUDE = (os.path.join("/usr", "include"),)
if BUILD_WITH_SYSTEM_CARES:
CORE_C_FILES = filter(lambda x: 'third_party/cares' not in x, CORE_C_FILES)
CARES_INCLUDE = (os.path.join('/usr', 'include'),)
CORE_C_FILES = filter(lambda x: "third_party/cares" not in x, CORE_C_FILES)
CARES_INCLUDE = (os.path.join("/usr", "include"),)
if BUILD_WITH_SYSTEM_RE2:
CORE_C_FILES = filter(lambda x: 'third_party/re2' not in x, CORE_C_FILES)
RE2_INCLUDE = (os.path.join('/usr', 'include', 're2'),)
CORE_C_FILES = filter(lambda x: "third_party/re2" not in x, CORE_C_FILES)
RE2_INCLUDE = (os.path.join("/usr", "include", "re2"),)
if BUILD_WITH_SYSTEM_ABSL:
CORE_C_FILES = filter(lambda x: 'third_party/abseil-cpp' not in x,
CORE_C_FILES)
ABSL_INCLUDE = (os.path.join('/usr', 'include'),)
EXTENSION_INCLUDE_DIRECTORIES = ((PYTHON_STEM,) + CORE_INCLUDE + ABSL_INCLUDE +
ADDRESS_SORTING_INCLUDE + CARES_INCLUDE +
RE2_INCLUDE + SSL_INCLUDE + UPB_INCLUDE +
UPB_GRPC_GENERATED_INCLUDE +
UPBDEFS_GRPC_GENERATED_INCLUDE +
UTF8_RANGE_INCLUDE + XXHASH_INCLUDE +
ZLIB_INCLUDE)
CORE_C_FILES = filter(
lambda x: "third_party/abseil-cpp" not in x, CORE_C_FILES
)
ABSL_INCLUDE = (os.path.join("/usr", "include"),)
EXTENSION_INCLUDE_DIRECTORIES = (
(PYTHON_STEM,)
+ CORE_INCLUDE
+ ABSL_INCLUDE
+ ADDRESS_SORTING_INCLUDE
+ CARES_INCLUDE
+ RE2_INCLUDE
+ SSL_INCLUDE
+ UPB_INCLUDE
+ UPB_GRPC_GENERATED_INCLUDE
+ UPBDEFS_GRPC_GENERATED_INCLUDE
+ UTF8_RANGE_INCLUDE
+ XXHASH_INCLUDE
+ ZLIB_INCLUDE
)
EXTENSION_LIBRARIES = ()
if "linux" in sys.platform:
EXTENSION_LIBRARIES += ('rt',)
EXTENSION_LIBRARIES += ("rt",)
if not "win32" in sys.platform:
EXTENSION_LIBRARIES += ('m',)
EXTENSION_LIBRARIES += ("m",)
if "win32" in sys.platform:
EXTENSION_LIBRARIES += (
'advapi32',
'bcrypt',
'dbghelp',
'ws2_32',
"advapi32",
"bcrypt",
"dbghelp",
"ws2_32",
)
if BUILD_WITH_SYSTEM_OPENSSL:
EXTENSION_LIBRARIES += (
'ssl',
'crypto',
"ssl",
"crypto",
)
if BUILD_WITH_SYSTEM_ZLIB:
EXTENSION_LIBRARIES += ('z',)
EXTENSION_LIBRARIES += ("z",)
if BUILD_WITH_SYSTEM_CARES:
EXTENSION_LIBRARIES += ('cares',)
EXTENSION_LIBRARIES += ("cares",)
if BUILD_WITH_SYSTEM_RE2:
EXTENSION_LIBRARIES += ('re2',)
EXTENSION_LIBRARIES += ("re2",)
if BUILD_WITH_SYSTEM_ABSL:
EXTENSION_LIBRARIES += tuple(
lib.stem[3:] for lib in pathlib.Path('/usr').glob('lib*/libabsl_*.so'))
lib.stem[3:] for lib in pathlib.Path("/usr").glob("lib*/libabsl_*.so")
)
DEFINE_MACROS = (('_WIN32_WINNT', 0x600),)
DEFINE_MACROS = (("_WIN32_WINNT", 0x600),)
asm_files = []
@ -353,69 +394,76 @@ asm_files = []
# the binary.
def _quote_build_define(argument):
if "win32" in sys.platform:
return '"\\\"{}\\\""'.format(argument)
return '"\\"{}\\""'.format(argument)
return '"{}"'.format(argument)
DEFINE_MACROS += (
("GRPC_XDS_USER_AGENT_NAME_SUFFIX", _quote_build_define("Python")),
("GRPC_XDS_USER_AGENT_VERSION_SUFFIX",
_quote_build_define(_metadata.__version__)),
(
"GRPC_XDS_USER_AGENT_VERSION_SUFFIX",
_quote_build_define(_metadata.__version__),
),
)
asm_key = ''
asm_key = ""
if BUILD_WITH_BORING_SSL_ASM and not BUILD_WITH_SYSTEM_OPENSSL:
boringssl_asm_platform = BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM if BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM else util.get_platform(
boringssl_asm_platform = (
BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM
if BUILD_OVERRIDE_BORING_SSL_ASM_PLATFORM
else util.get_platform()
)
LINUX_X86_64 = 'linux-x86_64'
LINUX_ARM = 'linux-arm'
LINUX_AARCH64 = 'linux-aarch64'
LINUX_X86_64 = "linux-x86_64"
LINUX_ARM = "linux-arm"
LINUX_AARCH64 = "linux-aarch64"
if LINUX_X86_64 == boringssl_asm_platform:
asm_key = 'crypto_linux_x86_64'
asm_key = "crypto_linux_x86_64"
elif LINUX_ARM == boringssl_asm_platform:
asm_key = 'crypto_linux_arm'
asm_key = "crypto_linux_arm"
elif LINUX_AARCH64 == boringssl_asm_platform:
asm_key = 'crypto_linux_aarch64'
asm_key = "crypto_linux_aarch64"
elif "mac" in boringssl_asm_platform and "x86_64" in boringssl_asm_platform:
asm_key = 'crypto_apple_x86_64'
asm_key = "crypto_apple_x86_64"
elif "mac" in boringssl_asm_platform and "arm64" in boringssl_asm_platform:
asm_key = 'crypto_apple_aarch64'
asm_key = "crypto_apple_aarch64"
else:
print("ASM Builds for BoringSSL currently not supported on:",
boringssl_asm_platform)
print(
"ASM Builds for BoringSSL currently not supported on:",
boringssl_asm_platform,
)
if asm_key:
asm_files = grpc_core_dependencies.ASM_SOURCE_FILES[asm_key]
else:
DEFINE_MACROS += (('OPENSSL_NO_ASM', 1),)
DEFINE_MACROS += (("OPENSSL_NO_ASM", 1),)
if not DISABLE_LIBC_COMPATIBILITY:
DEFINE_MACROS += (('GPR_BACKWARDS_COMPATIBILITY_MODE', 1),)
DEFINE_MACROS += (("GPR_BACKWARDS_COMPATIBILITY_MODE", 1),)
if "win32" in sys.platform:
# TODO(zyc): Re-enable c-ares on x64 and x86 windows after fixing the
# ares_library_init compilation issue
DEFINE_MACROS += (
('WIN32_LEAN_AND_MEAN', 1),
('CARES_STATICLIB', 1),
('GRPC_ARES', 0),
('NTDDI_VERSION', 0x06000000),
('NOMINMAX', 1),
("WIN32_LEAN_AND_MEAN", 1),
("CARES_STATICLIB", 1),
("GRPC_ARES", 0),
("NTDDI_VERSION", 0x06000000),
("NOMINMAX", 1),
)
if '64bit' in platform.architecture()[0]:
DEFINE_MACROS += (('MS_WIN64', 1),)
if "64bit" in platform.architecture()[0]:
DEFINE_MACROS += (("MS_WIN64", 1),)
elif sys.version_info >= (3, 5):
# For some reason, this is needed to get access to inet_pton/inet_ntop
# on msvc, but only for 32 bits
DEFINE_MACROS += (('NTDDI_VERSION', 0x06000000),)
DEFINE_MACROS += (("NTDDI_VERSION", 0x06000000),)
else:
DEFINE_MACROS += (
('HAVE_CONFIG_H', 1),
('GRPC_ENABLE_FORK_SUPPORT', 1),
("HAVE_CONFIG_H", 1),
("GRPC_ENABLE_FORK_SUPPORT", 1),
)
# Fix for multiprocessing support on Apple devices.
# TODO(vigneshbabu): Remove this once the poll poller gets fork support.
DEFINE_MACROS += (('GRPC_DO_NOT_INSTANTIATE_POSIX_POLLER', 1),)
DEFINE_MACROS += (("GRPC_DO_NOT_INSTANTIATE_POSIX_POLLER", 1),)
# Fix for Cython build issue in aarch64.
# It's required to define this macro before include <inttypes.h>.
@ -424,44 +472,48 @@ DEFINE_MACROS += (('GRPC_DO_NOT_INSTANTIATE_POSIX_POLLER', 1),)
# but we're still having issue in aarch64, so we manually define the macro here.
# TODO(xuanwn): Figure out what's going on in the aarch64 build so we can support
# gcc + Bazel.
DEFINE_MACROS += (('__STDC_FORMAT_MACROS', None),)
DEFINE_MACROS += (("__STDC_FORMAT_MACROS", None),)
LDFLAGS = tuple(EXTRA_LINK_ARGS)
CFLAGS = tuple(EXTRA_COMPILE_ARGS)
if "linux" in sys.platform or "darwin" in sys.platform:
pymodinit_type = 'PyObject*' if PY3 else 'void'
pymodinit_type = "PyObject*" if PY3 else "void"
pymodinit = 'extern "C" __attribute__((visibility ("default"))) {}'.format(
pymodinit_type)
DEFINE_MACROS += (('PyMODINIT_FUNC', pymodinit),)
DEFINE_MACROS += (('GRPC_POSIX_FORK_ALLOW_PTHREAD_ATFORK', 1),)
pymodinit_type
)
DEFINE_MACROS += (("PyMODINIT_FUNC", pymodinit),)
DEFINE_MACROS += (("GRPC_POSIX_FORK_ALLOW_PTHREAD_ATFORK", 1),)
# By default, Python3 distutils enforces compatibility of
# c plugins (.so files) with the OSX version Python was built with.
# We need OSX 10.10, the oldest which supports C++ thread_local.
# Python 3.9: Mac OS Big Sur sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') returns int (11)
if 'darwin' in sys.platform:
mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
if "darwin" in sys.platform:
mac_target = sysconfig.get_config_var("MACOSX_DEPLOYMENT_TARGET")
if mac_target:
mac_target = pkg_resources.parse_version(str(mac_target))
if mac_target < pkg_resources.parse_version('10.10.0'):
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.10'
os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
r'macosx-[0-9]+\.[0-9]+-(.+)', r'macosx-10.10-\1',
util.get_platform())
if mac_target < pkg_resources.parse_version("10.10.0"):
os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.10"
os.environ["_PYTHON_HOST_PLATFORM"] = re.sub(
r"macosx-[0-9]+\.[0-9]+-(.+)",
r"macosx-10.10-\1",
util.get_platform(),
)
def cython_extensions_and_necessity():
cython_module_files = [
os.path.join(PYTHON_STEM,
name.replace('.', '/') + '.pyx')
os.path.join(PYTHON_STEM, name.replace(".", "/") + ".pyx")
for name in CYTHON_EXTENSION_MODULE_NAMES
]
config = os.environ.get('CONFIG', 'opt')
prefix = 'libs/' + config + '/'
config = os.environ.get("CONFIG", "opt")
prefix = "libs/" + config + "/"
if USE_PREBUILT_GRPC_CORE:
extra_objects = [
prefix + 'libares.a', prefix + 'libboringssl.a',
prefix + 'libgpr.a', prefix + 'libgrpc.a'
prefix + "libares.a",
prefix + "libboringssl.a",
prefix + "libgpr.a",
prefix + "libgrpc.a",
]
core_c_files = []
else:
@ -470,42 +522,56 @@ def cython_extensions_and_necessity():
extensions = [
_extension.Extension(
name=module_name,
sources=([module_file] + list(CYTHON_HELPER_C_FILES) +
core_c_files + asm_files),
sources=(
[module_file]
+ list(CYTHON_HELPER_C_FILES)
+ core_c_files
+ asm_files
),
include_dirs=list(EXTENSION_INCLUDE_DIRECTORIES),
libraries=list(EXTENSION_LIBRARIES),
define_macros=list(DEFINE_MACROS),
extra_objects=extra_objects,
extra_compile_args=list(CFLAGS),
extra_link_args=list(LDFLAGS),
) for (module_name, module_file
) in zip(list(CYTHON_EXTENSION_MODULE_NAMES), cython_module_files)
)
for (module_name, module_file) in zip(
list(CYTHON_EXTENSION_MODULE_NAMES), cython_module_files
)
]
need_cython = BUILD_WITH_CYTHON
if not BUILD_WITH_CYTHON:
need_cython = need_cython or not commands.check_and_update_cythonization(
extensions)
need_cython = (
need_cython
or not commands.check_and_update_cythonization(extensions)
)
# TODO: the strategy for conditional compiling and exposing the aio Cython
# dependencies will be revisited by https://github.com/grpc/grpc/issues/19728
return commands.try_cythonize(extensions,
linetracing=ENABLE_CYTHON_TRACING,
mandatory=BUILD_WITH_CYTHON), need_cython
return (
commands.try_cythonize(
extensions,
linetracing=ENABLE_CYTHON_TRACING,
mandatory=BUILD_WITH_CYTHON,
),
need_cython,
)
CYTHON_EXTENSION_MODULES, need_cython = cython_extensions_and_necessity()
PACKAGE_DIRECTORIES = {
'': PYTHON_STEM,
"": PYTHON_STEM,
}
INSTALL_REQUIRES = ()
EXTRAS_REQUIRES = {
'protobuf': 'grpcio-tools>={version}'.format(version=grpc_version.VERSION),
"protobuf": "grpcio-tools>={version}".format(version=grpc_version.VERSION),
}
SETUP_REQUIRES = INSTALL_REQUIRES + (
'Sphinx~=1.8.1',) if ENABLE_DOCUMENTATION_BUILD else ()
SETUP_REQUIRES = (
INSTALL_REQUIRES + ("Sphinx~=1.8.1",) if ENABLE_DOCUMENTATION_BUILD else ()
)
try:
import Cython
@ -514,62 +580,65 @@ except ImportError:
sys.stderr.write(
"You requested a Cython build via GRPC_PYTHON_BUILD_WITH_CYTHON, "
"but do not have Cython installed. We won't stop you from using "
"other commands, but the extension files will fail to build.\n")
"other commands, but the extension files will fail to build.\n"
)
elif need_cython:
sys.stderr.write(
'We could not find Cython. Setup may take 10-20 minutes.\n')
SETUP_REQUIRES += ('cython>=0.23',)
"We could not find Cython. Setup may take 10-20 minutes.\n"
)
SETUP_REQUIRES += ("cython>=0.23",)
COMMAND_CLASS = {
'doc': commands.SphinxDocumentation,
'build_project_metadata': commands.BuildProjectMetadata,
'build_py': commands.BuildPy,
'build_ext': commands.BuildExt,
'gather': commands.Gather,
'clean': commands.Clean,
"doc": commands.SphinxDocumentation,
"build_project_metadata": commands.BuildProjectMetadata,
"build_py": commands.BuildPy,
"build_ext": commands.BuildExt,
"gather": commands.Gather,
"clean": commands.Clean,
}
# Ensure that package data is copied over before any commands have been run:
credentials_dir = os.path.join(PYTHON_STEM, 'grpc', '_cython', '_credentials')
credentials_dir = os.path.join(PYTHON_STEM, "grpc", "_cython", "_credentials")
try:
os.mkdir(credentials_dir)
except OSError:
pass
shutil.copyfile(os.path.join('etc', 'roots.pem'),
os.path.join(credentials_dir, 'roots.pem'))
shutil.copyfile(
os.path.join("etc", "roots.pem"), os.path.join(credentials_dir, "roots.pem")
)
PACKAGE_DATA = {
# Binaries that may or may not be present in the final installation, but are
# mentioned here for completeness.
'grpc._cython': [
'_credentials/roots.pem',
'_windows/grpc_c.32.python',
'_windows/grpc_c.64.python',
"grpc._cython": [
"_credentials/roots.pem",
"_windows/grpc_c.32.python",
"_windows/grpc_c.64.python",
],
}
PACKAGES = setuptools.find_packages(PYTHON_STEM)
setuptools.setup(
name='grpcio',
name="grpcio",
version=grpc_version.VERSION,
description='HTTP/2-based RPC framework',
author='The gRPC Authors',
author_email='grpc-io@googlegroups.com',
url='https://grpc.io',
description="HTTP/2-based RPC framework",
author="The gRPC Authors",
author_email="grpc-io@googlegroups.com",
url="https://grpc.io",
project_urls={
"Source Code": "https://github.com/grpc/grpc",
"Bug Tracker": "https://github.com/grpc/grpc/issues",
'Documentation': 'https://grpc.github.io/grpc/python',
"Documentation": "https://grpc.github.io/grpc/python",
},
license=LICENSE,
classifiers=CLASSIFIERS,
long_description_content_type='text/x-rst',
long_description_content_type="text/x-rst",
long_description=open(README).read(),
ext_modules=CYTHON_EXTENSION_MODULES,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
python_requires='>=3.7',
python_requires=">=3.7",
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRES,
setup_requires=SETUP_REQUIRES,

@ -17,14 +17,15 @@
import os
import yaml
BUILDS_YAML_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'preprocessed_builds.yaml')
BUILDS_YAML_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "preprocessed_builds.yaml"
)
with open(BUILDS_YAML_PATH) as f:
builds = yaml.safe_load(f)
for build in builds:
build['build'] = 'private'
build['build_system'] = []
build['language'] = 'c'
build['secure'] = False
print(yaml.dump({'libs': builds}))
build["build"] = "private"
build["build_system"] = []
build["language"] = "c"
build["secure"] = False
print(yaml.dump({"libs": builds}))

@ -29,185 +29,201 @@ ABSEIL_CMAKE_RULE_END = re.compile("^\)", re.MULTILINE)
# Rule object representing the rule of Bazel BUILD.
Rule = collections.namedtuple(
"Rule", "type name package srcs hdrs textual_hdrs deps visibility testonly")
"Rule", "type name package srcs hdrs textual_hdrs deps visibility testonly"
)
def get_elem_value(elem, name):
"""Returns the value of XML element with the given name."""
for child in elem:
if child.attrib.get("name") == name:
if child.tag == "string":
return child.attrib.get("value")
elif child.tag == "boolean":
return child.attrib.get("value") == "true"
elif child.tag == "list":
return [nested_child.attrib.get("value") for nested_child in child]
else:
raise "Cannot recognize tag: " + child.tag
return None
"""Returns the value of XML element with the given name."""
for child in elem:
if child.attrib.get("name") == name:
if child.tag == "string":
return child.attrib.get("value")
elif child.tag == "boolean":
return child.attrib.get("value") == "true"
elif child.tag == "list":
return [
nested_child.attrib.get("value") for nested_child in child
]
else:
raise "Cannot recognize tag: " + child.tag
return None
def normalize_paths(paths):
"""Returns the list of normalized path."""
# e.g. ["//absl/strings:dir/header.h"] -> ["absl/strings/dir/header.h"]
return [path.lstrip("/").replace(":", "/") for path in paths]
"""Returns the list of normalized path."""
# e.g. ["//absl/strings:dir/header.h"] -> ["absl/strings/dir/header.h"]
return [path.lstrip("/").replace(":", "/") for path in paths]
def parse_bazel_rule(elem, package):
"""Returns a rule from bazel XML rule."""
return Rule(
type=elem.attrib["class"],
name=get_elem_value(elem, "name"),
package=package,
srcs=normalize_paths(get_elem_value(elem, "srcs") or []),
hdrs=normalize_paths(get_elem_value(elem, "hdrs") or []),
textual_hdrs=normalize_paths(get_elem_value(elem, "textual_hdrs") or []),
deps=get_elem_value(elem, "deps") or [],
visibility=get_elem_value(elem, "visibility") or [],
testonly=get_elem_value(elem, "testonly") or False)
"""Returns a rule from bazel XML rule."""
return Rule(
type=elem.attrib["class"],
name=get_elem_value(elem, "name"),
package=package,
srcs=normalize_paths(get_elem_value(elem, "srcs") or []),
hdrs=normalize_paths(get_elem_value(elem, "hdrs") or []),
textual_hdrs=normalize_paths(
get_elem_value(elem, "textual_hdrs") or []
),
deps=get_elem_value(elem, "deps") or [],
visibility=get_elem_value(elem, "visibility") or [],
testonly=get_elem_value(elem, "testonly") or False,
)
def read_bazel_build(package):
"""Runs bazel query on given package file and returns all cc rules."""
# Use a wrapper version of bazel in gRPC not to use system-wide bazel
# to avoid bazel conflict when running on Kokoro.
BAZEL_BIN = "../../tools/bazel"
result = subprocess.check_output(
[BAZEL_BIN, "query", package + ":all", "--output", "xml"])
root = ET.fromstring(result)
return [
parse_bazel_rule(elem, package)
for elem in root
if elem.tag == "rule" and elem.attrib["class"].startswith("cc_")
]
"""Runs bazel query on given package file and returns all cc rules."""
# Use a wrapper version of bazel in gRPC not to use system-wide bazel
# to avoid bazel conflict when running on Kokoro.
BAZEL_BIN = "../../tools/bazel"
result = subprocess.check_output(
[BAZEL_BIN, "query", package + ":all", "--output", "xml"]
)
root = ET.fromstring(result)
return [
parse_bazel_rule(elem, package)
for elem in root
if elem.tag == "rule" and elem.attrib["class"].startswith("cc_")
]
def collect_bazel_rules(root_path):
"""Collects and returns all bazel rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "BUILD.bazel")
if os.path.exists(build_path):
rules.extend(read_bazel_build("//" + cur))
return rules
"""Collects and returns all bazel rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "BUILD.bazel")
if os.path.exists(build_path):
rules.extend(read_bazel_build("//" + cur))
return rules
def parse_cmake_rule(rule, package):
"""Returns a rule from absl cmake rule.
Reference: https://github.com/abseil/abseil-cpp/blob/master/CMake/AbseilHelpers.cmake
"""
kv = {}
bucket = None
lines = rule.splitlines()
for line in lines[1:-1]:
if CAPITAL_WORD.match(line.strip()):
bucket = kv.setdefault(line.strip(), [])
else:
if bucket is not None:
bucket.append(line.strip())
else:
raise ValueError("Illegal syntax: {}".format(rule))
return Rule(
type=lines[0].rstrip("("),
name="absl::" + kv["NAME"][0],
package=package,
srcs=[package + "/" + f.strip('"') for f in kv.get("SRCS", [])],
hdrs=[package + "/" + f.strip('"') for f in kv.get("HDRS", [])],
textual_hdrs=[],
deps=kv.get("DEPS", []),
visibility="PUBLIC" in kv,
testonly="TESTONLY" in kv,
)
"""Returns a rule from absl cmake rule.
Reference: https://github.com/abseil/abseil-cpp/blob/master/CMake/AbseilHelpers.cmake
"""
kv = {}
bucket = None
lines = rule.splitlines()
for line in lines[1:-1]:
if CAPITAL_WORD.match(line.strip()):
bucket = kv.setdefault(line.strip(), [])
else:
if bucket is not None:
bucket.append(line.strip())
else:
raise ValueError("Illegal syntax: {}".format(rule))
return Rule(
type=lines[0].rstrip("("),
name="absl::" + kv["NAME"][0],
package=package,
srcs=[package + "/" + f.strip('"') for f in kv.get("SRCS", [])],
hdrs=[package + "/" + f.strip('"') for f in kv.get("HDRS", [])],
textual_hdrs=[],
deps=kv.get("DEPS", []),
visibility="PUBLIC" in kv,
testonly="TESTONLY" in kv,
)
def read_cmake_build(build_path, package):
"""Parses given CMakeLists.txt file and returns all cc rules."""
rules = []
with open(build_path, "r") as f:
src = f.read()
for begin_mo in ABSEIL_CMAKE_RULE_BEGIN.finditer(src):
end_mo = ABSEIL_CMAKE_RULE_END.search(src[begin_mo.start(0):])
expr = src[begin_mo.start(0):begin_mo.start(0) + end_mo.start(0) + 1]
rules.append(parse_cmake_rule(expr, package))
return rules
"""Parses given CMakeLists.txt file and returns all cc rules."""
rules = []
with open(build_path, "r") as f:
src = f.read()
for begin_mo in ABSEIL_CMAKE_RULE_BEGIN.finditer(src):
end_mo = ABSEIL_CMAKE_RULE_END.search(src[begin_mo.start(0) :])
expr = src[
begin_mo.start(0) : begin_mo.start(0) + end_mo.start(0) + 1
]
rules.append(parse_cmake_rule(expr, package))
return rules
def collect_cmake_rules(root_path):
"""Collects and returns all cmake rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "CMakeLists.txt")
if os.path.exists(build_path):
rules.extend(read_cmake_build(build_path, cur))
return rules
"""Collects and returns all cmake rules from root path recursively."""
rules = []
for cur, _, _ in os.walk(root_path):
build_path = os.path.join(cur, "CMakeLists.txt")
if os.path.exists(build_path):
rules.extend(read_cmake_build(build_path, cur))
return rules
def pairing_bazel_and_cmake_rules(bazel_rules, cmake_rules):
"""Returns a pair map between bazel rules and cmake rules based on
the similarity of the file list in the rule. This is because
cmake build and bazel build of abseil are not identical.
"""
pair_map = {}
for rule in bazel_rules:
best_crule, best_similarity = None, 0
for crule in cmake_rules:
similarity = len(
set(rule.srcs + rule.hdrs + rule.textual_hdrs).intersection(
set(crule.srcs + crule.hdrs + crule.textual_hdrs)))
if similarity > best_similarity:
best_crule, best_similarity = crule, similarity
if best_crule:
pair_map[(rule.package, rule.name)] = best_crule.name
return pair_map
"""Returns a pair map between bazel rules and cmake rules based on
the similarity of the file list in the rule. This is because
cmake build and bazel build of abseil are not identical.
"""
pair_map = {}
for rule in bazel_rules:
best_crule, best_similarity = None, 0
for crule in cmake_rules:
similarity = len(
set(rule.srcs + rule.hdrs + rule.textual_hdrs).intersection(
set(crule.srcs + crule.hdrs + crule.textual_hdrs)
)
)
if similarity > best_similarity:
best_crule, best_similarity = crule, similarity
if best_crule:
pair_map[(rule.package, rule.name)] = best_crule.name
return pair_map
def resolve_hdrs(files):
return [ABSEIL_PATH + "/" + f for f in files if f.endswith((".h", ".inc"))]
return [ABSEIL_PATH + "/" + f for f in files if f.endswith((".h", ".inc"))]
def resolve_srcs(files):
return [ABSEIL_PATH + "/" + f for f in files if f.endswith(".cc")]
return [ABSEIL_PATH + "/" + f for f in files if f.endswith(".cc")]
def resolve_deps(targets):
return [(t[2:] if t.startswith("//") else t) for t in targets]
return [(t[2:] if t.startswith("//") else t) for t in targets]
def generate_builds(root_path):
"""Generates builds from all BUILD files under absl directory."""
bazel_rules = list(
filter(lambda r: r.type == "cc_library" and not r.testonly,
collect_bazel_rules(root_path)))
cmake_rules = list(
filter(lambda r: r.type == "absl_cc_library" and not r.testonly,
collect_cmake_rules(root_path)))
pair_map = pairing_bazel_and_cmake_rules(bazel_rules, cmake_rules)
builds = []
for rule in sorted(bazel_rules, key=lambda r: r.package[2:] + ":" + r.name):
p = {
"name":
rule.package[2:] + ":" + rule.name,
"cmake_target":
pair_map.get((rule.package, rule.name)) or "",
"headers":
sorted(resolve_hdrs(rule.srcs + rule.hdrs + rule.textual_hdrs)),
"src":
sorted(resolve_srcs(rule.srcs + rule.hdrs + rule.textual_hdrs)),
"deps":
sorted(resolve_deps(rule.deps)),
}
builds.append(p)
return builds
"""Generates builds from all BUILD files under absl directory."""
bazel_rules = list(
filter(
lambda r: r.type == "cc_library" and not r.testonly,
collect_bazel_rules(root_path),
)
)
cmake_rules = list(
filter(
lambda r: r.type == "absl_cc_library" and not r.testonly,
collect_cmake_rules(root_path),
)
)
pair_map = pairing_bazel_and_cmake_rules(bazel_rules, cmake_rules)
builds = []
for rule in sorted(bazel_rules, key=lambda r: r.package[2:] + ":" + r.name):
p = {
"name": rule.package[2:] + ":" + rule.name,
"cmake_target": pair_map.get((rule.package, rule.name)) or "",
"headers": sorted(
resolve_hdrs(rule.srcs + rule.hdrs + rule.textual_hdrs)
),
"src": sorted(
resolve_srcs(rule.srcs + rule.hdrs + rule.textual_hdrs)
),
"deps": sorted(resolve_deps(rule.deps)),
}
builds.append(p)
return builds
def main():
previous_dir = os.getcwd()
os.chdir(ABSEIL_PATH)
builds = generate_builds("absl")
os.chdir(previous_dir)
with open(OUTPUT_PATH, 'w') as outfile:
outfile.write(yaml.dump(builds, indent=2))
previous_dir = os.getcwd()
os.chdir(ABSEIL_PATH)
builds = generate_builds("absl")
os.chdir(previous_dir)
with open(OUTPUT_PATH, "w") as outfile:
outfile.write(yaml.dump(builds, indent=2))
if __name__ == "__main__":
main()
main()

@ -19,27 +19,23 @@ import sys
import glob
import yaml
os.chdir(os.path.dirname(sys.argv[0]) + '/../..')
os.chdir(os.path.dirname(sys.argv[0]) + "/../..")
out = {}
out['libs'] = [{
'name':
'benchmark',
'build':
'private',
'language':
'c++',
'secure':
False,
'defaults':
'benchmark',
'src':
sorted(glob.glob('third_party/benchmark/src/*.cc')),
'headers':
sorted(
glob.glob('third_party/benchmark/src/*.h') +
glob.glob('third_party/benchmark/include/benchmark/*.h')),
}]
out["libs"] = [
{
"name": "benchmark",
"build": "private",
"language": "c++",
"secure": False,
"defaults": "benchmark",
"src": sorted(glob.glob("third_party/benchmark/src/*.cc")),
"headers": sorted(
glob.glob("third_party/benchmark/src/*.h")
+ glob.glob("third_party/benchmark/include/benchmark/*.h")
),
}
]
print(yaml.dump(out))

@ -20,112 +20,114 @@ import yaml
run_dir = os.path.dirname(sys.argv[0])
sources_path = os.path.abspath(
os.path.join(run_dir,
'../../third_party/boringssl-with-bazel/sources.json'))
os.path.join(run_dir, "../../third_party/boringssl-with-bazel/sources.json")
)
try:
with open(sources_path, 'r') as s:
with open(sources_path, "r") as s:
sources = json.load(s)
except IOError:
sources_path = os.path.abspath(
os.path.join(run_dir,
'../../../../third_party/openssl/boringssl/sources.json'))
with open(sources_path, 'r') as s:
os.path.join(
run_dir, "../../../../third_party/openssl/boringssl/sources.json"
)
)
with open(sources_path, "r") as s:
sources = json.load(s)
def map_dir(filename):
return 'third_party/boringssl-with-bazel/' + filename
return "third_party/boringssl-with-bazel/" + filename
class Grpc(object):
"""Adapter for boring-SSL json sources files. """
"""Adapter for boring-SSL json sources files."""
def __init__(self, sources):
self.yaml = None
self.WriteFiles(sources)
def WriteFiles(self, files):
test_binaries = ['ssl_test', 'crypto_test']
test_binaries = ["ssl_test", "crypto_test"]
asm_outputs = {
key: value for key, value in files.items() if any(
f.endswith(".S") or f.endswith(".asm") for f in value)
key: value
for key, value in files.items()
if any(f.endswith(".S") or f.endswith(".asm") for f in value)
}
self.yaml = {
'#':
'generated with src/boringssl/gen_build_yaml.py',
'raw_boringssl_build_output_for_debugging': {
'files': files,
"#": "generated with src/boringssl/gen_build_yaml.py",
"raw_boringssl_build_output_for_debugging": {
"files": files,
},
'libs': [
"libs": [
{
'name':
'boringssl',
'build':
'private',
'language':
'c',
'secure':
False,
'src':
sorted(
map_dir(f) for f in files['ssl'] + files['crypto']),
'asm_src': {
k: [map_dir(f) for f in value
] for k, value in asm_outputs.items()
"name": "boringssl",
"build": "private",
"language": "c",
"secure": False,
"src": sorted(
map_dir(f) for f in files["ssl"] + files["crypto"]
),
"asm_src": {
k: [map_dir(f) for f in value]
for k, value in asm_outputs.items()
},
'headers':
sorted(
map_dir(f)
# We want to include files['fips_fragments'], but not build them as objects.
# See https://boringssl-review.googlesource.com/c/boringssl/+/16946
for f in files['ssl_headers'] +
files['ssl_internal_headers'] +
files['crypto_headers'] +
files['crypto_internal_headers'] +
files['fips_fragments']),
'boringssl':
True,
'defaults':
'boringssl',
"headers": sorted(
map_dir(f)
# We want to include files['fips_fragments'], but not build them as objects.
# See https://boringssl-review.googlesource.com/c/boringssl/+/16946
for f in files["ssl_headers"]
+ files["ssl_internal_headers"]
+ files["crypto_headers"]
+ files["crypto_internal_headers"]
+ files["fips_fragments"]
),
"boringssl": True,
"defaults": "boringssl",
},
{
'name': 'boringssl_test_util',
'build': 'private',
'language': 'c++',
'secure': False,
'boringssl': True,
'defaults': 'boringssl',
'src': [map_dir(f) for f in sorted(files['test_support'])],
"name": "boringssl_test_util",
"build": "private",
"language": "c++",
"secure": False,
"boringssl": True,
"defaults": "boringssl",
"src": [map_dir(f) for f in sorted(files["test_support"])],
},
],
"targets": [
{
"name": "boringssl_%s" % test,
"build": "test",
"run": False,
"secure": False,
"language": "c++",
"src": sorted(map_dir(f) for f in files[test]),
"vs_proj_dir": "test/boringssl",
"boringssl": True,
"defaults": "boringssl",
"deps": [
"boringssl_test_util",
"boringssl",
],
}
for test in test_binaries
],
"tests": [
{
"name": "boringssl_%s" % test,
"args": [],
"exclude_configs": ["asan", "ubsan"],
"ci_platforms": ["linux", "mac", "posix", "windows"],
"platforms": ["linux", "mac", "posix", "windows"],
"flaky": False,
"gtest": True,
"language": "c++",
"boringssl": True,
"defaults": "boringssl",
"cpu_cost": 1.0,
}
for test in test_binaries
],
'targets': [{
'name': 'boringssl_%s' % test,
'build': 'test',
'run': False,
'secure': False,
'language': 'c++',
'src': sorted(map_dir(f) for f in files[test]),
'vs_proj_dir': 'test/boringssl',
'boringssl': True,
'defaults': 'boringssl',
'deps': [
'boringssl_test_util',
'boringssl',
]
} for test in test_binaries],
'tests': [{
'name': 'boringssl_%s' % test,
'args': [],
'exclude_configs': ['asan', 'ubsan'],
'ci_platforms': ['linux', 'mac', 'posix', 'windows'],
'platforms': ['linux', 'mac', 'posix', 'windows'],
'flaky': False,
'gtest': True,
'language': 'c++',
'boringssl': True,
'defaults': 'boringssl',
'cpu_cost': 1.0
} for test in test_binaries]
}

@ -19,7 +19,7 @@ import os
import sys
import yaml
os.chdir(os.path.dirname(sys.argv[0]) + '/../..')
os.chdir(os.path.dirname(sys.argv[0]) + "/../..")
out = {}
@ -30,127 +30,124 @@ try:
subprocess.call("third_party/cares/cares/configure", shell=True)
def config_platform(x):
if 'darwin' in sys.platform:
return 'src/cares/cares/config_darwin/ares_config.h'
if 'freebsd' in sys.platform:
return 'src/cares/cares/config_freebsd/ares_config.h'
if 'linux' in sys.platform:
return 'src/cares/cares/config_linux/ares_config.h'
if 'openbsd' in sys.platform:
return 'src/cares/cares/config_openbsd/ares_config.h'
if not os.path.isfile('third_party/cares/cares/ares_config.h'):
if "darwin" in sys.platform:
return "src/cares/cares/config_darwin/ares_config.h"
if "freebsd" in sys.platform:
return "src/cares/cares/config_freebsd/ares_config.h"
if "linux" in sys.platform:
return "src/cares/cares/config_linux/ares_config.h"
if "openbsd" in sys.platform:
return "src/cares/cares/config_openbsd/ares_config.h"
if not os.path.isfile("third_party/cares/cares/ares_config.h"):
gen_ares_build(x)
return 'third_party/cares/cares/ares_config.h'
return "third_party/cares/cares/ares_config.h"
def ares_build(x):
if os.path.isfile('src/cares/cares/ares_build.h'):
return 'src/cares/cares/ares_build.h'
if not os.path.isfile('third_party/cares/cares/include/ares_build.h'):
if os.path.isfile("src/cares/cares/ares_build.h"):
return "src/cares/cares/ares_build.h"
if not os.path.isfile("third_party/cares/cares/include/ares_build.h"):
gen_ares_build(x)
return 'third_party/cares/cares/include/ares_build.h'
return "third_party/cares/cares/include/ares_build.h"
out['libs'] = [{
'name':
'ares',
'defaults':
'ares',
'build':
'private',
'language':
'c',
'secure':
False,
'src': [
"third_party/cares/cares/src/lib/ares_init.c",
"third_party/cares/cares/src/lib/ares_expand_string.c",
"third_party/cares/cares/src/lib/ares_strcasecmp.c",
"third_party/cares/cares/src/lib/ares_destroy.c",
"third_party/cares/cares/src/lib/ares_free_string.c",
"third_party/cares/cares/src/lib/ares__timeval.c",
"third_party/cares/cares/src/lib/ares_library_init.c",
"third_party/cares/cares/src/lib/ares_getsock.c",
"third_party/cares/cares/src/lib/ares_process.c",
"third_party/cares/cares/src/lib/ares_create_query.c",
"third_party/cares/cares/src/lib/ares_fds.c",
"third_party/cares/cares/src/lib/ares_gethostbyname.c",
"third_party/cares/cares/src/lib/ares_mkquery.c",
"third_party/cares/cares/src/lib/ares_freeaddrinfo.c",
"third_party/cares/cares/src/lib/ares_strdup.c",
"third_party/cares/cares/src/lib/ares_timeout.c",
"third_party/cares/cares/src/lib/ares_getnameinfo.c",
"third_party/cares/cares/src/lib/ares_parse_soa_reply.c",
"third_party/cares/cares/src/lib/ares_parse_naptr_reply.c",
"third_party/cares/cares/src/lib/ares_parse_a_reply.c",
"third_party/cares/cares/src/lib/ares_send.c",
"third_party/cares/cares/src/lib/ares_nowarn.c",
"third_party/cares/cares/src/lib/ares__sortaddrinfo.c",
"third_party/cares/cares/src/lib/ares_android.c",
"third_party/cares/cares/src/lib/ares_strerror.c",
"third_party/cares/cares/src/lib/ares_parse_caa_reply.c",
"third_party/cares/cares/src/lib/ares__close_sockets.c",
"third_party/cares/cares/src/lib/ares_llist.c",
"third_party/cares/cares/src/lib/ares_parse_aaaa_reply.c",
"third_party/cares/cares/src/lib/ares_getaddrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_ns_reply.c",
"third_party/cares/cares/src/lib/windows_port.c",
"third_party/cares/cares/src/lib/bitncmp.c",
"third_party/cares/cares/src/lib/ares_strsplit.c",
"third_party/cares/cares/src/lib/ares_data.c",
"third_party/cares/cares/src/lib/ares_free_hostent.c",
"third_party/cares/cares/src/lib/ares_platform.c",
"third_party/cares/cares/src/lib/ares_parse_txt_reply.c",
"third_party/cares/cares/src/lib/ares__parse_into_addrinfo.c",
"third_party/cares/cares/src/lib/ares_gethostbyaddr.c",
"third_party/cares/cares/src/lib/ares_parse_srv_reply.c",
"third_party/cares/cares/src/lib/ares_version.c",
"third_party/cares/cares/src/lib/ares_getenv.c",
"third_party/cares/cares/src/lib/ares_search.c",
"third_party/cares/cares/src/lib/ares_parse_mx_reply.c",
"third_party/cares/cares/src/lib/ares__get_hostent.c",
"third_party/cares/cares/src/lib/ares__readaddrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_ptr_reply.c",
"third_party/cares/cares/src/lib/ares__read_line.c",
"third_party/cares/cares/src/lib/ares_query.c",
"third_party/cares/cares/src/lib/ares_options.c",
"third_party/cares/cares/src/lib/inet_net_pton.c",
"third_party/cares/cares/src/lib/ares_expand_name.c",
"third_party/cares/cares/src/lib/inet_ntop.c",
"third_party/cares/cares/src/lib/ares_cancel.c",
"third_party/cares/cares/src/lib/ares_writev.c",
],
'headers': [
"third_party/cares/ares_build.h",
"third_party/cares/cares/include/ares_version.h",
"third_party/cares/cares/include/ares.h",
"third_party/cares/cares/include/ares_rules.h",
"third_party/cares/cares/include/ares_dns.h",
"third_party/cares/cares/src/lib/ares_data.h",
"third_party/cares/cares/src/lib/ares_strsplit.h",
"third_party/cares/cares/src/lib/bitncmp.h",
"third_party/cares/cares/src/lib/ares_iphlpapi.h",
"third_party/cares/cares/src/lib/ares_inet_net_pton.h",
"third_party/cares/cares/src/lib/ares_getenv.h",
"third_party/cares/cares/src/lib/ares_platform.h",
"third_party/cares/cares/src/lib/ares_writev.h",
"third_party/cares/cares/src/lib/ares_private.h",
"third_party/cares/cares/src/lib/ares_setup.h",
"third_party/cares/cares/src/lib/config-win32.h",
"third_party/cares/cares/src/lib/ares_strcasecmp.h",
"third_party/cares/cares/src/lib/setup_once.h",
"third_party/cares/cares/src/lib/ares_ipv6.h",
"third_party/cares/cares/src/lib/ares_library_init.h",
"third_party/cares/cares/src/lib/ares_nameser.h",
"third_party/cares/cares/src/lib/ares_strdup.h",
"third_party/cares/cares/src/lib/config-dos.h",
"third_party/cares/cares/src/lib/ares_llist.h",
"third_party/cares/cares/src/lib/ares_nowarn.h",
"third_party/cares/cares/src/lib/ares_android.h",
"third_party/cares/config_darwin/ares_config.h",
"third_party/cares/config_freebsd/ares_config.h",
"third_party/cares/config_linux/ares_config.h",
"third_party/cares/config_openbsd/ares_config.h"
],
}]
out["libs"] = [
{
"name": "ares",
"defaults": "ares",
"build": "private",
"language": "c",
"secure": False,
"src": [
"third_party/cares/cares/src/lib/ares_init.c",
"third_party/cares/cares/src/lib/ares_expand_string.c",
"third_party/cares/cares/src/lib/ares_strcasecmp.c",
"third_party/cares/cares/src/lib/ares_destroy.c",
"third_party/cares/cares/src/lib/ares_free_string.c",
"third_party/cares/cares/src/lib/ares__timeval.c",
"third_party/cares/cares/src/lib/ares_library_init.c",
"third_party/cares/cares/src/lib/ares_getsock.c",
"third_party/cares/cares/src/lib/ares_process.c",
"third_party/cares/cares/src/lib/ares_create_query.c",
"third_party/cares/cares/src/lib/ares_fds.c",
"third_party/cares/cares/src/lib/ares_gethostbyname.c",
"third_party/cares/cares/src/lib/ares_mkquery.c",
"third_party/cares/cares/src/lib/ares_freeaddrinfo.c",
"third_party/cares/cares/src/lib/ares_strdup.c",
"third_party/cares/cares/src/lib/ares_timeout.c",
"third_party/cares/cares/src/lib/ares_getnameinfo.c",
"third_party/cares/cares/src/lib/ares_parse_soa_reply.c",
"third_party/cares/cares/src/lib/ares_parse_naptr_reply.c",
"third_party/cares/cares/src/lib/ares_parse_a_reply.c",
"third_party/cares/cares/src/lib/ares_send.c",
"third_party/cares/cares/src/lib/ares_nowarn.c",
"third_party/cares/cares/src/lib/ares__sortaddrinfo.c",
"third_party/cares/cares/src/lib/ares_android.c",
"third_party/cares/cares/src/lib/ares_strerror.c",
"third_party/cares/cares/src/lib/ares_parse_caa_reply.c",
"third_party/cares/cares/src/lib/ares__close_sockets.c",
"third_party/cares/cares/src/lib/ares_llist.c",
"third_party/cares/cares/src/lib/ares_parse_aaaa_reply.c",
"third_party/cares/cares/src/lib/ares_getaddrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_ns_reply.c",
"third_party/cares/cares/src/lib/windows_port.c",
"third_party/cares/cares/src/lib/bitncmp.c",
"third_party/cares/cares/src/lib/ares_strsplit.c",
"third_party/cares/cares/src/lib/ares_data.c",
"third_party/cares/cares/src/lib/ares_free_hostent.c",
"third_party/cares/cares/src/lib/ares_platform.c",
"third_party/cares/cares/src/lib/ares_parse_txt_reply.c",
"third_party/cares/cares/src/lib/ares__parse_into_addrinfo.c",
"third_party/cares/cares/src/lib/ares_gethostbyaddr.c",
"third_party/cares/cares/src/lib/ares_parse_srv_reply.c",
"third_party/cares/cares/src/lib/ares_version.c",
"third_party/cares/cares/src/lib/ares_getenv.c",
"third_party/cares/cares/src/lib/ares_search.c",
"third_party/cares/cares/src/lib/ares_parse_mx_reply.c",
"third_party/cares/cares/src/lib/ares__get_hostent.c",
"third_party/cares/cares/src/lib/ares__readaddrinfo.c",
"third_party/cares/cares/src/lib/ares_parse_ptr_reply.c",
"third_party/cares/cares/src/lib/ares__read_line.c",
"third_party/cares/cares/src/lib/ares_query.c",
"third_party/cares/cares/src/lib/ares_options.c",
"third_party/cares/cares/src/lib/inet_net_pton.c",
"third_party/cares/cares/src/lib/ares_expand_name.c",
"third_party/cares/cares/src/lib/inet_ntop.c",
"third_party/cares/cares/src/lib/ares_cancel.c",
"third_party/cares/cares/src/lib/ares_writev.c",
],
"headers": [
"third_party/cares/ares_build.h",
"third_party/cares/cares/include/ares_version.h",
"third_party/cares/cares/include/ares.h",
"third_party/cares/cares/include/ares_rules.h",
"third_party/cares/cares/include/ares_dns.h",
"third_party/cares/cares/src/lib/ares_data.h",
"third_party/cares/cares/src/lib/ares_strsplit.h",
"third_party/cares/cares/src/lib/bitncmp.h",
"third_party/cares/cares/src/lib/ares_iphlpapi.h",
"third_party/cares/cares/src/lib/ares_inet_net_pton.h",
"third_party/cares/cares/src/lib/ares_getenv.h",
"third_party/cares/cares/src/lib/ares_platform.h",
"third_party/cares/cares/src/lib/ares_writev.h",
"third_party/cares/cares/src/lib/ares_private.h",
"third_party/cares/cares/src/lib/ares_setup.h",
"third_party/cares/cares/src/lib/config-win32.h",
"third_party/cares/cares/src/lib/ares_strcasecmp.h",
"third_party/cares/cares/src/lib/setup_once.h",
"third_party/cares/cares/src/lib/ares_ipv6.h",
"third_party/cares/cares/src/lib/ares_library_init.h",
"third_party/cares/cares/src/lib/ares_nameser.h",
"third_party/cares/cares/src/lib/ares_strdup.h",
"third_party/cares/cares/src/lib/config-dos.h",
"third_party/cares/cares/src/lib/ares_llist.h",
"third_party/cares/cares/src/lib/ares_nowarn.h",
"third_party/cares/cares/src/lib/ares_android.h",
"third_party/cares/config_darwin/ares_config.h",
"third_party/cares/config_freebsd/ares_config.h",
"third_party/cares/config_linux/ares_config.h",
"third_party/cares/config_openbsd/ares_config.h",
],
}
]
except:
pass

@ -117,7 +117,11 @@ def _parse_protoc_arguments(protoc_args, projectdir):
# msbuild integration uses, but it's not the only way.
(name, value) = arg.split("=", 1)
if name == "--dependency_out" or name == "--grpc_out" or name == "--csharp_out":
if (
name == "--dependency_out"
or name == "--grpc_out"
or name == "--csharp_out"
):
# For args that contain a path, make the path absolute and normalize it
# to make it easier to assert equality in tests.
value = _normalized_absolute_path(value)
@ -152,7 +156,8 @@ def _normalized_relative_to_projectdir(file, projectdir):
"""Convert a file path to one relative to the project directory."""
try:
return _normalize_slashes(
os.path.relpath(os.path.abspath(file), projectdir))
os.path.relpath(os.path.abspath(file), projectdir)
)
except ValueError:
# On Windows if the paths are on different drives then we get this error
# Just return the absolute path
@ -170,7 +175,7 @@ def _normalize_slashes(path):
def _write_or_update_results_json(log_dir, protofile, protoc_arg_dict):
""" Write or update the results JSON file """
"""Write or update the results JSON file"""
# Read existing json.
# Since protoc may be called more than once each build/test if there is
@ -182,9 +187,9 @@ def _write_or_update_results_json(log_dir, protofile, protoc_arg_dict):
results_json = json.load(forig)
else:
results_json = {}
results_json['Files'] = {}
results_json["Files"] = {}
results_json['Files'][protofile] = protoc_arg_dict
results_json["Files"][protofile] = protoc_arg_dict
results_json["Metadata"] = {"timestamp": str(datetime.datetime.now())}
with open(fname, "w") as fout:
@ -227,8 +232,9 @@ def _is_grpc_out_file(csfile):
return csfile.endswith("Grpc.cs")
def _generate_cs_files(protofile, cs_files_to_generate, grpc_out_dir,
csharp_out_dir, projectdir):
def _generate_cs_files(
protofile, cs_files_to_generate, grpc_out_dir, csharp_out_dir, projectdir
):
"""Create expected cs files."""
_write_debug("\ngenerate_cs_files")
@ -262,8 +268,13 @@ def _generate_cs_files(protofile, cs_files_to_generate, grpc_out_dir,
print("// Generated by fake protoc: %s" % timestamp, file=fout)
def _create_dependency_file(protofile, cs_files_to_generate, dependencyfile,
grpc_out_dir, csharp_out_dir):
def _create_dependency_file(
protofile,
cs_files_to_generate,
dependencyfile,
grpc_out_dir,
csharp_out_dir,
):
"""Create the expected dependency file."""
_write_debug("\ncreate_dependency_file")
@ -312,21 +323,21 @@ def _get_argument_last_occurrence_or_none(protoc_arg_dict, name):
def main():
# Check environment variables for the additional arguments used in the tests.
projectdir = _getenv('FAKEPROTOC_PROJECTDIR')
projectdir = _getenv("FAKEPROTOC_PROJECTDIR")
if not projectdir:
print("FAKEPROTOC_PROJECTDIR not set")
sys.exit(1)
projectdir = os.path.abspath(projectdir)
# Output directory for generated files and output file
protoc_outdir = _getenv('FAKEPROTOC_OUTDIR')
protoc_outdir = _getenv("FAKEPROTOC_OUTDIR")
if not protoc_outdir:
print("FAKEPROTOC_OUTDIR not set")
sys.exit(1)
protoc_outdir = os.path.abspath(protoc_outdir)
# Get list of expected generated files from env variable
generate_expected = _getenv('FAKEPROTOC_GENERATE_EXPECTED')
generate_expected = _getenv("FAKEPROTOC_GENERATE_EXPECTED")
if not generate_expected:
print("FAKEPROTOC_GENERATE_EXPECTED not set")
sys.exit(1)
@ -338,9 +349,13 @@ def main():
_open_debug_log("%s/fakeprotoc_log.txt" % log_dir)
_write_debug(
("##### fakeprotoc called at %s\n" + "FAKEPROTOC_PROJECTDIR = %s\n" +
"FAKEPROTOC_GENERATE_EXPECTED = %s\n") %
(datetime.datetime.now(), projectdir, generate_expected))
(
"##### fakeprotoc called at %s\n"
+ "FAKEPROTOC_PROJECTDIR = %s\n"
+ "FAKEPROTOC_GENERATE_EXPECTED = %s\n"
)
% (datetime.datetime.now(), projectdir, generate_expected)
)
proto_to_generated = _parse_generate_expected(generate_expected)
protoc_args = _read_protoc_arguments()
@ -349,43 +364,52 @@ def main():
# If argument was passed multiple times, take the last occurrence of it.
# TODO(jtattermusch): handle multiple occurrences of the same argument
dependencyfile = _get_argument_last_occurrence_or_none(
protoc_arg_dict, '--dependency_out')
grpcout = _get_argument_last_occurrence_or_none(protoc_arg_dict,
'--grpc_out')
csharpout = _get_argument_last_occurrence_or_none(protoc_arg_dict,
'--csharp_out')
protoc_arg_dict, "--dependency_out"
)
grpcout = _get_argument_last_occurrence_or_none(
protoc_arg_dict, "--grpc_out"
)
csharpout = _get_argument_last_occurrence_or_none(
protoc_arg_dict, "--csharp_out"
)
# --grpc_out might not be set in which case use --csharp_out
if grpcout is None:
grpcout = csharpout
if len(protoc_arg_dict.get('protofile')) != 1:
if len(protoc_arg_dict.get("protofile")) != 1:
# regular protoc can process multiple .proto files passed at once, but we know
# the Grpc.Tools msbuild integration only ever passes one .proto file per invocation.
print(
"Expecting to get exactly one .proto file argument per fakeprotoc invocation."
"Expecting to get exactly one .proto file argument per fakeprotoc"
" invocation."
)
sys.exit(1)
protofile = protoc_arg_dict.get('protofile')[0]
protofile = protoc_arg_dict.get("protofile")[0]
cs_files_to_generate = _get_cs_files_to_generate(
protofile=protofile, proto_to_generated=proto_to_generated)
_create_dependency_file(protofile=protofile,
cs_files_to_generate=cs_files_to_generate,
dependencyfile=dependencyfile,
grpc_out_dir=grpcout,
csharp_out_dir=csharpout)
_generate_cs_files(protofile=protofile,
cs_files_to_generate=cs_files_to_generate,
grpc_out_dir=grpcout,
csharp_out_dir=csharpout,
projectdir=projectdir)
_write_or_update_results_json(log_dir=log_dir,
protofile=protofile,
protoc_arg_dict=protoc_arg_dict)
protofile=protofile, proto_to_generated=proto_to_generated
)
_create_dependency_file(
protofile=protofile,
cs_files_to_generate=cs_files_to_generate,
dependencyfile=dependencyfile,
grpc_out_dir=grpcout,
csharp_out_dir=csharpout,
)
_generate_cs_files(
protofile=protofile,
cs_files_to_generate=cs_files_to_generate,
grpc_out_dir=grpcout,
csharp_out_dir=csharpout,
projectdir=projectdir,
)
_write_or_update_results_json(
log_dir=log_dir, protofile=protofile, protoc_arg_dict=protoc_arg_dict
)
_close_debug_log()

@ -22,9 +22,11 @@ if len(sys.argv) < 2:
sys.exit()
for file_name in sys.argv[1:]:
print("Modifying format of {file} comments in place...".format(
file=file_name,))
print(
"Modifying format of {file} comments in place...".format(
file=file_name,
)
)
# Input
@ -54,7 +56,7 @@ for file_name in sys.argv[1:]:
# Pattern matching
comment_regex = r'^(\s*)//\s(.*)$'
comment_regex = r"^(\s*)//\s(.*)$"
def is_comment(line):
return re.search(comment_regex, line)
@ -84,8 +86,11 @@ for file_name in sys.argv[1:]:
if len(comment_block) == 1:
return [indent + "/** " + content(comment_block[0]) + " */\n"]
block = ["/**"] + [" * " + content(line) for line in comment_block
] + [" */"]
block = (
["/**"]
+ [" * " + content(line) for line in comment_block]
+ [" */"]
)
return [indent + line.rstrip() + "\n" for line in block]
# Main algorithm
@ -97,7 +102,7 @@ for file_name in sys.argv[1:]:
comment_block = []
# Get all lines in the same comment block. We could restrict the indentation
# to be the same as the first line of the block, but it's probably ok.
while (next_line(is_comment)):
while next_line(is_comment):
comment_block.append(read_line())
for line in format_as_block(comment_block):

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save